Author: [log in to unmask]
Date: Wed Apr 27 11:11:32 2016
New Revision: 4346
Log:
Merge in trunk to ecal geom dev branch so it is up to date.
Added:
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/MollerMonitoring.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/MollerMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/MuonCandidateMonitoring.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/MuonCandidateMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/VertexAnalysis.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/examples/VertexAnalysis.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/plots/
- copied from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/plots/
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/ClusterDiagnosticModule.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/trigger/ClusterDiagnosticModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/DiagnosticsManagementDriver.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/trigger/DiagnosticsManagementDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/PairTriggerDiagnosticModule.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/trigger/PairTriggerDiagnosticModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SinglesTriggerDiagnosticModule.java
- copied unchanged from r4345, java/trunk/analysis/src/main/java/org/hps/analysis/trigger/SinglesTriggerDiagnosticModule.java
java/branches/HPSJAVA-409/analysis/src/test/
- copied from r4345, java/trunk/analysis/src/test/
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/beam/
- copied from r4345, java/trunk/conditions/src/main/java/org/hps/conditions/beam/
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java
- copied unchanged from r4345, java/trunk/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalTimeWalk.java
- copied unchanged from r4345, java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalTimeWalk.java
java/branches/HPSJAVA-409/conditions/src/test/java/org/hps/conditions/beam/
- copied from r4345, java/trunk/conditions/src/test/java/org/hps/conditions/beam/
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DataType.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/DataType.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatHelper.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/DatacatHelper.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileFormat.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/FileFormat.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileUtilities.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/FileUtilities.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/MetadataWriter.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/MetadataWriter.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/PathFilter.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/PathFilter.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/Site.java
- copied unchanged from r4345, java/trunk/crawler/src/main/java/org/hps/crawler/Site.java
java/branches/HPSJAVA-409/crawler/src/main/python/
- copied from r4345, java/trunk/crawler/src/main/python/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-1_5mm-v3-4-fieldmap/HPS-EngRun2015-1_5mm-v3-4-fieldmap.lcdd
- copied unchanged from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-1_5mm-v3-4-fieldmap/HPS-EngRun2015-1_5mm-v3-4-fieldmap.lcdd
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-0-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-0-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-1-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-1-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-2-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-2-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-3-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v3-5-3-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v4-4-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v4-4-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v5-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-EngRun2015-Nominal-v5-fieldmap/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-1_5mm-v4-4/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-1_5mm-v4-4/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-2mm-v4-4/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-2mm-v4-4/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-3mm-v4-4/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-3mm-v4-4/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-4mm-v4-4/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-4mm-v4-4/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-6pt6-v0/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-6pt6-v0/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-Nominal-v4-4/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-Nominal-v4-4/
java/branches/HPSJAVA-409/detector-data/detectors/HPS-PhysicsRun2016-Nominal-v4-4-fieldmap/
- copied from r4345, java/trunk/detector-data/detectors/HPS-PhysicsRun2016-Nominal-v4-4-fieldmap/
java/branches/HPSJAVA-409/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java
- copied unchanged from r4345, java/trunk/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTPulseFitPlots.java
- copied unchanged from r4345, java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTPulseFitPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalFeeMonitor.java
- copied unchanged from r4345, java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalFeeMonitor.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/CosmicPMTFilter.java
- copied unchanged from r4345, java/trunk/recon/src/main/java/org/hps/recon/filtering/CosmicPMTFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/PulserSingle0Pair0TriggerFilterDriver.java
- copied unchanged from r4345, java/trunk/recon/src/main/java/org/hps/recon/filtering/PulserSingle0Pair0TriggerFilterDriver.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/SvtHitMultiplicityFilter.java
- copied unchanged from r4345, java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtHitMultiplicityFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/SvtRawHitMultiplicityFilter.java
- copied unchanged from r4345, java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtRawHitMultiplicityFilter.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractLoopAdapter.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/AbstractLoopAdapter.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordLoop.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/AbstractRecordLoop.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsUtilities.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsUtilities.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagMask.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/evio/EventTagMask.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtConfigData.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/svt/SvtConfigData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerType.java
- copied unchanged from r4345, java/trunk/record-util/src/main/java/org/hps/record/triggerbank/TriggerType.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/util/
- copied from r4345, java/trunk/record-util/src/main/java/org/hps/record/util/
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/DaoProvider.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/DaoProvider.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
- copied unchanged from r4345, java/trunk/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
java/branches/HPSJAVA-409/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
- copied unchanged from r4345, java/trunk/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/FilterHitsPerSensor.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/production/FilterHitsPerSensor.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2016TrigPair0.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/readout/PhysicsRun2016TrigPair0.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015STRecon.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015STRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/baltzell/Cosmic.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/baltzell/Cosmic.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/baltzell/EngineeringRun2015FullRecon.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/baltzell/EngineeringRun2015FullRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/celentan/LedOnlineOfflineComparison.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/celentan/LedOnlineOfflineComparison.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EcalFeeViewer.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/holly/EcalFeeViewer.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngRun2015FullReconMC_FEE.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/holly/EngRun2015FullReconMC_FEE.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015EcalRecon_noTwalk.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015EcalRecon_noTwalk.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/PhysicsRun2016_FEEIter_Filter.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/holly/PhysicsRun2016_FEEIter_Filter.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/TridentMCSkim.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/meeg/TridentMCSkim.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/EventInfo.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/phansson/EventInfo.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/HPSReconNoReadout.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/phansson/HPSReconNoReadout.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/spaul/MollerBeamtilt.lcsim
- copied unchanged from r4345, java/trunk/steering-files/src/main/resources/org/hps/steering/users/spaul/MollerBeamtilt.lcsim
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HpsHelicalTrackFit.java
- copied unchanged from r4345, java/trunk/tracking/src/main/java/org/hps/recon/tracking/HpsHelicalTrackFit.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/SvtPlotUtils.java
- copied unchanged from r4345, java/trunk/tracking/src/main/java/org/hps/recon/tracking/SvtPlotUtils.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/straight/
- copied from r4345, java/trunk/tracking/src/main/java/org/hps/recon/tracking/straight/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/byale/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/byale/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/LedOnlineDataDumpDriver.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/celentan/LedOnlineDataDumpDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/LedOnlineOfflineComparisonDriver.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/celentan/LedOnlineOfflineComparisonDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/SVTPhaseOffsetReader.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/meeg/SVTPhaseOffsetReader.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/TrackCleanupDriver.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/meeg/TrackCleanupDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/GblResidualDriver.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/GblResidualDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/PrintEventInfoDriver.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/PrintEventInfoDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/alignment/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/alignment/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/apps/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/apps/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/daq/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/daq/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/gbl/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/gbl/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/testrun/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/testrun/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/tools/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/phansson/tools/
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/FindBiasOnRange.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/FindBiasOnRange.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ExtractFormFactors.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/ExtractFormFactors.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/FEESpectrumGenerator.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/FEESpectrumGenerator.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/FormFactor.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/FormFactor.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MottIntegral.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/MottIntegral.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MultipleScattering.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/MultipleScattering.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/RemoveDuplicateParticles.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/RemoveDuplicateParticles.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinningXY.java
- copied unchanged from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinningXY.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/moller/
- copied from r4345, java/trunk/users/src/main/java/org/hps/users/spaul/moller/
Removed:
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/AbstractConditionsObjectConverter.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerFileUtilities.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatUtilities.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileSet.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/LcioMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RunSummaryMap.java
java/branches/HPSJAVA-409/datacat-client/
java/branches/HPSJAVA-409/detector-model/target/classes/org/
java/branches/HPSJAVA-409/detector-model/target/test-classes/org/
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtPlotUtils.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/job/
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventCountProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagBitMask.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileMetadata.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataAdapter.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataProcessor.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunProcessor.java
java/branches/HPSJAVA-409/run-database/src/test/java/org/hps/run/database/TiTriggerOffsetTest.java
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon_Pass2.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullReconMC_Pass2.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullRecon_Pass2.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/HitRecon.lcsim
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HPSTrack.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblFitter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/AlignmentUtils.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/BeamCurrentData.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/Count.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DAQDeadTimeData.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DumpAIDATextFiles.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ECalExtrapolationDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/EcalHitMapPlots.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/GlobalParameter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/GlobalParameters.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/MPAlignmentInputCalculator.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ModuleMPAlignmentInput.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/PolarCount.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ROOTFlatTupleDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ReadSurveyRotations.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/RegExpMatcherTester.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ResLimit.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/RunMPAlignment.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/STUtils.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SimpleHPSConditions.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/StraightThroughAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/StripMPAlignmentInput.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SvtHeaderAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SvtHeaderMetaDataReaderDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SvtOldHeaderAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SvtOldHeaderDataInfo.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TestSort.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrackingGeometryChecker.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrigRateAna.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrigRateDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/WTrack.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/dataMCPlots.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ecalPlots.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/mergeSimpleAIDA.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/trigRate.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/analysis/
Modified:
java/branches/HPSJAVA-409/ (props changed)
java/branches/HPSJAVA-409/analysis/pom.xml
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DataQualityMonitor.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/EcalMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/FinalStateMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/PlotAndFitUtilities.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/SvtMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingResiduals.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/V0Monitoring.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalCellIDPrintDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalFADCPlotsDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalTriggerPlotsDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSMCParticlePlotsDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/PrintGeometryDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripGoldenEventsDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripMollerEventsDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerData.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerDiagnosticDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterEvent.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterMatchedPair.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterStatModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DetailedClusterEvent.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/GeneralStatModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/RunDiagStats.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/event/TriggerPlotsModule.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/ComponentUtils.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/OutputLogger.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/PairTrigger.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/SinglesTrigger.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Trigger.java
java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/TriggerDiagnosticUtil.java
java/branches/HPSJAVA-409/conditions/ (props changed)
java/branches/HPSJAVA-409/conditions/pom.xml
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsRecord.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/package-info.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AbstractCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsRecordConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConnectionParameters.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/Converter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConverterRegistry.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/package-info.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/dummy/DummyConditionsObjectConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/package-info.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/MotorPositionLoader.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java
java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java
java/branches/HPSJAVA-409/crawler/pom.xml
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDstMetadataReader.java
java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RunFilter.java
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-3/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L2-3-4_tu_rw.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-2/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-HPS-EngRun2015-Nominal-v1-4-1-100k-L456_L123_L234_L345_L123_L456_tu_rwIter0Iter1Iter2Iter3Iter4Iter5.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-Proposal2014-v3-2pt2-0zOffset/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v5/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v6/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-2/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-3/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-4/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-5/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8/compact.xml
java/branches/HPSJAVA-409/detector-data/detectors/HPSTestRunTracker2014-v0/compact.xml
java/branches/HPSJAVA-409/detector-data/pom.xml
java/branches/HPSJAVA-409/detector-model/pom.xml
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java
java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java
java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTest.xml
java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml
java/branches/HPSJAVA-409/detector-model/target/antrun/build-main.xml
java/branches/HPSJAVA-409/detector-model/target/hps-detector-model-3.4.2-SNAPSHOT-bin.jar
java/branches/HPSJAVA-409/detector-model/target/hps-detector-model-3.4.2-SNAPSHOT.jar
java/branches/HPSJAVA-409/detector-model/target/maven-archiver/pom.properties
java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst
java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst
java/branches/HPSJAVA-409/distribution/ (props changed)
java/branches/HPSJAVA-409/distribution/pom.xml
java/branches/HPSJAVA-409/distribution/src/main/java/org/hps/HPSJavaProperties.java
java/branches/HPSJAVA-409/ecal-event-display/pom.xml
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Association.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Cluster.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/EcalHit.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Event.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/EventManager.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/EventDisplayOutputDriver.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/LCIOBridgeDriver.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/DataFileViewer.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/FileViewer.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java (contents, props changed)
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/BooleanMap.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/ColorScale.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalEvent.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalListener.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/DatabaseCheck.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/GradientScale.java
java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/MultiGradientScale.java
java/branches/HPSJAVA-409/ecal-readout-sim/pom.xml
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ClockSingleton.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/MollerTriggerDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/OccupancyAnalysisDriver.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTrigger.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/RingBuffer.java
java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java
java/branches/HPSJAVA-409/ecal-recon/pom.xml
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverter.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPedestalCalculator.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRunningPedestalDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTimeWalk.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/FADCGenericHit.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/IterateGainFactorDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterDriver.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterer.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java
java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java
java/branches/HPSJAVA-409/evio/pom.xml
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AugmentedSvtEvioReader.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/BasicEvioFileReader.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/DummyEventBuilder.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EcalHitWriter.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioReader.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioToLcio.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitFunction.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitterDriver.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfHit.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/SvtEvioReader.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java
java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java
java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimEngRunEventBuilderTest.java
java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimTestRunEventBuilderTest.java
java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java
java/branches/HPSJAVA-409/integration-tests/ (props changed)
java/branches/HPSJAVA-409/integration-tests/pom.xml
java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/DataQualityMonitorTest.java
java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/EvioToLcioTest.java
java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/ReconSteeringTest.java
java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/SimpleSvtReadoutTest.java
java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/ecalreadoutsim/EcalReadoutSimTest.lcsim
java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/DataQualityTest.lcsim
java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/Dummy.lcsim
java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/EcalReadoutSimTest.lcsim
java/branches/HPSJAVA-409/job/pom.xml
java/branches/HPSJAVA-409/job/src/main/java/org/hps/job/JobManager.java
java/branches/HPSJAVA-409/logging/pom.xml
java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/logging.properties
java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/test_logging.properties
java/branches/HPSJAVA-409/monitoring-app/ (props changed)
java/branches/HPSJAVA-409/monitoring-app/pom.xml
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/EventProcessing.java
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/Main.java
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplication.java
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplicationFrame.java
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/SystemStatusPanel.java
java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/util/TableExporter.java
java/branches/HPSJAVA-409/monitoring-drivers/pom.xml
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/scalers/DeadtimePlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/PedestalPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTCellIDPrintDriver.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitRecoCorrelations.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitReconstructionPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SensorOccupancyPlotsDriver.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtClusterPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtHitPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtTimingInPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/PlotAndFitUtilities.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/SVTOpeningAlignment.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackResiduals.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackTimePlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackingReconPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/V0ReconPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/BasicMonitoringPlotsDriver.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalClusterPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplayWithRawWaveform.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalHitPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalPedestalViewer.java
java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java
java/branches/HPSJAVA-409/monitoring-util/pom.xml
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTriggerTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTwoColumnTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ClusterTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ComponentUtils.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/EfficiencyTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/PairTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigPanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigWindow.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/SinglesTablePanel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TableTextModel.java
java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TriggerDiagnosticGUIDriver.java
java/branches/HPSJAVA-409/parent/pom.xml
java/branches/HPSJAVA-409/plugin/pom.xml
java/branches/HPSJAVA-409/pom.xml
java/branches/HPSJAVA-409/recon/pom.xml
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalGainCalibFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/MinimumHitsFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/PulserScalerAndEpicsFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/V0CandidateFilter.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/SimpleParticleID.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BaseSimpleVertexer.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertex.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertexer.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoLineVertexer.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoParticleVertexer.java
java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoTrackFringeVertexer.java
java/branches/HPSJAVA-409/recon/src/test/java/org/hps/recon/particle/HpsReconParticleDriverTest.java
java/branches/HPSJAVA-409/record-util/pom.xml
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/RecordProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfig.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigDriver.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/EvioDAQParser.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/FADCConfig.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/GTPConfig.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/IDAQConfig.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/SSPConfig.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagConstant.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioBankTag.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioDetectorConditionsProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventUtilities.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoop.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPNumberedTrigger.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPPairTrigger.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPSinglesTrigger.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TIData.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java
java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerModule.java
java/branches/HPSJAVA-409/run-database/pom.xml
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsType.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsVariable.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunManager.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummary.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/package-info.java
java/branches/HPSJAVA-409/steering-files/pom.xml
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/analysis/StarterAnalysis.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceMonitoringApp.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringFinal.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringOnly.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/SvtOnlineMonitoring.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/TriggerDiagnosticsMonitoring.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/V0CandidateFilter.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/CommRun2014TightPairs.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/EngineeringRun2014PrescaledTriggers.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutNoPileup.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutToEvio.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014TruthReadoutToLcio.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/LcioToEvio.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunNoPileup.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunReadoutToEvio.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2014EcalReconMC.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullReconMC.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/celentan/LedAnalysisFromEvio.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/ClusterRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EcalSimReadout.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015EcalOnly.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015_FEEIter_Filter.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/QuickEcalReadout.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/EcalScoring.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/HitTimes.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/PairsSkimmer.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/SmallHits.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/ecal_fadc_bkgd.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/raw_triggers.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/EngineeringRun2015FullRecon_Pass2_Gbl.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/TestRunOfflineRecon.lcsim
java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/rafo/TestSteering.lcsim
java/branches/HPSJAVA-409/tracking/pom.xml
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/readout/svt/FpgaData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/DumbShaperFit.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HelicalTrackHitDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NoiselessReadoutChip.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/SVTBadChannelFilterDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackResidualsData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackTimeData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/WTrack.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLEventData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLFileIO.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLKinkData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutput.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutputDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLRefitterDriver.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblData.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblPoint.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblTrajectory.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/TruthResiduals.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Matrix.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Vector.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildMillepedeCompact.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java
java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/MillepedeCompactDump.java
java/branches/HPSJAVA-409/tracking/src/main/resources/org/hps/recon/tracking/strategies/HPS-Test-All.xml
java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/HelicalTrackHitDriverTest.java
java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/TruthResidualTest.java
java/branches/HPSJAVA-409/users/pom.xml
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitFunction.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitterDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfHit.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/RawPedestalComputator.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/StripChartTest.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/ClusterDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClusterICPosition.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClustererCosmics.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalRawConverter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/HPSEcalClusterIC.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ClusterAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/CountTriggersDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/EvioAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/FADCAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/HPSEcalDataPlotsDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/InvariantMassPairDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTEAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTETriggerPlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ParticleMCAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/PlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/RafoAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TridentTrackDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerPlotsModule.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerProcessAnalysisDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/AddPlots.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot1D.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot2D.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotFormatModule.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/InvariantMassPlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTEPlotFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTETriggerPlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/ParticleMCAnalysisPlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/RafoTridentFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/SingleTriggerPlotsFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TridentTrackFormatter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TriggerPlotsFormat.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FADCVariableTriggerFEEDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger2.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/LCIOReadScript.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/rate.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/ratesim.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/FilterMCBunches.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalAnalogPrintDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalDigitalPrintDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalRawTrackerHitPrintDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSGenericRawTrackerHitPrintDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/LCIOTrackAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/MergeMCBunches.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/TridentMCFilter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/ExamplePlotter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HPSTrackerHit.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HelicalTrackHitResidualsDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/SVTRawTrackerHitThresholdDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TrackExtrapolationAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TwoTrackAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/EcalScoringPlaneDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ExtrapolationAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/LheToStdhep.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/PlotUtils.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ReconstructedParticleChecker.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SharedHitAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtClusterAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtDataRates.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtHitCorrelations.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtQA.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackRecoEfficiency.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/TestRunTrackReconEfficiency.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/CmpGenToFittedTracksDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DataTrackerFakeHitDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/FastTrackResidualDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ParticleHelixProducer.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SimpleResiduals.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TriggerTurnOnAnalysis.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TruthMomentumResolutionDriver.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/rafo/test1.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/GetChargeFromScalersMultirun.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/HitrateHistograms.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/PulserFilter.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/StyleUtil.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/SumEverything.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/BinGenerator.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/CustomBinning.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/DisplayHistograms.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/EcalUtil.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MakeHistograms.java
java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinning.java
java/branches/HPSJAVA-409/util/pom.xml
java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/CalculateAcceptanceFromMadGraph.java
java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/ConvertToStdhep.java
java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/DumpLHEEventsToASCII.java
java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/LCIOFilterDriver.java
java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/MergeBunches.java
Modified: java/branches/HPSJAVA-409/analysis/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/analysis/pom.xml (original)
+++ java/branches/HPSJAVA-409/analysis/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/analysis/</url>
@@ -21,4 +21,17 @@
<artifactId>hps-recon</artifactId>
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>**/VertexAnalysisTest.java</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
</project>
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java Wed Apr 27 11:11:32 2016
@@ -7,10 +7,7 @@
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.ConsoleHandler;
-import java.util.logging.Formatter;
import java.util.logging.Level;
-import java.util.logging.LogRecord;
import java.util.logging.Logger;
import org.hps.conditions.api.TableMetaData;
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DataQualityMonitor.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DataQualityMonitor.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/DataQualityMonitor.java Wed Apr 27 11:11:32 2016
@@ -1,5 +1,7 @@
package org.hps.analysis.dataquality;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
@@ -7,7 +9,7 @@
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
-
+import org.apache.commons.lang3.StringUtils;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.TIData;
import org.lcsim.event.EventHeader;
@@ -23,8 +25,8 @@
* calculateEndOfRunQuantities & printDQMData i.e. useful methods
*/
public class DataQualityMonitor extends Driver {
-
- private static Logger LOGGER = Logger.getLogger(DataQualityMonitor.class.getPackage().getName());
+
+ private static final Logger LOGGER = Logger.getLogger(DataQualityMonitor.class.getPackage().getName());
protected AIDA aida = AIDA.defaultInstance();
protected DQMDatabaseManager manager;
@@ -38,6 +40,11 @@
protected boolean outputPlots = false;
protected String outputPlotDir = "DQMOutputPlots/";
+ protected PrintWriter tupleWriter = null;
+ protected String[] tupleVariables = {};
+ protected final Map<String, Double> tupleMap = new HashMap<String, Double>();
+ protected boolean cutTuple = false;
+
String triggerType = "all";//allowed types are "" (blank) or "all", singles0, singles1, pairs0,pairs1
public boolean isGBL = false;
@@ -58,7 +65,7 @@
}
public void setRunNumber(int run) {
- this.runNumber = run;
+ DataQualityMonitor.runNumber = run;
}
public void setOverwriteDB(boolean overwrite) {
@@ -81,17 +88,15 @@
this.outputPlotDir = dir;
}
- public void DataQualityMonitor() {
-
- }
-
+ @Override
public void endOfData() {
calculateEndOfRunQuantities();
fillEndOfRunPlots();
printDQMData();
- if (printDQMStrings)
+ if (printDQMStrings) {
printDQMStrings();
- LOGGER.info("Should I write to the database? " + connectToDB);
+ }
+ LOGGER.info("Write to database = " + connectToDB);
if (connectToDB) {
LOGGER.info("Connecting To Database...getting DQMDBManager");
manager = DQMDatabaseManager.getInstance();
@@ -99,17 +104,21 @@
boolean entryExists = false;
try {
entryExists = checkRowExists();
- if (entryExists)
+ if (entryExists) {
LOGGER.info("Found an existing run/reco entry in the dqm database; overwrite = " + overwriteDB);
+ }
} catch (SQLException ex) {
Logger.getLogger(DataQualityMonitor.class.getName()).log(Level.SEVERE, null, ex);
}
- if (!entryExists)
+ if (!entryExists) {
makeNewRow();
+ }
dumpDQMData();
}
-
+ if (tupleWriter != null) {
+ tupleWriter.close();
+ }
}
private void makeNewRow() {
@@ -127,9 +136,7 @@
private boolean checkRowExists() throws SQLException {
String ins = "select * from dqm where " + getRunRecoString();
ResultSet res = manager.selectQuery(ins);
- if (res.next()) //this is a funny way of determining if the ResultSet has any entries
- return true;
- return false;
+ return res.next(); //this is a funny way of determining if the ResultSet has any entries
}
public boolean checkSelectionIsNULL(String var) throws SQLException {
@@ -137,8 +144,9 @@
ResultSet res = manager.selectQuery(ins);
res.next();
double result = res.getDouble(var);
- if (res.wasNull())
- return true;
+ if (res.wasNull()) {
+ return true;
+ }
LOGGER.info("checkSelectionIsNULL::" + var + " = " + result);
return false;
}
@@ -183,16 +191,21 @@
}
public boolean matchTriggerType(TIData triggerData) {
- if (triggerType.contentEquals("") || triggerType.contentEquals("all"))
- return true;
- if (triggerData.isSingle0Trigger() && triggerType.contentEquals("singles0"))
- return true;
- if (triggerData.isSingle1Trigger() && triggerType.contentEquals("singles1"))
- return true;
- if (triggerData.isPair0Trigger() && triggerType.contentEquals("pairs0"))
- return true;
- if (triggerData.isPair1Trigger() && triggerType.contentEquals("pairs1"))
- return true;
+ if (triggerType.contentEquals("") || triggerType.contentEquals("all")) {
+ return true;
+ }
+ if (triggerData.isSingle0Trigger() && triggerType.contentEquals("singles0")) {
+ return true;
+ }
+ if (triggerData.isSingle1Trigger() && triggerType.contentEquals("singles1")) {
+ return true;
+ }
+ if (triggerData.isPair0Trigger() && triggerType.contentEquals("pairs0")) {
+ return true;
+ }
+ if (triggerData.isPair1Trigger() && triggerType.contentEquals("pairs1")) {
+ return true;
+ }
return false;
}
@@ -201,14 +214,18 @@
boolean match = true;
if (event.hasCollection(GenericObject.class, "TriggerBank")) {
List<GenericObject> triggerList = event.get(GenericObject.class, "TriggerBank");
- for (GenericObject data : triggerList)
+ for (GenericObject data : triggerList) {
if (AbstractIntData.getTag(data) == TIData.BANK_TAG) {
TIData triggerData = new TIData(data);
if (!matchTriggerType(triggerData))//only process singles0 triggers...
+ {
match = false;
+ }
}
- } else if (debug)
+ }
+ } else if (debug) {
LOGGER.info(this.getClass().getSimpleName() + ": No trigger bank found...running over all trigger types");
+ }
return match;
}
@@ -223,4 +240,42 @@
public void printDQMStrings() {
}
+ protected void writeTuple() {
+ for (String variable : tupleVariables) {
+ Double value = tupleMap.get(variable);
+ if (value == null) {
+ value = -9999.0;
+ }
+ if (variable.endsWith("/I") || variable.endsWith("/B")) {
+ tupleWriter.format("%d\t", Math.round(value));
+ } else {
+ tupleWriter.format("%f\t", value);
+ }
+ }
+ tupleWriter.println();
+ tupleMap.clear();
+ }
+
+ public void setTupleFile(String tupleFile) {
+ try {
+ tupleWriter = new PrintWriter(tupleFile);
+ } catch (FileNotFoundException e) {
+ tupleWriter = null;
+ }
+ tupleWriter.println(StringUtils.join(tupleVariables, ":"));
+// for (String variable : tupleVariables) {
+// tupleWriter.format("%s:", variable);
+// }
+// tupleWriter.println();
+ }
+
+ /**
+ * apply loose cuts to the tuple (cuts to be defined in the specific DQM
+ * driver)
+ *
+ * @param cutTuple
+ */
+ public void setCutTuple(boolean cutTuple) {
+ this.cutTuple = cutTuple;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/EcalMonitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/EcalMonitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/EcalMonitoring.java Wed Apr 27 11:11:32 2016
@@ -8,6 +8,7 @@
import java.util.logging.Logger;
import org.apache.commons.math.stat.StatUtils;
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
import org.hps.recon.ecal.cluster.ClusterUtilities;
import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.Cluster;
@@ -73,7 +74,11 @@
boolean fillHitPlots = true;
String[] ecalQuantNames = {"avg_N_hits", "avg_Hit_Energy",
"avg_N_clusters", "avg_N_hitsPerCluster", "avg_Cluster_Energy", "avg_ClusterTime"};
- double maxE = 1.1;
+
+ double maxFactor = 1.5;
+
+
+
private final String plotHitsDir = "EcalHits/";
private final String plotClustersDir = "EcalClusters/";
private final String plotFidCutDir = "FiducialCut/";
@@ -90,44 +95,49 @@
this.clusterCollectionName = clusterCollectionName;
}
+
public void setFillHitPlots(boolean fill) {
this.fillHitPlots = fill;
}
@Override
protected void detectorChanged(Detector detector) {
+ BeamEnergyCollection beamEnergyCollection =
+ this.getConditionsManager().getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();
+ double beamEnergy = beamEnergyCollection.get(0).getBeamEnergy();
+ //this.getConditionsManager().getCachedConditions(org.hps.conditions.EcalChannelCollection.class, "ecal_channels").
LOGGER.info("EcalMonitoring::detectorChanged Setting up the plotter");
aida.tree().cd("/");
if (fillHitPlots) {
// Setup hit plots.
hitCountPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+ calibratedHitCollectionName + " Hit Count In Event", 40, -0.5, 39.5);
hitTimePlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Time", 50, 0 * 4.0, 50 * 4.0);
- hitEnergyPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Energy", 100, -0.1, maxE);
+ hitEnergyPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Energy", 100, -0.1, beamEnergy*maxFactor);
fiducialHitCountPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Count with Fiducial Cut", 10, -0.5, 9.5);
- fiducialEnergyPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Energy with Fiducial Cut", 100, -0.1, maxE);
+ fiducialEnergyPlot = aida.histogram1D(plotHitsDir + triggerType + "/"+calibratedHitCollectionName + " Hit Energy with Fiducial Cut", 100, -0.1, beamEnergy*maxFactor);
}
// Setup cluster plots
clusterCountPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Count per Event", 10, -0.5, 9.5);
clusterSizePlot = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Size", 10, -0.5, 9.5);
- clusterEnergyPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Energy", 100, -0.1, maxE);
+ clusterEnergyPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Energy", 100, -0.1, beamEnergy*maxFactor);
clusterTimes = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Seed Times", 400, 0, 4.0 * 50);
clusterTimeMean = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Time Mean", 400, 0, 4.0 * 50);
clusterTimeSigma = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Cluster Time Sigma", 100, 0, 10);
- twoclusterTotEnergy = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Two Cluster Energy Sum", 100, 0, maxE);
- twoclusterEnergyAsymmetry = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Two Cluster Energy Asymmetry", 100, 0, 1.0);
- energyVsT = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs time", 400, 0.0, 200.0, 100, -0.1, maxE);
+ twoclusterTotEnergy = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Two Cluster Energy Sum", 100, 0, beamEnergy*maxFactor);
+ twoclusterEnergyAsymmetry = aida.histogram1D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Two Cluster Energy Asymmetry", 100, 0, beamEnergy*maxFactor);
+ energyVsT = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs time", 400, 0.0, 200.0, 100, -0.1, beamEnergy*maxFactor);
xVsY = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " X vs Y (NHits >1)", 200, -200.0, 200.0, 85, -85.0, 85.0);
- energyVsX = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs X", 50, 0, 1.6, 50, .0, 200.0);
- energyVsY = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs Y", 50, 0, 1.6, 50, 20.0, 85.0);
- pairsE1vsE2 = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + "Pair E1 vs E2", 50, 0, 2, 50, 0, 2);
+ energyVsX = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs X", 50, 0, maxFactor*beamEnergy, 50, .0, 200.0);
+ energyVsY = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + " Energy vs Y", 50, 0, maxFactor*beamEnergy, 50, 20.0, 85.0);
+ pairsE1vsE2 = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + "Pair E1 vs E2", 50, 0, beamEnergy*maxFactor, 50, 0, beamEnergy*maxFactor);
pairsT1vsT2 = aida.histogram2D(plotClustersDir + triggerType + "/"+clusterCollectionName + "Pair T1 vs T2", 200, 0, 100, 200, 0, 100);
pairsDeltaT = aida.histogram1D(plotClustersDir + triggerType + "/" + clusterCollectionName + " Pair Time Difference", 100, -20.0, 20.0);
fiducialClusterCountPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+plotFidCutDir + clusterCollectionName + " Cluster Count with Fiducal Cut", 10, -0.5, 9.5);
fiducialClusterSizePlot = aida.histogram1D(plotClustersDir+ triggerType + "/" +plotFidCutDir + clusterCollectionName + " Cluster Size with Fiducal Cut", 10, -0.5, 9.5);
- fiducialClusterEnergyPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+plotFidCutDir + clusterCollectionName + " Cluster Energy with Fiducal Cut", 100, -0.1, maxE);
- fiducialenergyVsY = aida.histogram2D(plotClustersDir + triggerType + "/"+plotFidCutDir + clusterCollectionName + " Energy vs Y with Fiducial Cuts", 50, 0, 1.6, 50, 45.0, 85.0);
- fiducialenergyVsX = aida.histogram2D(plotClustersDir+ triggerType + "/" +plotFidCutDir + clusterCollectionName + " Energy vs X with Fiducial Cuts", 50, 0, 1.6, 50, 0.0, 200.0);
+ fiducialClusterEnergyPlot = aida.histogram1D(plotClustersDir + triggerType + "/"+plotFidCutDir + clusterCollectionName + " Cluster Energy with Fiducal Cut", 100, -0.1, beamEnergy*maxFactor);
+ fiducialenergyVsY = aida.histogram2D(plotClustersDir + triggerType + "/"+plotFidCutDir + clusterCollectionName + " Energy vs Y with Fiducial Cuts", 50, 0, beamEnergy*maxFactor, 50, 45.0, 85.0);
+ fiducialenergyVsX = aida.histogram2D(plotClustersDir+ triggerType + "/" +plotFidCutDir + clusterCollectionName + " Energy vs X with Fiducial Cuts", 50, 0, beamEnergy*maxFactor, 50, 0.0, 200.0);
}
@@ -265,5 +275,5 @@
public void printDQMStrings() {
}
-
+
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/FinalStateMonitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/FinalStateMonitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/FinalStateMonitoring.java Wed Apr 27 11:11:32 2016
@@ -17,6 +17,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
import org.hps.recon.ecal.cluster.ClusterUtilities;
import org.hps.recon.tracking.TrackType;
import org.hps.recon.tracking.TrackUtils;
@@ -60,9 +61,9 @@
double sumdelY = 0.0;
double sumEoverP = 0.0;
private final String plotDir = "FinalStateParticles/";
- double beamEnergy = 1.05; //GeV
+ // double beamEnergy = 1.05; //GeV
double maxFactor = 1.5;
- double feeMomentumCut = 0.8; //GeV
+ double feeMomentumCut = 0.75; //this number, multiplied by the beam energy, is the actual cut
IHistogram1D elePx;
IHistogram1D elePy;
@@ -97,12 +98,19 @@
/* number of unassocaited tracks/event */
IHistogram1D nUnAssTracksHisto;
+
+
public void setFinalStateParticlesColName(String fsp) {
this.finalStateParticlesColName = fsp;
}
@Override
protected void detectorChanged(Detector detector) {
+ BeamEnergyCollection beamEnergyCollection =
+ this.getConditionsManager().getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();
+ double beamEnergy = beamEnergyCollection.get(0).getBeamEnergy();
+
+
LOGGER.info("Setting up the plotter");
aida.tree().cd("/");
String trkType="SeedTrack/";
@@ -111,24 +119,24 @@
/* Final State Particle Quantities */
/* plot electron & positron momentum separately */
- elePx = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Px (GeV)", 100, -0.1, 0.200);
- elePy = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Py (GeV)", 100, -0.1, 0.1);
+ elePx = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Px (GeV)", 100, -0.1*beamEnergy, 0.200*beamEnergy);
+ elePy = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Py (GeV)", 100, -0.1*beamEnergy, 0.1*beamEnergy);
elePz = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Pz (GeV)", 100, 0, beamEnergy * maxFactor);
- elePzBeam = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV)", 100, feeMomentumCut, beamEnergy * maxFactor);
- elePzBeamTop = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV): Top", 100, feeMomentumCut, beamEnergy * maxFactor);
- elePzBeamBottom = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV): Bottom", 100, feeMomentumCut, beamEnergy * maxFactor);
+ elePzBeam = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV)", 100, feeMomentumCut*beamEnergy, beamEnergy * maxFactor);
+ elePzBeamTop = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV): Top", 100, feeMomentumCut*beamEnergy, beamEnergy * maxFactor);
+ elePzBeamBottom = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Beam Electrons Total P (GeV): Bottom", 100, feeMomentumCut*beamEnergy, beamEnergy * maxFactor);
elePTop = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Total P (GeV): Top", 100, 0, beamEnergy * maxFactor);
elePBottom = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Electron Total P (GeV): Bottom", 100, 0, beamEnergy * maxFactor);
- posPx = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Px (GeV)", 50, -0.1, 0.200);
- posPy = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Py (GeV)", 50, -0.1, 0.1);
+ posPx = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Px (GeV)", 50, -0.1*beamEnergy, 0.200*beamEnergy);
+ posPy = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Py (GeV)", 50, -0.1*beamEnergy, 0.1*beamEnergy);
posPz = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Pz (GeV)", 50, 0, beamEnergy * maxFactor);
posPTop = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Total P (GeV): Top", 100, 0, beamEnergy * maxFactor);
posPBottom = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Positron Total P (GeV): Bottom", 100, 0, beamEnergy * maxFactor);
/* photon quanties (...right now, just unassociated clusters) */
nPhotonsHisto = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Number of photons per event", 15, 0, 15);
- enePhoton = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Photon Energy (GeV)", 50, 0, 2.4);
+ enePhoton = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Photon Energy (GeV)", 50, 0, 2.4*beamEnergy);
xPhoton = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Photon X position (mm)", 50, -200, 200);
yPhoton = aida.histogram1D(plotDir +trkType+ triggerType + "/" + "Photon Y position (mm)", 50, -100, 100);
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/PlotAndFitUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/PlotAndFitUtilities.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/PlotAndFitUtilities.java Wed Apr 27 11:11:32 2016
@@ -19,6 +19,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.util.aida.AIDA;
@@ -29,7 +30,7 @@
public class PlotAndFitUtilities {
private static Logger LOGGER = Logger.getLogger(PlotAndFitUtilities.class.getPackage().getName());
-
+
static private AIDA aida = AIDA.defaultInstance();
/*
@@ -37,13 +38,15 @@
* copied from org.hps.monitoring.drivers.ecal.EcalMonitoringUtilities.java
*/
static IPlotter plot(IPlotterFactory plotterFactory, IBaseHistogram histogram, IPlotterStyle style, boolean show) {
- if (style == null)
+ if (style == null) {
style = getPlotterStyle(histogram);
+ }
IPlotter plotter = plotterFactory.create(histogram.title());
plotter.createRegion();
plotter.region(0).plot(histogram, style);
- if (show)
+ if (show) {
plotter.show();
+ }
return plotter;
}
@@ -52,8 +55,9 @@
* copied from org.hps.monitoring.drivers.ecal.EcalMonitoringUtilities.java
*/
static void plot(IPlotter plotter, IBaseHistogram histogram, IPlotterStyle style, int region) {
- if (style == null)
+ if (style == null) {
style = getPlotterStyle(histogram);
+ }
LOGGER.info("Putting plot in region " + region);
plotter.region(region).plot(histogram, style);
@@ -64,8 +68,9 @@
*/
static void plot(IPlotter plotter, IFunction function, IPlotterStyle style, int region) {
- if (style == null)
+ if (style == null) {
style = getPlotterStyle(function);
+ }
LOGGER.info("Putting function in region " + region);
plotter.region(region).plot(function, style);
}
@@ -78,16 +83,17 @@
StyleRegistry styleRegistry = StyleRegistry.getStyleRegistry();
IStyleStore store = styleRegistry.getStore("DefaultStyleStore");
IPlotterStyle style = null;
- if ((histogram instanceof IHistogram1D) || (histogram instanceof ICloud1D) || (histogram instanceof IProfile1D))
+ if ((histogram instanceof IHistogram1D) || (histogram instanceof ICloud1D) || (histogram instanceof IProfile1D)) {
style = store.getStyle("DefaultHistogram1DStyle");
- else if ((histogram instanceof IHistogram2D) || (histogram instanceof IProfile2D)) {
+ } else if ((histogram instanceof IHistogram2D) || (histogram instanceof IProfile2D)) {
style = store.getStyle("DefaultColorMapStyle");
style.statisticsBoxStyle().setVisible(false);
style.setParameter("hist2DStyle", "colorMap");
style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
}
- if (style == null)
+ if (style == null) {
throw new RuntimeException("A default style could not be found for " + histogram.title());
+ }
//custom stuff...mg
style.dataStyle().errorBarStyle().setVisible(false);
@@ -106,8 +112,9 @@
IStyleStore store = styleRegistry.getStore("DefaultStyleStore");
IPlotterStyle style = null;
style = store.getStyle("DefaultFunctionStyle");
- if (style == null)
+ if (style == null) {
throw new RuntimeException("A default style could not be found for " + func.title());
+ }
return style;
}
@@ -122,10 +129,10 @@
parameters[2] = histogram.rms();
function.setParameters(parameters);
IFitResult fitResult = null;
- Logger minuitLogger = Logger.getLogger("org.freehep.math.minuit");
+ Logger minuitLogger = Logger.getLogger("org.freehep.math.minuit");
minuitLogger.setLevel(Level.OFF);
minuitLogger.info("minuit logger test");
-
+
try {
fitResult = fitter.fit(histogram, function);
} catch (RuntimeException e) {
@@ -142,4 +149,44 @@
}
}
+ private static final String nameStrip = "Tracker_TestRunModule_";
+
+ private static String getNiceSensorName(HpsSiSensor sensor) {
+ return sensor.getName().replaceAll(nameStrip, "")
+ .replace("module", "mod")
+ .replace("layer", "lyr")
+ .replace("sensor", "sens");
+ }
+
+ public static IHistogram1D getSensorPlot(String prefix, HpsSiSensor sensor) {
+ String hname = prefix + getNiceSensorName(sensor);
+ return aida.histogram1D(hname);
+ }
+
+// private static IHistogram1D getSensorPlot(String prefix, String sensorName) {
+// return aida.histogram1D(prefix + sensorName);
+// }
+ public static IHistogram1D createSensorPlot(String prefix, HpsSiSensor sensor, int nchan, double min, double max) {
+ String hname = prefix + getNiceSensorName(sensor);
+ IHistogram1D hist = aida.histogram1D(hname, nchan, min, max);
+ hist.setTitle(getNiceSensorName(sensor));
+
+ return hist;
+ }
+
+ public static IHistogram2D getSensorPlot2D(String prefix, HpsSiSensor sensor) {
+ String hname = prefix + getNiceSensorName(sensor);
+ return aida.histogram2D(hname);
+ }
+
+ public static IHistogram2D createSensorPlot2D(String prefix, HpsSiSensor sensor, int nchanX, double minX, double maxX, int nchanY, double minY, double maxY) {
+ String hname = prefix + getNiceSensorName(sensor);
+ IHistogram2D hist = aida.histogram2D(hname, nchanX, minX, maxX, nchanY, minY, maxY);
+ hist.setTitle(sensor.getName().replaceAll(nameStrip, "")
+ .replace("module", "mod")
+ .replace("layer", "lyr")
+ .replace("sensor", "sens"));
+
+ return hist;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/SvtMonitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/SvtMonitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/SvtMonitoring.java Wed Apr 27 11:11:32 2016
@@ -38,7 +38,7 @@
*/
//TODO: add some more quantities to DQM database: <t0> or <sigma>_t0 for intime events; <chi^2>, <amplitude> etc
public class SvtMonitoring extends DataQualityMonitor {
-
+
private static Logger LOGGER = Logger.getLogger(SvtMonitoring.class.getPackage().getName());
private String rawTrackerHitCollectionName = "SVTRawTrackerHits";
@@ -90,22 +90,22 @@
aida.tree().cd("/");
for (HpsSiSensor sensor : sensors) {
//IHistogram1D occupancyPlot = aida.histogram1D(sensor.getName().replaceAll("Tracker_TestRunModule_", ""), 640, 0, 639);
- IHistogram1D occupancyPlot = createSensorPlot(plotDir + triggerType + "/"+"occupancy_", sensor, maxChannels, 0, maxChannels - 1);
- IHistogram1D t0Plot = createSensorPlot(plotDir + triggerType + "/"+"t0Hit_", sensor, 400, -100., 100.);
- IHistogram1D nHits = createSensorPlot(plotDir + triggerType + "/"+"nHitsPerEvent_", sensor, 100, -0.5, 99.5);
- IHistogram1D pileup = createSensorPlot(plotDir + triggerType + "/"+"nFitsPerHit_", sensor, 3, 0.5, 3.5);
-
- IHistogram1D amplitudePlot = createSensorPlot(plotDir + triggerType + "/"+"amplitude_", sensor, 50, 0, 4000.0);
- IHistogram2D t0AmpPlot = createSensorPlot2D(plotDir + triggerType + "/"+"t0AmpHit_", sensor, 200, -100., 100., 50, 0, 4000.0);
- IHistogram2D t0ChanPlot = createSensorPlot2D(plotDir + triggerType + "/"+"t0ChanBigHit_", sensor, 640, -0.5, 639.5, 200, -100., 100.);
- IHistogram2D ampChanPlot = createSensorPlot2D(plotDir + triggerType + "/"+"ampChanHit_", sensor, 640, -0.5, 639.5, 50, 0, 4000);
- IHistogram2D chiprobChanPlot = createSensorPlot2D(plotDir + triggerType + "/"+"chiprobChanBigHit_", sensor, 640, -0.5, 639.5, 50, 0, 1.0);
- IHistogram2D t0TrigTimeHitPlot = createSensorPlot2D(plotDir + triggerType + "/"+"t0BigHitTrigTime_", sensor, 400, -100., 100., 6, 0, 24);
-
- IHistogram1D chiProbPlot = createSensorPlot(plotDir + triggerType + "/"+"chiProb_", sensor, 50, 0, 1.0);
- IHistogram1D t0ClusterPlot = createSensorPlot(plotDir + triggerType + "/"+"t0Cluster_", sensor, 400, -100., 100.);
- IHistogram2D t0TrigTimePlot = createSensorPlot2D(plotDir + triggerType + "/"+"t0ClusterTrigTime_", sensor, 400, -100., 100., 6, 0, 24);
- IHistogram1D dedxClusterPlot = createSensorPlot(plotDir + triggerType + "/"+"electrons_", sensor, 50, 0., 10.);
+ IHistogram1D occupancyPlot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "occupancy_", sensor, maxChannels, 0, maxChannels - 1);
+ IHistogram1D t0Plot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "t0Hit_", sensor, 400, -100., 100.);
+ IHistogram1D nHits = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "nHitsPerEvent_", sensor, 100, -0.5, 99.5);
+ IHistogram1D pileup = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "nFitsPerHit_", sensor, 3, 0.5, 3.5);
+
+ IHistogram1D amplitudePlot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "amplitude_", sensor, 50, 0, 4000.0);
+ IHistogram2D t0AmpPlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "t0AmpHit_", sensor, 200, -100., 100., 50, 0, 4000.0);
+ IHistogram2D t0ChanPlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "t0ChanBigHit_", sensor, 640, -0.5, 639.5, 200, -100., 100.);
+ IHistogram2D ampChanPlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "ampChanHit_", sensor, 640, -0.5, 639.5, 50, 0, 4000);
+ IHistogram2D chiprobChanPlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "chiprobChanBigHit_", sensor, 640, -0.5, 639.5, 50, 0, 1.0);
+ IHistogram2D t0TrigTimeHitPlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "t0BigHitTrigTime_", sensor, 400, -100., 100., 6, 0, 24);
+
+ IHistogram1D chiProbPlot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "chiProb_", sensor, 50, 0, 1.0);
+ IHistogram1D t0ClusterPlot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "t0Cluster_", sensor, 400, -100., 100.);
+ IHistogram2D t0TrigTimePlot = PlotAndFitUtilities.createSensorPlot2D(plotDir + triggerType + "/" + "t0ClusterTrigTime_", sensor, 400, -100., 100., 6, 0, 24);
+ IHistogram1D dedxClusterPlot = PlotAndFitUtilities.createSensorPlot(plotDir + triggerType + "/" + "electrons_", sensor, 50, 0., 10.);
occupancyPlot.reset();
}
@@ -116,10 +116,11 @@
public void process(EventHeader event) {
- //check to see if this event is from the correct trigger (or "all");
- if (!matchTrigger(event))
+ //check to see if this event is from the correct trigger (or "all");
+ if (!matchTrigger(event)) {
return;
-
+ }
+
/* increment the strip occupancy arrays */
Map<String, Integer> hitsPerSensor = new HashMap<String, Integer>();
@@ -140,7 +141,7 @@
++eventCountRaw;
}
for (HpsSiSensor sensor : sensors) {
- IHistogram1D sensorHist = getSensorPlot(plotDir + triggerType + "/"+"nHitsPerEvent_", sensor);
+ IHistogram1D sensorHist = PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "nHitsPerEvent_", sensor);
Integer nHits = hitsPerSensor.get(sensor.getName());
if (nHits == null) {
sensorHist.fill(0);
@@ -161,22 +162,22 @@
RawTrackerHit rth = (RawTrackerHit) hit.getFrom();
GenericObject pars = (GenericObject) hit.getTo();
- String sensorName = getNiceSensorName((HpsSiSensor) rth.getDetectorElement());
+ HpsSiSensor sensor = ((HpsSiSensor) rth.getDetectorElement());
//this is a clever way to get the parameters we want from the generic object
double t0 = ShapeFitParameters.getT0(pars);
double amp = ShapeFitParameters.getAmp(pars);
double chiProb = ShapeFitParameters.getChiProb(pars);
int channel = rth.getIdentifierFieldValue("strip");
- getSensorPlot(plotDir + triggerType + "/"+"nFitsPerHit_", sensorName).fill(rthtofit.allFrom(rth).size());
- getSensorPlot(plotDir + triggerType + "/"+"t0Hit_", sensorName).fill(t0);
- getSensorPlot(plotDir + triggerType + "/"+"amplitude_", sensorName).fill(amp);
- getSensorPlot2D(plotDir + triggerType + "/"+"t0AmpHit_", sensorName).fill(t0, amp);
- getSensorPlot(plotDir + triggerType + "/"+"chiProb_", sensorName).fill(chiProb);
- getSensorPlot2D(plotDir + triggerType + "/"+"ampChanHit_", sensorName).fill(channel, amp);
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "nFitsPerHit_", sensor).fill(rthtofit.allFrom(rth).size());
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "t0Hit_", sensor).fill(t0);
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "amplitude_", sensor).fill(amp);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "t0AmpHit_", sensor).fill(t0, amp);
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "chiProb_", sensor).fill(chiProb);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "ampChanHit_", sensor).fill(channel, amp);
if (amp > 1000.0) {
- getSensorPlot2D(plotDir + triggerType + "/"+"t0ChanBigHit_", sensorName).fill(channel, t0);
- getSensorPlot2D(plotDir + triggerType + "/"+"chiprobChanBigHit_", sensorName).fill(channel, chiProb);
- getSensorPlot2D(plotDir + triggerType + "/"+"t0BigHitTrigTime_", sensorName).fill(t0, event.getTimeStamp() % 24);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "t0ChanBigHit_", sensor).fill(channel, t0);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "chiprobChanBigHit_", sensor).fill(channel, chiProb);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "t0BigHitTrigTime_", sensor).fill(t0, event.getTimeStamp() % 24);
}
}
++eventCountFit;
@@ -186,55 +187,15 @@
// LOGGER.info("Found a Si cluster collection");
List<TrackerHit> siClusters = (List<TrackerHit>) event.get(trackerHitCollectionName);
for (TrackerHit cluster : siClusters) {
- String sensorName = getNiceSensorName((HpsSiSensor) ((RawTrackerHit) cluster.getRawHits().get(0)).getDetectorElement());
+ HpsSiSensor sensor = (HpsSiSensor) ((RawTrackerHit) cluster.getRawHits().get(0)).getDetectorElement();
double t0 = cluster.getTime();
double dedx = cluster.getdEdx() * 1e6;
// LOGGER.info("dedx = "+dedx);
- getSensorPlot(plotDir + triggerType + "/"+"t0Cluster_", sensorName).fill(t0);
- getSensorPlot2D(plotDir + triggerType + "/"+"t0ClusterTrigTime_", sensorName).fill(t0, event.getTimeStamp() % 24);
- getSensorPlot(plotDir + triggerType + "/"+"electrons_", sensorName).fill(dedx);
- }
- }
- }
-
- private IHistogram1D getSensorPlot(String prefix, HpsSiSensor sensor) {
- String hname = prefix + getNiceSensorName(sensor);
- return aida.histogram1D(hname);
- }
-
- private IHistogram1D getSensorPlot(String prefix, String sensorName) {
- return aida.histogram1D(prefix + sensorName);
- }
-
- private IHistogram1D createSensorPlot(String prefix, HpsSiSensor sensor, int nchan, double min, double max) {
- String hname = prefix + getNiceSensorName(sensor);
- IHistogram1D hist = aida.histogram1D(hname, nchan, min, max);
- hist.setTitle(sensor.getName().replaceAll(nameStrip, "")
- .replace("module", "mod")
- .replace("layer", "lyr")
- .replace("sensor", "sens"));
-
- return hist;
- }
-
- private IHistogram2D getSensorPlot2D(String prefix, HpsSiSensor sensor) {
- String hname = prefix + getNiceSensorName(sensor);
- return aida.histogram2D(hname);
- }
-
- private IHistogram2D getSensorPlot2D(String prefix, String sensorName) {
- return aida.histogram2D(prefix + sensorName);
- }
-
- private IHistogram2D createSensorPlot2D(String prefix, HpsSiSensor sensor, int nchanX, double minX, double maxX, int nchanY, double minY, double maxY) {
- String hname = prefix + getNiceSensorName(sensor);
- IHistogram2D hist = aida.histogram2D(hname, nchanX, minX, maxX, nchanY, minY, maxY);
- hist.setTitle(sensor.getName().replaceAll(nameStrip, "")
- .replace("module", "mod")
- .replace("layer", "lyr")
- .replace("sensor", "sens"));
-
- return hist;
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "t0Cluster_", sensor).fill(t0);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + triggerType + "/" + "t0ClusterTrigTime_", sensor).fill(t0, event.getTimeStamp() % 24);
+ PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "electrons_", sensor).fill(dedx);
+ }
+ }
}
private void resetOccupancyMap() {
@@ -279,7 +240,7 @@
for (HpsSiSensor sensor : sensors) {
Double avg = 0.0;
//IHistogram1D sensorHist = aida.histogram1D(sensor.getName());
- IHistogram1D sensorHist = getSensorPlot(plotDir + triggerType + "/"+"occupancy_", sensor);
+ IHistogram1D sensorHist = PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "occupancy_", sensor);
sensorHist.reset();
int[] strips = occupancyMap.get(sensor.getName());
for (int i = 0; i < strips.length; i++) {
@@ -314,7 +275,7 @@
int irTop = 0;
int irBot = 0;
for (HpsSiSensor sensor : sensors) {
- IHistogram1D sensPlot = getSensorPlot(plotDir + triggerType + "/"+"t0Hit_", sensor);
+ IHistogram1D sensPlot = PlotAndFitUtilities.getSensorPlot(plotDir + triggerType + "/" + "t0Hit_", sensor);
IFitResult result = fitGaussian(sensPlot, fitter, "range=\"(-8.0,8.0)\"");
boolean isTop = sensor.isTopLayer();
@@ -362,9 +323,9 @@
@Override
public void printDQMData() {
for (HpsSiSensor sensor : sensors) {
- LOGGER.info(avgOccupancyNames.get(sensor.getName()) + " " +triggerType+" " + avgOccupancyMap.get(sensor.getName()));
- LOGGER.info(avgt0Names.get(sensor.getName()) + " " +triggerType+" " + avgt0Map.get(sensor.getName()));
- LOGGER.info(sigt0Names.get(sensor.getName()) + " " +triggerType+" " + sigt0Map.get(sensor.getName()));
+ LOGGER.info(avgOccupancyNames.get(sensor.getName()) + " " + triggerType + " " + avgOccupancyMap.get(sensor.getName()));
+ LOGGER.info(avgt0Names.get(sensor.getName()) + " " + triggerType + " " + avgt0Map.get(sensor.getName()));
+ LOGGER.info(sigt0Names.get(sensor.getName()) + " " + triggerType + " " + sigt0Map.get(sensor.getName()));
}
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java Wed Apr 27 11:11:32 2016
@@ -8,16 +8,17 @@
import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.Hep3Vector;
import hep.physics.vec.VecOp;
-
+import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
-
import org.hps.recon.tracking.CoordinateTransformations;
import org.hps.recon.tracking.TrackUtils;
+import org.hps.recon.tracking.gbl.GBLKinkData;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.RelationalTable;
@@ -37,7 +38,7 @@
public class TrackingMonitoring extends DataQualityMonitor {
private static Logger LOGGER = Logger.getLogger(SvtMonitoring.class.getPackage().getName());
-
+
private String helicalTrackHitCollectionName = "HelicalTrackHits";
private final String rotatedTrackHitCollectionName = "RotatedHelicalTrackHits";
private final String helicalTrackHitRelationsCollectionName = "HelicalTrackHitRelations";
@@ -64,6 +65,7 @@
private final String botDir = "Bottom/";
private final String hthplotDir = "HelicalTrackHits/";
private final String timeresidDir = "HitTimeResiduals/";
+ private final String kinkDir = "Kinks/";
String[] trackingQuantNames = {"avg_N_tracks", "avg_N_hitsPerTrack", "avg_d0", "avg_z0", "avg_absslope", "avg_chi2"};
int nmodules = 6;
IHistogram1D[] hthTop = new IHistogram1D[nmodules];
@@ -163,7 +165,8 @@
IHistogram2D chi2VsLambda;
IHistogram2D chi2VsZ0;
- IHistogram2D beamAngle2D;
+ IHistogram2D beamAngleXY;
+ IHistogram2D beamAngleThetaPhi;
IHistogram1D L1Iso;
IHistogram1D L12Iso;
@@ -286,7 +289,8 @@
chi2VsLambda = aida.histogram2D(plotDir + trackCollectionName + "/" + triggerType + "/" + "chi2 vs lambda", 50, -lambdaCut, lambdaCut, 50, 0.0, 50.0);
chi2VsZ0 = aida.histogram2D(plotDir + trackCollectionName + "/" + triggerType + "/" + "chi2 vs z0", 50, -z0Cut, z0Cut, 50, 0.0, 50.0);
- beamAngle2D = aida.histogram2D(plotDir + trackCollectionName + "/" + triggerType + "/" + "angles around beam axis: theta vs phi", 100, -Math.PI, Math.PI, 100, 0, 0.25);
+ beamAngleXY = aida.histogram2D(plotDir + trackCollectionName + "/" + triggerType + "/" + "angles around beam axis: theta_y vs theta_x", 100, -0.1, 0.1, 100, -0.1, 0.1);
+ beamAngleThetaPhi = aida.histogram2D(plotDir + trackCollectionName + "/" + triggerType + "/" + "angles around beam axis: theta vs phi", 100, -Math.PI, Math.PI, 100, 0, 0.25);
L1Iso = aida.histogram1D(plotDir + trackCollectionName + "/" + triggerType + "/" + "L1 isolation", 100, -5.0, 5.0);
L12Iso = aida.histogram1D(plotDir + trackCollectionName + "/" + triggerType + "/" + "L1-2 isolation", 100, -5.0, 5.0);
@@ -300,9 +304,12 @@
aida.tree().cd("/");
for (HpsSiSensor sensor : sensors) {
//IHistogram1D occupancyPlot = aida.histogram1D(sensor.getName().replaceAll("Tracker_TestRunModule_", ""), 640, 0, 639);
- IHistogram1D hitTimeResidual = createSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", sensor, 100, -20, 20);
- }
-
+ IHistogram1D hitTimeResidual = PlotAndFitUtilities.createSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", sensor, 100, -20, 20);
+ IHistogram1D lambdaKink = PlotAndFitUtilities.createSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "lambdaKink_", sensor, 100, -5e-3, 5e-3);
+ IHistogram1D phiKink = PlotAndFitUtilities.createSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "phiKink_", sensor, 100, -5e-3, 5e-3);
+ IHistogram2D lambdaKink2D = PlotAndFitUtilities.createSensorPlot2D(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "lambdaKinkVsOmega_", sensor, 100, -omegaCut, omegaCut, 100, -5e-3, 5e-3);
+ IHistogram2D phiKink2D = PlotAndFitUtilities.createSensorPlot2D(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "phiKinkVsOmega_", sensor, 100, -omegaCut, omegaCut, 100, -5e-3, 5e-3);
+ }
}
@Override
@@ -366,9 +373,8 @@
int cntPos = 0;
int cntTop = 0;
int cntBot = 0;
- double ecalFace = 1393.0;//mm
for (Track trk : tracks) {
- Hep3Vector trackPosAtEcalFace = TrackUtils.extrapolateTrack(trk, ecalFace);
+ Hep3Vector trackPosAtEcalFace = TrackUtils.getTrackPositionAtEcal(trk);
double xAtECal = trackPosAtEcalFace.x();
double yAtECal = trackPosAtEcalFace.y();
if (yAtECal > 0) {
@@ -385,7 +391,7 @@
double sinphi0 = Math.sin(trk.getTrackStates().get(0).getPhi());
double omega = trk.getTrackStates().get(0).getOmega();
double lambda = trk.getTrackStates().get(0).getTanLambda();
- double z0 = trk.getTrackStates().get(0).getZ0();
+ double z0 = trk.getTrackStates().get(0).getZ0();
trkChi2.fill(trk.getChi2());
nHits.fill(trk.getTrackerHits().size());
trackNhitsVsChi2.fill(trk.getChi2(), trk.getTrackerHits().size());
@@ -418,7 +424,8 @@
double beamPhi = Math.atan2(dirRotated.y(), dirRotated.x());
double beamTheta = Math.acos(dirRotated.z());
- beamAngle2D.fill(beamPhi, beamTheta);
+ beamAngleXY.fill(dirRotated.x(), dirRotated.y());
+ beamAngleThetaPhi.fill(beamPhi, beamTheta);
Double[] isolations = TrackUtils.getIsolations(trk, hitToStrips, hitToRotated);
double l1Iso = Double.MAX_VALUE;
@@ -447,8 +454,12 @@
int nSeedStrips = 0;
double meanTime = 0;
double meanSeedTime = 0;
+
+ List<TrackerHit> stripHits = new ArrayList<TrackerHit>();
+
for (TrackerHit hit : trk.getTrackerHits()) {
Collection<TrackerHit> htsList = hitToStrips.allFrom(hitToRotated.from(hit));
+ stripHits.addAll(htsList);
double hitTimes[] = new double[2];
for (TrackerHit hts : htsList) {
int stripLayer = ((HpsSiSensor) ((RawTrackerHit) hts.getRawHits().get(0)).getDetectorElement()).getLayerNumber();
@@ -479,18 +490,16 @@
double rmsTime = 0;
double rmsSeedTime = 0;
- for (TrackerHit hit : trk.getTrackerHits()) {
- Collection<TrackerHit> htsList = hitToStrips.allFrom(hitToRotated.from(hit));
- for (TrackerHit hts : htsList) {
- rmsTime += Math.pow(hts.getTime() - meanTime, 2);
- HpsSiSensor sensor = (HpsSiSensor) ((RawTrackerHit) hts.getRawHits().get(0)).getDetectorElement();
- int layer = sensor.getLayerNumber();
- if (layer <= 6) {
- rmsSeedTime += Math.pow(hts.getTime() - meanSeedTime, 2);
- }
- String sensorName = getNiceSensorName(sensor);
- getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", sensorName).fill((hts.getTime() - meanTime) * nStrips / (nStrips - 1)); //correct residual for bias
+
+ stripHits = TrackUtils.sortHits(stripHits);
+ for (TrackerHit hts : stripHits) {
+ rmsTime += Math.pow(hts.getTime() - meanTime, 2);
+ HpsSiSensor sensor = (HpsSiSensor) ((RawTrackerHit) hts.getRawHits().get(0)).getDetectorElement();
+ int layer = sensor.getLayerNumber();
+ if (layer <= 6) {
+ rmsSeedTime += Math.pow(hts.getTime() - meanSeedTime, 2);
}
+ PlotAndFitUtilities.getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", sensor).fill((hts.getTime() - meanTime) * nStrips / (nStrips - 1)); //correct residual for bias
}
rmsTime = Math.sqrt(rmsTime / nStrips);
trackMeanTime.fill(meanTime);
@@ -501,6 +510,23 @@
rmsSeedTime = Math.sqrt(rmsSeedTime / nSeedStrips);
seedRMSTime.fill(rmsSeedTime);
+
+ GenericObject kinkData = GBLKinkData.getKinkData(event, trk);
+ if (kinkData != null) {
+ for (int i = 0; i < stripHits.size(); i++) {
+ TrackerHit hts = stripHits.get(i);
+ HpsSiSensor sensor = (HpsSiSensor) ((RawTrackerHit) hts.getRawHits().get(0)).getDetectorElement();
+// int layer = sensor.getLayerNumber();
+ double lambdaKink = GBLKinkData.getLambdaKink(kinkData, i);
+ double phiKink = GBLKinkData.getPhiKink(kinkData, i);
+// System.out.format("%d %d %f %f\n", i, layer, lambdaKink, phiKink);
+
+ PlotAndFitUtilities.getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "lambdaKink_", sensor).fill(lambdaKink);
+ PlotAndFitUtilities.getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "phiKink_", sensor).fill(phiKink);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "lambdaKinkVsOmega_", sensor).fill(trk.getTrackStates().get(0).getOmega(), lambdaKink);
+ PlotAndFitUtilities.getSensorPlot2D(plotDir + trackCollectionName + "/" + triggerType + "/" + kinkDir + "phiKinkVsOmega_", sensor).fill(trk.getTrackStates().get(0).getOmega(), phiKink);
+ }
+ }
if (trk.getTrackStates().get(0).getOmega() < 0) {//positrons
trkChi2Pos.fill(trk.getChi2());
@@ -568,10 +594,10 @@
for (HpsSiSensor sensor : sensors) {
//IHistogram1D occupancyPlot = aida.histogram1D(sensor.getName().replaceAll("Tracker_TestRunModule_", ""), 640, 0, 639);
- IHistogram1D hitTimeResidual = getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", getNiceSensorName(sensor));
+ IHistogram1D hitTimeResidual = PlotAndFitUtilities.getSensorPlot(plotDir + trackCollectionName + "/" + triggerType + "/" + timeresidDir + "hitTimeResidual_", sensor);
IFitResult result = fitGaussian(hitTimeResidual, fitter, "range=\"(-20.0,20.0)\"");
if (result != null) {
- LOGGER.info(String.format("%s\t%f\t%f\t%d\t%d", getNiceSensorName(sensor), result.fittedParameters()[1], result.fittedParameters()[2], sensor.getFebID(), sensor.getFebHybridID()));
+ System.out.format("%s\t%f\t%f\t%d\t%d\t%f\n", getNiceSensorName(sensor), result.fittedParameters()[1], result.fittedParameters()[2], sensor.getFebID(), sensor.getFebHybridID(), sensor.getT0Shift());
}
}
@@ -611,26 +637,6 @@
}
}
- private IHistogram1D getSensorPlot(String prefix, HpsSiSensor sensor) {
- String hname = prefix + getNiceSensorName(sensor);
- return aida.histogram1D(hname);
- }
-
- private IHistogram1D getSensorPlot(String prefix, String sensorName) {
- return aida.histogram1D(prefix + sensorName);
- }
-
- private IHistogram1D createSensorPlot(String prefix, HpsSiSensor sensor, int nchan, double min, double max) {
- String hname = prefix + getNiceSensorName(sensor);
- IHistogram1D hist = aida.histogram1D(hname, nchan, min, max);
- hist.setTitle(sensor.getName().replaceAll(nameStrip, "")
- .replace("module", "mod")
- .replace("layer", "lyr")
- .replace("sensor", "sens"));
-
- return hist;
- }
-
private String getNiceSensorName(HpsSiSensor sensor) {
return sensor.getName().replaceAll(nameStrip, "")
.replace("module", "mod")
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingResiduals.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingResiduals.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TrackingResiduals.java Wed Apr 27 11:11:32 2016
@@ -28,8 +28,8 @@
// TODO: Add some quantities for DQM monitoring:
public class TrackingResiduals extends DataQualityMonitor {
- private static Logger LOGGER = Logger.getLogger(TrackingResiduals.class.getPackage().getName());
-
+ private static final Logger LOGGER = Logger.getLogger(TrackingResiduals.class.getPackage().getName());
+
// Collection Names
String trackTimeDataCollectionName = "TrackTimeData";
String trackResidualsCollectionName = "TrackResiduals";
@@ -82,8 +82,9 @@
yresidbot[i - 1] = aida.histogram1D(plotDir + triggerType + "/" + posresDir + "Module " + i + " Bot y Residual", 50, -getRange(i, false), getRange(i, false));
}
- for (int i = 1; i <= nmodules * 2; i++)
+ for (int i = 1; i <= nmodules * 2; i++) {
tresid[i - 1] = aida.histogram1D(plotDir + triggerType + "/" + timeresDir + "HalfModule " + i + " t Residual", 50, -20, 20);
+ }
for (int i = 1; i <= nsensors; i++) {
// IHistogram1D utopresid = aida.histogram1D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Top u Residual", 50, -getRange((i + 1) / 2, false), getRange((i + 1) / 2, false));
// IHistogram1D ubotresid = aida.histogram1D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Bot u Residual", 50, -getRange((i + 1) / 2, false), getRange((i + 1) / 2, false));
@@ -106,17 +107,19 @@
@Override
public void process(EventHeader event) {
aida.tree().cd("/");
- if (!event.hasCollection(GenericObject.class, trackResidualsCollectionName))
+ if (!event.hasCollection(GenericObject.class, trackResidualsCollectionName)) {
return;
+ }
//check to see if this event is from the correct trigger (or "all");
- if (!matchTrigger(event))
+ if (!matchTrigger(event)) {
return;
+ }
nEvents++;
List<GenericObject> trdList = event.get(GenericObject.class, trackResidualsCollectionName);
for (GenericObject trd : trdList) {
int nResid = trd.getNDouble();
int isBot = trd.getIntVal(trd.getNInt() - 1);//last Int is the top/bottom flag
- for (int i = 1; i <= nResid; i++)
+ for (int i = 1; i <= nResid; i++) {
if (isBot == 1) {
xresidbot[i - 1].fill(trd.getDoubleVal(i - 1));//x is the double value in the generic object
yresidbot[i - 1].fill(trd.getFloatVal(i - 1));//y is the float value in the generic object
@@ -124,19 +127,22 @@
xresidtop[i - 1].fill(trd.getDoubleVal(i - 1));//x is the double value in the generic object
yresidtop[i - 1].fill(trd.getFloatVal(i - 1));//y is the float value in the generic object
}
- }
-
+ }
+ }
+
if (event.hasCollection(GenericObject.class, trackTimeDataCollectionName)) {
List<GenericObject> ttdList = event.get(GenericObject.class, trackTimeDataCollectionName);
for (GenericObject ttd : ttdList) {
int nResid = ttd.getNDouble();
- for (int i = 1; i <= nResid; i++)
+ for (int i = 1; i <= nResid; i++) {
tresid[i - 1].fill(ttd.getDoubleVal(i - 1));//x is the double value in the generic object
- }
- }
-
- if (!event.hasCollection(GenericObject.class, gblStripClusterDataCollectionName))
+ }
+ }
+ }
+
+ if (!event.hasCollection(GenericObject.class, gblStripClusterDataCollectionName)) {
return;
+ }
List<GenericObject> gblSCDList = event.get(GenericObject.class, gblStripClusterDataCollectionName);
for (GenericObject gblSCD : gblSCDList) {
double umeas = gblSCD.getDoubleVal(GBLStripClusterData.GBLDOUBLE.UMEAS);//TODO: implement generic methods into GBLStripClusterData so this isn't hard coded
@@ -146,23 +152,25 @@
double tanlambda = gblSCD.getDoubleVal(GBLStripClusterData.GBLDOUBLE.TLAMBDA);//use the slope as a proxy for the top/bottom half of tracker
int i = gblSCD.getIntVal(GBLStripClusterData.GBLINT.ID);//implement generic methods into GBLStripClusterData so this isn't hard coded
- if (i == 666)
- if (tanlambda > 0)
+ if (i == 666) {
+ if (tanlambda > 0) {
xtopresidBS.fill(resid);
- else
+ } else {
xbotresidBS.fill(resid);
- else if (i == 667)
- if (tanlambda > 0)
+ }
+ } else if (i == 667) {
+ if (tanlambda > 0) {
ytopresidBS.fill(resid);
- else
+ } else {
ybotresidBS.fill(resid);
- else if (tanlambda > 0)
+ }
+ } else if (tanlambda > 0) {
utopresid[i - 1].fill(resid);//x is the double value in the generic object
- // aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Top u Residual vs. u").fill(utrk,resid);//x is the double value in the generic object
+ } // aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Top u Residual vs. u").fill(utrk,resid);//x is the double value in the generic object
// aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Top u Residual vs. v").fill(vtrk,resid);//x is the double value in the generic object
- else
+ else {
ubotresid[i - 1].fill(resid);//x is the double value in the generic object
- // aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Bot u Residual vs. u").fill(utrk,resid);//x is the double value in the generic object
+ } // aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Bot u Residual vs. u").fill(utrk,resid);//x is the double value in the generic object
// aida.histogram2D(plotDir + triggerType + "/"+uresDir + "HalfModule " + i + " Bot u Residual vs. v").fill(vtrk,resid);//x is the double value in the generic object
}
}
@@ -287,20 +295,25 @@
private String getQuantityName(int itype, int iquant, int top, int nlayer) {
String typeString = "position_resid";
String quantString = "mean_";
- if (itype == 1)
+ if (itype == 1) {
typeString = "time_resid";
- if (iquant == 1)
+ }
+ if (iquant == 1) {
quantString = "sigma_";
+ }
String botString = "bot_";
- if (top == 1)
+ if (top == 1) {
botString = "top_";
- if (top == 2)
+ }
+ if (top == 2) {
botString = "";
+ }
String layerString = "module" + nlayer;
- if (itype == 1)
+ if (itype == 1) {
layerString = "halfmodule" + nlayer;
+ }
return typeString + quantString + botString + layerString;
}
@@ -308,51 +321,71 @@
@Override
public void printDQMData() {
LOGGER.info("TrackingResiduals::printDQMData");
- for (Map.Entry<String, Double> entry : xposTopMeanResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : xposBotMeanResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : xposTopSigmaResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : xposBotSigmaResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : yposTopMeanResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : yposBotMeanResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : yposTopSigmaResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : yposBotSigmaResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : timeMeanResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- for (Map.Entry<String, Double> entry : timeSigmaResidMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ for (Map.Entry<String, Double> entry : xposTopMeanResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : xposBotMeanResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : xposTopSigmaResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : xposBotSigmaResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : yposTopMeanResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : yposBotMeanResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : yposTopSigmaResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : yposBotSigmaResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : timeMeanResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ for (Map.Entry<String, Double> entry : timeSigmaResidMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
LOGGER.info("*******************************");
}
@Override
public void printDQMStrings() {
- for (Map.Entry<String, Double> entry : xposTopMeanResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : xposBotMeanResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : xposTopSigmaResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : xposBotSigmaResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : yposTopMeanResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : yposBotMeanResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : yposTopSigmaResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : yposBotSigmaResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : timeMeanResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
- for (Map.Entry<String, Double> entry : timeSigmaResidMap.entrySet())
- LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ for (Map.Entry<String, Double> entry : xposTopMeanResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : xposBotMeanResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : xposTopSigmaResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : xposBotSigmaResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : yposTopMeanResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : yposBotMeanResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : yposTopSigmaResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : yposBotSigmaResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : timeMeanResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
+ for (Map.Entry<String, Double> entry : timeSigmaResidMap.entrySet()) {
+ LOGGER.info("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
}
private void resetOccupancyMap() {
@@ -399,31 +432,35 @@
private double getRange(int layer, boolean isX) {
double range = 2.5;
if (isX) {
- if (layer == 1)
- return 0.2;
- if (layer == 2)
- return 0.5;
- if (layer == 3)
- return 0.5;
- if (layer == 4)
- return 1.0;
- if (layer == 5)
- return 1.0;
- if (layer == 6)
- return 1.0;
+ switch (layer) {
+ case 1:
+ return 0.2;
+ case 2:
+ return 0.5;
+ case 3:
+ return 0.5;
+ case 4:
+ return 1.0;
+ case 5:
+ return 1.0;
+ case 6:
+ return 1.0;
+ }
} else {
- if (layer == 1)
- return 0.005;
- if (layer == 2)
- return 0.5;
- if (layer == 3)
- return 0.5;
- if (layer == 4)
- return 1.0;
- if (layer == 5)
- return 1.0;
- if (layer == 6)
- return 1.5;
+ switch (layer) {
+ case 1:
+ return 0.005;
+ case 2:
+ return 0.5;
+ case 3:
+ return 0.5;
+ case 4:
+ return 1.0;
+ case 5:
+ return 1.0;
+ case 6:
+ return 1.5;
+ }
}
return range;
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java Wed Apr 27 11:11:32 2016
@@ -9,17 +9,21 @@
import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.Hep3Vector;
import hep.physics.vec.VecOp;
-
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.logging.Logger;
-
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
import org.hps.recon.ecal.cluster.ClusterUtilities;
+import org.hps.recon.particle.HpsReconParticleDriver;
import org.hps.recon.particle.ReconParticleDriver;
import org.hps.recon.tracking.TrackType;
import org.hps.recon.tracking.TrackUtils;
+import org.hps.recon.vertexing.BilliorTrack;
+import org.hps.recon.vertexing.BilliorVertex;
+import org.hps.recon.vertexing.BilliorVertexer;
+import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.ReconstructedParticle;
import org.lcsim.event.RelationalTable;
@@ -72,7 +76,6 @@
private final static Logger LOGGER = Logger.getLogger(TridentMonitoring.class.getPackage().getName());
- private double ebeam = 1.05;
private final BasicHep3Matrix beamAxisRotation = new BasicHep3Matrix();
// private static final int nCuts = 9;
// private final String[] cutNames = {"Trk Quality",
@@ -97,8 +100,8 @@
private final String plotDir = "TridentMonitoring/";
- private IHistogram2D triTrackTime2D;
- private IHistogram1D triTrackTimeDiff;
+// private IHistogram2D triTrackTime2D;
+// private IHistogram1D triTrackTimeDiff;
private IHistogram2D triMassMomentum;
private IHistogram2D triZVsMomentum;
private IHistogram2D triTrackMomentum2D;
@@ -108,22 +111,22 @@
private IHistogram1D triSumP;
private IHistogram1D triMass;
private IHistogram2D triZVsMass;
- private IHistogram1D triX;
- private IHistogram1D triY;
- private IHistogram1D triZ;
- private IHistogram2D triZY;
- private IHistogram2D triXY;
- private IHistogram1D triPx;
- private IHistogram1D triPy;
- private IHistogram1D triPz;
- private IHistogram2D triPxPy;
- private IHistogram1D triU;
- private IHistogram1D triV;
+// private IHistogram1D triX;
+// private IHistogram1D triY;
+// private IHistogram1D triZ;
+// private IHistogram2D triZY;
+// private IHistogram2D triXY;
+// private IHistogram1D triPx;
+// private IHistogram1D triPy;
+// private IHistogram1D triPz;
+// private IHistogram2D triPxPy;
+// private IHistogram1D triU;
+// private IHistogram1D triV;
private IHistogram2D triRadTrackTime2D;
private IHistogram1D triRadTrackTimeDiff;
- private IHistogram2D triRadMassMomentum;
- private IHistogram2D triRadZVsMomentum;
+// private IHistogram2D triRadMassMomentum;
+// private IHistogram2D triRadZVsMomentum;
private IHistogram2D triRadTrackMomentum2D;
private IHistogram2D triRadPyEleVsPyPos;
private IHistogram2D triRadPxEleVsPxPos;
@@ -131,11 +134,11 @@
private IHistogram1D triRadSumP;
private IHistogram1D triRadMass;
private IHistogram2D triRadZVsMass;
- private IHistogram1D triRadX;
- private IHistogram1D triRadY;
- private IHistogram1D triRadZ;
- private IHistogram2D triRadZY;
- private IHistogram2D triRadXY;
+// private IHistogram1D triRadX;
+// private IHistogram1D triRadY;
+// private IHistogram1D triRadZ;
+// private IHistogram2D triRadZY;
+// private IHistogram2D triRadXY;
private IHistogram1D triRadPx;
private IHistogram1D triRadPy;
private IHistogram1D triRadPz;
@@ -143,8 +146,8 @@
private IHistogram1D triRadU;
private IHistogram1D triRadV;
- private IHistogram2D vertTrackTime2D;
- private IHistogram1D vertTrackTimeDiff;
+// private IHistogram2D vertTrackTime2D;
+// private IHistogram1D vertTrackTimeDiff;
private IHistogram2D vertMassMomentum;
private IHistogram2D vertZVsMomentum;
private IHistogram2D vertTrackMomentum2D;
@@ -154,17 +157,17 @@
private IHistogram1D vertSumP;
private IHistogram1D vertMass;
private IHistogram2D vertZVsMass;
- private IHistogram1D vertX;
+// private IHistogram1D vertX;
private IHistogram1D vertY;
- private IHistogram1D vertZ;
+// private IHistogram1D vertZ;
private IHistogram2D vertZY;
private IHistogram2D vertXY;
- private IHistogram1D vertPx;
- private IHistogram1D vertPy;
- private IHistogram1D vertPz;
- private IHistogram2D vertPxPy;
- private IHistogram1D vertU;
- private IHistogram1D vertV;
+// private IHistogram1D vertPx;
+// private IHistogram1D vertPy;
+// private IHistogram1D vertPz;
+// private IHistogram2D vertPxPy;
+// private IHistogram1D vertU;
+// private IHistogram1D vertV;
private IHistogram2D vertRadTrackTime2D;
private IHistogram1D vertRadTrackTimeDiff;
@@ -189,6 +192,8 @@
private IHistogram1D vertRadU;
private IHistogram1D vertRadV;
+ private IHistogram2D vertRadUnconBsconChi2;
+
private IHistogram1D nTriCand;
private IHistogram1D nVtxCand;
// IHistogram1D vertexW;
@@ -197,7 +202,11 @@
private IHistogram1D maxTrkChi2;
private IHistogram2D zVsMaxTrkChi2;
private IHistogram1D v0Chi2;
+ private IHistogram1D bsconV0Chi2;
private IHistogram2D zVsV0Chi2;
+ private IHistogram2D zVsBsconV0Chi2;
+ private IHistogram1D v0Chi2Diff;
+ private IHistogram2D zVsV0Chi2Diff;
private IHistogram1D trackTimeDiff;
private IHistogram2D zVsTrackTimeDiff;
private IHistogram1D hitTimeStdDev;
@@ -213,55 +222,149 @@
private final IHistogram1D[][] cutVertexZ = new IHistogram1D[Cut.nCuts][2];
private final IHistogram2D[][] cutVertexZVsMass = new IHistogram2D[Cut.nCuts][2];
- private final double plotsMinMass = 0.03 * ebeam;
- private final double plotsMaxMass = 0.04 * ebeam;
+ private final double plotsMinMass = 0.03;
+ private final double plotsMaxMass = 0.04;
//clean up event first
private final int nTrkMax = 5;
private final int nPosMax = 1;
private final double maxChi2SeedTrack = 7.0;
- private final double maxChi2GBLTrack = 15.0;
- private final double maxVertChi2 = 7.0;
-
- //v0 cuts
- private final double v0PzMax = 1.25 * ebeam;//GeV
+ private double maxChi2GBLTrack = 15.0;
+ private double maxUnconVertChi2 = 7.0;
+ private double maxBsconVertChi2 = 1000.0; //disable by default
+
+ //v0 plot ranges
+ private final double v0PzMax = 1.25;//GeV
private final double v0PzMin = 0.1;// GeV
private final double v0PyMax = 0.04;//GeV absolute value
private final double v0PxMax = 0.04;//GeV absolute value
private final double v0VzMax = 50.0;// mm from target...someday make mass dependent
- private final double v0VyMax = 1.0;// mm from target...someday make mass dependent
+ private final double v0VyMax = 2.0;// mm from target...someday make mass dependent
private final double v0VxMax = 2.0;// mm from target...someday make mass dependent
- // track quality cuts
+
+ //v0 cuts
+ private final double v0PzMaxCut = 1.25;//GeV
+ private final double v0PzMinCut = 0.1;// GeV
+ private final double v0PyCut = 0.04;//GeV absolute value
+ private final double v0PxCut = 0.04;//GeV absolute value
+ private final double v0UnconVzCut = 50.0;// mm from target...someday make mass dependent
+ private double v0UnconVyCut = 2.0;// mm from target...someday make mass dependent
+ private double v0UnconVxCut = 2.0;// mm from target...someday make mass dependent
+ private double v0BsconVyCut = 10.0; //disable by default
+ private double v0BsconVxCut = 10.0; //disable by default
+
+// track quality cuts
private final double beamPCut = 0.85;
private final double minPCut = 0.05;
// private double trkPyMax = 0.2;
// private double trkPxMax = 0.2;
- private final double radCut = 0.8 * ebeam;
+ private final double radCut = 0.8;
private final double trkTimeDiff = 5.0;
private final double clusterTimeDiffCut = 2.5;
- private final double l1IsoMin = 1.0;
+ private double l1IsoMin = 0.5;
+
+ private final double tupleTrkPCut = 0.9;
+ private final double tupleMaxSumCut = 1.3;
+
+ private final double[] beamSize = {0.001, 0.130, 0.050}; //rough estimate from harp scans during engineering run production running
+ private final double[] beamPos = {0.0, 0.0, 0.0};
+ private final double[] vzcBeamSize = {0.001, 100, 100};
+
//cluster matching
// private boolean reqCluster = false;
// private int nClustMax = 3;
// private double eneLossFactor = 0.7; //average E/p roughly
// private double eneOverPCut = 0.3; //|(E/p)_meas - (E/p)_mean|<eneOverPCut
-
//counters
private float nEvents = 0;
private float nRecoV0 = 0;
private final float[] nPassCut = new float[Cut.nCuts];
- public void setEbeam(double ebeam) {
- this.ebeam = ebeam;
- }
+ public TridentMonitoring() {
+ this.tupleVariables = new String[]{"run/I", "event/I",
+ "nTrk/I", "nPos/I",
+ "uncPX/D", "uncPY/D", "uncPZ/D", "uncP/D",
+ "uncVX/D", "uncVY/D", "uncVZ/D", "uncChisq/D", "uncM/D",
+ "bscPX/D", "bscPY/D", "bscPZ/D", "bscP/D",
+ "bscVX/D", "bscVY/D", "bscVZ/D", "bscChisq/D", "bscM/D",
+ "tarPX/D", "tarPY/D", "tarPZ/D", "tarP/D",
+ "tarVX/D", "tarVY/D", "tarVZ/D", "tarChisq/D", "tarM/D",
+ "vzcPX/D", "vzcPY/D", "vzcPZ/D", "vzcP/D",
+ "vzcVX/D", "vzcVY/D", "vzcVZ/D", "vzcChisq/D", "vzcM/D",
+ "elePX/D", "elePY/D", "elePZ/D", "eleP/D",
+ "eleTrkChisq/D", "eleTrkHits/I", "eleTrkType/I", "eleTrkT/D",
+ "eleTrkD0/D", "eleTrkZ0/D", "eleTrkEcalX/D", "eleTrkEcalY/D",
+ "eleHasL1/B", "eleHasL2/B",
+ "eleMatchChisq/D", "eleClT/D", "eleClE/D", "eleClHits/I",
+ "posPX/D", "posPY/D", "posPZ/D", "posP/D",
+ "posTrkChisq/D", "posTrkHits/I", "posTrkType/I", "posTrkT/D",
+ "posTrkD0/D", "posTrkZ0/D", "posTrkEcalX/D", "posTrkEcalY/D",
+ "posHasL1/B", "posHasL2/B",
+ "posMatchChisq/D", "posClT/D", "posClE/D", "posClHits/I",
+ "minL1Iso/D"
+ };
+ }
+
+ public void setMaxChi2GBLTrack(double maxChi2GBLTrack) {
+ this.maxChi2GBLTrack = maxChi2GBLTrack;
+ }
+
+ public void setMaxUnconVertChi2(double maxUnconVertChi2) {
+ this.maxUnconVertChi2 = maxUnconVertChi2;
+ }
+
+ public void setMaxBsconVertChi2(double maxBsconVertChi2) {
+ this.maxBsconVertChi2 = maxBsconVertChi2;
+ }
+
+ public void setV0UnconVyCut(double v0UnconVyCut) {
+ this.v0UnconVyCut = v0UnconVyCut;
+ }
+
+ public void setV0UnconVxCut(double v0UnconVxCut) {
+ this.v0UnconVxCut = v0UnconVxCut;
+ }
+
+ public void setV0BsconVyCut(double v0BsconVyCut) {
+ this.v0BsconVyCut = v0BsconVyCut;
+ }
+
+ public void setV0BsconVxCut(double v0BsconVxCut) {
+ this.v0BsconVxCut = v0BsconVxCut;
+ }
+
+ public void setL1IsoMin(double l1IsoMin) {
+ this.l1IsoMin = l1IsoMin;
+ }
+
+ public void setBeamSizeX(double beamSizeX) {
+ this.beamSize[1] = beamSizeX;
+ }
+
+ public void setBeamSizeY(double beamSizeY) {
+ this.beamSize[2] = beamSizeY;
+ }
+
+ public void setBeamPosX(double beamPosX) {
+ this.beamPos[1] = beamPosX;
+ }
+
+ public void setBeamPosY(double beamPosY) {
+ this.beamPos[2] = beamPosY;
+ }
+
+ double ebeam;
@Override
protected void detectorChanged(Detector detector) {
LOGGER.info("TridendMonitoring::detectorChanged Setting up the plotter");
beamAxisRotation.setActiveEuler(Math.PI / 2, -0.0305, -Math.PI / 2);
+ BeamEnergyCollection beamEnergyCollection
+ = this.getConditionsManager().getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();
+ ebeam = beamEnergyCollection.get(0).getBeamEnergy();
aida.tree().cd("/");
String trkType = "SeedTrack/";
if (isGBL) {
@@ -284,106 +387,106 @@
// IHistogram1D tarconChi2 = aida.histogram1D(plotDir + triggerType + "/"+ triggerType + "/"+"Target Constrained Chi2", 25, 0, 25);
nTriCand = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Number of Trident Candidates", 5, 0, 4);
- triTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Track time difference", 100, -10, 10);
- triTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Track time vs. track time", 100, -10, 10, 100, -10, 10);
-
- triTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Positron vs. electron momentum", 100, 0, v0PzMax, 100, 0, v0PzMax);
- triDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron - electron momentum", 100, -1., 1.0);
- triSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron + electron momentum", 100, v0PzMin, v0PzMax);
- triPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- triPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- triMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
- triZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
- triMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass", 100, 0, 0.11);
- triZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
- triX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex X", 100, -v0VxMax, v0VxMax);
- triY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Y", 100, -v0VyMax, v0VyMax);
- triZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z", 100, -v0VzMax, v0VzMax);
- triXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
- triZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
- triPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Px", 100, -v0PxMax, v0PxMax);
- triPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py", 100, -v0PyMax, v0PyMax);
- triPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Pz", 100, v0PzMin, v0PzMax);
- triPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
- triU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Px over Ptot", 100, -0.1, 0.1);
- triV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py over Ptot", 100, -0.1, 0.1);
+// triTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Track time difference", 100, -10, 10);
+// triTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Track time vs. track time", 100, -10, 10, 100, -10, 10);
+ triTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
+ triDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron - electron momentum", 100, -ebeam, ebeam);
+ triSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ triPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ triPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ triMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
+ triZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
+ triMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass", 100, 0, 0.1 * ebeam);
+ triZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
+// triX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex X", 100, -v0VxMax, v0VxMax);
+// triY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Y", 100, -v0VyMax, v0VyMax);
+// triZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z", 100, -v0VzMax, v0VzMax);
+// triXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
+// triZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
+// triPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Px", 100, -v0PxMax, v0PxMax);
+// triPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py", 100, -v0PyMax, v0PyMax);
+// triPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Pz", 100, v0PzMin, v0PzMax);
+// triPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
+// triU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Px over Ptot", 100, -0.1, 0.1);
+// triV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Py over Ptot", 100, -0.1, 0.1);
triRadTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Track time difference", 100, -10, 10);
triRadTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Track time vs. track time", 100, -10, 10, 100, -10, 10);
- triRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron vs. electron momentum", 100, 0, v0PzMax, 100, 0, v0PzMax);
- triRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron - electron momentum", 100, -1., 1.0);
- triRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron + electron momentum", 100, v0PzMin, v0PzMax);
- triRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- triRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- triRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
- triRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
- triRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass", 100, 0, 0.11);
- triRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
- triRadX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex X", 100, -v0VxMax, v0VxMax);
- triRadY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Y", 100, -v0VyMax, v0VyMax);
- triRadZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z", 100, -v0VzMax, v0VzMax);
- triRadXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
- triRadZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
- triRadPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Px", 100, -v0PxMax, v0PxMax);
- triRadPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Py", 100, -v0PyMax, v0PyMax);
- triRadPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Pz", 100, v0PzMin, v0PzMax);
- triRadPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
+ triRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
+ triRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron - electron momentum", 100, -ebeam, ebeam);
+ triRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ triRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ triRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+// triRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
+// triRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
+ triRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass", 100, 0, 0.1 * ebeam);
+ triRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
+// triRadX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex X", 100, -v0VxMax, v0VxMax);
+// triRadY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Y", 100, -v0VyMax, v0VyMax);
+// triRadZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z", 100, -v0VzMax, v0VzMax);
+// triRadXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
+// triRadZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
+ triRadPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Px", 100, -v0PxMax * ebeam, v0PxMax * ebeam);
+ triRadPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Py", 100, -v0PyMax * ebeam, v0PyMax * ebeam);
+ triRadPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Pz", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ triRadPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Py vs. Px", 100, -v0PxMax * ebeam, v0PxMax * ebeam, 100, -v0PyMax * ebeam, v0PyMax * ebeam);
triRadU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Px over Ptot", 100, -0.1, 0.1);
triRadV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Py over Ptot", 100, -0.1, 0.1);
- vertTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Track time difference", 100, -10, 10);
- vertTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Track time vs. track time", 100, -10, 10, 100, -10, 10);
-
- vertTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Positron vs. electron momentum", 100, 0, v0PzMax, 100, 0, v0PzMax);
- vertDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron - electron momentum", 100, -1., 1.0);
- vertSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron + electron momentum", 100, v0PzMin, v0PzMax);
- vertPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- vertPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- vertMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
- vertZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
- vertMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass", 100, 0, 0.11);
- vertZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
- vertX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex X", 100, -v0VxMax, v0VxMax);
+// vertTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Track time difference", 100, -10, 10);
+// vertTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Track time vs. track time", 100, -10, 10, 100, -10, 10);
+ vertTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
+ vertDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron - electron momentum", 100, -ebeam, ebeam);
+ vertSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ vertPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ vertPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ vertMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
+ vertZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
+ vertMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass", 100, 0, 0.1 * ebeam);
+ vertZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
+// vertX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex X", 100, -v0VxMax, v0VxMax);
vertY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Y", 100, -v0VyMax, v0VyMax);
- vertZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z", 100, -v0VzMax, v0VzMax);
+// vertZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z", 100, -v0VzMax, v0VzMax);
vertXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
vertZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
- vertPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Px", 100, -v0PxMax, v0PxMax);
- vertPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py", 100, -v0PyMax, v0PyMax);
- vertPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Pz", 100, v0PzMin, v0PzMax);
- vertPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
- vertU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Px over Ptot", 100, -0.1, 0.1);
- vertV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py over Ptot", 100, -0.1, 0.1);
+// vertPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Px", 100, -v0PxMax, v0PxMax);
+// vertPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py", 100, -v0PyMax, v0PyMax);
+// vertPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Pz", 100, v0PzMin, v0PzMax);
+// vertPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
+// vertU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Px over Ptot", 100, -0.1, 0.1);
+// vertV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Py over Ptot", 100, -0.1, 0.1);
vertRadTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Track time difference", 100, -10, 10);
vertRadTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Track time vs. track time", 100, -10, 10, 100, -10, 10);
- vertRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron vs. electron momentum", 100, 0, v0PzMax, 100, 0, v0PzMax);
- vertRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron - electron momentum", 100, -1., 1.0);
- vertRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron + electron momentum", 100, v0PzMin, v0PzMax);
- vertRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- vertRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- vertRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
- vertRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
- vertRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass", 100, 0, 0.11);
- vertRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
+ vertRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
+ vertRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron - electron momentum", 100, -ebeam, ebeam);
+ vertRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ vertRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ vertRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ vertRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
+ vertRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
+ vertRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass", 100, 0, 0.1 * ebeam);
+ vertRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
vertRadX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex X", 100, -v0VxMax, v0VxMax);
vertRadY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Y", 100, -v0VyMax, v0VyMax);
vertRadZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z", 100, -v0VzMax, v0VzMax);
vertRadXY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Y vs. X", 100, -v0VxMax, v0VxMax, 100, -v0VyMax, v0VyMax);
vertRadZY = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. Y", 100, -v0VyMax, v0VyMax, 100, -v0VzMax, v0VzMax);
- vertRadPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Px", 100, -v0PxMax, v0PxMax);
- vertRadPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Py", 100, -v0PyMax, v0PyMax);
- vertRadPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Pz", 100, v0PzMin, v0PzMax);
- vertRadPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Py vs. Px", 100, -v0PxMax, v0PxMax, 100, -v0PyMax, v0PyMax);
+ vertRadPx = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Px", 100, -v0PxMax * ebeam, v0PxMax * ebeam);
+ vertRadPy = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Py", 100, -v0PyMax * ebeam, v0PyMax * ebeam);
+ vertRadPz = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Pz", 100, v0PzMin * ebeam, v0PzMax * ebeam);
+ vertRadPxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Py vs. Px", 100, -v0PxMax * ebeam, v0PxMax * ebeam, 100, -v0PyMax * ebeam, v0PyMax * ebeam);
vertRadU = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Px over Ptot", 100, -0.1, 0.1);
vertRadV = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Py over Ptot", 100, -0.1, 0.1);
+ vertRadUnconBsconChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: beamspot chi2 vs. uncon chi2", 100, 0, 25.0, 100, 0, 25.0);
+
nVtxCand = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Number of Vertexing Candidates", 5, 0, 4);
maxTrkChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: Trk Chi2", 50, 0.0, 50.0);
@@ -391,6 +494,10 @@
v0Chi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: V0 Chi2", 50, 0.0, 25.0);
zVsV0Chi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs V0 Chi2", 50, 0.0, 25.0, 50, -v0VzMax, v0VzMax);
+ bsconV0Chi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: Bscon V0 Chi2", 50, 0.0, 25.0);
+ zVsBsconV0Chi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs Bscon V0 Chi2", 50, 0.0, 25.0, 50, -v0VzMax, v0VzMax);
+ v0Chi2Diff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: Bscon-Uncon V0 Chi2 Diff", 50, 0.0, 25.0);
+ zVsV0Chi2Diff = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs Bscon-Uncon V0 Chi2 Diff", 50, 0.0, 25.0, 50, -v0VzMax, v0VzMax);
trackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: Trk Time Diff", 50, 0.0, 10.0);
hitTimeStdDev = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: Hit Time Std Dev", 50, 0.0, 10.0);
@@ -402,16 +509,16 @@
zVsEventTrkCount = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs Num Tracks", 10, 0.5, 10.5, 50, -v0VzMax, v0VzMax);
zVsEventPosCount = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs Num Positrons", 5, 0.5, 5.5, 50, -v0VzMax, v0VzMax);
- l1Iso = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: L1 Isolation", 50, 0.0, 5.0);
- zVsL1Iso = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs L1 Isolation", 50, 0.0, 5.0, 50, -v0VzMax, v0VzMax);
+ l1Iso = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Cut: L1 Isolation", 100, 0.0, 5.0);
+ zVsL1Iso = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Cut: Vz vs L1 Isolation", 100, 0.0, 5.0, 50, -v0VzMax, v0VzMax);
for (Cut cut : Cut.values()) {
for (int i = 0; i < 2; i++) {
- cutVertexZ[cut.ordinal()][i] = aida.histogram1D(String.format("%s%s%s/failed cut: %s/%s: Vertex Z position (mm)", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
+ cutVertexZ[cut.ordinal()][i] = aida.histogram1D(String.format("%s%s%s/failed cut %d: %s/%s: Vertex Z position (mm)", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
100, -v0VzMax, v0VzMax);
- cutVertexMass[cut.ordinal()][i] = aida.histogram1D(String.format("%s%s%s/failed cut: %s/%s: Vertex mass (GeV)", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
+ cutVertexMass[cut.ordinal()][i] = aida.histogram1D(String.format("%s%s%s/failed cut %d: %s/%s: Vertex mass (GeV)", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
100, 0, 0.1 * ebeam);
- cutVertexZVsMass[cut.ordinal()][i] = aida.histogram2D(String.format("%s%s%s/failed cut: %s/%s: Vertex Z vs. mass", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
+ cutVertexZVsMass[cut.ordinal()][i] = aida.histogram2D(String.format("%s%s%s/failed cut %d: %s/%s: Vertex Z vs. mass", plotDir, trkType, triggerType, cut.ordinal(), cut.name, i == VERTEX ? "vertex" : "trident"),
100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
}
}
@@ -489,11 +596,9 @@
if (tracks.size() != 2) {
throw new RuntimeException("expected two tracks in vertex, got " + tracks.size());
}
- List<Double> trackTimes = new ArrayList<Double>();
List<Double> hitTimes = new ArrayList<Double>();
double mean = 0;
for (Track track : tracks) {
- trackTimes.add(TrackUtils.getTrackTime(track, hitToStrips, hitToRotated));
for (TrackerHit hit : TrackUtils.getStripHits(track, hitToStrips, hitToRotated)) {
mean += hit.getTime();
hitTimes.add(hit.getTime());
@@ -516,57 +621,180 @@
minL1Iso = Math.min(eleL1Iso, posL1Iso);
}
+ double tEle = TrackUtils.getTrackTime(electron.getTracks().get(0), hitToStrips, hitToRotated);
+ double tPos = TrackUtils.getTrackTime(positron.getTracks().get(0), hitToStrips, hitToRotated);
+ Hep3Vector pEleRot = VecOp.mult(beamAxisRotation, electron.getMomentum());
+ Hep3Vector pPosRot = VecOp.mult(beamAxisRotation, positron.getMomentum());
+
+ Hep3Vector eleAtEcal = TrackUtils.getTrackPositionAtEcal(electron.getTracks().get(0));
+ Hep3Vector posAtEcal = TrackUtils.getTrackPositionAtEcal(positron.getTracks().get(0));
+
+ BilliorVertexer vtxFitter = new BilliorVertexer(TrackUtils.getBField(event.getDetector()).y());
+ vtxFitter.setBeamSize(beamSize);
+ vtxFitter.setBeamPosition(beamPos);
+ List<BilliorTrack> billiorTracks = new ArrayList<BilliorTrack>();
+ billiorTracks.add(new BilliorTrack(electron.getTracks().get(0)));
+ billiorTracks.add(new BilliorTrack(positron.getTracks().get(0)));
+
+ vtxFitter.doBeamSpotConstraint(true);
+ BilliorVertex bsconVertex = vtxFitter.fitVertex(billiorTracks);
+ ReconstructedParticle bscV0 = HpsReconParticleDriver.makeReconstructedParticle(electron, positron, bsconVertex);
+ Hep3Vector bscMomRot = VecOp.mult(beamAxisRotation, bscV0.getMomentum());
+ Hep3Vector bscVtx = VecOp.mult(beamAxisRotation, bscV0.getStartVertex().getPosition());
+
+ vtxFitter.doTargetConstraint(true);
+ BilliorVertex tarVertex = vtxFitter.fitVertex(billiorTracks);
+ ReconstructedParticle tarV0 = HpsReconParticleDriver.makeReconstructedParticle(electron, positron, tarVertex);
+ Hep3Vector tarMomRot = VecOp.mult(beamAxisRotation, tarV0.getMomentum());
+ Hep3Vector tarVtx = VecOp.mult(beamAxisRotation, tarV0.getStartVertex().getPosition());
+
+ vtxFitter.setBeamSize(vzcBeamSize);
+ vtxFitter.doTargetConstraint(true);
+ BilliorVertex vzcVertex = vtxFitter.fitVertex(billiorTracks);
+ ReconstructedParticle vzcV0 = HpsReconParticleDriver.makeReconstructedParticle(electron, positron, vzcVertex);
+ Hep3Vector vzcMomRot = VecOp.mult(beamAxisRotation, vzcV0.getMomentum());
+ Hep3Vector vzcVtx = VecOp.mult(beamAxisRotation, vzcV0.getStartVertex().getPosition());
+
+ if (tupleWriter != null) {
+ boolean trkCut = electron.getMomentum().magnitude() < tupleTrkPCut * ebeam && positron.getMomentum().magnitude() < tupleTrkPCut * ebeam;
+ boolean sumCut = electron.getMomentum().magnitude() + positron.getMomentum().magnitude() < tupleMaxSumCut * ebeam;
+ if (!cutTuple || (trkCut && sumCut)) {
+
+ tupleMap.put("run/I", (double) event.getRunNumber());
+ tupleMap.put("event/I", (double) event.getEventNumber());
+
+ tupleMap.put("uncPX/D", v0MomRot.x());
+ tupleMap.put("uncPY/D", v0MomRot.y());
+ tupleMap.put("uncPZ/D", v0MomRot.z());
+ tupleMap.put("uncP/D", v0MomRot.magnitude());
+ tupleMap.put("uncVX/D", v0Vtx.x());
+ tupleMap.put("uncVY/D", v0Vtx.y());
+ tupleMap.put("uncVZ/D", v0Vtx.z());
+ tupleMap.put("uncChisq/D", uncV0.getStartVertex().getChi2());
+ tupleMap.put("uncM/D", uncV0.getMass());
+
+ tupleMap.put("bscPX/D", bscMomRot.x());
+ tupleMap.put("bscPY/D", bscMomRot.y());
+ tupleMap.put("bscPZ/D", bscMomRot.z());
+ tupleMap.put("bscP/D", bscMomRot.magnitude());
+ tupleMap.put("bscVX/D", bscVtx.x());
+ tupleMap.put("bscVY/D", bscVtx.y());
+ tupleMap.put("bscVZ/D", bscVtx.z());
+ tupleMap.put("bscChisq/D", bscV0.getStartVertex().getChi2());
+ tupleMap.put("bscM/D", bscV0.getMass());
+
+ tupleMap.put("tarPX/D", tarMomRot.x());
+ tupleMap.put("tarPY/D", tarMomRot.y());
+ tupleMap.put("tarPZ/D", tarMomRot.z());
+ tupleMap.put("tarP/D", tarMomRot.magnitude());
+ tupleMap.put("tarVX/D", tarVtx.x());
+ tupleMap.put("tarVY/D", tarVtx.y());
+ tupleMap.put("tarVZ/D", tarVtx.z());
+ tupleMap.put("tarChisq/D", tarV0.getStartVertex().getChi2());
+ tupleMap.put("tarM/D", tarV0.getMass());
+
+ tupleMap.put("vzcPX/D", vzcMomRot.x());
+ tupleMap.put("vzcPY/D", vzcMomRot.y());
+ tupleMap.put("vzcPZ/D", vzcMomRot.z());
+ tupleMap.put("vzcP/D", vzcMomRot.magnitude());
+ tupleMap.put("vzcVX/D", vzcVtx.x());
+ tupleMap.put("vzcVY/D", vzcVtx.y());
+ tupleMap.put("vzcVZ/D", vzcVtx.z());
+ tupleMap.put("vzcChisq/D", vzcV0.getStartVertex().getChi2());
+ tupleMap.put("vzcM/D", vzcV0.getMass());
+
+ tupleMap.put("elePX/D", pEleRot.x());
+ tupleMap.put("elePY/D", pEleRot.y());
+ tupleMap.put("elePZ/D", pEleRot.z());
+ tupleMap.put("eleP/D", pEleRot.magnitude());
+ tupleMap.put("eleTrkD0/D", electron.getTracks().get(0).getTrackStates().get(0).getD0());
+ tupleMap.put("eleTrkZ0/D", electron.getTracks().get(0).getTrackStates().get(0).getZ0());
+ tupleMap.put("eleTrkEcalX/D", eleAtEcal.x());
+ tupleMap.put("eleTrkEcalY/D", eleAtEcal.y());
+ tupleMap.put("eleTrkChisq/D", electron.getTracks().get(0).getChi2());
+ tupleMap.put("eleTrkHits/I", (double) electron.getTracks().get(0).getTrackerHits().size());
+ tupleMap.put("eleTrkType/I", (double) electron.getType());
+ tupleMap.put("eleTrkT/D", tEle);
+ tupleMap.put("eleHasL1/B", eleIso[0] != null ? 1.0 : 0.0);
+ tupleMap.put("eleHasL2/B", eleIso[2] != null ? 1.0 : 0.0);
+ tupleMap.put("eleMatchChisq/D", electron.getGoodnessOfPID());
+ if (!electron.getClusters().isEmpty()) {
+ Cluster eleC = electron.getClusters().get(0);
+ tupleMap.put("eleClT/D", ClusterUtilities.getSeedHitTime(eleC));
+ tupleMap.put("eleClE/D", eleC.getEnergy());
+ tupleMap.put("eleClHits/I", (double) eleC.getCalorimeterHits().size());
+ }
+
+ tupleMap.put("posPX/D", pPosRot.x());
+ tupleMap.put("posPY/D", pPosRot.y());
+ tupleMap.put("posPZ/D", pPosRot.z());
+ tupleMap.put("posP/D", pPosRot.magnitude());
+ tupleMap.put("posTrkD0/D", positron.getTracks().get(0).getTrackStates().get(0).getD0());
+ tupleMap.put("posTrkZ0/D", positron.getTracks().get(0).getTrackStates().get(0).getZ0());
+ tupleMap.put("posTrkEcalX/D", posAtEcal.x());
+ tupleMap.put("posTrkEcalY/D", posAtEcal.y());
+ tupleMap.put("posTrkChisq/D", positron.getTracks().get(0).getChi2());
+ tupleMap.put("posTrkHits/I", (double) positron.getTracks().get(0).getTrackerHits().size());
+ tupleMap.put("posTrkType/I", (double) positron.getType());
+ tupleMap.put("posTrkT/D", tPos);
+ tupleMap.put("posHasL1/B", posIso[0] != null ? 1.0 : 0.0);
+ tupleMap.put("posHasL2/B", posIso[2] != null ? 1.0 : 0.0);
+ tupleMap.put("posMatchChisq/D", positron.getGoodnessOfPID());
+ if (!positron.getClusters().isEmpty()) {
+ Cluster posC = positron.getClusters().get(0);
+ tupleMap.put("posClT/D", ClusterUtilities.getSeedHitTime(posC));
+ tupleMap.put("posClE/D", posC.getEnergy());
+ tupleMap.put("posClHits/I", (double) posC.getCalorimeterHits().size());
+ }
+
+ tupleMap.put("minL1Iso/D", minL1Iso);
+
+ tupleMap.put("nTrk/I", (double) ntrk);
+ tupleMap.put("nPos/I", (double) npos);
+ writeTuple();
+ }
+ }
+
//start applying cuts
EnumSet<Cut> bits = EnumSet.noneOf(Cut.class);
- boolean trackQualityCut = Math.max(tracks.get(0).getChi2(), tracks.get(1).getChi2()) < (isGBL ? maxChi2GBLTrack : maxChi2SeedTrack);
- maxTrkChi2.fill(Math.max(tracks.get(0).getChi2(), tracks.get(1).getChi2()));
- zVsMaxTrkChi2.fill(Math.max(tracks.get(0).getChi2(), tracks.get(1).getChi2()), v0Vtx.z());
+ boolean trackQualityCut = Math.max(electron.getTracks().get(0).getChi2(), positron.getTracks().get(0).getChi2()) < (isGBL ? maxChi2GBLTrack : maxChi2SeedTrack);
if (trackQualityCut) {
bits.add(Cut.TRK_QUALITY);
}
- boolean v0QualityCut = uncVert.getChi2() < maxVertChi2;
- v0Chi2.fill(uncVert.getChi2());
- zVsV0Chi2.fill(uncVert.getChi2(), v0Vtx.z());
+ boolean v0QualityCut = uncVert.getChi2() < maxUnconVertChi2 && bsconVertex.getChi2() < maxBsconVertChi2;
if (v0QualityCut) {
bits.add(Cut.VTX_QUALITY);
}
- boolean vertexMomentumCut = v0MomRot.z() < v0PzMax && v0MomRot.z() > v0PzMin && Math.abs(v0MomRot.x()) < v0PxMax && Math.abs(v0MomRot.y()) < v0PyMax;
- boolean vertexPositionCut = Math.abs(v0Vtx.x()) < v0VxMax && Math.abs(v0Vtx.y()) < v0VyMax && Math.abs(v0Vtx.z()) < v0VzMax;
+ boolean vertexMomentumCut = v0MomRot.z() < v0PzMaxCut * ebeam && v0MomRot.z() > v0PzMinCut * ebeam && Math.abs(v0MomRot.x()) < v0PxCut * ebeam && Math.abs(v0MomRot.y()) < v0PyCut * ebeam;
+ boolean vertexPositionCut = Math.abs(v0Vtx.x()) < v0UnconVxCut && Math.abs(v0Vtx.y()) < v0UnconVyCut && Math.abs(v0Vtx.z()) < v0UnconVzCut && Math.abs(bscVtx.x()) < v0BsconVxCut && Math.abs(bscVtx.y()) < v0BsconVyCut;
if (vertexMomentumCut && vertexPositionCut) {
bits.add(Cut.VERTEX_CUTS);
}
- boolean trackTimeDiffCut = Math.abs(trackTimes.get(0) - trackTimes.get(1)) < trkTimeDiff;
- trackTimeDiff.fill(Math.abs(trackTimes.get(0) - trackTimes.get(1)));
- hitTimeStdDev.fill(stdDev);
- zVsTrackTimeDiff.fill(Math.abs(trackTimes.get(0) - trackTimes.get(1)), v0Vtx.z());
- zVsHitTimeStdDev.fill(stdDev, v0Vtx.z());
+ boolean trackTimeDiffCut = Math.abs(tEle - tPos) < trkTimeDiff;
if (trackTimeDiffCut) {
bits.add(Cut.TIMING);
}
boolean topBottomCut = electron.getMomentum().y() * positron.getMomentum().y() < 0;
- boolean pMinCut = electron.getMomentum().magnitude() > minPCut && positron.getMomentum().magnitude() > minPCut;
- boolean pMaxCut = electron.getMomentum().magnitude() < beamPCut && positron.getMomentum().magnitude() < beamPCut;
+ boolean pMinCut = electron.getMomentum().magnitude() > minPCut * ebeam && positron.getMomentum().magnitude() > minPCut * ebeam;
+ boolean pMaxCut = electron.getMomentum().magnitude() < beamPCut * ebeam && positron.getMomentum().magnitude() < beamPCut * ebeam;
if (topBottomCut && pMaxCut && pMinCut) {
bits.add(Cut.TRACK_CUTS);
}
boolean clusterMatchCut = !electron.getClusters().isEmpty() && !positron.getClusters().isEmpty();
boolean clusterTimeCut = clusterMatchCut && Math.abs(ClusterUtilities.getSeedHitTime(electron.getClusters().get(0)) - ClusterUtilities.getSeedHitTime(positron.getClusters().get(0))) < clusterTimeDiffCut;
- if (clusterMatchCut && clusterTimeCut) {
- bits.add(Cut.CLUSTER_CUTS);
- }
+//disable cut for now
+// if (clusterMatchCut && clusterTimeCut) {
+ bits.add(Cut.CLUSTER_CUTS);
+// }
boolean eventTrkCountCut = ntrk >= 2 && ntrk <= nTrkMax;
boolean eventPosCountCut = npos >= 1 && npos <= nPosMax;
- eventTrkCount.fill(ntrk);
- eventPosCount.fill(npos);
- zVsEventTrkCount.fill(ntrk, v0Vtx.z());
- zVsEventPosCount.fill(npos, v0Vtx.z());
if (eventTrkCountCut && eventPosCountCut) {
bits.add(Cut.EVENT_QUALITY);
}
@@ -576,10 +804,8 @@
bits.add(Cut.FRONT_HITS);
}
- l1Iso.fill(minL1Iso);
- zVsL1Iso.fill(minL1Iso, v0Vtx.z());
boolean isoCut = minL1Iso > l1IsoMin;
- if (isoCut) {
+ if (!frontHitsCut || isoCut) { //diagnostic plots look better if failing the front hits cut makes you pass this one
bits.add(Cut.ISOLATION);
}
@@ -597,22 +823,58 @@
for (Cut cut : Cut.values()) {
EnumSet<Cut> allButThisCut = EnumSet.allOf(Cut.class);
allButThisCut.remove(cut);
- if (bits.equals(allButThisCut)) {
- if (uncV0.getMass() > plotsMinMass && uncV0.getMass() < plotsMaxMass) {
- cutVertexZ[cut.ordinal()][VERTEX].fill(v0Vtx.z());
+ if (bits.containsAll(allButThisCut)) {
+ if (uncV0.getMass() > plotsMinMass * ebeam && uncV0.getMass() < plotsMaxMass * ebeam && uncV0.getMomentum().magnitude() > radCut * ebeam) {
+ switch (cut) {
+ case ISOLATION:
+ l1Iso.fill(minL1Iso);
+ zVsL1Iso.fill(minL1Iso, v0Vtx.z());
+ break;
+ case EVENT_QUALITY:
+ eventTrkCount.fill(ntrk);
+ eventPosCount.fill(npos);
+ zVsEventTrkCount.fill(ntrk, v0Vtx.z());
+ zVsEventPosCount.fill(npos, v0Vtx.z());
+ break;
+ case TIMING:
+ trackTimeDiff.fill(Math.abs(tEle - tPos));
+ hitTimeStdDev.fill(stdDev);
+ zVsTrackTimeDiff.fill(Math.abs(tEle - tPos), v0Vtx.z());
+ zVsHitTimeStdDev.fill(stdDev, v0Vtx.z());
+ break;
+ case VTX_QUALITY:
+ v0Chi2.fill(uncVert.getChi2());
+ zVsV0Chi2.fill(uncVert.getChi2(), v0Vtx.z());
+ bsconV0Chi2.fill(bsconVertex.getChi2());
+ zVsBsconV0Chi2.fill(bsconVertex.getChi2(), v0Vtx.z());
+ v0Chi2Diff.fill(bsconVertex.getChi2() - uncVert.getChi2());
+ zVsV0Chi2Diff.fill(bsconVertex.getChi2() - uncVert.getChi2(), v0Vtx.z());
+ break;
+ case TRK_QUALITY:
+ maxTrkChi2.fill(Math.max(tracks.get(0).getChi2(), tracks.get(1).getChi2()));
+ zVsMaxTrkChi2.fill(Math.max(tracks.get(0).getChi2(), tracks.get(1).getChi2()), v0Vtx.z());
+ break;
+ }
}
- cutVertexMass[cut.ordinal()][VERTEX].fill(uncV0.getMass());
- cutVertexZVsMass[cut.ordinal()][VERTEX].fill(uncV0.getMass(), v0Vtx.z());
+ if (!bits.contains(cut)) {
+ if (uncV0.getMass() > plotsMinMass * ebeam && uncV0.getMass() < plotsMaxMass * ebeam) {
+ cutVertexZ[cut.ordinal()][VERTEX].fill(v0Vtx.z());
+ }
+ cutVertexMass[cut.ordinal()][VERTEX].fill(uncV0.getMass());
+ cutVertexZVsMass[cut.ordinal()][VERTEX].fill(uncV0.getMass(), v0Vtx.z());
+ }
}
EnumSet<Cut> allTriCutsButThisCut = EnumSet.range(Cut.values()[0], Cut.values()[Cut.firstVertexingCut - 1]);
allTriCutsButThisCut.remove(cut);
- if (bits.containsAll(allTriCutsButThisCut) && !bits.contains(cut)) {
- if (uncV0.getMass() > plotsMinMass && uncV0.getMass() < plotsMaxMass) {
- cutVertexZ[cut.ordinal()][TRIDENT].fill(v0Vtx.z());
+ if (bits.containsAll(allTriCutsButThisCut)) {
+ if (!bits.contains(cut)) {
+ if (uncV0.getMass() > plotsMinMass * ebeam && uncV0.getMass() < plotsMaxMass * ebeam) {
+ cutVertexZ[cut.ordinal()][TRIDENT].fill(v0Vtx.z());
+ }
+ cutVertexMass[cut.ordinal()][TRIDENT].fill(uncV0.getMass());
+ cutVertexZVsMass[cut.ordinal()][TRIDENT].fill(uncV0.getMass(), v0Vtx.z());
}
- cutVertexMass[cut.ordinal()][TRIDENT].fill(uncV0.getMass());
- cutVertexZVsMass[cut.ordinal()][TRIDENT].fill(uncV0.getMass(), v0Vtx.z());
}
}
@@ -645,8 +907,8 @@
Hep3Vector pPosRot = VecOp.mult(beamAxisRotation, positron.getMomentum());
Hep3Vector v0Vtx = VecOp.mult(beamAxisRotation, bestCandidate.getStartVertex().getPosition());
- triTrackTime2D.fill(tEle, tPos);
- triTrackTimeDiff.fill(tEle - tPos);
+// triTrackTime2D.fill(tEle, tPos);
+// triTrackTimeDiff.fill(tEle - tPos);
triZVsMomentum.fill(bestCandidate.getMomentum().magnitude(), v0Vtx.z());
triMassMomentum.fill(bestCandidate.getMomentum().magnitude(), bestCandidate.getMass());
triTrackMomentum2D.fill(electron.getMomentum().magnitude(), positron.getMomentum().magnitude());
@@ -655,24 +917,24 @@
triSumP.fill(bestCandidate.getMomentum().magnitude());
triDeltaP.fill(positron.getMomentum().magnitude() - electron.getMomentum().magnitude());
- triPxPy.fill(pBestV0Rot.x(), pBestV0Rot.y());
+// triPxPy.fill(pBestV0Rot.x(), pBestV0Rot.y());
triMass.fill(bestCandidate.getMass());
triZVsMass.fill(bestCandidate.getMass(), v0Vtx.z());
- triX.fill(v0Vtx.x());
- triY.fill(v0Vtx.y());
- triZ.fill(v0Vtx.z());
- triPx.fill(pBestV0Rot.x());
- triPy.fill(pBestV0Rot.y());
- triPz.fill(pBestV0Rot.z());
- triU.fill(pBestV0Rot.x() / pBestV0Rot.magnitude());
- triV.fill(pBestV0Rot.y() / pBestV0Rot.magnitude());
- triXY.fill(v0Vtx.x(), v0Vtx.y());
- triZY.fill(v0Vtx.y(), v0Vtx.z());
- if (bestCandidate.getMomentum().magnitude() > radCut) {
+// triX.fill(v0Vtx.x());
+// triY.fill(v0Vtx.y());
+// triZ.fill(v0Vtx.z());
+// triPx.fill(pBestV0Rot.x());
+// triPy.fill(pBestV0Rot.y());
+// triPz.fill(pBestV0Rot.z());
+// triU.fill(pBestV0Rot.x() / pBestV0Rot.magnitude());
+// triV.fill(pBestV0Rot.y() / pBestV0Rot.magnitude());
+// triXY.fill(v0Vtx.x(), v0Vtx.y());
+// triZY.fill(v0Vtx.y(), v0Vtx.z());
+ if (bestCandidate.getMomentum().magnitude() > radCut * ebeam) {
triRadTrackTime2D.fill(tEle, tPos);
triRadTrackTimeDiff.fill(tEle - tPos);
- triRadZVsMomentum.fill(bestCandidate.getMomentum().magnitude(), v0Vtx.z());
- triRadMassMomentum.fill(bestCandidate.getMomentum().magnitude(), bestCandidate.getMass());
+// triRadZVsMomentum.fill(bestCandidate.getMomentum().magnitude(), v0Vtx.z());
+// triRadMassMomentum.fill(bestCandidate.getMomentum().magnitude(), bestCandidate.getMass());
triRadTrackMomentum2D.fill(electron.getMomentum().magnitude(), positron.getMomentum().magnitude());
triRadPyEleVsPyPos.fill(pEleRot.y(), pPosRot.y());
triRadPxEleVsPxPos.fill(pEleRot.x(), pPosRot.x());
@@ -682,22 +944,23 @@
triRadPxPy.fill(pBestV0Rot.x(), pBestV0Rot.y());
triRadMass.fill(bestCandidate.getMass());
triRadZVsMass.fill(bestCandidate.getMass(), v0Vtx.z());
- triRadX.fill(v0Vtx.x());
- triRadY.fill(v0Vtx.y());
- triRadZ.fill(v0Vtx.z());
+// triRadX.fill(v0Vtx.x());
+// triRadY.fill(v0Vtx.y());
+// triRadZ.fill(v0Vtx.z());
triRadPx.fill(pBestV0Rot.x());
triRadPy.fill(pBestV0Rot.y());
triRadPz.fill(pBestV0Rot.z());
triRadU.fill(pBestV0Rot.x() / pBestV0Rot.magnitude());
triRadV.fill(pBestV0Rot.y() / pBestV0Rot.magnitude());
- triRadXY.fill(v0Vtx.x(), v0Vtx.y());
- triRadZY.fill(v0Vtx.y(), v0Vtx.z());
+// triRadXY.fill(v0Vtx.x(), v0Vtx.y());
+// triRadZY.fill(v0Vtx.y(), v0Vtx.z());
}
}
if (!vertCandidateList.isEmpty()) {
// pick the best candidate...for now just pick a random one.
ReconstructedParticle bestCandidate = vertCandidateList.get((int) (Math.random() * vertCandidateList.size()));
+ Vertex unconVertex = bestCandidate.getStartVertex();
//fill some stuff:
ReconstructedParticle electron = bestCandidate.getParticles().get(ReconParticleDriver.ELECTRON);
@@ -711,10 +974,10 @@
Hep3Vector pBestV0Rot = VecOp.mult(beamAxisRotation, bestCandidate.getMomentum());
Hep3Vector pEleRot = VecOp.mult(beamAxisRotation, electron.getMomentum());
Hep3Vector pPosRot = VecOp.mult(beamAxisRotation, positron.getMomentum());
- Hep3Vector v0Vtx = VecOp.mult(beamAxisRotation, bestCandidate.getStartVertex().getPosition());
-
- vertTrackTime2D.fill(tEle, tPos);
- vertTrackTimeDiff.fill(tEle - tPos);
+ Hep3Vector v0Vtx = VecOp.mult(beamAxisRotation, unconVertex.getPosition());
+
+// vertTrackTime2D.fill(tEle, tPos);
+// vertTrackTimeDiff.fill(tEle - tPos);
vertZVsMomentum.fill(bestCandidate.getMomentum().magnitude(), v0Vtx.z());
vertMassMomentum.fill(bestCandidate.getMomentum().magnitude(), bestCandidate.getMass());
vertTrackMomentum2D.fill(electron.getMomentum().magnitude(), positron.getMomentum().magnitude());
@@ -723,20 +986,34 @@
vertSumP.fill(bestCandidate.getMomentum().magnitude());
vertDeltaP.fill(positron.getMomentum().magnitude() - electron.getMomentum().magnitude());
- vertPxPy.fill(pBestV0Rot.x(), pBestV0Rot.y());
+// vertPxPy.fill(pBestV0Rot.x(), pBestV0Rot.y());
vertMass.fill(bestCandidate.getMass());
vertZVsMass.fill(bestCandidate.getMass(), v0Vtx.z());
- vertX.fill(v0Vtx.x());
+// vertX.fill(v0Vtx.x());
vertY.fill(v0Vtx.y());
- vertZ.fill(v0Vtx.z());
- vertPx.fill(pBestV0Rot.x());
- vertPy.fill(pBestV0Rot.y());
- vertPz.fill(pBestV0Rot.z());
- vertU.fill(pBestV0Rot.x() / pBestV0Rot.magnitude());
- vertV.fill(pBestV0Rot.y() / pBestV0Rot.magnitude());
+// vertZ.fill(v0Vtx.z());
+// vertPx.fill(pBestV0Rot.x());
+// vertPy.fill(pBestV0Rot.y());
+// vertPz.fill(pBestV0Rot.z());
+// vertU.fill(pBestV0Rot.x() / pBestV0Rot.magnitude());
+// vertV.fill(pBestV0Rot.y() / pBestV0Rot.magnitude());
vertXY.fill(v0Vtx.x(), v0Vtx.y());
vertZY.fill(v0Vtx.y(), v0Vtx.z());
- if (bestCandidate.getMomentum().magnitude() > radCut) {
+ if (bestCandidate.getMomentum().magnitude() > radCut * ebeam) {
+
+ BilliorVertexer vtxFitter = new BilliorVertexer(TrackUtils.getBField(event.getDetector()).y());
+ vtxFitter.setBeamSize(beamSize);
+ vtxFitter.setBeamPosition(beamPos);
+// vtxFitter.setDebug(false);
+ List<BilliorTrack> billiorTracks = new ArrayList<BilliorTrack>();
+ billiorTracks.add(new BilliorTrack(electron.getTracks().get(0)));
+ billiorTracks.add(new BilliorTrack(positron.getTracks().get(0)));
+ vtxFitter.doBeamSpotConstraint(true);
+ BilliorVertex bsconVertex = vtxFitter.fitVertex(billiorTracks);
+ vtxFitter.doTargetConstraint(true);
+ BilliorVertex tarconVertex = vtxFitter.fitVertex(billiorTracks);
+ vertRadUnconBsconChi2.fill(unconVertex.getChi2(), bsconVertex.getChi2());
+
vertRadTrackTime2D.fill(tEle, tPos);
vertRadTrackTimeDiff.fill(tEle - tPos);
vertRadZVsMomentum.fill(bestCandidate.getMomentum().magnitude(), v0Vtx.z());
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/V0Monitoring.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/V0Monitoring.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/dataquality/V0Monitoring.java Wed Apr 27 11:11:32 2016
@@ -8,8 +8,8 @@
import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
import hep.aida.IPlotterStyle;
+import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.Hep3Vector;
-import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.VecOp;
import java.io.IOException;
@@ -19,6 +19,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
import org.hps.recon.tracking.TrackType;
import org.hps.recon.tracking.TrackUtils;
import org.hps.recon.vertexing.BilliorTrack;
@@ -33,252 +34,493 @@
import org.lcsim.recon.tracking.seedtracker.SeedTrack;
/**
- * DQM driver V0 particles (i.e. e+e- pars) plots things like number of vertex
- * position an mass
+ * DQM driver V0 particles (i.e. e+e- pars) plots things like number of vertex position an mass
*
* @author mgraham on May 14, 2014
- *
*/
public class V0Monitoring extends DataQualityMonitor {
private static Logger LOGGER = Logger.getLogger(V0Monitoring.class.getPackage().getName());
-
- String finalStateParticlesColName = "FinalStateParticles";
- String unconstrainedV0CandidatesColName = "UnconstrainedV0Candidates";
- String beamConV0CandidatesColName = "BeamspotConstrainedV0Candidates";
- String targetV0ConCandidatesColName = "TargetConstrainedV0Candidates";
- String[] fpQuantNames = {"nV0_per_Event", "avg_BSCon_mass", "avg_BSCon_Vx", "avg_BSCon_Vy", "avg_BSCon_Vz", "sig_BSCon_Vx", "sig_BSCon_Vy", "sig_BSCon_Vz", "avg_BSCon_Chi2"};
- //some counters
- int nRecoEvents = 0;
- int nTotV0 = 0;
- int nTot2Ele = 0;
- //some summers
- double sumMass = 0.0;
- double sumVx = 0.0;
- double sumVy = 0.0;
- double sumVz = 0.0;
- double sumChi2 = 0.0;
-
- /* V0 Quantities */
- /* Mass, vertex, chi^2 of fit */
- /* unconstrained */
- IHistogram1D unconMass;
- IHistogram1D unconVx;
- IHistogram1D unconVy;
- IHistogram1D unconVz;
- IHistogram1D unconChi2;
- IHistogram2D unconVzVsChi2;
- IHistogram2D unconChi2VsTrkChi2;
+
+ private static boolean hasSharedStrips(final ReconstructedParticle fs1, final ReconstructedParticle fs2,
+ final RelationalTable hittostrip, final RelationalTable hittorotated) {
+ return TrackUtils.hasSharedStrips(fs1.getTracks().get(0), fs2.getTracks().get(0), hittostrip, hittorotated);
+ }
+
+ private final BasicHep3Matrix beamAxisRotation = new BasicHep3Matrix();
+ private final String beamConV0CandidatesColName = "BeamspotConstrainedV0Candidates";
+ private IHistogram1D bsconChi2;
+ private IHistogram2D bsconChi2VsTrkChi2;
+ private IHistogram1D bsconMass;
+ private IHistogram1D bsconVx;
+ private IHistogram1D bsconVy;
+ private IHistogram1D bsconVz;
+ private IHistogram2D bsconVzVsChi2;
+ private final String finalStateParticlesColName = "FinalStateParticles";
+ private final String[] fpQuantNames = {"nV0_per_Event", "avg_BSCon_mass", "avg_BSCon_Vx", "avg_BSCon_Vy",
+ "avg_BSCon_Vz", "sig_BSCon_Vx", "sig_BSCon_Vy", "sig_BSCon_Vz", "avg_BSCon_Chi2"};
+
+ private final double maxFactor = 1.25;
+ private IHistogram1D mollerHiP, mollerLoP, mollerEitherP, mollerPsum;
+ private IHistogram1D mollerMass;
+ private IHistogram1D mollerMassVtxCut;
+ private IHistogram1D mollerUx;
+ private IHistogram1D mollerUy;
+ private IHistogram1D mollerVx;
+
+ private IHistogram1D mollerVy;
+
+ private IHistogram1D mollerVz;
+ private IHistogram1D mollerVzVtxCut;
+ private IHistogram2D mollerXVsVtxY;
+ private IHistogram2D mollerXVsVtxZ;
+
+ private IHistogram2D mollerYVsVtxZ;
+ // some counters
+ private int nRecoEvents = 0;
+ private int nTotV0 = 0;
+ private IHistogram1D numChargeHisto;
+ private IHistogram1D nV0;
+ private IHistogram1D pEle;
+ private IHistogram2D pEleVspEle;
+ private IHistogram2D pEleVspEleBeamBeam;
+ private IHistogram2D pEleVspEleMoller;
+ private IHistogram2D pEleVspEleNoBeam;
+ private IHistogram2D pEleVspPos;
+ private IHistogram2D pEleVspPosWithCut;
+ private IHistogram2D pEleVsthetaBeamBeam;
+ private IHistogram2D pEleVsthetaMoller;
+
+ private IHistogram2D phiEleVsphiEle;
+ private final String plotDir = "V0Monitoring/";
+ private IHistogram1D pPos;
+
+ private IHistogram2D pxEleVspxEle;
+ private IHistogram2D pxEleVspxEleNoBeam;
+ private IHistogram2D pxEleVspxPos;
+
+ private IHistogram2D pyEleVspyEle;
+ private IHistogram2D pyEleVspyEleNoBeam;
+ private IHistogram2D pyEleVspyPos;
+ private IHistogram1D sumChargeHisto;
+ private double sumChi2 = 0.0;
+ // some summers
+ private double sumMass = 0.0;
+ private double sumVx = 0.0;
+ private double sumVy = 0.0;
+ private double sumVz = 0.0;
+
+ private IHistogram1D tarconChi2;
+ private IHistogram2D tarconChi2VsTrkChi2;
+
+ /* target constrained */
+ private IHistogram1D tarconMass;
+ private IHistogram1D tarconVx;
+ private IHistogram1D tarconVy;
+ private IHistogram1D tarconVz;
+ private IHistogram2D tarconVzVsChi2;
+ private final String targetV0ConCandidatesColName = "TargetConstrainedV0Candidates";
+ private IHistogram2D thetaEleVsthetaBeamBeam;
+ private IHistogram2D thetaEleVsthetaMoller;
+ private final double thetaMax = 0.06;
+ private final double thetaMin = 0.015;
+ private IHistogram1D trigTime;
+ private IHistogram2D trigTimeV0Time;
+ private IHistogram1D unconChi2;
+
+ private IHistogram2D unconChi2VsTrkChi2;
/* beamspot constrained */
-
- IHistogram1D nV0;
-
- IHistogram1D v0Time;
- IHistogram1D v0Dt;
- IHistogram2D trigTimeV0Time;
- IHistogram1D trigTime;
-
- IHistogram1D bsconMass;
- IHistogram1D bsconVx;
- IHistogram1D bsconVy;
- IHistogram1D bsconVz;
- IHistogram1D bsconChi2;
- IHistogram2D bsconVzVsChi2;
- IHistogram2D bsconChi2VsTrkChi2;
- /* target constrained */
- IHistogram1D tarconMass;
- IHistogram1D tarconVx;
- IHistogram1D tarconVy;
- IHistogram1D tarconVz;
- IHistogram1D tarconChi2;
- IHistogram2D tarconVzVsChi2;
- IHistogram2D tarconChi2VsTrkChi2;
-
- IHistogram2D pEleVspPos;
- IHistogram2D pEleVspPosWithCut;
- IHistogram2D pyEleVspyPos;
- IHistogram2D pxEleVspxPos;
-
- IHistogram2D VtxZVsMass;
- IHistogram2D VtxYVsVtxZ;
- IHistogram2D VtxXVsVtxZ;
- IHistogram2D VtxXVsVtxY;
- IHistogram2D VtxXVsVtxPx;
- IHistogram2D VtxYVsVtxPy;
- IHistogram2D VtxZVsVtxPx;
- IHistogram2D VtxZVsVtxPy;
- IHistogram2D VtxZVsVtxPz;
-
- IHistogram2D VtxZVsL1Iso;
- IHistogram2D VtxZVsTrkChi2;
-
- IHistogram2D pEleVspEle;
- IHistogram2D phiEleVsphiEle;
- IHistogram2D pyEleVspyEle;
- IHistogram2D pxEleVspxEle;
- IHistogram2D pEleVspEleNoBeam;
- IHistogram2D pyEleVspyEleNoBeam;
- IHistogram2D pxEleVspxEleNoBeam;
- IHistogram2D pEleVspEleMoller;
- IHistogram2D pEleVsthetaMoller;
- IHistogram2D thetaEleVsthetaMoller;
- IHistogram2D pEleVspEleBeamBeam;
- IHistogram2D pEleVsthetaBeamBeam;
- IHistogram2D thetaEleVsthetaBeamBeam;
-
- IHistogram1D mollerMass;
- IHistogram1D mollerMassVtxCut;
- IHistogram1D mollerVx;
- IHistogram1D mollerVy;
- IHistogram1D mollerVz;
- IHistogram1D mollerVzVtxCut;
- IHistogram2D mollerXVsVtxZ;
- IHistogram2D mollerYVsVtxZ;
- IHistogram2D mollerXVsVtxY;
-
- IHistogram1D sumChargeHisto;
- IHistogram1D numChargeHisto;
-
- private final String plotDir = "V0Monitoring/";
-
- double beamEnergy = 1.05; //GeV
- private final BasicHep3Matrix beamAxisRotation = new BasicHep3Matrix();
-
- double maxFactor = 1.25;
- double feeMomentumCut = 0.8; //GeV
-
- double v0ESumMinCut = 0.8 * beamEnergy;
- double v0ESumMaxCut = 1.25 * beamEnergy;
- double v0MaxPCut = 1.1;//GeV
- double molPSumMin = 0.85;
- double molPSumMax = 1.3;
- double beambeamCut = 0.85;
- double thetaMax = 0.06;
- double thetaMin = 0.015;
-
+ /* V0 Quantities */
+ /* Mass, vertex, chi^2 of fit */
+ /* unconstrained */
+ private IHistogram1D unconMass;
+ private final String unconstrainedV0CandidatesColName = "UnconstrainedV0Candidates";
+ private IHistogram1D unconVx;
+ private IHistogram1D unconVy;
+ private IHistogram1D unconVz;
+ private IHistogram2D unconVzVsChi2;
+ private IHistogram1D v0Dt;
+ private double v0ESumMinCut, v0MaxPCut, v0ESumMaxCut, molPSumMin, molPSumMax, beambeamCut;
+
+ private IHistogram1D v0Time;
+ private IHistogram2D VtxXVsVtxPx;
+
+ private IHistogram2D VtxXVsVtxY;
+ private IHistogram2D VtxXVsVtxZ;
+
+ private IHistogram2D VtxYVsVtxPy;
+
+ private IHistogram2D VtxYVsVtxZ;
+
+ private IHistogram2D VtxZVsL1Iso;
+
+ private IHistogram2D VtxZVsMass;
+ private IHistogram2D VtxZVsTrkChi2;
+
+ private IHistogram2D VtxZVsVtxPx;
+
+ private IHistogram2D VtxZVsVtxPy;
+
+ private IHistogram2D VtxZVsVtxPz;
+
+ /**
+ * Calculate the averages here and fill the map
+ */
@Override
- protected void detectorChanged(Detector detector) {
+ public void calculateEndOfRunQuantities() {
+
+ final IAnalysisFactory analysisFactory = IAnalysisFactory.create();
+ final IFitFactory fitFactory = analysisFactory.createFitFactory();
+ final IFitter fitter = fitFactory.createFitter("chi2");
+ final double[] init = {50.0, 0.0, 0.2, 1.0, 0.0};
+ final IFitResult resVx = this.fitVertexPosition(bsconVx, fitter, init, "range=\"(-0.5,0.5)\"");
+ final double[] init2 = {50.0, 0.0, 0.04, 1.0, 0.0};
+ final IFitResult resVy = this.fitVertexPosition(bsconVy, fitter, init2, "range=\"(-0.2,0.2)\"");
+ final double[] init3 = {50.0, 0.0, 3.0, 1.0, 0.0};
+ final IFitResult resVz = this.fitVertexPosition(bsconVz, fitter, init3, "range=\"(-6,6)\"");
+
+ if (resVx != null && resVy != null & resVz != null) {
+ final double[] parsVx = resVx.fittedParameters();
+ final double[] parsVy = resVy.fittedParameters();
+ final double[] parsVz = resVz.fittedParameters();
+
+ for (int i = 0; i < 5; i++) {
+ LOGGER.info("Vertex Fit Parameters: " + resVx.fittedParameterNames()[i] + " = " + parsVx[i] + "; "
+ + parsVy[i] + "; " + parsVz[i]);
+ }
+
+ final IPlotter plotter = analysisFactory.createPlotterFactory().create("Vertex Position");
+ plotter.createRegions(1, 3);
+ final IPlotterStyle pstyle = plotter.style();
+ pstyle.legendBoxStyle().setVisible(false);
+ pstyle.dataStyle().fillStyle().setColor("green");
+ pstyle.dataStyle().lineStyle().setColor("black");
+ plotter.region(0).plot(bsconVx);
+ plotter.region(0).plot(resVx.fittedFunction());
+ plotter.region(1).plot(bsconVy);
+ plotter.region(1).plot(resVy.fittedFunction());
+ plotter.region(2).plot(bsconVz);
+ plotter.region(2).plot(resVz.fittedFunction());
+ if (outputPlots) {
+ try {
+ plotter.writeToFile(outputPlotDir + "vertex.png");
+ } catch (final IOException ex) {
+ Logger.getLogger(V0Monitoring.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ }
+
+ // monitoredQuantityMap.put(fpQuantNames[2], sumVx / nTotV0);
+ // monitoredQuantityMap.put(fpQuantNames[3], sumVy / nTotV0);
+ // monitoredQuantityMap.put(fpQuantNames[4], sumVz / nTotV0);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[2], parsVx[1]);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[3], parsVy[1]);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[4], parsVz[1]);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[5], parsVx[2]);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[6], parsVy[2]);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[7], parsVz[2]);
+ }
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[0],
+ (double) nTotV0 / nRecoEvents);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[1], sumMass
+ / nTotV0);
+ monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[8], sumChi2
+ / nTotV0);
+
+ }
+
+ @Override
+ protected void detectorChanged(final Detector detector) {
+
+ final BeamEnergyCollection beamEnergyCollection = this.getConditionsManager()
+ .getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();
+ final double beamEnergy = beamEnergyCollection.get(0).getBeamEnergy();
+ v0ESumMinCut = 0.8 * beamEnergy;
+ v0ESumMaxCut = 1.25 * beamEnergy;
+
+ v0MaxPCut = 1.05 * beamEnergy;// GeV
+ molPSumMin = 0.80 * beamEnergy;
+ molPSumMax = 1.25 * beamEnergy;
+ beambeamCut = 0.80 * beamEnergy;
+
beamAxisRotation.setActiveEuler(Math.PI / 2, -0.0305, -Math.PI / 2);
- LOGGER.info("Setting up the plotter");
+ // LOGGER.info("Setting up the plotter");
aida.tree().cd("/");
- String xtra = "Extras";
+ final String xtra = "Extras";
String trkType = "SeedTrack/";
- if (isGBL)
+ if (isGBL) {
trkType = "GBLTrack/";
- /* V0 Quantities */
- /* Mass, vertex, chi^2 of fit */
- /* unconstrained */
- unconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Invariant Mass (GeV)", 100, 0, 0.200);
- unconVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Vx (mm)", 50, -10, 10);
- unconVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Vy (mm)", 50, -10, 10);
- unconVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Vz (mm)", 50, -50, 50);
- unconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Chi2", 25, 0, 25);
- unconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
- unconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/" + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
+ }
+
+ final double maxMass = .2 * beamEnergy;
+ final double maxMassMoller = .1 * Math.sqrt(beamEnergy);
+ /* V0 Quantities */
+ /* Mass, vertex, chi^2 of fit */
+ /* unconstrained */
+ unconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Invariant Mass (GeV)", 100, 0, maxMass);
+ unconVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Vx (mm)", 50, -10, 10);
+ unconVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Vy (mm)", 50, -10, 10);
+ unconVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Vz (mm)", 50, -50, 50);
+ unconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Chi2", 25, 0, 25);
+ unconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName + "/"
+ + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
+ unconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + unconstrainedV0CandidatesColName
+ + "/" + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
/* beamspot constrained */
- bsconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Mass (GeV)", 100, 0, 0.200);
- bsconVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vx (mm)", 50, -10, 10);
- bsconVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vy (mm)", 50, -10, 10);
- bsconVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vz (mm)", 50, -50, 50);
- bsconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Chi2", 25, 0, 25);
- bsconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
- bsconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
+ bsconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/"
+ + "Mass (GeV)", 100, 0, maxMass);
+ bsconVx = aida.histogram1D(
+ plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vx (mm)", 50, -10, 10);
+ bsconVy = aida.histogram1D(
+ plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vy (mm)", 50, -10, 10);
+ bsconVz = aida.histogram1D(
+ plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Vz (mm)", 50, -50, 50);
+ bsconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/" + "Chi2",
+ 25, 0, 25);
+ bsconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/"
+ + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
+ bsconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + beamConV0CandidatesColName + "/"
+ + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
/* target constrained */
- tarconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Mass (GeV)", 100, 0, 0.200);
- tarconVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Vx (mm)", 50, -1, 1);
- tarconVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Vy (mm)", 50, -1, 1);
- tarconVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Vz (mm)", 50, -10, 10);
- tarconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Chi2", 25, 0, 25);
- tarconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
- tarconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/" + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
-
- nV0 = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Number of V0 per event", 10, 0, 10);
+ tarconMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Mass (GeV)", 100, 0, maxMass);
+ tarconVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Vx (mm)", 50, -1, 1);
+ tarconVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Vy (mm)", 50, -1, 1);
+ tarconVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Vz (mm)", 50, -10, 10);
+ tarconChi2 = aida.histogram1D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Chi2", 25, 0, 25);
+ tarconVzVsChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName + "/"
+ + "Vz vs. Chi2", 25, 0, 25, 50, -50, 50);
+ tarconChi2VsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + targetV0ConCandidatesColName
+ + "/" + "Chi2 vs. total track chi2", 50, 0, 50, 50, 0, 25);
+
+ nV0 = aida
+ .histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Number of V0 per event", 10, 0, 10);
v0Time = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "V0 mean time", 100, -25, 25);
- v0Dt = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "V0 time difference", 100, -25, 25);
- trigTimeV0Time = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Trigger phase vs. V0 mean time", 100, -25, 25, 6, 0, 24);
+ v0Dt = aida
+ .histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "V0 time difference", 100, -25, 25);
+ trigTimeV0Time = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "Trigger phase vs. V0 mean time", 100, -25, 25, 6, 0, 24);
trigTime = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Trigger phase", 6, 0, 24);
- pEleVspPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "P(e) vs P(p)", 50, 0, beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
- pEleVspPosWithCut = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "P(e) vs P(p): Radiative", 50, 0, beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
- pyEleVspyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- pxEleVspxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- VtxZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Mass", 50, 0, 0.15, 50, -50, 80);
- VtxXVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Vz", 100, -10, 10, 100, -50, 80);
- VtxYVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vy vs Vz", 100, -5, 5, 100, -50, 80);
- VtxXVsVtxY = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Vy", 100, -10, 10, 100, -5, 5);
- VtxXVsVtxPx = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Px", 100, -0.1, 0.1, 100, -10, 10);
- VtxYVsVtxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vy vs Py", 100, -0.1, 0.1, 100, -5, 5);
- VtxZVsVtxPx = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Px", 100, -0.1, 0.1, 100, -50, 80);
- VtxZVsVtxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Py", 100, -0.1, 0.1, 100, -50, 80);
- VtxZVsVtxPz = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Pz", 100, 0.0, beamEnergy * maxFactor, 100, -50, 80);
- VtxZVsL1Iso = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs L1 Isolation", 100, 0.0, 5.0, 50, -50, 80);
- VtxZVsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Track Chi2", 50, 0, 50, 50, -50, 80);
- pEleVspEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs P(e)", 50, 0, beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
- phiEleVsphiEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/phi(e) vs phi(e)", 50, -Math.PI, Math.PI, 50, -Math.PI, Math.PI);
- pyEleVspyEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Py(e) vs Py(e)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- pxEleVspxEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Px(e) vs Px(e)", 50, -0.02, 0.06, 50, -0.02, 0.06);
- pEleVspEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs P(e) NoBeam", 50, 0, beambeamCut, 50, 0, beambeamCut);
- pEleVspEleMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs P(e) Moller", 50, 0, beambeamCut, 50, 0, beambeamCut);
- pEleVspEleBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs P(e) BeamBeam", 50, beambeamCut, beamEnergy * maxFactor, 50, beambeamCut, beamEnergy * maxFactor);
- pyEleVspyEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Py(e) vs Py(e) NoBeam", 50, -0.04, 0.04, 50, -0.04, 0.04);
- pxEleVspxEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Px(e) vs Px(e) NoBeam", 50, -0.02, 0.06, 50, -0.02, 0.06);
- sumChargeHisto = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Total Charge of Event", 5, -2, 3);
- numChargeHisto = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Number of Charged Particles", 6, 0, 6);
-
- pEleVsthetaMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs Theta Moller", 50, 0, beambeamCut, 50, thetaMin, thetaMax);
- thetaEleVsthetaMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Theta vs Theta Moller", 50, thetaMin, thetaMax, 50, thetaMin, thetaMax);
- pEleVsthetaBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs Theta BeamBeam", 50, beambeamCut, beamEnergy * maxFactor, 50, thetaMin, thetaMax);
- thetaEleVsthetaBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Theta vs Theta BeamBeam", 50, thetaMin, thetaMax, 50, thetaMin, thetaMax);
-
- mollerMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Mass (GeV)", 100, 0, 0.100);
- mollerMassVtxCut = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Mass (GeV): VtxCut", 100, 0, 0.100);
- mollerVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vx (mm)", 50, -10, 10);
- mollerVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vy (mm)", 50, -2, 2);
- mollerVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vz (mm)", 50, -50, 50);
- mollerVzVtxCut = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vz (mm): VtxCut", 50, -50, 50);
- mollerXVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vx vs Vz", 100, -5, 5, 100, -50, 50);
- mollerYVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vy vs Vz", 100, -2, 2, 100, -50, 50);
- mollerXVsVtxY = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vx vs Vy", 100, -5, 5, 100, -2, 2);
+ pEleVspPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "P(e) vs P(p)", 50, 0,
+ beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
+
+ pEle = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "P(e)", 50, 0, beamEnergy
+ * maxFactor);
+ pPos = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "P(p)", 50, 0, beamEnergy
+ * maxFactor);
+
+ pEleVspPosWithCut = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "P(e) vs P(p): Radiative", 50, 0, beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
+ pyEleVspyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Py(e) vs Py(p)", 50,
+ -0.04 * beamEnergy, 0.04 * beamEnergy, 50, -0.04 * beamEnergy, 0.04 * beamEnergy);
+ pxEleVspxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Px(e) vs Px(p)", 50,
+ -0.04 * beamEnergy, 0.04 * beamEnergy, 50, -0.04 * beamEnergy, 0.04 * beamEnergy);
+ VtxZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Mass", 50, 0,
+ maxMass, 50, -50, 80);
+ VtxXVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Vz", 100, -10, 10,
+ 100, -50, 80);
+ VtxYVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vy vs Vz", 100, -5, 5, 100,
+ -50, 80);
+ VtxXVsVtxY = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Vy", 100, -10, 10,
+ 100, -5, 5);
+ VtxXVsVtxPx = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vx vs Px", 100, -0.1, 0.1,
+ 100, -10, 10);
+ VtxYVsVtxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vy vs Py", 100, -0.1, 0.1,
+ 100, -5, 5);
+ VtxZVsVtxPx = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Px", 100, -0.1, 0.1,
+ 100, -50, 80);
+ VtxZVsVtxPy = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Py", 100, -0.1, 0.1,
+ 100, -50, 80);
+ VtxZVsVtxPz = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Pz", 100, 0.0,
+ beamEnergy * maxFactor, 100, -50, 80);
+ VtxZVsL1Iso = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs L1 Isolation", 100,
+ 0.0, 5.0, 50, -50, 80);
+ VtxZVsTrkChi2 = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "Vz vs Track Chi2", 50,
+ 0, 50, 50, -50, 80);
+ phiEleVsphiEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/phi(e) vs phi(e)", 50, -Math.PI, Math.PI, 50, -Math.PI, Math.PI);
+ pyEleVspyEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Py(e) vs Py(e)", 50, -0.04 * beamEnergy, 0.04 * beamEnergy, 50, -0.04 * beamEnergy,
+ 0.04 * beamEnergy);
+ pxEleVspxEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Px(e) vs Px(e)", 50, -0.02 * beamEnergy, 0.06 * beamEnergy, 50, -0.02 * beamEnergy,
+ 0.06 * beamEnergy);
+
+ // electron vs electron momentum with different cuts
+ // 1) no cut
+ // 2) cut out FEE
+ // 3) cut out FEE and also cut on momentum sum
+ // 4) cut out everything except FEE coincidentals
+ pEleVspEle = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(e) vs P(e)",
+ 50, 0, beamEnergy * maxFactor, 50, 0, beamEnergy * maxFactor);
+ pEleVspEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/P(e) vs P(e) NoBeam", 50, 0, beambeamCut, 50, 0, beambeamCut);
+ pEleVspEleMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/P(e) vs P(e) Moller", 50, 0, beambeamCut, 50, 0, beambeamCut);
+ pEleVspEleBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/P(e) vs P(e) BeamBeam", 50, beambeamCut, beamEnergy * maxFactor, 50, beambeamCut,
+ beamEnergy * maxFactor);
+
+ pyEleVspyEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Py(e) vs Py(e) NoBeam", 50, -0.04 * beamEnergy, 0.04 * beamEnergy, 50,
+ -0.04 * beamEnergy, 0.04 * beamEnergy);
+ pxEleVspxEleNoBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Px(e) vs Px(e) NoBeam", 50, -0.02 * beamEnergy, 0.06 * beamEnergy, 50,
+ -0.02 * beamEnergy, 0.06 * beamEnergy);
+ sumChargeHisto = aida.histogram1D(
+ plotDir + trkType + triggerType + "/" + xtra + "/" + "Total Charge of Event", 5, -2, 3);
+ numChargeHisto = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "Number of Charged Particles", 6, 0, 6);
+
+ pEleVsthetaMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/P(e) vs Theta Moller", 50, 0, beambeamCut, 50, thetaMin, thetaMax);
+ thetaEleVsthetaMoller = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Theta vs Theta Moller", 50, thetaMin, thetaMax, 50, thetaMin, thetaMax);
+ pEleVsthetaBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/P(e) vs Theta BeamBeam", 50, beambeamCut, beamEnergy * maxFactor, 50, thetaMin, thetaMax);
+ thetaEleVsthetaBeamBeam = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Theta vs Theta BeamBeam", 50, thetaMin, thetaMax, 50, thetaMin, thetaMax);
+
+ mollerMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Mass (GeV)", 100, 0, maxMassMoller);
+ mollerMassVtxCut = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Mass (GeV): VtxCut", 100, 0, maxMassMoller);
+ mollerVx = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vx (mm)",
+ 50, -10, 10);
+ mollerVy = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vy (mm)",
+ 50, -2, 2);
+ mollerVz = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Moller Vz (mm)",
+ 50, -50, 50);
+ mollerVzVtxCut = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Vz (mm): VtxCut", 50, -50, 50);
+ mollerXVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Vx vs Vz", 100, -5, 5, 100, -50, 50);
+ mollerYVsVtxZ = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Vy vs Vz", 100, -2, 2, 100, -50, 50);
+ mollerXVsVtxY = aida.histogram2D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Vx vs Vy", 100, -5, 5, 100, -2, 2);
+
+ mollerUx = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Pair Momentum Direction Ux", 100, .015, .045);
+ mollerUy = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/"
+ + "2 Electron/Moller Pair Momentum Direction Uy", 100, -.01, .01);
+
+ mollerHiP = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(high)", 100, 0,
+ beamEnergy * maxFactor);
+ mollerLoP = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(low)", 100, 0,
+ beamEnergy * maxFactor);
+
+ mollerEitherP = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/P(either)",
+ 100, 0, beamEnergy * maxFactor);
+ mollerPsum = aida.histogram1D(plotDir + trkType + triggerType + "/" + xtra + "/" + "2 Electron/Psum", 100, 0,
+ beamEnergy * maxFactor);
+
}
+ private BilliorVertex fitVertex(final BilliorTrack electron, final BilliorTrack positron, final double bField) {
+ // Create a vertex fitter from the magnetic field.
+ final double[] beamSize = {0.001, 0.2, 0.02};
+ final BilliorVertexer vtxFitter = new BilliorVertexer(bField);
+ // TODO: The beam size should come from the conditions database.
+ vtxFitter.setBeamSize(beamSize);
+
+ // Perform the vertexing based on the specified constraint.
+ vtxFitter.doBeamSpotConstraint(false);
+
+ // Add the electron and positron tracks to a track list for
+ // the vertex fitter.
+ final List<BilliorTrack> billiorTracks = new ArrayList<BilliorTrack>();
+
+ billiorTracks.add(electron);
+
+ billiorTracks.add(positron);
+
+ // Find and return a vertex based on the tracks.
+ return vtxFitter.fitVertex(billiorTracks);
+ }
+
+ IFitResult fitVertexPosition(final IHistogram1D h1d, final IFitter fitter, final double[] init, final String range) {
+ IFitResult ifr = null;
+ try {
+ ifr = fitter.fit(h1d, "g+p1", init, range);
+ } catch (final RuntimeException ex) {
+ LOGGER.info(this.getClass().getSimpleName() + ": caught exception in fitGaussian");
+ }
+ return ifr;
+ }
+
@Override
- public void process(EventHeader event) {
- /* make sure everything is there */
- if (!event.hasCollection(ReconstructedParticle.class, finalStateParticlesColName))
+ public void printDQMData() {
+ LOGGER.info("V0Monitoring::printDQMData");
+ for (final Entry<String, Double> entry : monitoredQuantityMap.entrySet()) {
+ LOGGER.info(entry.getKey() + " = " + entry.getValue());
+ }
+ LOGGER.info("*******************************");
+ }
+
+ @Override
+ public void printDQMStrings() {
+ for (int i = 0; i < 9; i++) {
+ LOGGER.info("ALTER TABLE dqm ADD " + fpQuantNames[i] + " double;");
+ }
+ }
+
+ @Override
+ public void process(final EventHeader event) {
+ /* make sure everything is there */
+ if (!event.hasCollection(ReconstructedParticle.class, finalStateParticlesColName)) {
return;
- if (!event.hasCollection(ReconstructedParticle.class, unconstrainedV0CandidatesColName))
+ }
+ if (!event.hasCollection(ReconstructedParticle.class, unconstrainedV0CandidatesColName)) {
return;
- if (!event.hasCollection(ReconstructedParticle.class, beamConV0CandidatesColName))
+ }
+ if (!event.hasCollection(ReconstructedParticle.class, beamConV0CandidatesColName)) {
return;
- if (!event.hasCollection(ReconstructedParticle.class, targetV0ConCandidatesColName))
+ }
+ if (!event.hasCollection(ReconstructedParticle.class, targetV0ConCandidatesColName)) {
return;
-
- //check to see if this event is from the correct trigger (or "all");
- if (!matchTrigger(event))
+ }
+
+ // check to see if this event is from the correct trigger (or "all");
+ if (!this.matchTrigger(event)) {
return;
+ }
nRecoEvents++;
- RelationalTable hitToStrips = TrackUtils.getHitToStripsTable(event);
- RelationalTable hitToRotated = TrackUtils.getHitToRotatedTable(event);
-
- List<ReconstructedParticle> unonstrainedV0List = event.get(ReconstructedParticle.class, unconstrainedV0CandidatesColName);
- for (ReconstructedParticle uncV0 : unonstrainedV0List) {
- if (isGBL != TrackType.isGBL(uncV0.getType()))
+ final RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
+ final RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
+
+ final List<ReconstructedParticle> unonstrainedV0List = event.get(ReconstructedParticle.class,
+ unconstrainedV0CandidatesColName);
+ for (final ReconstructedParticle uncV0 : unonstrainedV0List) {
+ if (isGBL != TrackType.isGBL(uncV0.getType())) {
continue;
- Vertex uncVert = uncV0.getStartVertex();
- Hep3Vector pVtxRot = VecOp.mult(beamAxisRotation, uncV0.getMomentum());
- Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, uncVert.getPosition());
- double theta = Math.acos(pVtxRot.z() / pVtxRot.magnitude());
- double phi = Math.atan2(pVtxRot.y(), pVtxRot.x());
+ }
+ final Vertex uncVert = uncV0.getStartVertex();
+ final Hep3Vector pVtxRot = VecOp.mult(beamAxisRotation, uncV0.getMomentum());
+ final Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, uncVert.getPosition());
+ final double theta = Math.acos(pVtxRot.z() / pVtxRot.magnitude());
+ final double phi = Math.atan2(pVtxRot.y(), pVtxRot.x());
unconVx.fill(vtxPosRot.x());
unconVy.fill(vtxPosRot.y());
unconVz.fill(vtxPosRot.z());
unconMass.fill(uncV0.getMass());
unconChi2.fill(uncVert.getChi2());
unconVzVsChi2.fill(uncVert.getChi2(), vtxPosRot.z());
- unconChi2VsTrkChi2.fill(Math.max(uncV0.getParticles().get(0).getTracks().get(0).getChi2(), uncV0.getParticles().get(1).getTracks().get(0).getChi2()), uncVert.getChi2());
+ unconChi2VsTrkChi2.fill(
+ Math.max(uncV0.getParticles().get(0).getTracks().get(0).getChi2(), uncV0.getParticles().get(1)
+ .getTracks().get(0).getChi2()), uncVert.getChi2());
VtxZVsMass.fill(uncV0.getMass(), vtxPosRot.z());
VtxXVsVtxZ.fill(vtxPosRot.x(), vtxPosRot.z());
@@ -290,76 +532,90 @@
VtxZVsVtxPy.fill(pVtxRot.y(), vtxPosRot.z());
VtxZVsVtxPz.fill(pVtxRot.z(), vtxPosRot.z());
- //this always has 2 tracks.
- List<ReconstructedParticle> trks = uncV0.getParticles();
-// Track ele = trks.get(0).getTracks().get(0);
-// Track pos = trks.get(1).getTracks().get(0);
-// //if track #0 has charge>0 it's the electron! This seems mixed up, but remember the track
-// //charge is assigned assuming a positive B-field, while ours is negative
-// if (trks.get(0).getCharge() > 0) {
-// pos = trks.get(0).getTracks().get(0);
-// ele = trks.get(1).getTracks().get(0);
-// }
-// aida.histogram2D(plotDir + trkType + triggerType + "/" + "P(e) vs P(p)").fill(getMomentum(ele), getMomentum(pos));
-// aida.histogram2D(plotDir + trkType + triggerType + "/" + "Px(e) vs Px(p)").fill(ele.getTrackStates().get(0).getMomentum()[1], pos.getTrackStates().get(0).getMomentum()[1]);
-// aida.histogram2D(plotDir + trkType + triggerType + "/" + "Py(e) vs Py(p)").fill(ele.getTrackStates().get(0).getMomentum()[2], pos.getTrackStates().get(0).getMomentum()[2]);
+ // this always has 2 tracks.
+ final List<ReconstructedParticle> trks = uncV0.getParticles();
+ // Track ele = trks.get(0).getTracks().get(0);
+ // Track pos = trks.get(1).getTracks().get(0);
+ // //if track #0 has charge>0 it's the electron! This seems mixed up, but remember the track
+ // //charge is assigned assuming a positive B-field, while ours is negative
+ // if (trks.get(0).getCharge() > 0) {
+ // pos = trks.get(0).getTracks().get(0);
+ // ele = trks.get(1).getTracks().get(0);
+ // }
+ // aida.histogram2D(plotDir + trkType + triggerType + "/" + "P(e) vs P(p)").fill(getMomentum(ele),
+ // getMomentum(pos));
+ // aida.histogram2D(plotDir + trkType + triggerType + "/" +
+ // "Px(e) vs Px(p)").fill(ele.getTrackStates().get(0).getMomentum()[1],
+ // pos.getTrackStates().get(0).getMomentum()[1]);
+ // aida.histogram2D(plotDir + trkType + triggerType + "/" +
+ // "Py(e) vs Py(p)").fill(ele.getTrackStates().get(0).getMomentum()[2],
+ // pos.getTrackStates().get(0).getMomentum()[2]);
ReconstructedParticle ele = trks.get(0);
ReconstructedParticle pos = trks.get(1);
- //ReconParticles have the charge correct.
+ // ReconParticles have the charge correct.
if (trks.get(0).getCharge() > 0) {
pos = trks.get(0);
ele = trks.get(1);
}
if (ele.getCharge() < 0 && pos.getCharge() > 0) {
- VtxZVsTrkChi2.fill(Math.max(uncV0.getParticles().get(0).getTracks().get(0).getChi2(), uncV0.getParticles().get(1).getTracks().get(0).getChi2()), uncVert.getPosition().z());
-
- Double[] eleIso = TrackUtils.getIsolations(ele.getTracks().get(0), hitToStrips, hitToRotated);
- Double[] posIso = TrackUtils.getIsolations(pos.getTracks().get(0), hitToStrips, hitToRotated);
+ VtxZVsTrkChi2.fill(
+ Math.max(uncV0.getParticles().get(0).getTracks().get(0).getChi2(), uncV0.getParticles().get(1)
+ .getTracks().get(0).getChi2()), uncVert.getPosition().z());
+
+ final Double[] eleIso = TrackUtils.getIsolations(ele.getTracks().get(0), hitToStrips, hitToRotated);
+ final Double[] posIso = TrackUtils.getIsolations(pos.getTracks().get(0), hitToStrips, hitToRotated);
if (eleIso[0] != null && posIso[0] != null) {
- double eleL1Iso = Math.min(Math.abs(eleIso[0]), Math.abs(eleIso[1]));
- double posL1Iso = Math.min(Math.abs(posIso[0]), Math.abs(posIso[1]));
- double minL1Iso = Math.min(eleL1Iso, posL1Iso);
+ final double eleL1Iso = Math.min(Math.abs(eleIso[0]), Math.abs(eleIso[1]));
+ final double posL1Iso = Math.min(Math.abs(posIso[0]), Math.abs(posIso[1]));
+ final double minL1Iso = Math.min(eleL1Iso, posL1Iso);
VtxZVsL1Iso.fill(minL1Iso, uncVert.getPosition().z());
}
- double pe = ele.getMomentum().magnitude();
- double pp = pos.getMomentum().magnitude();
- Hep3Vector pEleRot = VecOp.mult(beamAxisRotation, ele.getMomentum());
- Hep3Vector pPosRot = VecOp.mult(beamAxisRotation, pos.getMomentum());
+ final double pe = ele.getMomentum().magnitude();
+ final double pp = pos.getMomentum().magnitude();
+ final Hep3Vector pEleRot = VecOp.mult(beamAxisRotation, ele.getMomentum());
+ final Hep3Vector pPosRot = VecOp.mult(beamAxisRotation, pos.getMomentum());
pEleVspPos.fill(pe, pp);
+ pEle.fill(pe);
+ pPos.fill(pp);
+
pxEleVspxPos.fill(pEleRot.x(), pPosRot.x());
pyEleVspyPos.fill(pEleRot.y(), pPosRot.y());
- if (pe < v0MaxPCut && pp < v0MaxPCut && (pe + pp) > v0ESumMinCut && (pe + pp) < v0ESumMaxCut)//enrich radiative-like events
-
+ if (pe < v0MaxPCut && pp < v0MaxPCut && pe + pp > v0ESumMinCut && pe + pp < v0ESumMaxCut) {
pEleVspPosWithCut.fill(pe, pp);
- }
-
- double eleT = TrackUtils.getTrackTime(ele.getTracks().get(0), hitToStrips, hitToRotated);
- double posT = TrackUtils.getTrackTime(pos.getTracks().get(0), hitToStrips, hitToRotated);
- double meanT = (eleT + posT) / 2.0;
+ }
+ }
+
+ final double eleT = TrackUtils.getTrackTime(ele.getTracks().get(0), hitToStrips, hitToRotated);
+ final double posT = TrackUtils.getTrackTime(pos.getTracks().get(0), hitToStrips, hitToRotated);
+ final double meanT = (eleT + posT) / 2.0;
v0Time.fill(meanT);
v0Dt.fill(eleT - posT);
trigTimeV0Time.fill(meanT, event.getTimeStamp() % 24);
trigTime.fill(event.getTimeStamp() % 24);
}
- List<ReconstructedParticle> beamConstrainedV0List = event.get(ReconstructedParticle.class, beamConV0CandidatesColName);
+ final List<ReconstructedParticle> beamConstrainedV0List = event.get(ReconstructedParticle.class,
+ beamConV0CandidatesColName);
nV0.fill(beamConstrainedV0List.size());
- for (ReconstructedParticle bsV0 : beamConstrainedV0List) {
-
- if (isGBL != TrackType.isGBL(bsV0.getType()))
+ for (final ReconstructedParticle bsV0 : beamConstrainedV0List) {
+
+ if (isGBL != TrackType.isGBL(bsV0.getType())) {
continue;
+ }
nTotV0++;
- Vertex bsVert = bsV0.getStartVertex();
- Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, bsVert.getPosition());
+ final Vertex bsVert = bsV0.getStartVertex();
+ final Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, bsVert.getPosition());
bsconVx.fill(vtxPosRot.x());
bsconVy.fill(vtxPosRot.y());
bsconVz.fill(vtxPosRot.z());
bsconMass.fill(bsV0.getMass());
bsconChi2.fill(bsVert.getChi2());
bsconVzVsChi2.fill(bsVert.getChi2(), vtxPosRot.z());
- bsconChi2VsTrkChi2.fill(Math.max(bsV0.getParticles().get(0).getTracks().get(0).getChi2(), bsV0.getParticles().get(1).getTracks().get(0).getChi2()), bsVert.getChi2());
+ bsconChi2VsTrkChi2.fill(
+ Math.max(bsV0.getParticles().get(0).getTracks().get(0).getChi2(), bsV0.getParticles().get(1)
+ .getTracks().get(0).getChi2()), bsVert.getChi2());
sumMass += bsV0.getMass();
sumVx += vtxPosRot.x();
sumVy += vtxPosRot.y();
@@ -367,71 +623,82 @@
sumChi2 += bsVert.getChi2();
}
- List<ReconstructedParticle> targetConstrainedV0List = event.get(ReconstructedParticle.class, targetV0ConCandidatesColName);
- for (ReconstructedParticle tarV0 : targetConstrainedV0List) {
-
- if (isGBL != TrackType.isGBL(tarV0.getType()))
+ final List<ReconstructedParticle> targetConstrainedV0List = event.get(ReconstructedParticle.class,
+ targetV0ConCandidatesColName);
+ for (final ReconstructedParticle tarV0 : targetConstrainedV0List) {
+
+ if (isGBL != TrackType.isGBL(tarV0.getType())) {
continue;
-
- Vertex tarVert = tarV0.getStartVertex();
- Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, tarVert.getPosition());
+ }
+
+ final Vertex tarVert = tarV0.getStartVertex();
+ final Hep3Vector vtxPosRot = VecOp.mult(beamAxisRotation, tarVert.getPosition());
tarconVx.fill(vtxPosRot.x());
tarconVy.fill(vtxPosRot.y());
tarconVz.fill(vtxPosRot.z());
tarconMass.fill(tarV0.getMass());
tarconChi2.fill(tarVert.getChi2());
tarconVzVsChi2.fill(tarVert.getChi2(), vtxPosRot.z());
- tarconChi2VsTrkChi2.fill(Math.max(tarV0.getParticles().get(0).getTracks().get(0).getChi2(), tarV0.getParticles().get(1).getTracks().get(0).getChi2()), tarVert.getChi2());
- }
- List<ReconstructedParticle> finalStateParticles = event.get(ReconstructedParticle.class, finalStateParticlesColName);
- if (debug)
+ tarconChi2VsTrkChi2.fill(
+ Math.max(tarV0.getParticles().get(0).getTracks().get(0).getChi2(), tarV0.getParticles().get(1)
+ .getTracks().get(0).getChi2()), tarVert.getChi2());
+ }
+ final List<ReconstructedParticle> finalStateParticles = event.get(ReconstructedParticle.class,
+ finalStateParticlesColName);
+ if (debug) {
LOGGER.info("This events has " + finalStateParticles.size() + " final state particles");
+ }
ReconstructedParticle ele1 = null;
ReconstructedParticle ele2 = null;
int sumCharge = 0;
int numChargedParticles = 0;
- for (ReconstructedParticle fsPart : finalStateParticles) {
- if (isGBL != TrackType.isGBL(fsPart.getType()))
+ for (final ReconstructedParticle fsPart : finalStateParticles) {
+ if (isGBL != TrackType.isGBL(fsPart.getType())) {
continue;
- if (debug)
- LOGGER.info("PDGID = " + fsPart.getParticleIDUsed() + "; charge = " + fsPart.getCharge() + "; pz = " + fsPart.getMomentum().x());
- double charge = fsPart.getCharge();
+ }
+ if (debug) {
+ LOGGER.info("PDGID = " + fsPart.getParticleIDUsed() + "; charge = " + fsPart.getCharge() + "; pz = "
+ + fsPart.getMomentum().x());
+ }
+ final double charge = fsPart.getCharge();
sumCharge += charge;
if (charge != 0) {
numChargedParticles++;
- if (charge < 1)
- if (ele1 == null)
+ if (charge < 1) {
+ if (ele1 == null) {
ele1 = fsPart;
- else if (!hasSharedStrips(ele1, fsPart, hitToStrips, hitToRotated))
+ } else if (!hasSharedStrips(ele1, fsPart, hitToStrips, hitToRotated)) {
ele2 = fsPart;
+ }
+ }
}
}
sumChargeHisto.fill(sumCharge);
numChargeHisto.fill(numChargedParticles);
if (ele1 != null && ele2 != null) {
- Hep3Vector p1 = VecOp.mult(beamAxisRotation, ele1.getMomentum());
- Hep3Vector p2 = VecOp.mult(beamAxisRotation, ele2.getMomentum());
-// Hep3Vector beamAxis = new BasicHep3Vector(Math.sin(0.0305), 0, Math.cos(0.0305));
-// LOGGER.info(p1);
-// LOGGER.info(VecOp.mult(rot, p1));
-
- double theta1 = Math.acos(p1.z() / p1.magnitude());
- double theta2 = Math.acos(p2.z() / p2.magnitude());
- double phi1 = Math.atan2(p1.y(), p1.x());
- double phi2 = Math.atan2(p2.y(), p2.x());
+ final Hep3Vector p1 = VecOp.mult(beamAxisRotation, ele1.getMomentum());
+ final Hep3Vector p2 = VecOp.mult(beamAxisRotation, ele2.getMomentum());
+ // Hep3Vector beamAxis = new BasicHep3Vector(Math.sin(0.0305), 0, Math.cos(0.0305));
+ // LOGGER.info(p1);
+ // LOGGER.info(VecOp.mult(rot, p1));
+
+ final double theta1 = Math.acos(p1.z() / p1.magnitude());
+ final double theta2 = Math.acos(p2.z() / p2.magnitude());
+ final double phi1 = Math.atan2(p1.y(), p1.x());
+ final double phi2 = Math.atan2(p2.y(), p2.x());
phiEleVsphiEle.fill(phi1, phi2);
pEleVspEle.fill(ele1.getMomentum().magnitude(), ele2.getMomentum().magnitude());
pyEleVspyEle.fill(ele1.getMomentum().y(), ele2.getMomentum().y());
pxEleVspxEle.fill(ele1.getMomentum().x(), ele2.getMomentum().x());
- //remove beam electrons
+ // remove beam electrons
if (ele1.getMomentum().magnitude() < beambeamCut && ele2.getMomentum().magnitude() < beambeamCut) {
pEleVspEleNoBeam.fill(ele1.getMomentum().magnitude(), ele2.getMomentum().magnitude());
pyEleVspyEleNoBeam.fill(ele1.getMomentum().y(), ele2.getMomentum().y());
pxEleVspxEleNoBeam.fill(ele1.getMomentum().x(), ele2.getMomentum().x());
}
- //look at beam-beam events
+ // look at beam-beam events
if (ele1.getMomentum().magnitude() > beambeamCut && ele2.getMomentum().magnitude() > beambeamCut) {
pEleVspEleBeamBeam.fill(ele1.getMomentum().magnitude(), ele2.getMomentum().magnitude());
pEleVsthetaBeamBeam.fill(p1.magnitude(), theta1);
@@ -439,29 +706,51 @@
thetaEleVsthetaBeamBeam.fill(theta1, theta2);
}
- //look at "Moller" events (if that's what they really are
+ // look at "Moller" events (if that's what they really are
if (ele1.getMomentum().magnitude() + ele2.getMomentum().magnitude() > molPSumMin
&& ele1.getMomentum().magnitude() + ele2.getMomentum().magnitude() < molPSumMax
- && (p1.magnitude() < beambeamCut && p2.magnitude() < beambeamCut)) {
-
- Track ele1trk = ele1.getTracks().get(0);
- Track ele2trk = ele2.getTracks().get(0);
- SeedTrack stEle1 = TrackUtils.makeSeedTrackFromBaseTrack(ele1trk);
- SeedTrack stEle2 = TrackUtils.makeSeedTrackFromBaseTrack(ele2trk);
- BilliorTrack btEle1 = new BilliorTrack(stEle1.getSeedCandidate().getHelix());
- BilliorTrack btEle2 = new BilliorTrack(stEle2.getSeedCandidate().getHelix());
- BilliorVertex bv = fitVertex(btEle1, btEle2);
-// LOGGER.info("ee vertex: "+bv.toString());
- mollerMass.fill(bv.getParameters().get("invMass"));
+ && p1.magnitude() < beambeamCut && p2.magnitude() < beambeamCut) {
+
+ final Track ele1trk = ele1.getTracks().get(0);
+ final Track ele2trk = ele2.getTracks().get(0);
+ final SeedTrack stEle1 = TrackUtils.makeSeedTrackFromBaseTrack(ele1trk);
+ final SeedTrack stEle2 = TrackUtils.makeSeedTrackFromBaseTrack(ele2trk);
+ final BilliorTrack btEle1 = new BilliorTrack(stEle1.getSeedCandidate().getHelix());
+ final BilliorTrack btEle2 = new BilliorTrack(stEle2.getSeedCandidate().getHelix());
+ final BilliorVertex bv = this.fitVertex(btEle1, btEle2, TrackUtils.getBField(event.getDetector())
+ .magnitude());
+ // LOGGER.info("ee vertex: "+bv.toString());
+ final double invMass = bv.getParameters().get("invMass");
+ mollerMass.fill(invMass);
mollerVx.fill(bv.getPosition().x());
mollerVy.fill(bv.getPosition().y());
mollerVz.fill(bv.getPosition().z());
mollerXVsVtxZ.fill(bv.getPosition().x(), bv.getPosition().z());
mollerYVsVtxZ.fill(bv.getPosition().y(), bv.getPosition().z());
mollerXVsVtxY.fill(bv.getPosition().x(), bv.getPosition().y());
- if (Math.abs(bv.getPosition().x()) < 2
- && Math.abs(bv.getPosition().y()) < 0.5) {
- mollerMassVtxCut.fill(bv.getParameters().get("invMass"));
+
+ final double ux = (ele1.getMomentum().x() + ele2.getMomentum().x())
+ / (ele1.getMomentum().z() + ele2.getMomentum().z());
+ final double uy = (ele1.getMomentum().y() + ele2.getMomentum().y())
+ / (ele1.getMomentum().z() + ele2.getMomentum().z());
+ mollerUx.fill(ux);
+ mollerUy.fill(uy);
+
+ // higher and lower energy electrons in moller pair
+ final double pt1 = ele1.getMomentum().magnitude();
+ final double pt2 = ele2.getMomentum().magnitude();
+ final double ph = pt1 > pt2 ? pt1 : pt2;
+ final double pl = pt1 > pt2 ? pt2 : pt1;
+
+ mollerHiP.fill(ph);
+ mollerLoP.fill(pl);
+
+ mollerEitherP.fill(ph);
+ mollerEitherP.fill(pl);
+ mollerPsum.fill(pt1 + pt2);
+
+ if (Math.abs(bv.getPosition().x()) < 2 && Math.abs(bv.getPosition().y()) < 0.5) {
+ mollerMassVtxCut.fill(invMass);
mollerVzVtxCut.fill(bv.getPosition().z());
}
pEleVspEleMoller.fill(p1.magnitude(), p2.magnitude());
@@ -471,119 +760,4 @@
}
}
}
-
- @Override
- public void printDQMData() {
- LOGGER.info("V0Monitoring::printDQMData");
- for (Entry<String, Double> entry : monitoredQuantityMap.entrySet())
- LOGGER.info(entry.getKey() + " = " + entry.getValue());
- LOGGER.info("*******************************");
- }
-
- /**
- * Calculate the averages here and fill the map
- */
- @Override
- public void calculateEndOfRunQuantities() {
-
- IAnalysisFactory analysisFactory = IAnalysisFactory.create();
- IFitFactory fitFactory = analysisFactory.createFitFactory();
- IFitter fitter = fitFactory.createFitter("chi2");
- double[] init = {50.0, 0.0, 0.2, 1.0, 0.0};
- IFitResult resVx = fitVertexPosition(bsconVx, fitter, init, "range=\"(-0.5,0.5)\"");
- double[] init2 = {50.0, 0.0, 0.04, 1.0, 0.0};
- IFitResult resVy = fitVertexPosition(bsconVy, fitter, init2, "range=\"(-0.2,0.2)\"");
- double[] init3 = {50.0, 0.0, 3.0, 1.0, 0.0};
- IFitResult resVz = fitVertexPosition(bsconVz, fitter, init3, "range=\"(-6,6)\"");
-
- if (resVx != null && resVy != null & resVz != null) {
- double[] parsVx = resVx.fittedParameters();
- double[] parsVy = resVy.fittedParameters();
- double[] parsVz = resVz.fittedParameters();
-
- for (int i = 0; i < 5; i++)
- LOGGER.info("Vertex Fit Parameters: " + resVx.fittedParameterNames()[i] + " = " + parsVx[i] + "; " + parsVy[i] + "; " + parsVz[i]);
-
- IPlotter plotter = analysisFactory.createPlotterFactory().create("Vertex Position");
- plotter.createRegions(1, 3);
- IPlotterStyle pstyle = plotter.style();
- pstyle.legendBoxStyle().setVisible(false);
- pstyle.dataStyle().fillStyle().setColor("green");
- pstyle.dataStyle().lineStyle().setColor("black");
- plotter.region(0).plot(bsconVx);
- plotter.region(0).plot(resVx.fittedFunction());
- plotter.region(1).plot(bsconVy);
- plotter.region(1).plot(resVy.fittedFunction());
- plotter.region(2).plot(bsconVz);
- plotter.region(2).plot(resVz.fittedFunction());
- if (outputPlots)
- try {
- plotter.writeToFile(outputPlotDir + "vertex.png");
- } catch (IOException ex) {
- Logger.getLogger(V0Monitoring.class.getName()).log(Level.SEVERE, null, ex);
- }
-
-// monitoredQuantityMap.put(fpQuantNames[2], sumVx / nTotV0);
-// monitoredQuantityMap.put(fpQuantNames[3], sumVy / nTotV0);
-// monitoredQuantityMap.put(fpQuantNames[4], sumVz / nTotV0);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[2], parsVx[1]);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[3], parsVy[1]);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[4], parsVz[1]);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[5], parsVx[2]);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[6], parsVy[2]);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[7], parsVz[2]);
- }
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[0], (double) nTotV0 / nRecoEvents);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[1], sumMass / nTotV0);
- monitoredQuantityMap.put(beamConV0CandidatesColName + " " + triggerType + " " + fpQuantNames[8], sumChi2 / nTotV0);
-
- }
-
- @Override
- public void printDQMStrings() {
- for (int i = 0; i < 9; i++)//TODO: do this in a smarter way...loop over the map
- LOGGER.info("ALTER TABLE dqm ADD " + fpQuantNames[i] + " double;");
- }
-
- IFitResult fitVertexPosition(IHistogram1D h1d, IFitter fitter, double[] init, String range
- ) {
- IFitResult ifr = null;
- try {
- ifr = fitter.fit(h1d, "g+p1", init, range);
- } catch (RuntimeException ex) {
- LOGGER.info(this.getClass().getSimpleName() + ": caught exception in fitGaussian");
- }
- return ifr;
- }
-
- private BilliorVertex fitVertex(BilliorTrack electron, BilliorTrack positron) {
- // Create a vertex fitter from the magnetic field.
- double bField = 0.24;
- double[] beamSize = {0.001, 0.2, 0.02};
- BilliorVertexer vtxFitter = new BilliorVertexer(bField);
- // TODO: The beam size should come from the conditions database.
- vtxFitter.setBeamSize(beamSize);
-
- // Perform the vertexing based on the specified constraint.
- vtxFitter.doBeamSpotConstraint(false);
-
- // Add the electron and positron tracks to a track list for
- // the vertex fitter.
- List<BilliorTrack> billiorTracks = new ArrayList<BilliorTrack>();
-
- billiorTracks.add(electron);
-
- billiorTracks.add(positron);
-
- // Find and return a vertex based on the tracks.
- return vtxFitter.fitVertex(billiorTracks);
- }
-
- private static boolean hasSharedStrips(ReconstructedParticle vertex, RelationalTable hittostrip, RelationalTable hittorotated) {
- return hasSharedStrips(vertex.getParticles().get(0), vertex.getParticles().get(1), hittostrip, hittorotated);
- }
-
- private static boolean hasSharedStrips(ReconstructedParticle fs1, ReconstructedParticle fs2, RelationalTable hittostrip, RelationalTable hittorotated) {
- return TrackUtils.hasSharedStrips(fs1.getTracks().get(0), fs2.getTracks().get(0), hittostrip, hittorotated);
- }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalCellIDPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalCellIDPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalCellIDPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -19,63 +19,63 @@
*/
public class EcalCellIDPrintDriver extends Driver {
- Subdetector ecal;
- IDDecoder dec;
- String ecalName = "Ecal";
- String ecalCollectionName = "EcalReadoutHits";
- String outputFileName;
- PrintWriter outputStream = null;
+ Subdetector ecal;
+ IDDecoder dec;
+ String ecalName = "Ecal";
+ String ecalCollectionName = "EcalReadoutHits";
+ String outputFileName;
+ PrintWriter outputStream = null;
- public EcalCellIDPrintDriver() {
- }
+ public EcalCellIDPrintDriver() {
+ }
- public void setEcalCollectionName(String ecalCollectionName) {
- this.ecalCollectionName = ecalCollectionName;
- }
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
- public void setEcalName(String ecalName) {
- this.ecalName = ecalName;
- }
+ public void setEcalName(String ecalName) {
+ this.ecalName = ecalName;
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- public void startOfData() {
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ public void startOfData() {
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = (Subdetector) detector.getSubdetector(ecalName);
- dec = ecal.getIDDecoder();
- }
+ public void detectorChanged(Detector detector) {
+ // Get the Subdetector.
+ ecal = (Subdetector) detector.getSubdetector(ecalName);
+ dec = ecal.getIDDecoder();
+ }
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(RawCalorimeterHit.class, ecalCollectionName)) {
- List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, ecalCollectionName);
- //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
- for (RawCalorimeterHit hit : hits) {
- dec.setID(hit.getCellID());
- outputStream.printf("x=%d\ty=%d\n", dec.getValue("ix"), dec.getValue("iy"));
- }
- }
- if (event.hasCollection(RawTrackerHit.class, ecalCollectionName)) {
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, ecalCollectionName);
- //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
- for (RawTrackerHit hit : hits) {
- dec.setID(hit.getCellID());
- outputStream.printf("x=%d\ty=%d\n", dec.getValue("ix"), dec.getValue("iy"));
- }
- }
- }
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(RawCalorimeterHit.class, ecalCollectionName)) {
+ List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, ecalCollectionName);
+ //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
+ for (RawCalorimeterHit hit : hits) {
+ dec.setID(hit.getCellID());
+ outputStream.printf("x=%d\ty=%d\n", dec.getValue("ix"), dec.getValue("iy"));
+ }
+ }
+ if (event.hasCollection(RawTrackerHit.class, ecalCollectionName)) {
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, ecalCollectionName);
+ //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
+ for (RawTrackerHit hit : hits) {
+ dec.setID(hit.getCellID());
+ outputStream.printf("x=%d\ty=%d\n", dec.getValue("ix"), dec.getValue("iy"));
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java Wed Apr 27 11:11:32 2016
@@ -17,7 +17,7 @@
public class EcalClusterPlots extends Driver {
- //AIDAFrame plotterFrame;
+ //AIDAFrame plotterFrame;
String inputCollection = "EcalClusters";
AIDA aida = AIDA.defaultInstance();
IPlotter plotter, plotter2, plotter3, plotter4;
@@ -47,7 +47,7 @@
@Override
protected void detectorChanged(Detector detector) {
- //plotterFrame = new AIDAFrame();
+ //plotterFrame = new AIDAFrame();
//plotterFrame.setTitle("HPS ECal Cluster Plots");
// Setup the plotter.
@@ -172,6 +172,6 @@
@Override
public void endOfData() {
- //plotterFrame.dispose();
+ //plotterFrame.dispose();
}
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java Wed Apr 27 11:11:32 2016
@@ -192,8 +192,8 @@
} else if (AbstractIntData.getTag(data) == SSPData.BANK_TAG) {
//SSPData triggerData = new SSPData(data);
// TODO: TOP, BOTTOM, AND, and OR trigger are test
- // run-specific parameters and are not supported by
- // SSPData.
+ // run-specific parameters and are not supported by
+ // SSPData.
int orTrig = 0; //triggerData.getOrTrig();
if(orTrig != 0) {
for (int i = 0; i < 32; i++) {
@@ -237,7 +237,7 @@
double botTime = Double.POSITIVE_INFINITY;
double orTime = Double.POSITIVE_INFINITY;
for (CalorimeterHit hit : hits) {
- /*
+ /*
if (hit.getIdentifierFieldValue("iy") > 0) {
topX.fill(hit.getIdentifierFieldValue("ix"),hit.getPosition()[0]);
topY.fill(hit.getIdentifierFieldValue("iy"),hit.getPosition()[1]);
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java Wed Apr 27 11:11:32 2016
@@ -28,8 +28,8 @@
*/
public class FEEClusterPlotter extends Driver {
-
- //private AIDAFrame plotterFrame;
+
+ //private AIDAFrame plotterFrame;
private AIDA aida = AIDA.defaultInstance();
IPlotter plotter;
IAnalysisFactory fac = aida.analysisFactory();
@@ -43,6 +43,28 @@
private String histoNameFormat = "%3d";
private String outputPlots = null;
+
+ //Set min energy in histo
+ private double minHistoE = 0.5;
+
+ //Set max energy in histo
+ private double maxHistoE = 1.3;
+
+ /**
+ * Set the minimum histogram energy
+ * @param minHistoE
+ */
+ public void setMinHistoE(double minHistoE) {
+ this.minHistoE = minHistoE;
+ }
+
+ /**
+ * Set the maximum histogram energy
+ * @param maxHistoE
+ */
+ public void setMaxHistoE(double maxHistoE) {
+ this.maxHistoE = maxHistoE;
+ }
@Override
protected void detectorChanged(Detector detector) {
@@ -52,7 +74,7 @@
aida.tree().cd("/");
for (EcalChannel cc : ecalConditions.getChannelCollection()) {
- aida.histogram1D(getHistoName(cc),200,0.5,1.3);
+ aida.histogram1D(getHistoName(cc),200,minHistoE,maxHistoE);
}
}
@@ -61,43 +83,121 @@
return String.format(histoNameFormat,cc.getChannelId());
}
+
+ //Set min seed energy value, default to 2015 run
+ private double seedCut = 0.4;
+
+ //set min cluster time in window, default to 2015 run
+ private double minTime = 30;
+
+ //set max cluster time in window, default to 2015 run
+ private double maxTime = 70;
+
+ //set min number of hits in a cluster in row 1, default to 2015 run
+ private int hitCut = 5;
+
+ //hit cut is only used in 2016 data, not 2015
+ private boolean useHitCut = false;
+
+
+ /**
+ * Set the cut value for seed energy in GeV
+ * @param seedCut
+ */
+ public void setSeedCut(double seedCut) {
+ this.seedCut = seedCut;
+ }
+
+ /**
+ * Set the min time in window to look for cluster
+ * @param minTime
+ */
+ public void setMinTime(double minTime) {
+ this.minTime = minTime;
+ }
+
+ /**
+ * Set the max time in window to look for cluster
+ * @param maxTime
+ */
+ public void setMaxTime(double maxTime) {
+ this.maxTime = maxTime;
+ }
+
+ /**
+ * Set the hit cut value for hits in cluster
+ * This cut is used in 2016 running (not 2015)
+ * @param hitCut
+ */
+ public void setHitCut(int hitCut) {
+ this.hitCut = hitCut;
+ }
+
+ /**
+ * Set the hit cut value for hits in cluster
+ * This cut is used in 2016 running (not 2015)
+ * @param hitCut
+ */
+ public void setUseHitCut(boolean useHitCut) {
+ this.useHitCut = useHitCut;
+ }
+
public void process(EventHeader event) {
aida.tree().cd("/");
//only keep singles triggers:
if (!event.hasCollection(GenericObject.class,"TriggerBank"))
- throw new Driver.NextEventException();
+ throw new Driver.NextEventException();
boolean isSingles=false;
for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
- {
- if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
- TIData tid = new TIData(gob);
- if (tid.isSingle0Trigger() || tid.isSingle1Trigger())
- {
- isSingles=true;
- break;
- }
+ {
+ if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+ TIData tid = new TIData(gob);
+ if (tid.isSingle0Trigger() || tid.isSingle1Trigger())
+ {
+ isSingles=true;
+ break;
+ }
}
if (isSingles){
- List<Cluster> clusters = event.get(Cluster.class, inputCollection);
- for (Cluster clus : clusters) {
- List<CalorimeterHit> hits = clus.getCalorimeterHits();
- CalorimeterHit seed = hits.get(0);
-
- double seedE = seed.getCorrectedEnergy();
- double clusE = clus.getEnergy();
- double time = seed.getTime();
-
- if ((seedE/clusE > 0.6) && seedE >0.45 && time>30 && time <70){
-
- EcalChannel cc = findChannel(seed);
- aida.histogram1D(getHistoName(cc)).fill(clusE);
- }
- }
- }
- }
-
+ List<Cluster> clusters = event.get(Cluster.class, inputCollection);
+ for (Cluster clus : clusters) {
+ List<CalorimeterHit> hits = clus.getCalorimeterHits();
+ CalorimeterHit seed = hits.get(0);
+
+ double seedE = seed.getCorrectedEnergy();
+ double clusE = clus.getEnergy();
+ double time = seed.getTime();
+
+ //in 2015, not hit count cut used at all
+ if (useHitCut){
+ if (Math.abs(seed.getIdentifierFieldValue("iy"))==1 && (seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime && hits.size()>(hitCut+2) ){
+
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+ }
+ else if (Math.abs(seed.getIdentifierFieldValue("iy"))>1 && (seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime && hits.size()>(hitCut) ){
+
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+ }
+ }
+ else {
+ if ((seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime ){
+
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+
+ }
+ }
+ }
+ }
+ }
+
public void setOutputPlots(String output) {
this.outputPlots = output;
}
@@ -119,5 +219,5 @@
Logger.getLogger(FEEClusterPlotter.class.getName()).log(Level.SEVERE, null, ex);
}
}
- }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalFADCPlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalFADCPlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalFADCPlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -20,90 +20,90 @@
*/
public class HPSEcalFADCPlotsDriver extends Driver {
- String edepCollectionName = "EcalHits";
- String rawCollectionName = null;
- String ecalCollectionName = null;
- String clusterCollectionName = "EcalClusters";
- AIDA aida = AIDA.defaultInstance();
- IHistogram1D edepE;
- IHistogram1D rawE;
- IHistogram1D ecalE;
- IHistogram1D clusterE;
- ICloud2D window_E;
- double edepThreshold = 0.05;
+ String edepCollectionName = "EcalHits";
+ String rawCollectionName = null;
+ String ecalCollectionName = null;
+ String clusterCollectionName = "EcalClusters";
+ AIDA aida = AIDA.defaultInstance();
+ IHistogram1D edepE;
+ IHistogram1D rawE;
+ IHistogram1D ecalE;
+ IHistogram1D clusterE;
+ ICloud2D window_E;
+ double edepThreshold = 0.05;
- public void setEdepThreshold(double edepThreshold) {
- this.edepThreshold = edepThreshold;
- }
+ public void setEdepThreshold(double edepThreshold) {
+ this.edepThreshold = edepThreshold;
+ }
- public void setRawCollectionName(String rawCollectionName) {
- this.rawCollectionName = rawCollectionName;
- }
+ public void setRawCollectionName(String rawCollectionName) {
+ this.rawCollectionName = rawCollectionName;
+ }
- public void setEcalCollectionName(String ecalCollectionName) {
- this.ecalCollectionName = ecalCollectionName;
- }
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
- public void startOfData() {
- edepE = aida.histogram1D(
- "FADC plots: " + edepCollectionName + " : Hits",
- 500, 0.0, 5.0);
- if (rawCollectionName != null) {
- rawE = aida.histogram1D(
- "FADC plots: " + rawCollectionName + " : Hits",
- 500, 0.0, 500.0);
- window_E = aida.cloud2D("FADC plots: " + rawCollectionName + " : Window vs. E");
- }
- if (ecalCollectionName != null) {
- ecalE = aida.histogram1D(
- "FADC plots: " + ecalCollectionName + " : Hits",
- 500, 0.0, 5.0);
- }
- clusterE = aida.histogram1D(
- "FADC plots: " + clusterCollectionName + " : Clusters",
- 500, 0.0, 5.0);
- }
+ public void startOfData() {
+ edepE = aida.histogram1D(
+ "FADC plots: " + edepCollectionName + " : Hits",
+ 500, 0.0, 5.0);
+ if (rawCollectionName != null) {
+ rawE = aida.histogram1D(
+ "FADC plots: " + rawCollectionName + " : Hits",
+ 500, 0.0, 500.0);
+ window_E = aida.cloud2D("FADC plots: " + rawCollectionName + " : Window vs. E");
+ }
+ if (ecalCollectionName != null) {
+ ecalE = aida.histogram1D(
+ "FADC plots: " + ecalCollectionName + " : Hits",
+ 500, 0.0, 5.0);
+ }
+ clusterE = aida.histogram1D(
+ "FADC plots: " + clusterCollectionName + " : Clusters",
+ 500, 0.0, 5.0);
+ }
- public void process(EventHeader event) {
- List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
- if (clusters == null)
- throw new RuntimeException("Missing cluster collection!");
+ public void process(EventHeader event) {
+ List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
+ if (clusters == null)
+ throw new RuntimeException("Missing cluster collection!");
- List<CalorimeterHit> edepHits = event.get(CalorimeterHit.class, edepCollectionName);
- if (edepHits == null)
- throw new RuntimeException("Missing hit collection!");
+ List<CalorimeterHit> edepHits = event.get(CalorimeterHit.class, edepCollectionName);
+ if (edepHits == null)
+ throw new RuntimeException("Missing hit collection!");
- if (rawCollectionName != null) {
- List<RawCalorimeterHit> rawHits = event.get(RawCalorimeterHit.class, rawCollectionName);
- if (rawHits == null)
- throw new RuntimeException("Missing hit collection!");
+ if (rawCollectionName != null) {
+ List<RawCalorimeterHit> rawHits = event.get(RawCalorimeterHit.class, rawCollectionName);
+ if (rawHits == null)
+ throw new RuntimeException("Missing hit collection!");
- for (RawCalorimeterHit hit : rawHits) {
- rawE.fill(hit.getAmplitude());
- //window_E.fill(hit.getAmplitude(),hit.getWindowSize());
- }
- }
+ for (RawCalorimeterHit hit : rawHits) {
+ rawE.fill(hit.getAmplitude());
+ //window_E.fill(hit.getAmplitude(),hit.getWindowSize());
+ }
+ }
- if (ecalCollectionName != null) {
- List<CalorimeterHit> ecalHits = event.get(CalorimeterHit.class, ecalCollectionName);
- if (ecalHits == null)
- throw new RuntimeException("Missing hit collection!");
+ if (ecalCollectionName != null) {
+ List<CalorimeterHit> ecalHits = event.get(CalorimeterHit.class, ecalCollectionName);
+ if (ecalHits == null)
+ throw new RuntimeException("Missing hit collection!");
- for (CalorimeterHit hit : ecalHits) {
- ecalE.fill(hit.getRawEnergy());
- }
- }
+ for (CalorimeterHit hit : ecalHits) {
+ ecalE.fill(hit.getRawEnergy());
+ }
+ }
- for (CalorimeterHit hit : edepHits) {
- if (hit.getRawEnergy() > edepThreshold)
- edepE.fill(hit.getRawEnergy());
- }
- for (Cluster cluster : clusters) {
- clusterE.fill(cluster.getEnergy());
- }
- }
+ for (CalorimeterHit hit : edepHits) {
+ if (hit.getRawEnergy() > edepThreshold)
+ edepE.fill(hit.getRawEnergy());
+ }
+ for (Cluster cluster : clusters) {
+ clusterE.fill(cluster.getEnergy());
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalTriggerPlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalTriggerPlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSEcalTriggerPlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
* Exp $
*/
public class HPSEcalTriggerPlotsDriver extends Driver {
- // LCSim collection names.
+ // LCSim collection names.
String ecalCollectionName = "EcalHits";
String clusterCollectionName = "EcalClusters";
@@ -55,46 +55,46 @@
}
public void startOfData() {
- // Initialize a hit histogram for each declared energy.
- for(int e = 0; e < energyCut.length; e++) {
- hitXYPlot[e] = aida.histogram2D("Trigger Plots: " + ecalCollectionName +
- " : Hits above " + energyCut[e] + " MeV", 46, -23, 23, 11, -5.5, 5.5);
- }
- // Initialize the remaining plots.
+ // Initialize a hit histogram for each declared energy.
+ for(int e = 0; e < energyCut.length; e++) {
+ hitXYPlot[e] = aida.histogram2D("Trigger Plots: " + ecalCollectionName +
+ " : Hits above " + energyCut[e] + " MeV", 46, -23, 23, 11, -5.5, 5.5);
+ }
+ // Initialize the remaining plots.
crystalDeadTime = aida.histogram2D("Trigger Plots: " + ecalCollectionName +
- " : Crystal dead time", 46, -23, 23, 11, -5.5, 5.5);
+ " : Crystal dead time", 46, -23, 23, 11, -5.5, 5.5);
clusterHitXYPlot = aida.histogram2D("Trigger Plots: " + clusterCollectionName +
- " : Crystals in clusters", 47, -23.5, 23.5, 11, -5.5, 5.5);
+ " : Crystals in clusters", 47, -23.5, 23.5, 11, -5.5, 5.5);
seedHitXYPlot = aida.histogram2D("Trigger Plots: " + clusterCollectionName +
- " : Seed hits", 47, -23.5, 23.5, 11, -5.5, 5.5);
+ " : Seed hits", 47, -23.5, 23.5, 11, -5.5, 5.5);
triggerClusterHitXYPlot = aida.histogram2D("Trigger Plots: " + clusterCollectionName +
" : Crystals in clusters, with trigger", 47, -23.5, 23.5, 11, -5.5, 5.5);
triggerSeedHitXYPlot = aida.histogram2D("Trigger Plots: " + clusterCollectionName +
- " : Seed hits, with trigger", 47, -23.5, 23.5, 11, -5.5, 5.5);
+ " : Seed hits, with trigger", 47, -23.5, 23.5, 11, -5.5, 5.5);
}
public void process(EventHeader event) {
- // If the current event has the indicated hit collection,
- // use it as the hit list.
- List<CalorimeterHit> hits;
- if(event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
- hits = event.get(CalorimeterHit.class, ecalCollectionName);
- }
- // If it does not, then use an empty list to avoid crashing.
- else { hits = new ArrayList<CalorimeterHit>(0); }
-
- // If the current event has the indicated cluster collection,
- // use it as the cluster list.
- List<Cluster> clusters;
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- clusters = event.get(Cluster.class, clusterCollectionName);
- }
- // If it does not, then use an empty list to avoid crashing.
- else { clusters = new ArrayList<Cluster>(0); }
+ // If the current event has the indicated hit collection,
+ // use it as the hit list.
+ List<CalorimeterHit> hits;
+ if(event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
+ hits = event.get(CalorimeterHit.class, ecalCollectionName);
+ }
+ // If it does not, then use an empty list to avoid crashing.
+ else { hits = new ArrayList<CalorimeterHit>(0); }
+
+ // If the current event has the indicated cluster collection,
+ // use it as the cluster list.
+ List<Cluster> clusters;
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ clusters = event.get(Cluster.class, clusterCollectionName);
+ }
+ // If it does not, then use an empty list to avoid crashing.
+ else { clusters = new ArrayList<Cluster>(0); }
// Populate hit plots.
for (CalorimeterHit hit : hits) {
- // Get the hit crystal position.
+ // Get the hit crystal position.
int ix = hit.getIdentifierFieldValue("ix");
int iy = hit.getIdentifierFieldValue("iy");
double energy = hit.getRawEnergy();
@@ -102,9 +102,9 @@
// Loop through the energy plots and fill them if the hit
// is over the current energy threshold/
for(int e = 0; e < energyCut.length; e++) {
- if(energy > energyCut[e] * EcalUtils.MeV) {
- hitXYPlot[e].fill(ix - 0.5 * Math.signum(ix), iy);
- }
+ if(energy > energyCut[e] * EcalUtils.MeV) {
+ hitXYPlot[e].fill(ix - 0.5 * Math.signum(ix), iy);
+ }
}
// Generate the dead time plot.
@@ -121,8 +121,8 @@
// Populate cluster based plots.
for (Cluster cluster : clusters) {
- // Get the cluster's seed hit position.
- CalorimeterHit seed = cluster.getCalorimeterHits().get(0);
+ // Get the cluster's seed hit position.
+ CalorimeterHit seed = cluster.getCalorimeterHits().get(0);
int ix = seed.getIdentifierFieldValue("ix");
int iy = seed.getIdentifierFieldValue("iy");
@@ -135,7 +135,7 @@
// Populate the component hit histogram.
for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
- // Get the component hit location.
+ // Get the component hit location.
ix = hit.getIdentifierFieldValue("ix");
iy = hit.getIdentifierFieldValue("iy");
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSMCParticlePlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSMCParticlePlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/HPSMCParticlePlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -25,138 +25,138 @@
*/
public class HPSMCParticlePlotsDriver extends Driver {
- AIDA aida = AIDA.defaultInstance();
+ AIDA aida = AIDA.defaultInstance();
//private AIDAFrame pFrame;
IAnalysisFactory af = aida.analysisFactory();
public boolean _hideFrame = false;
// MCParticle plots.
- ICloud1D primaryEPlot;
- ICloud1D fsCountPlot;
- IHistogram1D fsCountVsEventPlot;
- ICloud1D fsCountTypePlot;
- ICloud1D fsCountEventTypePlot;
- ICloud1D fsCountEventTypePlot2;
- ICloud1D fsCountTypePlot500;
- IHistogram1D fsEPlot;
- IHistogram1D fsGammaEPlot;
- IHistogram1D fsElectronEPlot;
- IHistogram1D fsPositronEPlot;
- IHistogram1D fsThetayPlot;
- ICloud1D fsGammaThetaPlot;
- IHistogram1D fsGammaThetayPlot;
- IHistogram1D fsGammaThetayTrigPlot;
- ICloud2D fsGammaThetayEPlot;
- ICloud1D fsElectronThetaPlot;
- IHistogram1D fsElectronThetayPlot;
- IHistogram1D fsElectronThetayTrigPlot;
- ICloud2D fsElectronThetayEPlot;
- ICloud1D fsPositronThetaPlot;
- IHistogram1D fsPositronThetayPlot;
- IHistogram1D fsPositronThetayTrigPlot;
- ICloud2D fsPositronThetayEPlot;
+ ICloud1D primaryEPlot;
+ ICloud1D fsCountPlot;
+ IHistogram1D fsCountVsEventPlot;
+ ICloud1D fsCountTypePlot;
+ ICloud1D fsCountEventTypePlot;
+ ICloud1D fsCountEventTypePlot2;
+ ICloud1D fsCountTypePlot500;
+ IHistogram1D fsEPlot;
+ IHistogram1D fsGammaEPlot;
+ IHistogram1D fsElectronEPlot;
+ IHistogram1D fsPositronEPlot;
+ IHistogram1D fsThetayPlot;
+ ICloud1D fsGammaThetaPlot;
+ IHistogram1D fsGammaThetayPlot;
+ IHistogram1D fsGammaThetayTrigPlot;
+ ICloud2D fsGammaThetayEPlot;
+ ICloud1D fsElectronThetaPlot;
+ IHistogram1D fsElectronThetayPlot;
+ IHistogram1D fsElectronThetayTrigPlot;
+ ICloud2D fsElectronThetayEPlot;
+ ICloud1D fsPositronThetaPlot;
+ IHistogram1D fsPositronThetayPlot;
+ IHistogram1D fsPositronThetayTrigPlot;
+ ICloud2D fsPositronThetayEPlot;
ICloud1D eventEPlot;
- class MCParticleEComparator implements Comparator<MCParticle> {
-
- public int compare(MCParticle p1, MCParticle p2) {
- double e1 = p1.getEnergy();
- double e2 = p2.getEnergy();
- if (e1 < e2) {
- return -1;
- } else if (e1 == e2) {
- return 0;
- } else {
- return 1;
- }
- }
- }
+ class MCParticleEComparator implements Comparator<MCParticle> {
+
+ public int compare(MCParticle p1, MCParticle p2) {
+ double e1 = p1.getEnergy();
+ double e2 = p2.getEnergy();
+ if (e1 < e2) {
+ return -1;
+ } else if (e1 == e2) {
+ return 0;
+ } else {
+ return 1;
+ }
+ }
+ }
public void setHideFrame(boolean hideFrame) {
this._hideFrame = hideFrame;
}
- @Override
- public void startOfData() {
- fsCountPlot = aida.cloud1D("MCParticle: Number of Final State Particles");
- fsCountPlot.annotation().addItem("xAxisLabel", "Number of FS Particles");
+ @Override
+ public void startOfData() {
+ fsCountPlot = aida.cloud1D("MCParticle: Number of Final State Particles");
+ fsCountPlot.annotation().addItem("xAxisLabel", "Number of FS Particles");
fsCountVsEventPlot = aida.histogram1D("MCParticle: Number of Final State Particles vs Event Nr", 501, -0.5, 500.5);
- fsCountVsEventPlot.annotation().addItem("xAxisLabel", "Event Number");
+ fsCountVsEventPlot.annotation().addItem("xAxisLabel", "Event Number");
fsCountTypePlot = aida.cloud1D("MCParticle: Number of Final State Particles Type");
- fsCountTypePlot.annotation().addItem("xAxisLabel", "Number of FS Particles of Type");
+ fsCountTypePlot.annotation().addItem("xAxisLabel", "Number of FS Particles of Type");
fsCountTypePlot500 = aida.cloud1D("MCParticle: Number of Final State Particles Type E>0.5GeV");
- fsCountTypePlot500.annotation().addItem("xAxisLabel", "Number of FS Particles of Type E>0.5GeV");
+ fsCountTypePlot500.annotation().addItem("xAxisLabel", "Number of FS Particles of Type E>0.5GeV");
fsCountEventTypePlot = aida.cloud1D("MCParticle: Number of Final State Types");
- fsCountEventTypePlot.annotation().addItem("xAxisLabel", "Number of FS Types");
+ fsCountEventTypePlot.annotation().addItem("xAxisLabel", "Number of FS Types");
fsCountEventTypePlot2 = aida.cloud1D("MCParticle: Number of Final State Types Gamma E>500");
- fsCountEventTypePlot2.annotation().addItem("xAxisLabel", "Number of FS Types Gamma E>500");
-
- fsEPlot = aida.histogram1D("MCParticle: FS Particle E",100,0,3);
- fsEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
-
- fsGammaEPlot = aida.histogram1D("MCParticle: FS Gamma E",100,0,3);
- fsGammaEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
-
- fsElectronEPlot = aida.histogram1D("MCParticle: FS Electron E",100,0,3);
- fsElectronEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
-
- fsPositronEPlot = aida.histogram1D("MCParticle: FS Positron E",100,0,3);
- fsPositronEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
-
- fsGammaThetaPlot = aida.cloud1D("MCParticle: FS Gamma Theta");
- fsGammaThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
+ fsCountEventTypePlot2.annotation().addItem("xAxisLabel", "Number of FS Types Gamma E>500");
+
+ fsEPlot = aida.histogram1D("MCParticle: FS Particle E",100,0,3);
+ fsEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
+
+ fsGammaEPlot = aida.histogram1D("MCParticle: FS Gamma E",100,0,3);
+ fsGammaEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
+
+ fsElectronEPlot = aida.histogram1D("MCParticle: FS Electron E",100,0,3);
+ fsElectronEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
+
+ fsPositronEPlot = aida.histogram1D("MCParticle: FS Positron E",100,0,3);
+ fsPositronEPlot.annotation().addItem("xAxisLabel", "Particle E [GeV]");
+
+ fsGammaThetaPlot = aida.cloud1D("MCParticle: FS Gamma Theta");
+ fsGammaThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
fsThetayPlot = aida.histogram1D("MCParticle: FS Particle Thetay",100,0,0.1);
- fsThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsGammaThetayPlot = aida.histogram1D("MCParticle: FS Gamma Thetay",100,0,0.1);
- fsGammaThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsGammaThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsGammaThetayTrigPlot = aida.histogram1D("MCParticle: FS Gamma Thetay Trig",100,0,0.1);
- fsGammaThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsGammaThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsGammaThetayEPlot = aida.cloud2D("MCParticle: FS Gamma Thetay vs E");
- fsGammaThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
- fsGammaThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
-
- fsElectronThetaPlot = aida.cloud1D("MCParticle: FS Electron Theta");
- fsElectronThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
+ fsGammaThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsGammaThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
+
+ fsElectronThetaPlot = aida.cloud1D("MCParticle: FS Electron Theta");
+ fsElectronThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
fsElectronThetayPlot = aida.histogram1D("MCParticle: FS Electron Thetay",100,0,0.1);
- fsElectronThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsElectronThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsElectronThetayTrigPlot = aida.histogram1D("MCParticle: FS Electron Thetay Trig",100,0,0.1);
- fsElectronThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsElectronThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsElectronThetayEPlot = aida.cloud2D("MCParticle: FS Electron Thetay vs E");
- fsElectronThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
- fsElectronThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
-
- fsPositronThetaPlot = aida.cloud1D("MCParticle: FS Positron Theta");
- fsPositronThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
+ fsElectronThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsElectronThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
+
+ fsPositronThetaPlot = aida.cloud1D("MCParticle: FS Positron Theta");
+ fsPositronThetaPlot.annotation().addItem("xAxisLabel", "Particle angle [rad]");
fsPositronThetayPlot = aida.histogram1D("MCParticle: FS Positron Thetay",100,0,0.1);
- fsPositronThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsPositronThetayPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsPositronThetayTrigPlot = aida.histogram1D("MCParticle: FS Positron Thetay Trig",100,0,0.1);
- fsPositronThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsPositronThetayTrigPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
fsPositronThetayEPlot = aida.cloud2D("MCParticle: FS Positron Thetay vs E");
- fsPositronThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
- fsPositronThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
-
-
- primaryEPlot = aida.cloud1D("MCParticle: Highest Primary E in Event");
- primaryEPlot.annotation().addItem("xAxisLabel", "E [GeV]");
-
- eventEPlot = aida.cloud1D("MCParticle: Total Gen FS Electron E in Event");
- eventEPlot.annotation().addItem("xAxisLabel", "E [GeV]");
+ fsPositronThetayEPlot.annotation().addItem("xAxisLabel", "Particle Thetay angle [rad]");
+ fsPositronThetayEPlot.annotation().addItem("yAxisLabel", "Particle Energy [GeV]");
+
+
+ primaryEPlot = aida.cloud1D("MCParticle: Highest Primary E in Event");
+ primaryEPlot.annotation().addItem("xAxisLabel", "E [GeV]");
+
+ eventEPlot = aida.cloud1D("MCParticle: Total Gen FS Electron E in Event");
+ eventEPlot.annotation().addItem("xAxisLabel", "E [GeV]");
//pFrame = new AIDAFrame();
@@ -205,19 +205,19 @@
- }
-
- @Override
- public void process(EventHeader event) {
-
- // MCParticles
- List<MCParticle> mcparticles = event.get(MCParticle.class).get(0);
-
- // Final State particles.
- List<MCParticle> fsParticles = makeGenFSParticleList(mcparticles);
-
- //System.out.println("fsParticles="+fsParticles.size());
- fsCountPlot.fill(fsParticles.size());
+ }
+
+ @Override
+ public void process(EventHeader event) {
+
+ // MCParticles
+ List<MCParticle> mcparticles = event.get(MCParticle.class).get(0);
+
+ // Final State particles.
+ List<MCParticle> fsParticles = makeGenFSParticleList(mcparticles);
+
+ //System.out.println("fsParticles="+fsParticles.size());
+ fsCountPlot.fill(fsParticles.size());
for (int i=0;i<fsParticles.size();++i) fsCountVsEventPlot.fill(event.getEventNumber());
@@ -229,69 +229,69 @@
int[] ngammas = {0,0};
int count = 0;
double trigThr = 0.2;
- for (MCParticle fs : fsParticles) {
+ for (MCParticle fs : fsParticles) {
//System.out.println("Index " + count);
- double fsE = fs.getEnergy();
- double theta = Math.atan2(Math.sqrt(fs.getPX() * fs.getPX() + fs.getPY() * fs.getPY()), fs.getPZ());
- double thetay = Math.atan2(fs.getPY(), fs.getPZ());
- int fsPdg = fs.getPDGID();
- fsEPlot.fill(fsE);
+ double fsE = fs.getEnergy();
+ double theta = Math.atan2(Math.sqrt(fs.getPX() * fs.getPX() + fs.getPY() * fs.getPY()), fs.getPZ());
+ double thetay = Math.atan2(fs.getPY(), fs.getPZ());
+ int fsPdg = fs.getPDGID();
+ fsEPlot.fill(fsE);
this.fsThetayPlot.fill(Math.abs(thetay));
fsCountTypePlot.fill(fsPdg);
if(fsE>0.5) fsCountTypePlot500.fill(fsPdg);
- if (ParticleTypeClassifier.isElectron(fsPdg)) {
- fsElectronEPlot.fill(fsE);
- fsElectronThetaPlot.fill(theta);
- fsElectronThetayPlot.fill(Math.abs(thetay));
- if(fsE>trigThr) fsElectronThetayTrigPlot.fill(Math.abs(thetay));
- fsElectronThetayEPlot.fill(Math.abs(thetay),fsE);
+ if (ParticleTypeClassifier.isElectron(fsPdg)) {
+ fsElectronEPlot.fill(fsE);
+ fsElectronThetaPlot.fill(theta);
+ fsElectronThetayPlot.fill(Math.abs(thetay));
+ if(fsE>trigThr) fsElectronThetayTrigPlot.fill(Math.abs(thetay));
+ fsElectronThetayEPlot.fill(Math.abs(thetay),fsE);
nelectrons[0]++;
if(fsGammaEmax>0.5) nelectrons[1]++;
- } else if (ParticleTypeClassifier.isPositron(fsPdg)) {
- fsPositronEPlot.fill(fsE);
- fsPositronThetaPlot.fill(theta);
- fsPositronThetayPlot.fill(Math.abs(thetay));
- if(fsE>trigThr) fsPositronThetayTrigPlot.fill(Math.abs(thetay));
- fsPositronThetayEPlot.fill(Math.abs(thetay),fsE);
+ } else if (ParticleTypeClassifier.isPositron(fsPdg)) {
+ fsPositronEPlot.fill(fsE);
+ fsPositronThetaPlot.fill(theta);
+ fsPositronThetayPlot.fill(Math.abs(thetay));
+ if(fsE>trigThr) fsPositronThetayTrigPlot.fill(Math.abs(thetay));
+ fsPositronThetayEPlot.fill(Math.abs(thetay),fsE);
npositrons[0]++;
if(fsGammaEmax>0.5) npositrons[1]++;
- } else if (ParticleTypeClassifier.isPhoton(fsPdg)) {
- fsGammaEPlot.fill(fsE);
- fsGammaThetaPlot.fill(theta);
- fsGammaThetayPlot.fill(Math.abs(thetay));
- if(fsE>trigThr) fsGammaThetayTrigPlot.fill(Math.abs(thetay));
- fsGammaThetayEPlot.fill(Math.abs(thetay),fsE);
+ } else if (ParticleTypeClassifier.isPhoton(fsPdg)) {
+ fsGammaEPlot.fill(fsE);
+ fsGammaThetaPlot.fill(theta);
+ fsGammaThetayPlot.fill(Math.abs(thetay));
+ if(fsE>trigThr) fsGammaThetayTrigPlot.fill(Math.abs(thetay));
+ fsGammaThetayEPlot.fill(Math.abs(thetay),fsE);
ngammas[0]++;
if(fsGammaEmax>0.5) {
ngammas[1]++;
//System.out.println("Counting high E gamma at count "+ count);
}
- }
- }
+ }
+ }
fsCountEventTypePlot.fill(getEventTypeId(nelectrons[0],npositrons[0],ngammas[0]));
fsCountEventTypePlot2.fill(getEventTypeId(nelectrons[1],npositrons[1],ngammas[1]));
- // Sort MCParticles on energy.
- //Collections.sort(fsParticles, new MCParticleEComparator());
-
- // Energy of top two FS particles.
- //double e2 = fsParticles.get(0).getEnergy() + fsParticles.get(1).getEnergy();
-
- // Energy of top three FS particles.
- //double e3 = e2 + fsParticles.get(2).getEnergy();
-
- if (!fsParticles.isEmpty()) {
- // primary particle with most E
- double primaryE = getPrimary(fsParticles).getEnergy();
- primaryEPlot.fill(primaryE);
- }
-
- // event electron energy
- double eventE = getPrimaryElectronE(fsParticles);
- eventEPlot.fill(eventE);
- }
+ // Sort MCParticles on energy.
+ //Collections.sort(fsParticles, new MCParticleEComparator());
+
+ // Energy of top two FS particles.
+ //double e2 = fsParticles.get(0).getEnergy() + fsParticles.get(1).getEnergy();
+
+ // Energy of top three FS particles.
+ //double e3 = e2 + fsParticles.get(2).getEnergy();
+
+ if (!fsParticles.isEmpty()) {
+ // primary particle with most E
+ double primaryE = getPrimary(fsParticles).getEnergy();
+ primaryEPlot.fill(primaryE);
+ }
+
+ // event electron energy
+ double eventE = getPrimaryElectronE(fsParticles);
+ eventEPlot.fill(eventE);
+ }
public int getEventTypeId(int ne, int np, int ng) {
@@ -314,57 +314,57 @@
return 0;
}
- public double getHighestPhotonE(List<MCParticle> particles) {
- double Emax = -1;
+ public double getHighestPhotonE(List<MCParticle> particles) {
+ double Emax = -1;
double E=0;
int count = 0;
- for (MCParticle particle : particles) {
- if (ParticleTypeClassifier.isPhoton(particle.getPDGID())) {
- E = particle.getEnergy();
+ for (MCParticle particle : particles) {
+ if (ParticleTypeClassifier.isPhoton(particle.getPDGID())) {
+ E = particle.getEnergy();
if(E>Emax) {
Emax = E;
//System.out.println("Emax from photon with index " + count);
}
- count++;
+ count++;
}
- }
- return Emax;
- }
+ }
+ return Emax;
+ }
private double getPrimaryElectronE(List<MCParticle> particles) {
- double totalE = 0;
- for (MCParticle particle : particles) {
- if (Math.abs(particle.getPDGID()) == 11) {
- totalE += particle.getEnergy();
- }
- }
- return totalE;
- }
-
- private MCParticle getPrimary(List<MCParticle> particles) {
- double maxE = 0;
- MCParticle primary = null;
- for (MCParticle particle : particles) {
- if (particle.getEnergy() > maxE) {
- maxE = particle.getEnergy();
- primary = particle;
- }
- }
- return primary;
- }
-
- public static List<MCParticle> makeGenFSParticleList(List<MCParticle> mcparticles) {
- List<MCParticle> fsParticles = new ArrayList<MCParticle>();
- for (MCParticle mcparticle : mcparticles) {
- if (mcparticle.getGeneratorStatus() == MCParticle.FINAL_STATE) {
- double theta = Math.atan2(Math.sqrt(mcparticle.getPX() * mcparticle.getPX() + mcparticle.getPY() * mcparticle.getPY()), mcparticle.getPZ());
- if (theta > 1e-3) {
- fsParticles.add(mcparticle);
- }
- }
- }
- return fsParticles;
- }
+ double totalE = 0;
+ for (MCParticle particle : particles) {
+ if (Math.abs(particle.getPDGID()) == 11) {
+ totalE += particle.getEnergy();
+ }
+ }
+ return totalE;
+ }
+
+ private MCParticle getPrimary(List<MCParticle> particles) {
+ double maxE = 0;
+ MCParticle primary = null;
+ for (MCParticle particle : particles) {
+ if (particle.getEnergy() > maxE) {
+ maxE = particle.getEnergy();
+ primary = particle;
+ }
+ }
+ return primary;
+ }
+
+ public static List<MCParticle> makeGenFSParticleList(List<MCParticle> mcparticles) {
+ List<MCParticle> fsParticles = new ArrayList<MCParticle>();
+ for (MCParticle mcparticle : mcparticles) {
+ if (mcparticle.getGeneratorStatus() == MCParticle.FINAL_STATE) {
+ double theta = Math.atan2(Math.sqrt(mcparticle.getPX() * mcparticle.getPX() + mcparticle.getPY() * mcparticle.getPY()), mcparticle.getPZ());
+ if (theta > 1e-3) {
+ fsParticles.add(mcparticle);
+ }
+ }
+ }
+ return fsParticles;
+ }
public void endOfData() {
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java Wed Apr 27 11:11:32 2016
@@ -182,7 +182,7 @@
* Process the event, performing a signal fit for every raw data hit in the input collection.
* The hits that pass the sigma selection cut are added to a new hits collection, which can be
* converted to a CalorimeterHit collection and then clustered.
- * @throw NextEventException if there are not enough hits that pass the selection cut.
+ * @throws NextEventException if there are not enough hits that pass the selection cut.
*/
public void process(EventHeader event) {
if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java Wed Apr 27 11:11:32 2016
@@ -54,7 +54,7 @@
/**
* Set the number of ADC samples in a row which must be above the threshold.
- * @param selectedHits The minimum number of samples above threshold.
+ * @param minimumSelectedSamples The minimum number of samples above threshold.
*/
public void setMinimumSelectedSamples(int minimumSelectedSamples) {
this.minimumSelectedSamples = minimumSelectedSamples;
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java Wed Apr 27 11:11:32 2016
@@ -168,7 +168,7 @@
* Process the event, performing a signal fit for every raw data hit in the input collection.
* The hits that pass the sigma selection cut are added to a new hits collection, which can be
* converted to a CalorimeterHit collection and then clustered.
- * @throw NextEventException if there are not enough hits that pass the selection cut.
+ * @throws NextEventException if there are not enough hits that pass the selection cut.
*/
public void process(EventHeader event) {
if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/PrintGeometryDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/PrintGeometryDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/PrintGeometryDriver.java Wed Apr 27 11:11:32 2016
@@ -2,8 +2,9 @@
import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
-import java.util.ArrayList;
+
import java.util.List;
+
import org.lcsim.detector.IDetectorElement;
import org.lcsim.detector.ITransform3D;
import org.lcsim.detector.Transform3D;
@@ -13,13 +14,7 @@
import org.lcsim.detector.tracker.silicon.SiSensor;
import org.lcsim.detector.tracker.silicon.SiSensorElectrodes;
import org.lcsim.detector.tracker.silicon.SiTrackerIdentifierHelper;
-import org.lcsim.event.RawTrackerHit;
-import org.lcsim.event.SimTrackerHit;
-import org.lcsim.event.base.BaseRawTrackerHit;
import org.lcsim.geometry.Detector;
-import org.hps.recon.tracking.MaterialSupervisor;
-import org.hps.recon.tracking.TrackerHitUtils;
-import org.lcsim.recon.tracking.digitization.sisim.TrackerHitType;
import org.lcsim.util.Driver;
/**
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripGoldenEventsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripGoldenEventsDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripGoldenEventsDriver.java Wed Apr 27 11:11:32 2016
@@ -1,11 +1,11 @@
package org.hps.analysis.examples;
import java.util.List;
+
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.ReconstructedParticle;
import org.lcsim.event.Track;
-import org.lcsim.event.Vertex;
import org.lcsim.util.Driver;
/**
@@ -27,7 +27,7 @@
boolean skipEvent = false;
if (event.hasCollection(ReconstructedParticle.class, "UnconstrainedV0Candidates")) {
List<ReconstructedParticle> vertices = event.get(ReconstructedParticle.class, "UnconstrainedV0Candidates");
- //System.out.println("Thete are: "+vertices.size()+" Unconstrained V0 candidates");
+ //System.out.println("Thete are: "+vertices.size()+" Unconstrained V0 candidates");
if (vertices.size() > 1 || vertices.isEmpty()) {
skipEvent = true;
} else {
@@ -38,14 +38,14 @@
List<Track> trks = rp.getTracks();
// require each track to have six hits
if (trks.get(0).getTrackerHits().size() != 6) {
- //System.out.println("Thete are: "+trks.get(0).getTrackerHits().size()+" hits on Track");
+ //System.out.println("Thete are: "+trks.get(0).getTrackerHits().size()+" hits on Track");
skipEvent = true;
}
}
// require no other tracks in the event
if (event.get(Track.class, "MatchedTracks").size() > 2) {
skipEvent = true;
- //System.out.println("Thete are: "+event.get(Track.class, "MatchedTracks").size()+" Matched tracks");
+ //System.out.println("Thete are: "+event.get(Track.class, "MatchedTracks").size()+" Matched tracks");
}
// require no other clusters in the event
if (event.get(Cluster.class, "EcalClustersGTP").size() > 2) {
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripMollerEventsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripMollerEventsDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/examples/StripMollerEventsDriver.java Wed Apr 27 11:11:32 2016
@@ -94,7 +94,7 @@
skipEvent = false;
}
}
- //System.out.println("Thete are: "+event.get(Track.class, "MatchedTracks").size()+" Matched tracks");
+ //System.out.println("Thete are: "+event.get(Track.class, "MatchedTracks").size()+" Matched tracks");
}
}
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java Wed Apr 27 11:11:32 2016
@@ -48,454 +48,454 @@
* output object is not persisted into LCIO after runtime.
*
* @author Kyle McCarty <[log in to unmask]>
- * @see DAQConfigDriver
- * @see EcalOnlineRawConverterDriver
- * @see GTPOnlineClusterDriver
+ * @see org.hps.record.daqconfig.DAQConfigDriver
+ * @see org.hps.recon.ecal.EcalOnlineRawConverterDriver
+ * @see org.hps.recon.ecal.cluster.GTPOnlineClusterDriver
* @see SimTriggerData
*/
public class DataTriggerSimDriver extends Driver {
- // Store the LCIO collection names for the needed objects.
- private boolean filterUnverifiable = false;
- private String bankCollectionName = "TriggerBank";
- private String clusterCollectionName = "EcalClusters";
- private String simTriggerCollectionName = "SimTriggers";
-
- // Store the SSP bank.
- private SSPData sspBank = null;
-
- // Store cluster verifiability parameters.
- private int nsa = 0;
- private int nsb = 0;
- private int windowWidth = 0;
-
- // Define trigger simulation modules.
- private boolean[] pairTriggerEnabled = new boolean[2];
- private boolean[] singlesTriggerEnabled = new boolean[2];
- private boolean[][] pairCutsEnabled = new boolean[2][7];
- private boolean[][] singlesCutsEnabled = new boolean[2][3];
- private TriggerModule[] pairsTrigger = new TriggerModule[2];
- private TriggerModule[] singlesTrigger = new TriggerModule[2];
-
- // Reference variables.
- private static final int ENERGY_MIN = TriggerDiagnosticUtil.SINGLES_ENERGY_MIN;
- private static final int ENERGY_MAX = TriggerDiagnosticUtil.SINGLES_ENERGY_MAX;
- private static final int HIT_COUNT = TriggerDiagnosticUtil.SINGLES_HIT_COUNT;
- private static final int ENERGY_SUM = TriggerDiagnosticUtil.PAIR_ENERGY_SUM;
- private static final int ENERGY_DIFF = TriggerDiagnosticUtil.PAIR_ENERGY_DIFF;
- private static final int ENERGY_SLOPE = TriggerDiagnosticUtil.PAIR_ENERGY_SLOPE;
- private static final int COPLANARITY = TriggerDiagnosticUtil.PAIR_COPLANARITY;
-
- /**
- * Connects the driver to the the <code>ConfigurationManager</code>
- * in order to obtain the correct trigger information. Trigger
- * settings are stored in the <code>TriggerModule</code> objects.
- */
- @Override
- public void startOfData() {
- // Define the first singles trigger.
- singlesTrigger[0] = new TriggerModule();
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.500);
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
-
- // Define the second singles trigger.
- singlesTrigger[1] = new TriggerModule();
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
-
- // Define the first pairs trigger.
- pairsTrigger[0] = new TriggerModule();
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
-
- // Define the second pairs trigger.
- pairsTrigger[1] = new TriggerModule();
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
-
- // Listen for the configuration manager to provide the real
- // trigger settings.
- ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // Get the DAQ configuration.
- DAQConfig daq = ConfigurationManager.getInstance();
-
- // Get cluster verifiability parameters.
- nsa = daq.getFADCConfig().getNSA();
- nsb = daq.getFADCConfig().getNSB();
- windowWidth = daq.getFADCConfig().getWindowWidth();
-
- // Load the DAQ settings from the configuration manager.
- singlesTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getSingles1Config());
- singlesTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getSingles2Config());
- pairsTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getPair1Config());
- pairsTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getPair2Config());
-
- // Get the trigger configurations from the DAQ.
- SinglesTriggerConfig[] singles = { daq.getSSPConfig().getSingles1Config(),
- daq.getSSPConfig().getSingles2Config() };
- PairTriggerConfig[] pairs = { daq.getSSPConfig().getPair1Config(),
- daq.getSSPConfig().getPair2Config() };
-
- // Update the enabled/disabled statuses.
- for(int i = 0; i < 2; i++) {
- // Set the trigger enabled status.
- pairTriggerEnabled[i] = pairs[i].isEnabled();
- singlesTriggerEnabled[i] = singles[i].isEnabled();
-
- // Set the singles cut statuses.
- singlesCutsEnabled[i][ENERGY_MIN] = singles[i].getEnergyMinCutConfig().isEnabled();
- singlesCutsEnabled[i][ENERGY_MAX] = singles[i].getEnergyMaxCutConfig().isEnabled();
- singlesCutsEnabled[i][HIT_COUNT] = singles[i].getHitCountCutConfig().isEnabled();
-
- // Set the pair cut statuses.
- pairCutsEnabled[i][ENERGY_MIN] = pairs[i].getEnergyMinCutConfig().isEnabled();
- pairCutsEnabled[i][ENERGY_MAX] = pairs[i].getEnergyMaxCutConfig().isEnabled();
- pairCutsEnabled[i][HIT_COUNT] = pairs[i].getHitCountCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_SUM] = pairs[i].getEnergySumCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_DIFF] = pairs[i].getEnergyDifferenceCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_SLOPE] = pairs[i].getEnergySlopeCutConfig().isEnabled();
- pairCutsEnabled[i][3 + COPLANARITY] = pairs[i].getCoplanarityCutConfig().isEnabled();
- }
- }
- });
- }
-
- /**
- * Processes an LCIO event and simulates triggers in the same manner
- * as the hardware for both <code>SSPCluster</code> objects as well
- * as <code>Cluster</code> objects reconstructed from FADC hits.
- * Triggers are then output to the data stream.
- * @param event - The <code>EventHeader</code> object representing
- * the current LCIO event.
- */
- @Override
- public void process(EventHeader event) {
- // If the DAQ configuration manager has not been initialized,
- // then no action can be performed.
- if(!ConfigurationManager.isInitialized()) {
- // Put an empty trigger results module into the data stream.
- SimTriggerData triggerData = new SimTriggerData();
- List<SimTriggerData> dataList = new ArrayList<SimTriggerData>(1);
- dataList.add(triggerData);
- event.put(simTriggerCollectionName, dataList, SimTriggerData.class, 0);
-
- // Nothing further can be done, since trigger settings are
- // not yet defined.
- return;
- }
-
- // Get the SSP bank.
- if(event.hasCollection(GenericObject.class, bankCollectionName)) {
- // Get the bank list.
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
-
- // Search through the banks and get the SSP and TI banks.
- for(GenericObject obj : bankList) {
- // If this is an SSP bank, parse it.
- if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
- sspBank = new SSPData(obj);
- }
- }
- }
-
- // Get a list of SSPClusters.
- List<SSPCluster> sspClusters = null;
- if(sspBank != null) { sspClusters = sspBank.getClusters(); }
- else { sspClusters = new ArrayList<SSPCluster>(0); }
-
- // Get reconstructed clusters.
- List<Cluster> reconClusters = null;
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- reconClusters = event.get(Cluster.class, clusterCollectionName);
- }
- else { reconClusters = new ArrayList<Cluster>(0); }
-
- // If only "verifiable" clusters should be used, test all the
- // reconstructed clusters for verifiability.
- if(filterUnverifiable) {
- // Create a list to store the verifiable clusters.
- List<Cluster> goodClusters = new ArrayList<Cluster>();
-
- // Iterate over all the clusters and test them to see if
- // they are verifiable.
- for(Cluster cluster : reconClusters) {
- if(TriggerDiagnosticUtil.isVerifiable(cluster, nsa, nsb, windowWidth)) {
- goodClusters.add(cluster);
- }
- }
-
- // Replace the old cluster list with the new one.
- reconClusters = goodClusters;
- }
-
- // Generate simulated triggers.
- SimTriggerModule<Cluster> reconModule = constructTriggers(reconClusters, Cluster.class);
- SimTriggerModule<SSPCluster> sspModule = constructTriggers(sspClusters, SSPCluster.class);
-
- // Insert the trigger results in the data stream.
- SimTriggerData triggerData = new SimTriggerData(reconModule, sspModule);
- List<SimTriggerData> dataList = new ArrayList<SimTriggerData>(1);
- dataList.add(triggerData);
- event.put(simTriggerCollectionName, dataList, SimTriggerData.class, 0);
- }
-
- /**
- * Constructs simulated triggers in the same manner as the hardware.
- * Method can accept either <code>Cluster</code> objects, any object
- * that is a subclass of <code>Cluster</code>, or objects of type
- * <code>SSPCluster</code>.
- * @param clusters - A <code>List</code> collection of the cluster
- * objects from which triggers are to be derived.
- * @param clusterType - The class of the cluster objects from which
- * triggers are to be derived. This can be <code>Cluster</code>,
- * <code>SSPCluster</code>, or a subclass thereof.
- * @return Returns a <code>SimTriggerModule</code> object containing
- * the simulated trigger results.
- * @throws IllegalArgumentException Occurs if the class of the
- * cluster objects is not of a supported type.
- *
- */
- private <E> SimTriggerModule<E> constructTriggers(List<E> clusters, Class<E> clusterType) throws IllegalArgumentException {
- // Verify that the cluster type is supported.
- if(!clusterType.equals(Cluster.class) && !clusterType.equals(SSPCluster.class)) {
- throw new IllegalArgumentException("Class \"" + clusterType.getSimpleName() + "\" is not a supported cluster type.");
- }
-
- // Store the singles and pair triggers.
- List<List<PairTrigger<E[]>>> pairTriggers = new ArrayList<List<PairTrigger<E[]>>>(2);
- pairTriggers.add(new ArrayList<PairTrigger<E[]>>());
- pairTriggers.add(new ArrayList<PairTrigger<E[]>>());
- List<List<SinglesTrigger<E>>> singlesTriggers = new ArrayList<List<SinglesTrigger<E>>>(2);
- singlesTriggers.add(new ArrayList<SinglesTrigger<E>>());
- singlesTriggers.add(new ArrayList<SinglesTrigger<E>>());
-
- // Run the clusters through the singles trigger to determine
- // whether or not they pass it.
- for(E cluster : clusters) {
- // Simulate each of the cluster singles triggers.
- triggerLoop:
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- // Track whether the cluster passed each singles cut.
- boolean passSeedLow = true;
- boolean passSeedHigh = true;
- boolean passClusterLow = false;
- boolean passClusterHigh = false;
- boolean passHitCount = false;
-
- // Perform the trigger cuts appropriately for the type
- // of cluster.
- if(cluster instanceof Cluster) {
- // Cast the cluster to the appropriate type.
- Cluster c = (Cluster) cluster;
-
- // Perform each trigger cut.
- passClusterLow = singlesTrigger[triggerNum].clusterTotalEnergyCutLow(c);
- passClusterHigh = singlesTrigger[triggerNum].clusterTotalEnergyCutHigh(c);
- passHitCount = singlesTrigger[triggerNum].clusterHitCountCut(c);
- } else if(cluster instanceof SSPCluster) {
- // Cast the cluster to the appropriate type.
- SSPCluster c = (SSPCluster) cluster;
-
- // Perform each trigger cut.
- passClusterLow = singlesTrigger[triggerNum].clusterTotalEnergyCutLow(c);
- passClusterHigh = singlesTrigger[triggerNum].clusterTotalEnergyCutHigh(c);
- passHitCount = singlesTrigger[triggerNum].clusterHitCountCut(c);
- }
-
- // Make a trigger to store the results.
- SinglesTrigger<E> trigger = new SinglesTrigger<E>(cluster, triggerNum);
- trigger.setStateSeedEnergyLow(passSeedLow);
- trigger.setStateSeedEnergyHigh(passSeedHigh);
- trigger.setStateClusterEnergyLow(passClusterLow);
- trigger.setStateClusterEnergyHigh(passClusterHigh);
- trigger.setStateHitCount(passHitCount);
-
- // A trigger will only be reported by the SSP if it
- // passes all of the enabled cuts for that trigger.
- // Check whether this trigger meets these conditions.
- if(singlesCutsEnabled[triggerNum][ENERGY_MIN] && !trigger.getStateClusterEnergyLow()) {
- continue triggerLoop;
- } if(singlesCutsEnabled[triggerNum][ENERGY_MAX] && !trigger.getStateClusterEnergyHigh()) {
- continue triggerLoop;
- } if(singlesCutsEnabled[triggerNum][HIT_COUNT] && !trigger.getStateHitCount()) {
- continue triggerLoop;
- }
-
- // Store the trigger.
- singlesTriggers.get(triggerNum).add(trigger);
- }
- }
-
- // Store cluster pairs.
- List<E[]> pairs = TriggerModule.getTopBottomPairs(clusters, clusterType);
-
- // Simulate the pair triggers and record the results.
- for(E[] pair : pairs) {
- // Simulate each of the cluster pair triggers.
- pairTriggerLoop:
- for(int triggerIndex = 0; triggerIndex < 2; triggerIndex++) {
- // Track whether the cluster passed each singles cut.
- boolean passSeedLow = true;
- boolean passSeedHigh = true;
- boolean passClusterLow = false;
- boolean passClusterHigh = false;
- boolean passHitCount = false;
- boolean passPairEnergySumLow = false;
- boolean passPairEnergySumHigh = false;
- boolean passPairEnergyDifference = false;
- boolean passPairEnergySlope = false;
- boolean passPairCoplanarity = false;
- boolean passTimeCoincidence = false;
-
- // Apply the trigger cuts appropriately according to the
- // cluster type.
- if(clusterType.equals(Cluster.class)) {
- // Cast the cluster object.
- Cluster[] reconPair = { (Cluster) pair[0], (Cluster) pair[1] };
-
- // Check that the pair passes the time coincidence cut.
- // If it does not, it is not a valid pair and should be
- // destroyed.
- if(!pairsTrigger[triggerIndex].pairTimeCoincidenceCut(reconPair)) {
- continue pairTriggerLoop;
- }
-
- passClusterLow = pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(reconPair[0])
- && pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(reconPair[1]);
- passClusterHigh = pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(reconPair[0])
- && pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(reconPair[1]);
- passHitCount = pairsTrigger[triggerIndex].clusterHitCountCut(reconPair[0])
- && pairsTrigger[triggerIndex].clusterHitCountCut(reconPair[1]);
- passPairEnergySumLow = pairsTrigger[triggerIndex].pairEnergySumCutLow(reconPair);
- passPairEnergySumHigh = pairsTrigger[triggerIndex].pairEnergySumCutHigh(reconPair);
- passPairEnergyDifference = pairsTrigger[triggerIndex].pairEnergyDifferenceCut(reconPair);
- passPairEnergySlope = pairsTrigger[triggerIndex].pairEnergySlopeCut(reconPair);
- passPairCoplanarity = pairsTrigger[triggerIndex].pairCoplanarityCut(reconPair);
- passTimeCoincidence = pairsTrigger[triggerIndex].pairTimeCoincidenceCut(reconPair);
- } else if(clusterType.equals(SSPCluster.class)) {
- // Cast the cluster object.
- SSPCluster[] sspPair = { (SSPCluster) pair[0], (SSPCluster) pair[1] };
-
- // Check that the pair passes the time coincidence cut.
- // If it does not, it is not a valid pair and should be
- // destroyed.
- if(!pairsTrigger[triggerIndex].pairTimeCoincidenceCut(sspPair)) {
- continue pairTriggerLoop;
- }
-
- // Perform each trigger cut.
- passClusterLow = pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(sspPair[0])
- && pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(sspPair[1]);
- passClusterHigh = pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(sspPair[0])
- && pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(sspPair[1]);
- passHitCount = pairsTrigger[triggerIndex].clusterHitCountCut(sspPair[0])
- && pairsTrigger[triggerIndex].clusterHitCountCut(sspPair[1]);
- passPairEnergySumLow = pairsTrigger[triggerIndex].pairEnergySumCutLow(sspPair);
- passPairEnergySumHigh = pairsTrigger[triggerIndex].pairEnergySumCutHigh(sspPair);
- passPairEnergyDifference = pairsTrigger[triggerIndex].pairEnergyDifferenceCut(sspPair);
- passPairEnergySlope = pairsTrigger[triggerIndex].pairEnergySlopeCut(sspPair);
- passPairCoplanarity = pairsTrigger[triggerIndex].pairCoplanarityCut(sspPair);
- passTimeCoincidence = pairsTrigger[triggerIndex].pairTimeCoincidenceCut(sspPair);
- }
-
- // Create a trigger from the results.
- PairTrigger<E[]> trigger = new PairTrigger<E[]>(pair, triggerIndex);
- trigger.setStateSeedEnergyLow(passSeedLow);
- trigger.setStateSeedEnergyHigh(passSeedHigh);
- trigger.setStateClusterEnergyLow(passClusterLow);
- trigger.setStateClusterEnergyHigh(passClusterHigh);
- trigger.setStateHitCount(passHitCount);
- trigger.setStateEnergySumLow(passPairEnergySumLow);
- trigger.setStateEnergySumHigh(passPairEnergySumHigh);
- trigger.setStateEnergyDifference(passPairEnergyDifference);
- trigger.setStateEnergySlope(passPairEnergySlope);
- trigger.setStateCoplanarity(passPairCoplanarity);
- trigger.setStateTimeCoincidence(passTimeCoincidence);
-
- // A trigger will only be reported by the SSP if it
- // passes all of the enabled cuts for that trigger.
- // Check whether this trigger meets these conditions.
- if(pairCutsEnabled[triggerIndex][ENERGY_MIN] && !trigger.getStateClusterEnergyLow()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][ENERGY_MAX] && !trigger.getStateClusterEnergyHigh()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][HIT_COUNT] && !trigger.getStateHitCount()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][3 + ENERGY_SUM] && !trigger.getStateEnergySum()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][3 + ENERGY_DIFF] && !trigger.getStateEnergyDifference()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][3 + ENERGY_SLOPE] && !trigger.getStateEnergySlope()) {
- continue pairTriggerLoop;
- } if(pairCutsEnabled[triggerIndex][3 + COPLANARITY] && !trigger.getStateCoplanarity()) {
- continue pairTriggerLoop;
- }
-
- // Add the trigger to the list.
- pairTriggers.get(triggerIndex).add(trigger);
- }
- }
-
- // Create a new simulated trigger module to contain the results.
- return new SimTriggerModule<E>(singlesTriggers.get(0), singlesTriggers.get(1),
- pairTriggers.get(0), pairTriggers.get(1));
- }
-
- /**
- * Sets the name of the LCIO collection containing the TI and SSP
- * banks.
- * @param bankCollectionName - The bank collection name.
- */
- public void setBankCollectionName(String bankCollectionName) {
- this.bankCollectionName = bankCollectionName;
- }
-
- /**
- * Sets the name of the LCIO collection containing the simulated
- * reconstructed clusters.
- * @param clusterCollectionName - The cluster collection name.
- */
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
-
- /**
- * Sets whether or not triggers should be formed using all clusters,
- * or only those that where the integration window for the cluster
- * is completely within the bounds of the event window.
- * @param state - <code>true</code> means that only clusters where
- * the entire cluster integration window is within the event time
- * window will be used, while <code>false</code> means that all
- * clusters will be used.
- */
- public void setFilterUnverifiableClusters(boolean state) {
- this.filterUnverifiable = state;
- }
-
- /**
- * Sets the name of the LCIO collection containing simulated triggers.
- * @param triggerCollection - The trigger collection name.
- */
- public void setTriggerCollectionName(String triggerCollection) {
- this.simTriggerCollectionName = triggerCollection;
- }
+ // Store the LCIO collection names for the needed objects.
+ private boolean filterUnverifiable = false;
+ private String bankCollectionName = "TriggerBank";
+ private String clusterCollectionName = "EcalClusters";
+ private String simTriggerCollectionName = "SimTriggers";
+
+ // Store the SSP bank.
+ private SSPData sspBank = null;
+
+ // Store cluster verifiability parameters.
+ private int nsa = 0;
+ private int nsb = 0;
+ private int windowWidth = 0;
+
+ // Define trigger simulation modules.
+ private boolean[] pairTriggerEnabled = new boolean[2];
+ private boolean[] singlesTriggerEnabled = new boolean[2];
+ private boolean[][] pairCutsEnabled = new boolean[2][7];
+ private boolean[][] singlesCutsEnabled = new boolean[2][3];
+ private TriggerModule[] pairsTrigger = new TriggerModule[2];
+ private TriggerModule[] singlesTrigger = new TriggerModule[2];
+
+ // Reference variables.
+ private static final int ENERGY_MIN = TriggerDiagnosticUtil.SINGLES_ENERGY_MIN;
+ private static final int ENERGY_MAX = TriggerDiagnosticUtil.SINGLES_ENERGY_MAX;
+ private static final int HIT_COUNT = TriggerDiagnosticUtil.SINGLES_HIT_COUNT;
+ private static final int ENERGY_SUM = TriggerDiagnosticUtil.PAIR_ENERGY_SUM;
+ private static final int ENERGY_DIFF = TriggerDiagnosticUtil.PAIR_ENERGY_DIFF;
+ private static final int ENERGY_SLOPE = TriggerDiagnosticUtil.PAIR_ENERGY_SLOPE;
+ private static final int COPLANARITY = TriggerDiagnosticUtil.PAIR_COPLANARITY;
+
+ /**
+ * Connects the driver to the the <code>ConfigurationManager</code>
+ * in order to obtain the correct trigger information. Trigger
+ * settings are stored in the <code>TriggerModule</code> objects.
+ */
+ @Override
+ public void startOfData() {
+ // Define the first singles trigger.
+ singlesTrigger[0] = new TriggerModule();
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.500);
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+
+ // Define the second singles trigger.
+ singlesTrigger[1] = new TriggerModule();
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+
+ // Define the first pairs trigger.
+ pairsTrigger[0] = new TriggerModule();
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
+
+ // Define the second pairs trigger.
+ pairsTrigger[1] = new TriggerModule();
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
+
+ // Listen for the configuration manager to provide the real
+ // trigger settings.
+ ConfigurationManager.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // Get the DAQ configuration.
+ DAQConfig daq = ConfigurationManager.getInstance();
+
+ // Get cluster verifiability parameters.
+ nsa = daq.getFADCConfig().getNSA();
+ nsb = daq.getFADCConfig().getNSB();
+ windowWidth = daq.getFADCConfig().getWindowWidth();
+
+ // Load the DAQ settings from the configuration manager.
+ singlesTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getSingles1Config());
+ singlesTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getSingles2Config());
+ pairsTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getPair1Config());
+ pairsTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getPair2Config());
+
+ // Get the trigger configurations from the DAQ.
+ SinglesTriggerConfig[] singles = { daq.getSSPConfig().getSingles1Config(),
+ daq.getSSPConfig().getSingles2Config() };
+ PairTriggerConfig[] pairs = { daq.getSSPConfig().getPair1Config(),
+ daq.getSSPConfig().getPair2Config() };
+
+ // Update the enabled/disabled statuses.
+ for(int i = 0; i < 2; i++) {
+ // Set the trigger enabled status.
+ pairTriggerEnabled[i] = pairs[i].isEnabled();
+ singlesTriggerEnabled[i] = singles[i].isEnabled();
+
+ // Set the singles cut statuses.
+ singlesCutsEnabled[i][ENERGY_MIN] = singles[i].getEnergyMinCutConfig().isEnabled();
+ singlesCutsEnabled[i][ENERGY_MAX] = singles[i].getEnergyMaxCutConfig().isEnabled();
+ singlesCutsEnabled[i][HIT_COUNT] = singles[i].getHitCountCutConfig().isEnabled();
+
+ // Set the pair cut statuses.
+ pairCutsEnabled[i][ENERGY_MIN] = pairs[i].getEnergyMinCutConfig().isEnabled();
+ pairCutsEnabled[i][ENERGY_MAX] = pairs[i].getEnergyMaxCutConfig().isEnabled();
+ pairCutsEnabled[i][HIT_COUNT] = pairs[i].getHitCountCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_SUM] = pairs[i].getEnergySumCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_DIFF] = pairs[i].getEnergyDifferenceCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_SLOPE] = pairs[i].getEnergySlopeCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + COPLANARITY] = pairs[i].getCoplanarityCutConfig().isEnabled();
+ }
+ }
+ });
+ }
+
+ /**
+ * Processes an LCIO event and simulates triggers in the same manner
+ * as the hardware for both <code>SSPCluster</code> objects as well
+ * as <code>Cluster</code> objects reconstructed from FADC hits.
+ * Triggers are then output to the data stream.
+ * @param event - The <code>EventHeader</code> object representing
+ * the current LCIO event.
+ */
+ @Override
+ public void process(EventHeader event) {
+ // If the DAQ configuration manager has not been initialized,
+ // then no action can be performed.
+ if(!ConfigurationManager.isInitialized()) {
+ // Put an empty trigger results module into the data stream.
+ SimTriggerData triggerData = new SimTriggerData();
+ List<SimTriggerData> dataList = new ArrayList<SimTriggerData>(1);
+ dataList.add(triggerData);
+ event.put(simTriggerCollectionName, dataList, SimTriggerData.class, 0);
+
+ // Nothing further can be done, since trigger settings are
+ // not yet defined.
+ return;
+ }
+
+ // Get the SSP bank.
+ if(event.hasCollection(GenericObject.class, bankCollectionName)) {
+ // Get the bank list.
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+
+ // Search through the banks and get the SSP and TI banks.
+ for(GenericObject obj : bankList) {
+ // If this is an SSP bank, parse it.
+ if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
+ sspBank = new SSPData(obj);
+ }
+ }
+ }
+
+ // Get a list of SSPClusters.
+ List<SSPCluster> sspClusters = null;
+ if(sspBank != null) { sspClusters = sspBank.getClusters(); }
+ else { sspClusters = new ArrayList<SSPCluster>(0); }
+
+ // Get reconstructed clusters.
+ List<Cluster> reconClusters = null;
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ reconClusters = event.get(Cluster.class, clusterCollectionName);
+ }
+ else { reconClusters = new ArrayList<Cluster>(0); }
+
+ // If only "verifiable" clusters should be used, test all the
+ // reconstructed clusters for verifiability.
+ if(filterUnverifiable) {
+ // Create a list to store the verifiable clusters.
+ List<Cluster> goodClusters = new ArrayList<Cluster>();
+
+ // Iterate over all the clusters and test them to see if
+ // they are verifiable.
+ for(Cluster cluster : reconClusters) {
+ if(TriggerDiagnosticUtil.isVerifiable(cluster, nsa, nsb, windowWidth)) {
+ goodClusters.add(cluster);
+ }
+ }
+
+ // Replace the old cluster list with the new one.
+ reconClusters = goodClusters;
+ }
+
+ // Generate simulated triggers.
+ SimTriggerModule<Cluster> reconModule = constructTriggers(reconClusters, Cluster.class);
+ SimTriggerModule<SSPCluster> sspModule = constructTriggers(sspClusters, SSPCluster.class);
+
+ // Insert the trigger results in the data stream.
+ SimTriggerData triggerData = new SimTriggerData(reconModule, sspModule);
+ List<SimTriggerData> dataList = new ArrayList<SimTriggerData>(1);
+ dataList.add(triggerData);
+ event.put(simTriggerCollectionName, dataList, SimTriggerData.class, 0);
+ }
+
+ /**
+ * Constructs simulated triggers in the same manner as the hardware.
+ * Method can accept either <code>Cluster</code> objects, any object
+ * that is a subclass of <code>Cluster</code>, or objects of type
+ * <code>SSPCluster</code>.
+ * @param clusters - A <code>List</code> collection of the cluster
+ * objects from which triggers are to be derived.
+ * @param clusterType - The class of the cluster objects from which
+ * triggers are to be derived. This can be <code>Cluster</code>,
+ * <code>SSPCluster</code>, or a subclass thereof.
+ * @return Returns a <code>SimTriggerModule</code> object containing
+ * the simulated trigger results.
+ * @throws IllegalArgumentException Occurs if the class of the
+ * cluster objects is not of a supported type.
+ *
+ */
+ private <E> SimTriggerModule<E> constructTriggers(List<E> clusters, Class<E> clusterType) throws IllegalArgumentException {
+ // Verify that the cluster type is supported.
+ if(!clusterType.equals(Cluster.class) && !clusterType.equals(SSPCluster.class)) {
+ throw new IllegalArgumentException("Class \"" + clusterType.getSimpleName() + "\" is not a supported cluster type.");
+ }
+
+ // Store the singles and pair triggers.
+ List<List<PairTrigger<E[]>>> pairTriggers = new ArrayList<List<PairTrigger<E[]>>>(2);
+ pairTriggers.add(new ArrayList<PairTrigger<E[]>>());
+ pairTriggers.add(new ArrayList<PairTrigger<E[]>>());
+ List<List<SinglesTrigger<E>>> singlesTriggers = new ArrayList<List<SinglesTrigger<E>>>(2);
+ singlesTriggers.add(new ArrayList<SinglesTrigger<E>>());
+ singlesTriggers.add(new ArrayList<SinglesTrigger<E>>());
+
+ // Run the clusters through the singles trigger to determine
+ // whether or not they pass it.
+ for(E cluster : clusters) {
+ // Simulate each of the cluster singles triggers.
+ triggerLoop:
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ // Track whether the cluster passed each singles cut.
+ boolean passSeedLow = true;
+ boolean passSeedHigh = true;
+ boolean passClusterLow = false;
+ boolean passClusterHigh = false;
+ boolean passHitCount = false;
+
+ // Perform the trigger cuts appropriately for the type
+ // of cluster.
+ if(cluster instanceof Cluster) {
+ // Cast the cluster to the appropriate type.
+ Cluster c = (Cluster) cluster;
+
+ // Perform each trigger cut.
+ passClusterLow = singlesTrigger[triggerNum].clusterTotalEnergyCutLow(c);
+ passClusterHigh = singlesTrigger[triggerNum].clusterTotalEnergyCutHigh(c);
+ passHitCount = singlesTrigger[triggerNum].clusterHitCountCut(c);
+ } else if(cluster instanceof SSPCluster) {
+ // Cast the cluster to the appropriate type.
+ SSPCluster c = (SSPCluster) cluster;
+
+ // Perform each trigger cut.
+ passClusterLow = singlesTrigger[triggerNum].clusterTotalEnergyCutLow(c);
+ passClusterHigh = singlesTrigger[triggerNum].clusterTotalEnergyCutHigh(c);
+ passHitCount = singlesTrigger[triggerNum].clusterHitCountCut(c);
+ }
+
+ // Make a trigger to store the results.
+ SinglesTrigger<E> trigger = new SinglesTrigger<E>(cluster, triggerNum);
+ trigger.setStateSeedEnergyLow(passSeedLow);
+ trigger.setStateSeedEnergyHigh(passSeedHigh);
+ trigger.setStateClusterEnergyLow(passClusterLow);
+ trigger.setStateClusterEnergyHigh(passClusterHigh);
+ trigger.setStateHitCount(passHitCount);
+
+ // A trigger will only be reported by the SSP if it
+ // passes all of the enabled cuts for that trigger.
+ // Check whether this trigger meets these conditions.
+ if(singlesCutsEnabled[triggerNum][ENERGY_MIN] && !trigger.getStateClusterEnergyLow()) {
+ continue triggerLoop;
+ } if(singlesCutsEnabled[triggerNum][ENERGY_MAX] && !trigger.getStateClusterEnergyHigh()) {
+ continue triggerLoop;
+ } if(singlesCutsEnabled[triggerNum][HIT_COUNT] && !trigger.getStateHitCount()) {
+ continue triggerLoop;
+ }
+
+ // Store the trigger.
+ singlesTriggers.get(triggerNum).add(trigger);
+ }
+ }
+
+ // Store cluster pairs.
+ List<E[]> pairs = TriggerModule.getTopBottomPairs(clusters, clusterType);
+
+ // Simulate the pair triggers and record the results.
+ for(E[] pair : pairs) {
+ // Simulate each of the cluster pair triggers.
+ pairTriggerLoop:
+ for(int triggerIndex = 0; triggerIndex < 2; triggerIndex++) {
+ // Track whether the cluster passed each singles cut.
+ boolean passSeedLow = true;
+ boolean passSeedHigh = true;
+ boolean passClusterLow = false;
+ boolean passClusterHigh = false;
+ boolean passHitCount = false;
+ boolean passPairEnergySumLow = false;
+ boolean passPairEnergySumHigh = false;
+ boolean passPairEnergyDifference = false;
+ boolean passPairEnergySlope = false;
+ boolean passPairCoplanarity = false;
+ boolean passTimeCoincidence = false;
+
+ // Apply the trigger cuts appropriately according to the
+ // cluster type.
+ if(clusterType.equals(Cluster.class)) {
+ // Cast the cluster object.
+ Cluster[] reconPair = { (Cluster) pair[0], (Cluster) pair[1] };
+
+ // Check that the pair passes the time coincidence cut.
+ // If it does not, it is not a valid pair and should be
+ // destroyed.
+ if(!pairsTrigger[triggerIndex].pairTimeCoincidenceCut(reconPair)) {
+ continue pairTriggerLoop;
+ }
+
+ passClusterLow = pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(reconPair[0])
+ && pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(reconPair[1]);
+ passClusterHigh = pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(reconPair[0])
+ && pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(reconPair[1]);
+ passHitCount = pairsTrigger[triggerIndex].clusterHitCountCut(reconPair[0])
+ && pairsTrigger[triggerIndex].clusterHitCountCut(reconPair[1]);
+ passPairEnergySumLow = pairsTrigger[triggerIndex].pairEnergySumCutLow(reconPair);
+ passPairEnergySumHigh = pairsTrigger[triggerIndex].pairEnergySumCutHigh(reconPair);
+ passPairEnergyDifference = pairsTrigger[triggerIndex].pairEnergyDifferenceCut(reconPair);
+ passPairEnergySlope = pairsTrigger[triggerIndex].pairEnergySlopeCut(reconPair);
+ passPairCoplanarity = pairsTrigger[triggerIndex].pairCoplanarityCut(reconPair);
+ passTimeCoincidence = pairsTrigger[triggerIndex].pairTimeCoincidenceCut(reconPair);
+ } else if(clusterType.equals(SSPCluster.class)) {
+ // Cast the cluster object.
+ SSPCluster[] sspPair = { (SSPCluster) pair[0], (SSPCluster) pair[1] };
+
+ // Check that the pair passes the time coincidence cut.
+ // If it does not, it is not a valid pair and should be
+ // destroyed.
+ if(!pairsTrigger[triggerIndex].pairTimeCoincidenceCut(sspPair)) {
+ continue pairTriggerLoop;
+ }
+
+ // Perform each trigger cut.
+ passClusterLow = pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(sspPair[0])
+ && pairsTrigger[triggerIndex].clusterTotalEnergyCutLow(sspPair[1]);
+ passClusterHigh = pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(sspPair[0])
+ && pairsTrigger[triggerIndex].clusterTotalEnergyCutHigh(sspPair[1]);
+ passHitCount = pairsTrigger[triggerIndex].clusterHitCountCut(sspPair[0])
+ && pairsTrigger[triggerIndex].clusterHitCountCut(sspPair[1]);
+ passPairEnergySumLow = pairsTrigger[triggerIndex].pairEnergySumCutLow(sspPair);
+ passPairEnergySumHigh = pairsTrigger[triggerIndex].pairEnergySumCutHigh(sspPair);
+ passPairEnergyDifference = pairsTrigger[triggerIndex].pairEnergyDifferenceCut(sspPair);
+ passPairEnergySlope = pairsTrigger[triggerIndex].pairEnergySlopeCut(sspPair);
+ passPairCoplanarity = pairsTrigger[triggerIndex].pairCoplanarityCut(sspPair);
+ passTimeCoincidence = pairsTrigger[triggerIndex].pairTimeCoincidenceCut(sspPair);
+ }
+
+ // Create a trigger from the results.
+ PairTrigger<E[]> trigger = new PairTrigger<E[]>(pair, triggerIndex);
+ trigger.setStateSeedEnergyLow(passSeedLow);
+ trigger.setStateSeedEnergyHigh(passSeedHigh);
+ trigger.setStateClusterEnergyLow(passClusterLow);
+ trigger.setStateClusterEnergyHigh(passClusterHigh);
+ trigger.setStateHitCount(passHitCount);
+ trigger.setStateEnergySumLow(passPairEnergySumLow);
+ trigger.setStateEnergySumHigh(passPairEnergySumHigh);
+ trigger.setStateEnergyDifference(passPairEnergyDifference);
+ trigger.setStateEnergySlope(passPairEnergySlope);
+ trigger.setStateCoplanarity(passPairCoplanarity);
+ trigger.setStateTimeCoincidence(passTimeCoincidence);
+
+ // A trigger will only be reported by the SSP if it
+ // passes all of the enabled cuts for that trigger.
+ // Check whether this trigger meets these conditions.
+ if(pairCutsEnabled[triggerIndex][ENERGY_MIN] && !trigger.getStateClusterEnergyLow()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][ENERGY_MAX] && !trigger.getStateClusterEnergyHigh()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][HIT_COUNT] && !trigger.getStateHitCount()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][3 + ENERGY_SUM] && !trigger.getStateEnergySum()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][3 + ENERGY_DIFF] && !trigger.getStateEnergyDifference()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][3 + ENERGY_SLOPE] && !trigger.getStateEnergySlope()) {
+ continue pairTriggerLoop;
+ } if(pairCutsEnabled[triggerIndex][3 + COPLANARITY] && !trigger.getStateCoplanarity()) {
+ continue pairTriggerLoop;
+ }
+
+ // Add the trigger to the list.
+ pairTriggers.get(triggerIndex).add(trigger);
+ }
+ }
+
+ // Create a new simulated trigger module to contain the results.
+ return new SimTriggerModule<E>(singlesTriggers.get(0), singlesTriggers.get(1),
+ pairTriggers.get(0), pairTriggers.get(1));
+ }
+
+ /**
+ * Sets the name of the LCIO collection containing the TI and SSP
+ * banks.
+ * @param bankCollectionName - The bank collection name.
+ */
+ public void setBankCollectionName(String bankCollectionName) {
+ this.bankCollectionName = bankCollectionName;
+ }
+
+ /**
+ * Sets the name of the LCIO collection containing the simulated
+ * reconstructed clusters.
+ * @param clusterCollectionName - The cluster collection name.
+ */
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ /**
+ * Sets whether or not triggers should be formed using all clusters,
+ * or only those that where the integration window for the cluster
+ * is completely within the bounds of the event window.
+ * @param state - <code>true</code> means that only clusters where
+ * the entire cluster integration window is within the event time
+ * window will be used, while <code>false</code> means that all
+ * clusters will be used.
+ */
+ public void setFilterUnverifiableClusters(boolean state) {
+ this.filterUnverifiable = state;
+ }
+
+ /**
+ * Sets the name of the LCIO collection containing simulated triggers.
+ * @param triggerCollection - The trigger collection name.
+ */
+ public void setTriggerCollectionName(String triggerCollection) {
+ this.simTriggerCollectionName = triggerCollection;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerData.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerData.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerData.java Wed Apr 27 11:11:32 2016
@@ -7,53 +7,76 @@
* Class <code>SimTriggerData</code> is a container class that holds
* simulated trigger data modules. It is intended to be placed in the
* LCIO data stream by the <code>DataTriggerSimDriver</code> to allow
- * other classes to access triggers simulated from SSP and reconstructed
+ * other classes to access triggers simulated from hardware and software
* cluster data.
*
* @author Kyle McCarty <[log in to unmask]>
*/
public class SimTriggerData {
- private final SimTriggerModule<Cluster> reconTriggers;
- private final SimTriggerModule<SSPCluster> sspTriggers;
-
- /**
- * Instantiates a new <code>SimTriggerData</code> object with empty
- * trigger results modules.
- */
- SimTriggerData() {
- reconTriggers = new SimTriggerModule<Cluster>();
- sspTriggers = new SimTriggerModule<SSPCluster>();
- }
-
- /**
- * Instantiates a new <code>SimTriggerData</code> object that will
- * contain the argument trigger modules.
- * @param reconTriggers - The simulated reconstructed cluster
- * triggers module.
- * @param sspTriggers - The simulated SSP cluster triggers module.
- */
- SimTriggerData(SimTriggerModule<Cluster> reconTriggers, SimTriggerModule<SSPCluster> sspTriggers) {
- this.reconTriggers = reconTriggers;
- this.sspTriggers = sspTriggers;
- }
-
- /**
- * Gets the module containing all simulated SSP trigger data for
- * each of the four primary triggers.
- * @return Returns the trigger data in a <code>SimTriggerModule</code>
- * object.
- */
- public SimTriggerModule<SSPCluster> getSimSSPTriggers() {
- return sspTriggers;
- }
-
- /**
- * Gets the module containing all simulated LCSim trigger data for
- * each of the four primary triggers.
- * @return Returns the trigger data in a <code>SimTriggerModule</code>
- * object.
- */
- public SimTriggerModule<Cluster> getSimReconTriggers() {
- return reconTriggers;
- }
+ private final SimTriggerModule<Cluster> softwareClusterTriggers;
+ private final SimTriggerModule<SSPCluster> hardwareClusterTriggers;
+
+ /**
+ * Instantiates a new <code>SimTriggerData</code> object with empty
+ * trigger results modules.
+ */
+ SimTriggerData() {
+ softwareClusterTriggers = new SimTriggerModule<Cluster>();
+ hardwareClusterTriggers = new SimTriggerModule<SSPCluster>();
+ }
+
+ /**
+ * Instantiates a new <code>SimTriggerData</code> object that will
+ * contain the argument trigger modules.
+ * @param softwareClusterTriggers - The module containing triggers
+ * simulated from software simulated clusters.
+ * @param hardwareClusterTriggers - The module containing triggers
+ * simulated from hardware reported clusters.
+ */
+ SimTriggerData(SimTriggerModule<Cluster> softwareClusterTriggers, SimTriggerModule<SSPCluster> hardwareClusterTriggers) {
+ this.softwareClusterTriggers = softwareClusterTriggers;
+ this.hardwareClusterTriggers = hardwareClusterTriggers;
+ }
+
+ /**
+ * Gets the module containing all simulated SSP trigger data for
+ * each of the four primary triggers.
+ * @return Returns the trigger data in a <code>SimTriggerModule</code>
+ * object.
+ */
+ @Deprecated
+ public SimTriggerModule<SSPCluster> getSimSSPTriggers() {
+ return hardwareClusterTriggers;
+ }
+
+ /**
+ * Gets the module containing all simulated LCSim trigger data for
+ * each of the four primary triggers.
+ * @return Returns the trigger data in a <code>SimTriggerModule</code>
+ * object.
+ */
+ @Deprecated
+ public SimTriggerModule<Cluster> getSimReconTriggers() {
+ return softwareClusterTriggers;
+ }
+
+ /**
+ * Gets the module containing all triggers simulated from hardware
+ * reported clusters for each of the four production triggers.
+ * @return Returns the trigger data in a <code>SimTriggerModule</code>
+ * object.
+ */
+ public SimTriggerModule<SSPCluster> getSimHardwareClusterTriggers() {
+ return hardwareClusterTriggers;
+ }
+
+ /**
+ * Gets the module containing all triggers simulated from software
+ * simulated clusters for each of the four production triggers.
+ * @return Returns the trigger data in a <code>SimTriggerModule</code>
+ * object.
+ */
+ public SimTriggerModule<Cluster> getSimSoftwareClusterTriggers() {
+ return softwareClusterTriggers;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerModule.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerModule.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/SimTriggerModule.java Wed Apr 27 11:11:32 2016
@@ -18,71 +18,115 @@
* @see DataTriggerSimDriver
*/
public class SimTriggerModule<E> {
- private final List<SinglesTrigger<E>> singles0;
- private final List<SinglesTrigger<E>> singles1;
- private final List<PairTrigger<E[]>> pair0;
- private final List<PairTrigger<E[]>> pair1;
-
- /**
- * Constructs a new <code>SimTriggerModule</code> with the no
- * triggers results for any triggers.
- */
- SimTriggerModule() {
- singles0 = new ArrayList<SinglesTrigger<E>>(0);
- singles1 = new ArrayList<SinglesTrigger<E>>(0);
- pair0 = new ArrayList<PairTrigger<E[]>>(0);
- pair1 = new ArrayList<PairTrigger<E[]>>(0);
- }
-
- /**
- * Constructs a new <code>SimTriggerModule</code> with the specified
- * trigger results for each of the four primary triggers.
- * @param singles0Triggers - The results for the singles 0 trigger.
- * @param singles1Triggers - The results for the singles 1 trigger.
- * @param pair0Triggers - The results for the pair 0 trigger.
- * @param pair1Triggers - The results for the pair 1 trigger.
- */
- SimTriggerModule(List<SinglesTrigger<E>> singles0Triggers, List<SinglesTrigger<E>> singles1Triggers,
- List<PairTrigger<E[]>> pair0Triggers, List<PairTrigger<E[]>> pair1Triggers) {
- this.singles0 = singles0Triggers;
- this.singles1 = singles1Triggers;
- this.pair0 = pair0Triggers;
- this.pair1 = pair1Triggers;
- }
-
- /**
- * Gets the simulated trigger results for the singles 0 trigger.
- * @return Returns the trigger results as a <code>List</code> of
- * <code>SinglesTrigger</code> objects.
- */
- public List<SinglesTrigger<E>> getSingles0Triggers() {
- return singles0;
- }
-
- /**
- * Gets the simulated trigger results for the singles 1 trigger.
- * @return Returns the trigger results as a <code>List</code> of
- * <code>SinglesTrigger</code> objects.
- */
- public List<SinglesTrigger<E>> getSingles1Triggers() {
- return singles1;
- }
-
- /**
- * Gets the simulated trigger results for the pair 0 trigger.
- * @return Returns the trigger results as a <code>List</code> of
- * <code>PairTrigger</code> objects.
- */
- public List<PairTrigger<E[]>> getPair0Triggers() {
- return pair0;
- }
-
- /**
- * Gets the simulated trigger results for the pair 1 trigger.
- * @return Returns the trigger results as a <code>List</code> of
- * <code>PairTrigger</code> objects.
- */
- public List<PairTrigger<E[]>> getPair1Triggers() {
- return pair1;
- }
-}
+ private final List<SinglesTrigger<E>> singles0;
+ private final List<SinglesTrigger<E>> singles1;
+ private final List<PairTrigger<E[]>> pair0;
+ private final List<PairTrigger<E[]>> pair1;
+
+ /**
+ * Constructs a new <code>SimTriggerModule</code> with the no
+ * triggers results for any triggers.
+ */
+ SimTriggerModule() {
+ singles0 = new ArrayList<SinglesTrigger<E>>(0);
+ singles1 = new ArrayList<SinglesTrigger<E>>(0);
+ pair0 = new ArrayList<PairTrigger<E[]>>(0);
+ pair1 = new ArrayList<PairTrigger<E[]>>(0);
+ }
+
+ /**
+ * Constructs a new <code>SimTriggerModule</code> with the specified
+ * trigger results for each of the four primary triggers.
+ * @param singles0Triggers - The results for the singles 0 trigger.
+ * @param singles1Triggers - The results for the singles 1 trigger.
+ * @param pair0Triggers - The results for the pair 0 trigger.
+ * @param pair1Triggers - The results for the pair 1 trigger.
+ */
+ SimTriggerModule(List<SinglesTrigger<E>> singles0Triggers, List<SinglesTrigger<E>> singles1Triggers,
+ List<PairTrigger<E[]>> pair0Triggers, List<PairTrigger<E[]>> pair1Triggers) {
+ this.singles0 = singles0Triggers;
+ this.singles1 = singles1Triggers;
+ this.pair0 = pair0Triggers;
+ this.pair1 = pair1Triggers;
+ }
+
+ /**
+ * Gets the simulated trigger results for the indicated singles
+ * trigger. Note that only inputs of <code>0</code> and <code>1</code>
+ * are allowed.
+ * @param triggerNumber - A value of either <code>0</code>, to
+ * obtain the singles 0 trigger results, or <code>1</code>, to
+ * obtain the singles 1 trigger results.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>SinglesTrigger</code> objects.
+ * @throws IllegalArgumentException Occurs if the input argument
+ * is not either <code>0</code> or <code>1</code>.
+ */
+ public List<SinglesTrigger<E>> getSinglesTriggers(int triggerNumber) {
+ // Return the appropriate trigger list.
+ if(triggerNumber == 0) { return getSingles0Triggers(); }
+ else if(triggerNumber == 1) { return getSingles1Triggers(); }
+
+ // Any other trigger number is not valid and should produce an
+ // exception.
+ throw new IllegalArgumentException("Trigger number " + triggerNumber + " is not valid.");
+ }
+
+ /**
+ * Gets the simulated trigger results for the singles 0 trigger.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>SinglesTrigger</code> objects.
+ */
+ public List<SinglesTrigger<E>> getSingles0Triggers() {
+ return singles0;
+ }
+
+ /**
+ * Gets the simulated trigger results for the singles 1 trigger.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>SinglesTrigger</code> objects.
+ */
+ public List<SinglesTrigger<E>> getSingles1Triggers() {
+ return singles1;
+ }
+
+ /**
+ * Gets the simulated trigger results for the indicated pair trigger.
+ * Note that only inputs of <code>0</code> and <code>1</code> are
+ * allowed.
+ * @param triggerNumber - A value of either <code>0</code>, to
+ * obtain the pair 0 trigger results, or <code>1</code>, to obtain
+ * the pair 1 trigger results.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>PairTrigger</code> objects.
+ * @throws IllegalArgumentException Occurs if the input argument
+ * is not either <code>0</code> or <code>1</code>.
+ */
+ public List<PairTrigger<E[]>> getPairTriggers(int triggerNumber) {
+ // Return the appropriate trigger list.
+ if(triggerNumber == 0) { return getPair0Triggers(); }
+ else if(triggerNumber == 1) { return getPair1Triggers(); }
+
+ // Any other trigger number is not valid and should produce an
+ // exception.
+ throw new IllegalArgumentException("Trigger number " + triggerNumber + " is not valid.");
+ }
+
+ /**
+ * Gets the simulated trigger results for the pair 0 trigger.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>PairTrigger</code> objects.
+ */
+ public List<PairTrigger<E[]>> getPair0Triggers() {
+ return pair0;
+ }
+
+ /**
+ * Gets the simulated trigger results for the pair 1 trigger.
+ * @return Returns the trigger results as a <code>List</code> of
+ * <code>PairTrigger</code> objects.
+ */
+ public List<PairTrigger<E[]>> getPair1Triggers() {
+ return pair1;
+ }
+}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerDiagnosticDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerDiagnosticDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerDiagnosticDriver.java Wed Apr 27 11:11:32 2016
@@ -52,54 +52,54 @@
import org.lcsim.util.aida.AIDA;
public class TriggerDiagnosticDriver extends Driver {
- // Store the LCIO collection names for the needed objects.
- private String hitCollectionName = "EcalCalHits";
- private String bankCollectionName = "TriggerBank";
- private String clusterCollectionName = "EcalClusters";
- private String diagnosticCollectionName = "DiagnosticSnapshot";
- private static final int clusterCollectionFlag = 1 << LCIOConstants.CLBIT_HITS;
- private String[] singlesCandidateCollectionName = { "Singles0TriggerCandidates", "Singles1TriggerCandidates" };
- private String[] pairCandidateCollectionName = { "Pair0TriggerCandidates", "Pair1TriggerCandidates" };
-
- // Store the lists of parsed objects.
- private TIData tiBank;
- private SSPData sspBank;
- private List<SSPCluster> sspClusters;
- private List<Cluster> reconClusters = new ArrayList<Cluster>();
- private SimTriggerData triggerData = null;
-
- // Trigger modules for performing trigger analysis.
- //private int activeTrigger = -1;
- private boolean[] tiFlags = new boolean[6];
- private TriggerModule[] singlesTrigger = new TriggerModule[2];
- private TriggerModule[] pairsTrigger = new TriggerModule[2];
- private boolean[][] singlesCutsEnabled = new boolean[2][3];
- private boolean[][] pairCutsEnabled = new boolean[2][7];
- private boolean[] singlesTriggerEnabled = new boolean[2];
- private boolean[] pairTriggerEnabled = new boolean[2];
-
- // Verification settings.
- private int nsa = 100;
- private int nsb = 20;
- private int windowWidth = 200;
- private int hitAcceptance = 1;
- private int noiseThreshold = 50;
- private double energyAcceptance = 0.003;
- private boolean readDAQConfig = false;
- private int localWindowThreshold = 1000000000;
- private boolean performClusterVerification = true;
- private boolean performSinglesTriggerVerification = true;
- private boolean performPairTriggerVerification = true;
- private boolean enforceTimeCompliance = false;
-
- // Efficiency tracking variables.
- private RunDiagStats localStats = new RunDiagStats();
- private RunDiagStats globalStats = new RunDiagStats();
-
- // Track which clusters/pairs are trigger candidates.
- private List<List<Cluster>> singlesCandidates = new ArrayList<List<Cluster>>(2);
- private List<List<LCRelation>> pairCandidates = new ArrayList<List<LCRelation>>(2);
-
+ // Store the LCIO collection names for the needed objects.
+ private String hitCollectionName = "EcalCalHits";
+ private String bankCollectionName = "TriggerBank";
+ private String clusterCollectionName = "EcalClusters";
+ private String diagnosticCollectionName = "DiagnosticSnapshot";
+ private static final int clusterCollectionFlag = 1 << LCIOConstants.CLBIT_HITS;
+ private String[] singlesCandidateCollectionName = { "Singles0TriggerCandidates", "Singles1TriggerCandidates" };
+ private String[] pairCandidateCollectionName = { "Pair0TriggerCandidates", "Pair1TriggerCandidates" };
+
+ // Store the lists of parsed objects.
+ private TIData tiBank;
+ private SSPData sspBank;
+ private List<SSPCluster> sspClusters;
+ private List<Cluster> reconClusters = new ArrayList<Cluster>();
+ private SimTriggerData triggerData = null;
+
+ // Trigger modules for performing trigger analysis.
+ //private int activeTrigger = -1;
+ private boolean[] tiFlags = new boolean[6];
+ private TriggerModule[] singlesTrigger = new TriggerModule[2];
+ private TriggerModule[] pairsTrigger = new TriggerModule[2];
+ private boolean[][] singlesCutsEnabled = new boolean[2][3];
+ private boolean[][] pairCutsEnabled = new boolean[2][7];
+ private boolean[] singlesTriggerEnabled = new boolean[2];
+ private boolean[] pairTriggerEnabled = new boolean[2];
+
+ // Verification settings.
+ private int nsa = 100;
+ private int nsb = 20;
+ private int windowWidth = 200;
+ private int hitAcceptance = 1;
+ private int noiseThreshold = 50;
+ private double energyAcceptance = 0.003;
+ private boolean readDAQConfig = false;
+ private int localWindowThreshold = 1000000000;
+ private boolean performClusterVerification = true;
+ private boolean performSinglesTriggerVerification = true;
+ private boolean performPairTriggerVerification = true;
+ private boolean enforceTimeCompliance = false;
+
+ // Efficiency tracking variables.
+ private RunDiagStats localStats = new RunDiagStats();
+ private RunDiagStats globalStats = new RunDiagStats();
+
+ // Track which clusters/pairs are trigger candidates.
+ private List<List<Cluster>> singlesCandidates = new ArrayList<List<Cluster>>(2);
+ private List<List<LCRelation>> pairCandidates = new ArrayList<List<LCRelation>>(2);
+
// Verbose settings.
private boolean clusterFail = false;
private boolean singlesEfficiencyFail = false;
@@ -115,2125 +115,2125 @@
private int statPrintInterval = Integer.MAX_VALUE;
// Cut index arrays for trigger verification.
- private static final int ENERGY_MIN = TriggerDiagnosticUtil.SINGLES_ENERGY_MIN;
- private static final int ENERGY_MAX = TriggerDiagnosticUtil.SINGLES_ENERGY_MAX;
- private static final int HIT_COUNT = TriggerDiagnosticUtil.SINGLES_HIT_COUNT;
- private static final int ENERGY_SUM = TriggerDiagnosticUtil.PAIR_ENERGY_SUM;
- private static final int ENERGY_DIFF = TriggerDiagnosticUtil.PAIR_ENERGY_DIFF;
- private static final int ENERGY_SLOPE = TriggerDiagnosticUtil.PAIR_ENERGY_SLOPE;
- private static final int COPLANARITY = TriggerDiagnosticUtil.PAIR_COPLANARITY;
-
- // Track the total run time.
- private long startTime = -1;
- private long endTime = -1;
-
- // Cut names for logging.
- private static final String[][] cutNames = {
- { "E_min", "E_max", "hit count", "null" },
- { "E_sum", "E_diff", "E_slope", "coplanar" }
- };
-
- // Temporary AIDA Plots
- private TriggerPlotsModule globalTriggerPlots = new TriggerPlotsModule(0, 0);
- private static final int RECON = 0;
- private static final int SSP = 1;
- private static final int ALL = 0;
- private static final int MATCHED = 1;
- private static final int FAILED = 2;
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D[][] clusterHitPlot = {
- {
- aida.histogram1D("Clustering/Recon Cluster Hit Count (All)", 9, 0.5, 9.5),
- aida.histogram1D("Clustering/Recon Cluster Hit Count (Matched)", 9, 0.5, 9.5),
- aida.histogram1D("Clustering/Recon Cluster Hit Count (Failed)", 9, 0.5, 9.5)
- },
- {
- aida.histogram1D("Clustering/SSP Cluster Hit Count (All)", 9, 0.5, 9.5),
- aida.histogram1D("Clustering/SSP Cluster Hit Count (Matched)", 9, 0.5, 9.5),
- aida.histogram1D("Clustering/SSP Cluster Hit Count (Failed)", 9, 0.5, 9.5)
- }
- };
- private IHistogram1D[][] clusterEnergyPlot = {
- {
- aida.histogram1D("Clustering/Recon Cluster Energy (All)", 300, 0.0, 3.0),
- aida.histogram1D("Clustering/Recon Cluster Energy (Matched)", 300, 0.0, 3.0),
- aida.histogram1D("Clustering/Recon Cluster Energy (Failed)", 300, 0.0, 3.0)
- },
- {
- aida.histogram1D("Clustering/SSP Cluster Energy (All)", 300, 0.0, 3.0),
- aida.histogram1D("Clustering/SSP Cluster Energy (Matched)", 300, 0.0, 3.0),
- aida.histogram1D("Clustering/SSP Cluster Energy (Failed)", 300, 0.0, 3.0)
- }
- };
- private IHistogram1D[][] clusterTimePlot = {
- {
- aida.histogram1D("Clustering/Recon Cluster Time (All)", 115, 0, 460),
- aida.histogram1D("Clustering/Recon Cluster Time (Matched)", 115, 0, 460),
- aida.histogram1D("Clustering/Recon Cluster Time (Failed)", 115, 0, 460)
- },
- {
- aida.histogram1D("Clustering/SSP Cluster Time (All)", 115, 0, 460),
- aida.histogram1D("Clustering/SSP Cluster Time (Matched)", 115, 0, 460),
- aida.histogram1D("Clustering/SSP Cluster Time (Failed)", 115, 0, 460)
- }
- };
- private IHistogram2D[][] clusterPositionPlot = {
- {
- aida.histogram2D("Clustering/Recon Cluster Position (All)", 47, -23.5, 23.5, 11, -5.5, 5.5),
- aida.histogram2D("Clustering/Recon Cluster Position (Matched)", 47, -23.5, 23.5, 11, -5.5, 5.5),
- aida.histogram2D("Clustering/Recon Cluster Position (Failed)", 47, -23.5, 23.5, 11, -5.5, 5.5)
- },
- {
- aida.histogram2D("Clustering/SSP Cluster Position (All)", 47, -23.5, 23.5, 11, -5.5, 5.5),
- aida.histogram2D("Clustering/SSP Cluster Position (Matched)", 47, -23.5, 23.5, 11, -5.5, 5.5),
- aida.histogram2D("Clustering/SSP Cluster Position (Failed)", 47, -23.5, 23.5, 11, -5.5, 5.5)
- }
- };
- private IHistogram2D[] energyhitDiffPlot = {
- aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (All)", 21, -0.010, 0.010, 6, -3, 3),
- aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (Matched)", 21, -0.010, 0.010, 6, -3, 3),
- aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (Failed)", 21, -0.010, 0.010, 6, -3, 3)
- };
- private ICloud2D[] efficiencyTimeHist = {
- aida.cloud2D("Clustering/Cluster Efficiency vs. Time"),
- aida.cloud2D("Singles Trigger 0/Cluster Efficiency vs. Time"),
- aida.cloud2D("Singles Trigger 1/Cluster Efficiency vs. Time"),
- aida.cloud2D("Pair Trigger 0/Cluster Efficiency vs. Time"),
- aida.cloud2D("Pair Trigger 1/Cluster Efficiency vs. Time")
- };
-
- /**
- * Define the trigger modules. This should be replaced by parsing
- * the DAQ configuration at some point.
- */
- @Override
- public void startOfData() {
- // By default, all triggers and cuts are enabled.
- for(int i = 0; i < 2; i++) {
- // Enable the triggers.
- pairTriggerEnabled[i] = true;
- singlesTriggerEnabled[i] = true;
-
- // Enable the singles cuts.
- for(int j = 0; j < singlesCutsEnabled.length; j++) {
- singlesCutsEnabled[i][j] = true;
- }
-
- // Enable the pair cuts.
- for(int j = 0; j < pairCutsEnabled.length; j++) {
- pairCutsEnabled[i][j] = true;
- }
- }
-
- // If the DAQ configuration should be read, attach a listener
- // to track when it updates.
- if(readDAQConfig) {
- ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // Get the DAQ configuration.
- DAQConfig daq = ConfigurationManager.getInstance();
-
- // Update the plotting energy slope values.
- globalTriggerPlots.setEnergySlopeParamF(0, daq.getSSPConfig().getPair1Config().getEnergySlopeCutConfig().getParameterF());
- globalTriggerPlots.setEnergySlopeParamF(1, daq.getSSPConfig().getPair2Config().getEnergySlopeCutConfig().getParameterF());
-
- // Load the DAQ settings from the configuration manager.
- singlesTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getSingles1Config());
- singlesTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getSingles2Config());
- pairsTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getPair1Config());
- pairsTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getPair2Config());
- nsa = daq.getFADCConfig().getNSA();
- nsb = daq.getFADCConfig().getNSB();
- windowWidth = daq.getFADCConfig().getWindowWidth();
-
- // Get the trigger configurations from the DAQ.
- SinglesTriggerConfig[] singles = { daq.getSSPConfig().getSingles1Config(),
- daq.getSSPConfig().getSingles2Config() };
- PairTriggerConfig[] pairs = { daq.getSSPConfig().getPair1Config(),
- daq.getSSPConfig().getPair2Config() };
-
- // Update the enabled/disabled statuses.
- for(int i = 0; i < 2; i++) {
- // Set the trigger enabled status.
- pairTriggerEnabled[i] = pairs[i].isEnabled();
- singlesTriggerEnabled[i] = singles[i].isEnabled();
-
- // Set the singles cut statuses.
- singlesCutsEnabled[i][ENERGY_MIN] = singles[i].getEnergyMinCutConfig().isEnabled();
- singlesCutsEnabled[i][ENERGY_MAX] = singles[i].getEnergyMaxCutConfig().isEnabled();
- singlesCutsEnabled[i][HIT_COUNT] = singles[i].getHitCountCutConfig().isEnabled();
-
- // Set the pair cut statuses.
- pairCutsEnabled[i][ENERGY_MIN] = pairs[i].getEnergyMinCutConfig().isEnabled();
- pairCutsEnabled[i][ENERGY_MAX] = pairs[i].getEnergyMaxCutConfig().isEnabled();
- pairCutsEnabled[i][HIT_COUNT] = pairs[i].getHitCountCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_SUM] = pairs[i].getEnergySumCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_DIFF] = pairs[i].getEnergyDifferenceCutConfig().isEnabled();
- pairCutsEnabled[i][3 + ENERGY_SLOPE] = pairs[i].getEnergySlopeCutConfig().isEnabled();
- pairCutsEnabled[i][3 + COPLANARITY] = pairs[i].getCoplanarityCutConfig().isEnabled();
- }
-
- // Update the trigger plots values.
- globalTriggerPlots.setEnergySlopeParamF(0, daq.getSSPConfig().getPair1Config().getEnergySlopeCutConfig().getParameterF());
- globalTriggerPlots.setEnergySlopeParamF(1, daq.getSSPConfig().getPair2Config().getEnergySlopeCutConfig().getParameterF());
-
- // Print a DAQ configuration settings header.
- System.out.println();
- System.out.println();
- System.out.println("======================================================================");
- System.out.println("=== DAQ Configuration Settings =======================================");
- System.out.println("======================================================================");
- logSettings();
- }
- });
- }
-
- // Print the cluster verification header.
- System.out.println();
- System.out.println();
- System.out.println("======================================================================");
- System.out.println("=== Cluster/Trigger Verification Settings ============================");
- System.out.println("======================================================================");
-
- // Define the first singles trigger.
- singlesTrigger[0] = new TriggerModule();
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.500);
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
-
- // Define the second singles trigger.
- singlesTrigger[1] = new TriggerModule();
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
-
- // Define the first pairs trigger.
- pairsTrigger[0] = new TriggerModule();
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
- pairsTrigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
-
- // Define the second pairs trigger.
- pairsTrigger[1] = new TriggerModule();
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
- pairsTrigger[1].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
-
- // Print the initial settings.
- logSettings();
- }
-
- /**
- * Prints the total run statistics.
- */
- @Override
- public void endOfData() {
- // Output the statistics.
- logStatistics();
-
- /*
- // Calculate the values needed for the efficiency histogram.
- long totalTime = entryList.get(entryList.size()).time / 1000000000;
- int entries = (int) (totalTime / (localWindowThreshold / 1000000000)) + 1;
-
- // Generate a histogram containing the efficiencies.
- IHistogram1D[] efficiencyHist = new IHistogram1D[5];
- for(int i = 0; i < 5; i++) {
- efficiencyHist[i] = aida.histogram1D("Efficiency " + i, entries, 0.0, totalTime + (localWindowThreshold / 1000000000));
- }
-
- // Input the efficiencies.
- for(EfficiencyEntry entry : entryList) {
- for(int i = 0; i < 5; i++) {
- efficiencyHist[i].fill(entry.time / 1000000000, entry.efficiency[i]);
- }
- }
- */
- }
-
- /**
- * Gets the banks and clusters from the event.
- */
- @Override
- public void process(EventHeader event) {
- // ==========================================================
- // ==== Event Pre-Initialization ============================
- // ==========================================================
-
- // If DAQ settings are to be used, check if they are initialized
- // yet. If not, skip the event.
- if(readDAQConfig) {
- if(!ConfigurationManager.isInitialized()) {
- return;
- }
- }
-
- // Reset the candidate cluster lists.
- singlesCandidates.clear();
- singlesCandidates.add(new ArrayList<Cluster>());
- singlesCandidates.add(new ArrayList<Cluster>());
- pairCandidates.clear();
- pairCandidates.add(new ArrayList<LCRelation>());
- pairCandidates.add(new ArrayList<LCRelation>());
-
- // Increment the total event count.
- localStats.sawEvent(event.getTimeStamp());
- globalStats.sawEvent(event.getTimeStamp());
-
- // Print the statistics every so often during a run.
- if(globalStats.getEventCount() % statPrintInterval == 0) {
- logStatistics();
- }
-
- // Reset the output buffer and print flags.
- clusterFail = false;
- singlesInternalFail = false;
- singlesEfficiencyFail = false;
- pairInternalFail = false;
- pairEfficiencyFail = false;
- OutputLogger.clearLog();
-
- // Track the times.
- if(startTime == -1) { startTime = event.getTimeStamp(); }
- else { endTime = event.getTimeStamp(); }
-
-
-
- // ==========================================================
- // ==== Output GTP Information ==============================
- // ==========================================================
-
+ private static final int ENERGY_MIN = TriggerDiagnosticUtil.SINGLES_ENERGY_MIN;
+ private static final int ENERGY_MAX = TriggerDiagnosticUtil.SINGLES_ENERGY_MAX;
+ private static final int HIT_COUNT = TriggerDiagnosticUtil.SINGLES_HIT_COUNT;
+ private static final int ENERGY_SUM = TriggerDiagnosticUtil.PAIR_ENERGY_SUM;
+ private static final int ENERGY_DIFF = TriggerDiagnosticUtil.PAIR_ENERGY_DIFF;
+ private static final int ENERGY_SLOPE = TriggerDiagnosticUtil.PAIR_ENERGY_SLOPE;
+ private static final int COPLANARITY = TriggerDiagnosticUtil.PAIR_COPLANARITY;
+
+ // Track the total run time.
+ private long startTime = -1;
+ private long endTime = -1;
+
+ // Cut names for logging.
+ private static final String[][] cutNames = {
+ { "E_min", "E_max", "hit count", "null" },
+ { "E_sum", "E_diff", "E_slope", "coplanar" }
+ };
+
+ // Temporary AIDA Plots
+ private TriggerPlotsModule globalTriggerPlots = new TriggerPlotsModule(0, 0);
+ private static final int RECON = 0;
+ private static final int SSP = 1;
+ private static final int ALL = 0;
+ private static final int MATCHED = 1;
+ private static final int FAILED = 2;
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D[][] clusterHitPlot = {
+ {
+ aida.histogram1D("Clustering/Recon Cluster Hit Count (All)", 9, 0.5, 9.5),
+ aida.histogram1D("Clustering/Recon Cluster Hit Count (Matched)", 9, 0.5, 9.5),
+ aida.histogram1D("Clustering/Recon Cluster Hit Count (Failed)", 9, 0.5, 9.5)
+ },
+ {
+ aida.histogram1D("Clustering/SSP Cluster Hit Count (All)", 9, 0.5, 9.5),
+ aida.histogram1D("Clustering/SSP Cluster Hit Count (Matched)", 9, 0.5, 9.5),
+ aida.histogram1D("Clustering/SSP Cluster Hit Count (Failed)", 9, 0.5, 9.5)
+ }
+ };
+ private IHistogram1D[][] clusterEnergyPlot = {
+ {
+ aida.histogram1D("Clustering/Recon Cluster Energy (All)", 300, 0.0, 3.0),
+ aida.histogram1D("Clustering/Recon Cluster Energy (Matched)", 300, 0.0, 3.0),
+ aida.histogram1D("Clustering/Recon Cluster Energy (Failed)", 300, 0.0, 3.0)
+ },
+ {
+ aida.histogram1D("Clustering/SSP Cluster Energy (All)", 300, 0.0, 3.0),
+ aida.histogram1D("Clustering/SSP Cluster Energy (Matched)", 300, 0.0, 3.0),
+ aida.histogram1D("Clustering/SSP Cluster Energy (Failed)", 300, 0.0, 3.0)
+ }
+ };
+ private IHistogram1D[][] clusterTimePlot = {
+ {
+ aida.histogram1D("Clustering/Recon Cluster Time (All)", 115, 0, 460),
+ aida.histogram1D("Clustering/Recon Cluster Time (Matched)", 115, 0, 460),
+ aida.histogram1D("Clustering/Recon Cluster Time (Failed)", 115, 0, 460)
+ },
+ {
+ aida.histogram1D("Clustering/SSP Cluster Time (All)", 115, 0, 460),
+ aida.histogram1D("Clustering/SSP Cluster Time (Matched)", 115, 0, 460),
+ aida.histogram1D("Clustering/SSP Cluster Time (Failed)", 115, 0, 460)
+ }
+ };
+ private IHistogram2D[][] clusterPositionPlot = {
+ {
+ aida.histogram2D("Clustering/Recon Cluster Position (All)", 47, -23.5, 23.5, 11, -5.5, 5.5),
+ aida.histogram2D("Clustering/Recon Cluster Position (Matched)", 47, -23.5, 23.5, 11, -5.5, 5.5),
+ aida.histogram2D("Clustering/Recon Cluster Position (Failed)", 47, -23.5, 23.5, 11, -5.5, 5.5)
+ },
+ {
+ aida.histogram2D("Clustering/SSP Cluster Position (All)", 47, -23.5, 23.5, 11, -5.5, 5.5),
+ aida.histogram2D("Clustering/SSP Cluster Position (Matched)", 47, -23.5, 23.5, 11, -5.5, 5.5),
+ aida.histogram2D("Clustering/SSP Cluster Position (Failed)", 47, -23.5, 23.5, 11, -5.5, 5.5)
+ }
+ };
+ private IHistogram2D[] energyhitDiffPlot = {
+ aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (All)", 21, -0.010, 0.010, 6, -3, 3),
+ aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (Matched)", 21, -0.010, 0.010, 6, -3, 3),
+ aida.histogram2D("Clustering/Recon-SSP Energy-Hit Difference (Failed)", 21, -0.010, 0.010, 6, -3, 3)
+ };
+ private ICloud2D[] efficiencyTimeHist = {
+ aida.cloud2D("Clustering/Cluster Efficiency vs. Time"),
+ aida.cloud2D("Singles Trigger 0/Cluster Efficiency vs. Time"),
+ aida.cloud2D("Singles Trigger 1/Cluster Efficiency vs. Time"),
+ aida.cloud2D("Pair Trigger 0/Cluster Efficiency vs. Time"),
+ aida.cloud2D("Pair Trigger 1/Cluster Efficiency vs. Time")
+ };
+
+ /**
+ * Define the trigger modules. This should be replaced by parsing
+ * the DAQ configuration at some point.
+ */
+ @Override
+ public void startOfData() {
+ // By default, all triggers and cuts are enabled.
+ for(int i = 0; i < 2; i++) {
+ // Enable the triggers.
+ pairTriggerEnabled[i] = true;
+ singlesTriggerEnabled[i] = true;
+
+ // Enable the singles cuts.
+ for(int j = 0; j < singlesCutsEnabled.length; j++) {
+ singlesCutsEnabled[i][j] = true;
+ }
+
+ // Enable the pair cuts.
+ for(int j = 0; j < pairCutsEnabled.length; j++) {
+ pairCutsEnabled[i][j] = true;
+ }
+ }
+
+ // If the DAQ configuration should be read, attach a listener
+ // to track when it updates.
+ if(readDAQConfig) {
+ ConfigurationManager.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // Get the DAQ configuration.
+ DAQConfig daq = ConfigurationManager.getInstance();
+
+ // Update the plotting energy slope values.
+ globalTriggerPlots.setEnergySlopeParamF(0, daq.getSSPConfig().getPair1Config().getEnergySlopeCutConfig().getParameterF());
+ globalTriggerPlots.setEnergySlopeParamF(1, daq.getSSPConfig().getPair2Config().getEnergySlopeCutConfig().getParameterF());
+
+ // Load the DAQ settings from the configuration manager.
+ singlesTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getSingles1Config());
+ singlesTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getSingles2Config());
+ pairsTrigger[0].loadDAQConfiguration(daq.getSSPConfig().getPair1Config());
+ pairsTrigger[1].loadDAQConfiguration(daq.getSSPConfig().getPair2Config());
+ nsa = daq.getFADCConfig().getNSA();
+ nsb = daq.getFADCConfig().getNSB();
+ windowWidth = daq.getFADCConfig().getWindowWidth();
+
+ // Get the trigger configurations from the DAQ.
+ SinglesTriggerConfig[] singles = { daq.getSSPConfig().getSingles1Config(),
+ daq.getSSPConfig().getSingles2Config() };
+ PairTriggerConfig[] pairs = { daq.getSSPConfig().getPair1Config(),
+ daq.getSSPConfig().getPair2Config() };
+
+ // Update the enabled/disabled statuses.
+ for(int i = 0; i < 2; i++) {
+ // Set the trigger enabled status.
+ pairTriggerEnabled[i] = pairs[i].isEnabled();
+ singlesTriggerEnabled[i] = singles[i].isEnabled();
+
+ // Set the singles cut statuses.
+ singlesCutsEnabled[i][ENERGY_MIN] = singles[i].getEnergyMinCutConfig().isEnabled();
+ singlesCutsEnabled[i][ENERGY_MAX] = singles[i].getEnergyMaxCutConfig().isEnabled();
+ singlesCutsEnabled[i][HIT_COUNT] = singles[i].getHitCountCutConfig().isEnabled();
+
+ // Set the pair cut statuses.
+ pairCutsEnabled[i][ENERGY_MIN] = pairs[i].getEnergyMinCutConfig().isEnabled();
+ pairCutsEnabled[i][ENERGY_MAX] = pairs[i].getEnergyMaxCutConfig().isEnabled();
+ pairCutsEnabled[i][HIT_COUNT] = pairs[i].getHitCountCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_SUM] = pairs[i].getEnergySumCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_DIFF] = pairs[i].getEnergyDifferenceCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + ENERGY_SLOPE] = pairs[i].getEnergySlopeCutConfig().isEnabled();
+ pairCutsEnabled[i][3 + COPLANARITY] = pairs[i].getCoplanarityCutConfig().isEnabled();
+ }
+
+ // Update the trigger plots values.
+ globalTriggerPlots.setEnergySlopeParamF(0, daq.getSSPConfig().getPair1Config().getEnergySlopeCutConfig().getParameterF());
+ globalTriggerPlots.setEnergySlopeParamF(1, daq.getSSPConfig().getPair2Config().getEnergySlopeCutConfig().getParameterF());
+
+ // Print a DAQ configuration settings header.
+ System.out.println();
+ System.out.println();
+ System.out.println("======================================================================");
+ System.out.println("=== DAQ Configuration Settings =======================================");
+ System.out.println("======================================================================");
+ logSettings();
+ }
+ });
+ }
+
+ // Print the cluster verification header.
+ System.out.println();
+ System.out.println();
+ System.out.println("======================================================================");
+ System.out.println("=== Cluster/Trigger Verification Settings ============================");
+ System.out.println("======================================================================");
+
+ // Define the first singles trigger.
+ singlesTrigger[0] = new TriggerModule();
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.500);
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ singlesTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+
+ // Define the second singles trigger.
+ singlesTrigger[1] = new TriggerModule();
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ singlesTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+
+ // Define the first pairs trigger.
+ pairsTrigger[0] = new TriggerModule();
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
+ pairsTrigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
+
+ // Define the second pairs trigger.
+ pairsTrigger[1] = new TriggerModule();
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 0);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 8.191);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.000);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.001);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 180);
+ pairsTrigger[1].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 8);
+
+ // Print the initial settings.
+ logSettings();
+ }
+
+ /**
+ * Prints the total run statistics.
+ */
+ @Override
+ public void endOfData() {
+ // Output the statistics.
+ logStatistics();
+
+ /*
+ // Calculate the values needed for the efficiency histogram.
+ long totalTime = entryList.get(entryList.size()).time / 1000000000;
+ int entries = (int) (totalTime / (localWindowThreshold / 1000000000)) + 1;
+
+ // Generate a histogram containing the efficiencies.
+ IHistogram1D[] efficiencyHist = new IHistogram1D[5];
+ for(int i = 0; i < 5; i++) {
+ efficiencyHist[i] = aida.histogram1D("Efficiency " + i, entries, 0.0, totalTime + (localWindowThreshold / 1000000000));
+ }
+
+ // Input the efficiencies.
+ for(EfficiencyEntry entry : entryList) {
+ for(int i = 0; i < 5; i++) {
+ efficiencyHist[i].fill(entry.time / 1000000000, entry.efficiency[i]);
+ }
+ }
+ */
+ }
+
+ /**
+ * Gets the banks and clusters from the event.
+ */
+ @Override
+ public void process(EventHeader event) {
+ // ==========================================================
+ // ==== Event Pre-Initialization ============================
+ // ==========================================================
+
+ // If DAQ settings are to be used, check if they are initialized
+ // yet. If not, skip the event.
+ if(readDAQConfig) {
+ if(!ConfigurationManager.isInitialized()) {
+ return;
+ }
+ }
+
+ // Reset the candidate cluster lists.
+ singlesCandidates.clear();
+ singlesCandidates.add(new ArrayList<Cluster>());
+ singlesCandidates.add(new ArrayList<Cluster>());
+ pairCandidates.clear();
+ pairCandidates.add(new ArrayList<LCRelation>());
+ pairCandidates.add(new ArrayList<LCRelation>());
+
+ // Increment the total event count.
+ localStats.sawEvent(event.getTimeStamp());
+ globalStats.sawEvent(event.getTimeStamp());
+
+ // Print the statistics every so often during a run.
+ if(globalStats.getEventCount() % statPrintInterval == 0) {
+ logStatistics();
+ }
+
+ // Reset the output buffer and print flags.
+ clusterFail = false;
+ singlesInternalFail = false;
+ singlesEfficiencyFail = false;
+ pairInternalFail = false;
+ pairEfficiencyFail = false;
+ OutputLogger.clearLog();
+
+ // Track the times.
+ if(startTime == -1) { startTime = event.getTimeStamp(); }
+ else { endTime = event.getTimeStamp(); }
+
+
+
+ // ==========================================================
+ // ==== Output GTP Information ==============================
+ // ==========================================================
+
// Print the verification header.
- OutputLogger.printNewLine(2);
- OutputLogger.println("======================================================================");
- OutputLogger.println("==== FADC/GTP Readout ================================================");
- OutputLogger.println("======================================================================");
-
- OutputLogger.println("FADC Hits:");
- for(CalorimeterHit hit : event.get(CalorimeterHit.class, "EcalCalHits")) {
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
- OutputLogger.printf("\tHit at (%3d, %3d) with %7.3f GeV at time %3.0f ns%n", ix, iy, hit.getCorrectedEnergy(), hit.getTime());
- }
- OutputLogger.printNewLine(2);
- OutputLogger.println("GTP Clusters:");
- for(Cluster cluster : event.get(Cluster.class, clusterCollectionName)) {
- OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(cluster));
- for(CalorimeterHit hit : cluster.getCalorimeterHits()) {
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
- OutputLogger.printf("\t\t> (%3d, %3d) :: %7.3f GeV%n", ix, iy, hit.getCorrectedEnergy());
- }
- }
-
-
-
- // ==========================================================
- // ==== Initialize the Event ================================
- // ==========================================================
-
+ OutputLogger.printNewLine(2);
+ OutputLogger.println("======================================================================");
+ OutputLogger.println("==== FADC/GTP Readout ================================================");
+ OutputLogger.println("======================================================================");
+
+ OutputLogger.println("FADC Hits:");
+ for(CalorimeterHit hit : event.get(CalorimeterHit.class, "EcalCalHits")) {
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+ OutputLogger.printf("\tHit at (%3d, %3d) with %7.3f GeV at time %3.0f ns%n", ix, iy, hit.getCorrectedEnergy(), hit.getTime());
+ }
+ OutputLogger.printNewLine(2);
+ OutputLogger.println("GTP Clusters:");
+ for(Cluster cluster : event.get(Cluster.class, clusterCollectionName)) {
+ OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(cluster));
+ for(CalorimeterHit hit : cluster.getCalorimeterHits()) {
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+ OutputLogger.printf("\t\t> (%3d, %3d) :: %7.3f GeV%n", ix, iy, hit.getCorrectedEnergy());
+ }
+ }
+
+
+
+ // ==========================================================
+ // ==== Initialize the Event ================================
+ // ==========================================================
+
// Print the verification header.
- OutputLogger.printNewLine(2);
- OutputLogger.println("======================================================================");
- OutputLogger.println("==== Cluster/Trigger Verification ====================================");
- OutputLogger.println("======================================================================");
-
-
-
- // ==========================================================
- // ==== Obtain SSP and TI Banks =============================
- // ==========================================================
-
- // Get the SSP clusters.
- if(event.hasCollection(GenericObject.class, bankCollectionName)) {
- // Get the bank list.
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
-
- // Search through the banks and get the SSP and TI banks.
- for(GenericObject obj : bankList) {
- // If this is an SSP bank, parse it.
- if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
- sspBank = new SSPData(obj);
- }
-
- // Otherwise, if this is a TI bank, parse it.
- else if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
- tiBank = new TIData(obj);
-
- tiFlags = new boolean[6];
- if(tiBank.isPulserTrigger()) {
- OutputLogger.println("Trigger type :: Pulser");
- tiFlags[TriggerDiagStats.PULSER] = true;
- } else if(tiBank.isSingle0Trigger()) {
- OutputLogger.println("Trigger type :: Singles 1");
- tiFlags[TriggerDiagStats.SINGLES0] = true;
- } else if(tiBank.isSingle1Trigger()) {
- OutputLogger.println("Trigger type :: Singles 2");
- tiFlags[TriggerDiagStats.SINGLES1] = true;
- } else if(tiBank.isPair0Trigger()) {
- OutputLogger.println("Trigger type :: Pair 1");
- tiFlags[TriggerDiagStats.PAIR0] = true;
- } else if(tiBank.isPair1Trigger()) {
- OutputLogger.println("Trigger type :: Pair 2");
- tiFlags[TriggerDiagStats.PAIR1] = true;
- } else if(tiBank.isCalibTrigger()) {
- OutputLogger.println("Trigger type :: Cosmic");
- tiFlags[TriggerDiagStats.COSMIC] = true;
- } else {
- System.err.println("TriggerDiagnosticDriver: Skipping event; no TI trigger source found.");
- return;
- }
-
- // Pass the TI triggers to the run statistical data
- // manager object.
- localStats.getTriggerStats().sawTITriggers(tiFlags);
- globalStats.getTriggerStats().sawTITriggers(tiFlags);
- }
- }
-
- // If there is an SSP bank, get the list of SSP clusters.
- if(sspBank != null) {
- sspClusters = sspBank.getClusters();
- if(sspClusters.size() == 1) {
- OutputLogger.println("1 SSP cluster found.");
- } else {
- OutputLogger.printf("%d SSP clusters found.%n", sspClusters.size());
- }
- }
- }
-
- // Make sure that both an SSP bank and a TI bank were found.
- if(tiBank == null || sspBank == null) {
- System.err.println("TriggerDiagnosticDriver :: SEVERE WARNING :: TI bank or SSP bank missing from event!");
- return;
- }
-
- // Output the event number and information.
- OutputLogger.printf("Event Number %d (%d)%n", sspBank.getEventNumber(), event.getEventNumber());
-
-
-
- // ==========================================================
- // ==== Establish Event Integrity ===========================
- // ==========================================================
-
- // Check that all of the required objects are present.
- if(sspBank == null) {
- OutputLogger.println("No SSP bank found for this event. No verification will be performed.");
- if(verbose) { OutputLogger.printLog(); }
- return;
- } if(tiBank == null) {
- OutputLogger.println("No TI bank found for this event. No verification will be performed.");
- if(verbose) { OutputLogger.printLog(); }
- return;
- }
-
-
-
- // ==========================================================
- // ==== Check the Noise Level ===============================
- // ==========================================================
-
- // Check if there are hits.
- if(event.hasCollection(CalorimeterHit.class, hitCollectionName)) {
- // Check if there are more hits than the noise threshold.
- if(event.get(CalorimeterHit.class, hitCollectionName).size() >= noiseThreshold) {
- localStats.sawNoiseEvent();
- globalStats.sawNoiseEvent();
- OutputLogger.println("Noise event detected. Skipping event...");
- if(verbose) { OutputLogger.printLog(); }
- return;
- }
- }
-
-
-
- // ==========================================================
- // ==== Obtain Reconstructed Clusters =======================
- // ==========================================================
-
- // Get the reconstructed clusters.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the reconstructed clusters.
- List<Cluster> allClusters = event.get(Cluster.class, clusterCollectionName);
-
- // Keep only the clusters that can be verified.
- OutputLogger.println();
- OutputLogger.println("Process cluster for verifiability:");
- reconClusters.clear();
- for(Cluster reconCluster : allClusters) {
- // Check that the cluster is within the safe region of the
- // FADC readout window. If it is not, it will likely have
- // inaccurate energy or hit values and may not produce the
- // expected results.
- OutputLogger.printf("\t%s", TriggerDiagnosticUtil.clusterToString(reconCluster));
- if(isVerifiable(reconCluster)) {
- reconClusters.add(reconCluster);
- OutputLogger.println(" [ verifiable ]");
- } else { OutputLogger.println(" [ unverifiable ]"); }
- }
-
- // Output the number of verifiable clusters found.
- if(reconClusters.size() == 1) { OutputLogger.println("1 verifiable reconstructed cluster found."); }
- else { OutputLogger.printf("%d verifiable reconstructed clusters found.%n", reconClusters.size()); }
-
- // Output the number of unverifiable clusters found.
- int unverifiableClusters = allClusters.size() - reconClusters.size();
- if(unverifiableClusters == 1) { OutputLogger.println("1 unverifiable reconstructed cluster found."); }
- else { OutputLogger.printf("%d unverifiable reconstructed clusters found.%n", unverifiableClusters); }
- } else {
- reconClusters = new ArrayList<Cluster>(0);
- OutputLogger.printf("No reconstructed clusters were found for collection \"%s\" in this event.%n", clusterCollectionName);
- }
-
-
-
- // ==========================================================
- // ==== Perform Event Verification ==========================
- // ==========================================================
-
- // Perform the cluster verification step.
- if(performClusterVerification) { clusterVerification(); }
-
- // Get the simulated triggers.
- if(event.hasCollection(SimTriggerData.class, "SimTriggers")) {
- List<SimTriggerData> stdList = event.get(SimTriggerData.class, "SimTriggers");
- triggerData = stdList.get(0);
- }
-
- // Construct lists of triggers for the SSP clusters and the
- // reconstructed clusters.
- if(performSinglesTriggerVerification) {
- singlesTriggerVerification();
- }
- if(performPairTriggerVerification) {
- pairTriggerVerification();
- }
-
- // Track how many events failed due to each type of verification.
- if(clusterFail) {
- localStats.failedClusterEvent();
- globalStats.failedClusterEvent();
- } if(pairInternalFail || pairEfficiencyFail) {
- localStats.failedPairEvent();
- globalStats.failedPairEvent();
- } if(singlesInternalFail || singlesEfficiencyFail) {
- localStats.failedSinglesEvent();
- globalStats.failedSinglesEvent();
- }
-
-
-
- // ==========================================================
- // ==== Perform Event Write-Out =============================
- // ==========================================================
-
- if(verbose ||(clusterFail && printClusterFail) ||
- (singlesInternalFail && printSinglesTriggerInternalFail) ||
- (singlesEfficiencyFail && printSinglesTriggerEfficiencyFail) ||
- (pairInternalFail && printPairTriggerInternalFail) ||
- (pairEfficiencyFail && printPairTriggerEfficiencyFail)) {
- OutputLogger.printLog();
- }
-
-
-
- // ==========================================================
- // ==== Process Local Tracked Variables =====================
- // ==========================================================
- if(localStats.getDuration() > localWindowThreshold) {
- // Write a snapshot of the driver to the event stream.
- List<DiagnosticSnapshot> snapshotList = new ArrayList<DiagnosticSnapshot>(2);
- snapshotList.add(localStats.getSnapshot());
- snapshotList.add(globalStats.getSnapshot());
-
- // Push the snapshot to the data stream.
- event.put(diagnosticCollectionName, snapshotList);
-
- // Store values needed to calculate efficiency.
- int[] matched = {
- localStats.getClusterStats().getMatches(),
- localStats.getTriggerStats().getSingles0Stats().getMatchedReconSimulatedTriggers(),
- localStats.getTriggerStats().getSingles1Stats().getMatchedReconSimulatedTriggers(),
- localStats.getTriggerStats().getPair0Stats().getMatchedReconSimulatedTriggers(),
- localStats.getTriggerStats().getPair1Stats().getMatchedReconSimulatedTriggers()
- };
- int[] total = {
- localStats.getClusterStats().getReconClusterCount(),
- localStats.getTriggerStats().getSingles0Stats().getReconSimulatedTriggers(),
- localStats.getTriggerStats().getSingles1Stats().getReconSimulatedTriggers(),
- localStats.getTriggerStats().getPair0Stats().getReconSimulatedTriggers(),
- localStats.getTriggerStats().getPair1Stats().getReconSimulatedTriggers()
- };
-
- // Calculate the efficiencies and upper/lower errors.
- double[] efficiency = new double[5];
- for(int i = 0; i < 5; i++) {
- efficiency[i] = 1.0 * matched[i] / total[i];
- }
-
- // Get the time for the current snapshot. This is the total
- // run time before the snapshot plus half of the snapshot.
- long time = globalStats.getDuration() - (localStats.getDuration() / 2);
-
- // Add them to the appropriate cloud plot.
- for(int i = 0; i < 5; i++) { efficiencyTimeHist[i].fill(time, efficiency[i]); }
-
- // Clear the local statistical data.
- localStats.clear();
- }
-
-
-
- // ==========================================================
- // ==== Write the Candidate Triggers ========================
- // ==========================================================
-
- // Write the candidates to a collection.
- event.put(pairCandidateCollectionName[0], pairCandidates.get(0), LCRelation.class, 0);
- event.put(pairCandidateCollectionName[1], pairCandidates.get(1), LCRelation.class, 0);
- event.put(singlesCandidateCollectionName[0], singlesCandidates.get(0), Cluster.class, clusterCollectionFlag);
- event.put(singlesCandidateCollectionName[1], singlesCandidates.get(1), Cluster.class, clusterCollectionFlag);
- }
+ OutputLogger.printNewLine(2);
+ OutputLogger.println("======================================================================");
+ OutputLogger.println("==== Cluster/Trigger Verification ====================================");
+ OutputLogger.println("======================================================================");
+
+
+
+ // ==========================================================
+ // ==== Obtain SSP and TI Banks =============================
+ // ==========================================================
+
+ // Get the SSP clusters.
+ if(event.hasCollection(GenericObject.class, bankCollectionName)) {
+ // Get the bank list.
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+
+ // Search through the banks and get the SSP and TI banks.
+ for(GenericObject obj : bankList) {
+ // If this is an SSP bank, parse it.
+ if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
+ sspBank = new SSPData(obj);
+ }
+
+ // Otherwise, if this is a TI bank, parse it.
+ else if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
+ tiBank = new TIData(obj);
+
+ tiFlags = new boolean[6];
+ if(tiBank.isPulserTrigger()) {
+ OutputLogger.println("Trigger type :: Pulser");
+ tiFlags[TriggerDiagStats.PULSER] = true;
+ } else if(tiBank.isSingle0Trigger()) {
+ OutputLogger.println("Trigger type :: Singles 1");
+ tiFlags[TriggerDiagStats.SINGLES0] = true;
+ } else if(tiBank.isSingle1Trigger()) {
+ OutputLogger.println("Trigger type :: Singles 2");
+ tiFlags[TriggerDiagStats.SINGLES1] = true;
+ } else if(tiBank.isPair0Trigger()) {
+ OutputLogger.println("Trigger type :: Pair 1");
+ tiFlags[TriggerDiagStats.PAIR0] = true;
+ } else if(tiBank.isPair1Trigger()) {
+ OutputLogger.println("Trigger type :: Pair 2");
+ tiFlags[TriggerDiagStats.PAIR1] = true;
+ } else if(tiBank.isCalibTrigger()) {
+ OutputLogger.println("Trigger type :: Cosmic");
+ tiFlags[TriggerDiagStats.COSMIC] = true;
+ } else {
+ System.err.println("TriggerDiagnosticDriver: Skipping event; no TI trigger source found.");
+ return;
+ }
+
+ // Pass the TI triggers to the run statistical data
+ // manager object.
+ localStats.getTriggerStats().sawTITriggers(tiFlags);
+ globalStats.getTriggerStats().sawTITriggers(tiFlags);
+ }
+ }
+
+ // If there is an SSP bank, get the list of SSP clusters.
+ if(sspBank != null) {
+ sspClusters = sspBank.getClusters();
+ if(sspClusters.size() == 1) {
+ OutputLogger.println("1 SSP cluster found.");
+ } else {
+ OutputLogger.printf("%d SSP clusters found.%n", sspClusters.size());
+ }
+ }
+ }
+
+ // Make sure that both an SSP bank and a TI bank were found.
+ if(tiBank == null || sspBank == null) {
+ System.err.println("TriggerDiagnosticDriver :: SEVERE WARNING :: TI bank or SSP bank missing from event!");
+ return;
+ }
+
+ // Output the event number and information.
+ OutputLogger.printf("Event Number %d (%d)%n", sspBank.getEventNumber(), event.getEventNumber());
+
+
+
+ // ==========================================================
+ // ==== Establish Event Integrity ===========================
+ // ==========================================================
+
+ // Check that all of the required objects are present.
+ if(sspBank == null) {
+ OutputLogger.println("No SSP bank found for this event. No verification will be performed.");
+ if(verbose) { OutputLogger.printLog(); }
+ return;
+ } if(tiBank == null) {
+ OutputLogger.println("No TI bank found for this event. No verification will be performed.");
+ if(verbose) { OutputLogger.printLog(); }
+ return;
+ }
+
+
+
+ // ==========================================================
+ // ==== Check the Noise Level ===============================
+ // ==========================================================
+
+ // Check if there are hits.
+ if(event.hasCollection(CalorimeterHit.class, hitCollectionName)) {
+ // Check if there are more hits than the noise threshold.
+ if(event.get(CalorimeterHit.class, hitCollectionName).size() >= noiseThreshold) {
+ localStats.sawNoiseEvent();
+ globalStats.sawNoiseEvent();
+ OutputLogger.println("Noise event detected. Skipping event...");
+ if(verbose) { OutputLogger.printLog(); }
+ return;
+ }
+ }
+
+
+
+ // ==========================================================
+ // ==== Obtain Reconstructed Clusters =======================
+ // ==========================================================
+
+ // Get the reconstructed clusters.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the reconstructed clusters.
+ List<Cluster> allClusters = event.get(Cluster.class, clusterCollectionName);
+
+ // Keep only the clusters that can be verified.
+ OutputLogger.println();
+ OutputLogger.println("Process cluster for verifiability:");
+ reconClusters.clear();
+ for(Cluster reconCluster : allClusters) {
+ // Check that the cluster is within the safe region of the
+ // FADC readout window. If it is not, it will likely have
+ // inaccurate energy or hit values and may not produce the
+ // expected results.
+ OutputLogger.printf("\t%s", TriggerDiagnosticUtil.clusterToString(reconCluster));
+ if(isVerifiable(reconCluster)) {
+ reconClusters.add(reconCluster);
+ OutputLogger.println(" [ verifiable ]");
+ } else { OutputLogger.println(" [ unverifiable ]"); }
+ }
+
+ // Output the number of verifiable clusters found.
+ if(reconClusters.size() == 1) { OutputLogger.println("1 verifiable reconstructed cluster found."); }
+ else { OutputLogger.printf("%d verifiable reconstructed clusters found.%n", reconClusters.size()); }
+
+ // Output the number of unverifiable clusters found.
+ int unverifiableClusters = allClusters.size() - reconClusters.size();
+ if(unverifiableClusters == 1) { OutputLogger.println("1 unverifiable reconstructed cluster found."); }
+ else { OutputLogger.printf("%d unverifiable reconstructed clusters found.%n", unverifiableClusters); }
+ } else {
+ reconClusters = new ArrayList<Cluster>(0);
+ OutputLogger.printf("No reconstructed clusters were found for collection \"%s\" in this event.%n", clusterCollectionName);
+ }
+
+
+
+ // ==========================================================
+ // ==== Perform Event Verification ==========================
+ // ==========================================================
+
+ // Perform the cluster verification step.
+ if(performClusterVerification) { clusterVerification(); }
+
+ // Get the simulated triggers.
+ if(event.hasCollection(SimTriggerData.class, "SimTriggers")) {
+ List<SimTriggerData> stdList = event.get(SimTriggerData.class, "SimTriggers");
+ triggerData = stdList.get(0);
+ }
+
+ // Construct lists of triggers for the SSP clusters and the
+ // reconstructed clusters.
+ if(performSinglesTriggerVerification) {
+ singlesTriggerVerification();
+ }
+ if(performPairTriggerVerification) {
+ pairTriggerVerification();
+ }
+
+ // Track how many events failed due to each type of verification.
+ if(clusterFail) {
+ localStats.failedClusterEvent();
+ globalStats.failedClusterEvent();
+ } if(pairInternalFail || pairEfficiencyFail) {
+ localStats.failedPairEvent();
+ globalStats.failedPairEvent();
+ } if(singlesInternalFail || singlesEfficiencyFail) {
+ localStats.failedSinglesEvent();
+ globalStats.failedSinglesEvent();
+ }
+
+
+
+ // ==========================================================
+ // ==== Perform Event Write-Out =============================
+ // ==========================================================
+
+ if(verbose ||(clusterFail && printClusterFail) ||
+ (singlesInternalFail && printSinglesTriggerInternalFail) ||
+ (singlesEfficiencyFail && printSinglesTriggerEfficiencyFail) ||
+ (pairInternalFail && printPairTriggerInternalFail) ||
+ (pairEfficiencyFail && printPairTriggerEfficiencyFail)) {
+ OutputLogger.printLog();
+ }
+
+
+
+ // ==========================================================
+ // ==== Process Local Tracked Variables =====================
+ // ==========================================================
+ if(localStats.getDuration() > localWindowThreshold) {
+ // Write a snapshot of the driver to the event stream.
+ List<DiagnosticSnapshot> snapshotList = new ArrayList<DiagnosticSnapshot>(2);
+ snapshotList.add(localStats.getSnapshot());
+ snapshotList.add(globalStats.getSnapshot());
+
+ // Push the snapshot to the data stream.
+ event.put(diagnosticCollectionName, snapshotList);
+
+ // Store values needed to calculate efficiency.
+ int[] matched = {
+ localStats.getClusterStats().getMatches(),
+ localStats.getTriggerStats().getSingles0Stats().getMatchedReconSimulatedTriggers(),
+ localStats.getTriggerStats().getSingles1Stats().getMatchedReconSimulatedTriggers(),
+ localStats.getTriggerStats().getPair0Stats().getMatchedReconSimulatedTriggers(),
+ localStats.getTriggerStats().getPair1Stats().getMatchedReconSimulatedTriggers()
+ };
+ int[] total = {
+ localStats.getClusterStats().getReconClusterCount(),
+ localStats.getTriggerStats().getSingles0Stats().getReconSimulatedTriggers(),
+ localStats.getTriggerStats().getSingles1Stats().getReconSimulatedTriggers(),
+ localStats.getTriggerStats().getPair0Stats().getReconSimulatedTriggers(),
+ localStats.getTriggerStats().getPair1Stats().getReconSimulatedTriggers()
+ };
+
+ // Calculate the efficiencies and upper/lower errors.
+ double[] efficiency = new double[5];
+ for(int i = 0; i < 5; i++) {
+ efficiency[i] = 1.0 * matched[i] / total[i];
+ }
+
+ // Get the time for the current snapshot. This is the total
+ // run time before the snapshot plus half of the snapshot.
+ long time = globalStats.getDuration() - (localStats.getDuration() / 2);
+
+ // Add them to the appropriate cloud plot.
+ for(int i = 0; i < 5; i++) { efficiencyTimeHist[i].fill(time, efficiency[i]); }
+
+ // Clear the local statistical data.
+ localStats.clear();
+ }
+
+
+
+ // ==========================================================
+ // ==== Write the Candidate Triggers ========================
+ // ==========================================================
+
+ // Write the candidates to a collection.
+ event.put(pairCandidateCollectionName[0], pairCandidates.get(0), LCRelation.class, 0);
+ event.put(pairCandidateCollectionName[1], pairCandidates.get(1), LCRelation.class, 0);
+ event.put(singlesCandidateCollectionName[0], singlesCandidates.get(0), Cluster.class, clusterCollectionFlag);
+ event.put(singlesCandidateCollectionName[1], singlesCandidates.get(1), Cluster.class, clusterCollectionFlag);
+ }
- public void setPrintResultsEveryNEvents(int n) {
- statPrintInterval = n;
- }
-
- public void setPrintOnClusterFailure(boolean state) {
- printClusterFail = state;
- }
-
- public void setPrintOnSinglesEfficiencyFailure(boolean state) {
- printSinglesTriggerEfficiencyFail = state;
- }
-
- public void setPrintOnSinglesSSPFailure(boolean state) {
- printSinglesTriggerInternalFail = state;
- }
-
- public void setPrintOnPairEfficiencyFailure(boolean state) {
- printPairTriggerEfficiencyFail = state;
- }
-
- public void setPrintOnPairSSPFailure(boolean state) {
- printPairTriggerInternalFail = state;
- }
-
- public void setVerbose(boolean state) {
- verbose = state;
- }
-
- public void setHitCollectionName(String hitCollectionName) {
- this.hitCollectionName = hitCollectionName;
- }
-
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
-
- public void setBankCollectionName(String bankCollectionName) {
- this.bankCollectionName = bankCollectionName;
- }
-
- public void setNoiseThresholdCount(int noiseHits) {
- noiseThreshold = noiseHits;
- }
-
- public void setHitAcceptanceWindow(int window) {
- hitAcceptance = window;
- }
-
- public void setEnergyAcceptanceWindow(double window) {
- energyAcceptance = window;
- }
-
- public void setEnforceStrictTimeCompliance(boolean state) {
- enforceTimeCompliance = state;
- }
-
- public void setReadDAQConfig(boolean state) {
- readDAQConfig = state;
- }
-
- public void setLocalWindowThresholdMilliseconds(int localWindowThreshold) {
- this.localWindowThreshold = localWindowThreshold;
- }
-
- /**
- * Attempts to match all reconstructed clusters that are safely
- * within the integration window with clusters reported by the SSP.
- * Method also tracks the ratio of valid reconstructed clusters to
- * matches found.<br/>
- * <br/>
- * Note that unmatched SSP clusters are ignored. Since these may
- * or may not correspond to reconstructed clusters that occur in
- * the forbidden time region, it is impossible to say whether or
- * not these legitimately failed to match or not.
- */
- private void clusterVerification() {
- // ==========================================================
- // ==== Initialize Cluster Verification =====================
- // ==========================================================
-
- // Print the cluster verification header.
- OutputLogger.printNewLine(2);
- OutputLogger.println("======================================================================");
- OutputLogger.println("=== Cluster Verification =============================================");
- OutputLogger.println("======================================================================");
-
-
-
- // ==========================================================
- // ==== Perform Cluster Matching ============================
- // ==========================================================
-
- // Track the number of cluster pairs that were matched and that
- // failed by failure type.
- DetailedClusterEvent event;
-
- if(enforceTimeCompliance) {
- event = matchClustersTimeCompliant(reconClusters, sspClusters, energyAcceptance, hitAcceptance);
- } else {
- event = matchClusters(reconClusters, sspClusters, energyAcceptance, hitAcceptance);
- }
-
- // Add the event results to the global results.
- localStats.getClusterStats().addEvent(event);
- globalStats.getClusterStats().addEvent(event);
- localStats.getClusterStats().sawSSPClusters(sspClusters.size());
- globalStats.getClusterStats().sawSSPClusters(sspClusters.size());
- localStats.getClusterStats().sawReconClusters(reconClusters.size());
- globalStats.getClusterStats().sawReconClusters(reconClusters.size());
-
-
-
- // ==========================================================
- // ==== Output Event Summary ================================
- // ==========================================================
-
- // Print the valid reconstructed clusters and populate their
- // distribution graphs.
- OutputLogger.println();
- OutputLogger.println("Verified Reconstructed Clusters:");
- if(!reconClusters.isEmpty()) {
- for(Cluster reconCluster : reconClusters) {
- OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(reconCluster));
- }
- } else { OutputLogger.println("\tNone"); }
-
- // Print the SSP clusters and populate their distribution graphs.
- OutputLogger.println("SSP Clusters:");
- if(!sspClusters.isEmpty()) {
- for(SSPCluster sspCluster : sspClusters) {
- OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(sspCluster));
- }
- } else { OutputLogger.println("\tNone"); }
-
- // Print the matched clusters.
- OutputLogger.println("Matched Clusters:");
- if(event.getMatches() != 0) {
- // Iterate over the matched pairs.
- for(ClusterMatchedPair pair : event.getClusterPairs()) {
- // If the pair is a match, print it out.
- if(pair.isMatch()) {
- OutputLogger.printf("\t%s --> %s%n",
- TriggerDiagnosticUtil.clusterToString(pair.getReconstructedCluster()),
- TriggerDiagnosticUtil.clusterToString(pair.getSSPCluster()));
- }
- }
- }
- else { OutputLogger.println("\tNone"); }
-
- // Print event statistics.
- OutputLogger.println();
- OutputLogger.println("Event Statistics:");
- OutputLogger.printf("\tRecon Clusters :: %d%n", reconClusters.size());
- OutputLogger.printf("\tClusters Matched :: %d%n", event.getMatches());
- OutputLogger.printf("\tFailed (Position) :: %d%n", event.getPositionFailures());
- OutputLogger.printf("\tFailed (Time) :: %d%n", event.getTimeFailures());
- OutputLogger.printf("\tFailed (Energy) :: %d%n", event.getEnergyFailures());
- OutputLogger.printf("\tFailed (Hit Count) :: %d%n", event.getHitCountFailures());
- OutputLogger.printf("\tCluster Efficiency :: %3.0f%%%n", 100.0 * event.getMatches() / reconClusters.size());
-
- // Note whether there was a cluster match failure.
- if(event.isFailState() || event.getMatches() - reconClusters.size() != 0) {
- clusterFail = true;
- }
-
-
-
- // TEMP :: Populate the cluster diagnostic plots.
-
- // Populate the ALL cluster plots.
- for(Cluster cluster : reconClusters) {
- clusterHitPlot[RECON][ALL].fill(cluster.getCalorimeterHits().size());
- clusterEnergyPlot[RECON][ALL].fill(cluster.getEnergy());
- clusterTimePlot[RECON][ALL].fill(cluster.getCalorimeterHits().get(0).getTime());
- Point position = TriggerDiagnosticUtil.getClusterPosition(cluster);
- clusterPositionPlot[RECON][ALL].fill(position.x, position.y);
- }
- for(SSPCluster cluster : sspClusters) {
- clusterHitPlot[SSP][ALL].fill(cluster.getHitCount());
- clusterEnergyPlot[SSP][ALL].fill(cluster.getEnergy());
- clusterTimePlot[SSP][ALL].fill(cluster.getTime());
- clusterPositionPlot[SSP][ALL].fill(cluster.getXIndex(), cluster.getYIndex());
- }
-
- // Populate the matched and failed plots.
- for(ClusterMatchedPair pair : event.getClusterPairs()) {
- if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
- double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
- int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
- energyhitDiffPlot[ALL].fill(energyDiff, hitDiff);
- }
-
- if(pair.isMatch()) {
- if(pair.getFirstElement() != null) {
- clusterHitPlot[RECON][MATCHED].fill(pair.getFirstElement().getCalorimeterHits().size());
- clusterEnergyPlot[RECON][MATCHED].fill(pair.getFirstElement().getEnergy());
- clusterTimePlot[RECON][MATCHED].fill(pair.getFirstElement().getCalorimeterHits().get(0).getTime());
- Point position = TriggerDiagnosticUtil.getClusterPosition(pair.getFirstElement());
- clusterPositionPlot[RECON][MATCHED].fill(position.x, position.y);
- } if(pair.getSecondElement() != null) {
- clusterHitPlot[SSP][MATCHED].fill(pair.getSecondElement().getHitCount());
- clusterEnergyPlot[SSP][MATCHED].fill(pair.getSecondElement().getEnergy());
- clusterTimePlot[SSP][MATCHED].fill(pair.getSecondElement().getTime());
- clusterPositionPlot[SSP][MATCHED].fill(pair.getSecondElement().getXIndex(), pair.getSecondElement().getYIndex());
- } if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
- double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
- int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
- energyhitDiffPlot[MATCHED].fill(energyDiff, hitDiff);
- }
- } else {
- if(pair.getFirstElement() != null) {
- clusterHitPlot[RECON][FAILED].fill(pair.getFirstElement().getCalorimeterHits().size());
- clusterEnergyPlot[RECON][FAILED].fill(pair.getFirstElement().getEnergy());
- clusterTimePlot[RECON][FAILED].fill(pair.getFirstElement().getCalorimeterHits().get(0).getTime());
- Point position = TriggerDiagnosticUtil.getClusterPosition(pair.getFirstElement());
- clusterPositionPlot[RECON][FAILED].fill(position.x, position.y);
- } if(pair.getSecondElement() != null) {
- clusterHitPlot[SSP][FAILED].fill(pair.getSecondElement().getHitCount());
- clusterEnergyPlot[SSP][FAILED].fill(pair.getSecondElement().getEnergy());
- clusterTimePlot[SSP][FAILED].fill(pair.getSecondElement().getTime());
- clusterPositionPlot[SSP][FAILED].fill(pair.getSecondElement().getXIndex(), pair.getSecondElement().getYIndex());
- } if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
- double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
- int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
- energyhitDiffPlot[FAILED].fill(energyDiff, hitDiff);
- }
- }
- }
- }
-
- /**
+ public void setPrintResultsEveryNEvents(int n) {
+ statPrintInterval = n;
+ }
+
+ public void setPrintOnClusterFailure(boolean state) {
+ printClusterFail = state;
+ }
+
+ public void setPrintOnSinglesEfficiencyFailure(boolean state) {
+ printSinglesTriggerEfficiencyFail = state;
+ }
+
+ public void setPrintOnSinglesSSPFailure(boolean state) {
+ printSinglesTriggerInternalFail = state;
+ }
+
+ public void setPrintOnPairEfficiencyFailure(boolean state) {
+ printPairTriggerEfficiencyFail = state;
+ }
+
+ public void setPrintOnPairSSPFailure(boolean state) {
+ printPairTriggerInternalFail = state;
+ }
+
+ public void setVerbose(boolean state) {
+ verbose = state;
+ }
+
+ public void setHitCollectionName(String hitCollectionName) {
+ this.hitCollectionName = hitCollectionName;
+ }
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ public void setBankCollectionName(String bankCollectionName) {
+ this.bankCollectionName = bankCollectionName;
+ }
+
+ public void setNoiseThresholdCount(int noiseHits) {
+ noiseThreshold = noiseHits;
+ }
+
+ public void setHitAcceptanceWindow(int window) {
+ hitAcceptance = window;
+ }
+
+ public void setEnergyAcceptanceWindow(double window) {
+ energyAcceptance = window;
+ }
+
+ public void setEnforceStrictTimeCompliance(boolean state) {
+ enforceTimeCompliance = state;
+ }
+
+ public void setReadDAQConfig(boolean state) {
+ readDAQConfig = state;
+ }
+
+ public void setLocalWindowThresholdMilliseconds(int localWindowThreshold) {
+ this.localWindowThreshold = localWindowThreshold;
+ }
+
+ /**
+ * Attempts to match all reconstructed clusters that are safely
+ * within the integration window with clusters reported by the SSP.
+ * Method also tracks the ratio of valid reconstructed clusters to
+ * matches found.<br/>
+ * <br/>
+ * Note that unmatched SSP clusters are ignored. Since these may
+ * or may not correspond to reconstructed clusters that occur in
+ * the forbidden time region, it is impossible to say whether or
+ * not these legitimately failed to match or not.
+ */
+ private void clusterVerification() {
+ // ==========================================================
+ // ==== Initialize Cluster Verification =====================
+ // ==========================================================
+
+ // Print the cluster verification header.
+ OutputLogger.printNewLine(2);
+ OutputLogger.println("======================================================================");
+ OutputLogger.println("=== Cluster Verification =============================================");
+ OutputLogger.println("======================================================================");
+
+
+
+ // ==========================================================
+ // ==== Perform Cluster Matching ============================
+ // ==========================================================
+
+ // Track the number of cluster pairs that were matched and that
+ // failed by failure type.
+ DetailedClusterEvent event;
+
+ if(enforceTimeCompliance) {
+ event = matchClustersTimeCompliant(reconClusters, sspClusters, energyAcceptance, hitAcceptance);
+ } else {
+ event = matchClusters(reconClusters, sspClusters, energyAcceptance, hitAcceptance);
+ }
+
+ // Add the event results to the global results.
+ localStats.getClusterStats().addEvent(event);
+ globalStats.getClusterStats().addEvent(event);
+ localStats.getClusterStats().sawSSPClusters(sspClusters.size());
+ globalStats.getClusterStats().sawSSPClusters(sspClusters.size());
+ localStats.getClusterStats().sawReconClusters(reconClusters.size());
+ globalStats.getClusterStats().sawReconClusters(reconClusters.size());
+
+
+
+ // ==========================================================
+ // ==== Output Event Summary ================================
+ // ==========================================================
+
+ // Print the valid reconstructed clusters and populate their
+ // distribution graphs.
+ OutputLogger.println();
+ OutputLogger.println("Verified Reconstructed Clusters:");
+ if(!reconClusters.isEmpty()) {
+ for(Cluster reconCluster : reconClusters) {
+ OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(reconCluster));
+ }
+ } else { OutputLogger.println("\tNone"); }
+
+ // Print the SSP clusters and populate their distribution graphs.
+ OutputLogger.println("SSP Clusters:");
+ if(!sspClusters.isEmpty()) {
+ for(SSPCluster sspCluster : sspClusters) {
+ OutputLogger.printf("\t%s%n", TriggerDiagnosticUtil.clusterToString(sspCluster));
+ }
+ } else { OutputLogger.println("\tNone"); }
+
+ // Print the matched clusters.
+ OutputLogger.println("Matched Clusters:");
+ if(event.getMatches() != 0) {
+ // Iterate over the matched pairs.
+ for(ClusterMatchedPair pair : event.getClusterPairs()) {
+ // If the pair is a match, print it out.
+ if(pair.isMatch()) {
+ OutputLogger.printf("\t%s --> %s%n",
+ TriggerDiagnosticUtil.clusterToString(pair.getReconstructedCluster()),
+ TriggerDiagnosticUtil.clusterToString(pair.getSSPCluster()));
+ }
+ }
+ }
+ else { OutputLogger.println("\tNone"); }
+
+ // Print event statistics.
+ OutputLogger.println();
+ OutputLogger.println("Event Statistics:");
+ OutputLogger.printf("\tRecon Clusters :: %d%n", reconClusters.size());
+ OutputLogger.printf("\tClusters Matched :: %d%n", event.getMatches());
+ OutputLogger.printf("\tFailed (Position) :: %d%n", event.getPositionFailures());
+ OutputLogger.printf("\tFailed (Time) :: %d%n", event.getTimeFailures());
+ OutputLogger.printf("\tFailed (Energy) :: %d%n", event.getEnergyFailures());
+ OutputLogger.printf("\tFailed (Hit Count) :: %d%n", event.getHitCountFailures());
+ OutputLogger.printf("\tCluster Efficiency :: %3.0f%%%n", 100.0 * event.getMatches() / reconClusters.size());
+
+ // Note whether there was a cluster match failure.
+ if(event.isFailState() || event.getMatches() - reconClusters.size() != 0) {
+ clusterFail = true;
+ }
+
+
+
+ // TEMP :: Populate the cluster diagnostic plots.
+
+ // Populate the ALL cluster plots.
+ for(Cluster cluster : reconClusters) {
+ clusterHitPlot[RECON][ALL].fill(cluster.getCalorimeterHits().size());
+ clusterEnergyPlot[RECON][ALL].fill(cluster.getEnergy());
+ clusterTimePlot[RECON][ALL].fill(cluster.getCalorimeterHits().get(0).getTime());
+ Point position = TriggerDiagnosticUtil.getClusterPosition(cluster);
+ clusterPositionPlot[RECON][ALL].fill(position.x, position.y);
+ }
+ for(SSPCluster cluster : sspClusters) {
+ clusterHitPlot[SSP][ALL].fill(cluster.getHitCount());
+ clusterEnergyPlot[SSP][ALL].fill(cluster.getEnergy());
+ clusterTimePlot[SSP][ALL].fill(cluster.getTime());
+ clusterPositionPlot[SSP][ALL].fill(cluster.getXIndex(), cluster.getYIndex());
+ }
+
+ // Populate the matched and failed plots.
+ for(ClusterMatchedPair pair : event.getClusterPairs()) {
+ if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
+ double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
+ int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
+ energyhitDiffPlot[ALL].fill(energyDiff, hitDiff);
+ }
+
+ if(pair.isMatch()) {
+ if(pair.getFirstElement() != null) {
+ clusterHitPlot[RECON][MATCHED].fill(pair.getFirstElement().getCalorimeterHits().size());
+ clusterEnergyPlot[RECON][MATCHED].fill(pair.getFirstElement().getEnergy());
+ clusterTimePlot[RECON][MATCHED].fill(pair.getFirstElement().getCalorimeterHits().get(0).getTime());
+ Point position = TriggerDiagnosticUtil.getClusterPosition(pair.getFirstElement());
+ clusterPositionPlot[RECON][MATCHED].fill(position.x, position.y);
+ } if(pair.getSecondElement() != null) {
+ clusterHitPlot[SSP][MATCHED].fill(pair.getSecondElement().getHitCount());
+ clusterEnergyPlot[SSP][MATCHED].fill(pair.getSecondElement().getEnergy());
+ clusterTimePlot[SSP][MATCHED].fill(pair.getSecondElement().getTime());
+ clusterPositionPlot[SSP][MATCHED].fill(pair.getSecondElement().getXIndex(), pair.getSecondElement().getYIndex());
+ } if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
+ double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
+ int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
+ energyhitDiffPlot[MATCHED].fill(energyDiff, hitDiff);
+ }
+ } else {
+ if(pair.getFirstElement() != null) {
+ clusterHitPlot[RECON][FAILED].fill(pair.getFirstElement().getCalorimeterHits().size());
+ clusterEnergyPlot[RECON][FAILED].fill(pair.getFirstElement().getEnergy());
+ clusterTimePlot[RECON][FAILED].fill(pair.getFirstElement().getCalorimeterHits().get(0).getTime());
+ Point position = TriggerDiagnosticUtil.getClusterPosition(pair.getFirstElement());
+ clusterPositionPlot[RECON][FAILED].fill(position.x, position.y);
+ } if(pair.getSecondElement() != null) {
+ clusterHitPlot[SSP][FAILED].fill(pair.getSecondElement().getHitCount());
+ clusterEnergyPlot[SSP][FAILED].fill(pair.getSecondElement().getEnergy());
+ clusterTimePlot[SSP][FAILED].fill(pair.getSecondElement().getTime());
+ clusterPositionPlot[SSP][FAILED].fill(pair.getSecondElement().getXIndex(), pair.getSecondElement().getYIndex());
+ } if(pair.getFirstElement() != null && pair.getSecondElement() != null) {
+ double energyDiff = pair.getSecondElement().getEnergy() - pair.getFirstElement().getEnergy();
+ int hitDiff = pair.getSecondElement().getHitCount() - pair.getFirstElement().getCalorimeterHits().size();
+ energyhitDiffPlot[FAILED].fill(energyDiff, hitDiff);
+ }
+ }
+ }
+ }
+
+ /**
* Performs cluster matching between a collection of reconstructed
- * clusters and a collection of SSP clusters with an algorithm that
- * ignores the times reported for each cluster.
- * @param reconClusters - A collection of reconstructed clusters.
- * @param sspClusters - A collection of SSP clusters.
- * @param energyWindow - The window of allowed deviation between
- * the reconstructed cluster and SSP cluster energies.
- * @param hitWindow - The window of allowed deviation between
- * the reconstructed cluster and SSP cluster hit counts.
- * @return Returns the cluster matching results stored inside a
- * <code>clusterMatchEvent</code> object.
- */
- private static final DetailedClusterEvent matchClusters(Collection<Cluster> reconClusters,
- Collection<SSPCluster> sspClusters, double energyWindow, int hitWindow) {
- // Track the number of cluster pairs that were matched and that
- // failed by failure type.
- DetailedClusterEvent event = new DetailedClusterEvent();
-
- // Create maps to link cluster position to the list of clusters
- // that were found at that location.
- Map<Point, List<Cluster>> reconClusterMap = new HashMap<Point, List<Cluster>>(reconClusters.size());
- Map<Point, List<SSPCluster>> sspClusterMap = new HashMap<Point, List<SSPCluster>>(reconClusters.size());
-
- // Populate the reconstructed cluster map.
- for(Cluster reconCluster : reconClusters) {
- // Get the cluster position.
- Point position = new Point(TriggerDiagnosticUtil.getXIndex(reconCluster),
- TriggerDiagnosticUtil.getYIndex(reconCluster));
-
- // Get the list for this cluster position.
- List<Cluster> reconList = reconClusterMap.get(position);
- if(reconList == null) {
- reconList = new ArrayList<Cluster>();
- reconClusterMap.put(position, reconList);
- }
-
- // Add the cluster to the list.
- reconList.add(reconCluster);
- }
-
- // Populate the SSP cluster map.
- for(SSPCluster sspCluster : sspClusters) {
- // Get the cluster position.
- Point position = new Point(sspCluster.getXIndex(), sspCluster.getYIndex());
-
- // Get the list for this cluster position.
- List<SSPCluster> sspList = sspClusterMap.get(position);
- if(sspList == null) {
- sspList = new ArrayList<SSPCluster>();
- sspClusterMap.put(position, sspList);
- }
-
- // Add the cluster to the list.
- sspList.add(sspCluster);
- }
-
- // For each reconstructed cluster, attempt to match the clusters
- // with SSP clusters at the same position.
- positionLoop:
- for(Entry<Point, List<Cluster>> clusterSet : reconClusterMap.entrySet()) {
- // Get the reconstructed and SSP clusters at this position.
- List<Cluster> reconList = clusterSet.getValue();
- List<SSPCluster> sspList = sspClusterMap.get(clusterSet.getKey());
-
- // Print the crystal position header.
- OutputLogger.println();
- OutputLogger.printf("Considering clusters at (%3d, %3d)%n", clusterSet.getKey().x, clusterSet.getKey().y);
-
- // If there are no SSP clusters, then matching fails by
- // reason of position. The remainder of the loop may be
- // skipped, since there is nothing to check.
- if(sspList == null || sspList.isEmpty()) {
- event.pairFailPosition(reconList.size());
- continue positionLoop;
- }
-
- // Get all possible permutations of SSP clusters.
- List<List<Pair<Cluster, SSPCluster>>> permutations = getPermutations(reconList, sspList);
-
- // Print the information for this crystal position.
- OutputLogger.printf("\tRecon Clusters :: %d%n", reconList.size());
- OutputLogger.printf("\tSSP Clusters :: %d%n", sspList.size());
- OutputLogger.printf("\tPermutations :: %d%n", permutations.size());
-
- // Track the plotted values for the current best permutation.
- DetailedClusterEvent bestPerm = null;
-
- // Iterate over the permutations and find the permutation
- // that produces the best possible result when compared to
- // the reconstructed clusters.
- int permIndex = 0;
- for(List<Pair<Cluster, SSPCluster>> pairs : permutations) {
- // Update the current permutation number.
- permIndex++;
-
- // Track the plot values for this permutation.
- DetailedClusterEvent perm = new DetailedClusterEvent();
-
- // Try to match each pair.
- pairLoop:
- for(Pair<Cluster, SSPCluster> pair : pairs) {
- // Print the current reconstructed/SSP cluster pair.
- OutputLogger.printf("\tP%d :: %s --> %s", permIndex,
- pair.getFirstElement() == null ? "None" : TriggerDiagnosticUtil.clusterToString(pair.getFirstElement()),
- pair.getSecondElement() == null ? "None" : TriggerDiagnosticUtil.clusterToString(pair.getSecondElement()));
-
- // If either cluster in the pair is null, there
- // are not enough clusters to perform this match.
- if(pair.getFirstElement() == null || pair.getSecondElement() == null) {
- // Log the result.
- OutputLogger.printf(" [ %18s ]%n", "failure: unpaired");
-
- // An unpaired SSP cluster does not necessarily
- // represent a problem. Often, this just means
- // that the SSP cluster's matching reconstructed
- // cluster is outside the verification window.
- if(pair.getSecondElement() == null) {
- perm.pairFailPosition(pair.getFirstElement(), pair.getSecondElement());
- }
-
- // Skip the rest of the checks.
- continue pairLoop;
- }
-
- // Check if the reconstructed cluster has an energy
- // within the allotted threshold of the SSP cluster.
- if(pair.getSecondElement().getEnergy() >= pair.getFirstElement().getEnergy() - energyWindow &&
- pair.getSecondElement().getEnergy() <= pair.getFirstElement().getEnergy() + energyWindow) {
-
- // Check that the hit count of the reconstructed
- // is within the allotted threshold of the SSP
- // cluster.
- if(pair.getSecondElement().getHitCount() >= pair.getFirstElement().getCalorimeterHits().size() - hitWindow &&
- pair.getSecondElement().getHitCount() <= pair.getFirstElement().getCalorimeterHits().size() + hitWindow) {
- // Designate the pair as a match.
- perm.pairMatch(pair.getFirstElement(), pair.getSecondElement());
- OutputLogger.printf(" [ %18s ]%n", "success: matched");
- } else {
- perm.pairFailHitCount(pair.getFirstElement(), pair.getSecondElement());
- OutputLogger.printf(" [ %18s ]%n", "failure: hit count");
- } // End hit count check.
- } else {
- perm.pairFailEnergy(pair.getFirstElement(), pair.getSecondElement());
- OutputLogger.printf(" [ %18s ]%n", "failure: energy");
- } // End energy check.
- } // End Pair Loop
-
- // Print the results of the permutation.
- OutputLogger.printf("\t\tPermutation Matched :: %d%n", perm.getMatches());
- OutputLogger.printf("\t\tPermutation Energy :: %d%n", perm.getEnergyFailures());
- OutputLogger.printf("\t\tPermutation Hit Count :: %d%n", perm.getHitCountFailures());
-
- // Check whether the results from this permutation
- // exceed the quality of the last best results. A
- // greater number of matches is always better. If the
- // matches are the same, select the one with fewer
- // failures due to energy.
- bestPerm = getBestPermutation(bestPerm, perm);
- } // End Permutation Loop
-
- // Print the final results for the position.
- OutputLogger.printf("\tPosition Matched :: %d%n", bestPerm.getMatches());
- OutputLogger.printf("\tPosition Energy :: %d%n", bestPerm.getEnergyFailures());
- OutputLogger.printf("\tPosition Hit Count :: %d%n", bestPerm.getHitCountFailures());
-
- // Add the results from the best-matched permutation
- // to the event efficiency results.
- event.addEvent(bestPerm);
- } // End Crystal Position Loop
-
- // Return the cluster match summary.
- return event;
- }
-
- /**
- * Performs cluster matching between a collection of reconstructed
- * clusters and a collection of SSP clusters using the strictly
- * time-compliant algorithm.
- * @param reconClusters - A collection of reconstructed clusters.
- * @param sspClusters - A collection of SSP clusters.
- * @param energyWindow - The window of allowed deviation between
- * the reconstructed cluster and SSP cluster energies.
- * @param hitWindow - The window of allowed deviation between
- * the reconstructed cluster and SSP cluster hit counts.
- * @return Returns the cluster matching results stored inside a
- * <code>clusterMatchEvent</code> object.
- */
- private static final DetailedClusterEvent matchClustersTimeCompliant(Collection<Cluster> reconClusters,
- Collection<SSPCluster> sspClusters, double energyWindow, int hitWindow) {
- // Track the number of cluster pairs that were matched and that
- // failed by failure type.
- DetailedClusterEvent event = new DetailedClusterEvent();
-
- // Store the clusters which have been successfully paired.
- Set<SSPCluster> sspMatched = new HashSet<SSPCluster>(sspClusters.size());
-
- // Find reconstructed/SSP cluster matched pairs.
- reconLoop:
- for(Cluster reconCluster : reconClusters) {
- // Track whether a position-matched cluster was found.
- boolean matchedPosition = false;
-
- // VERBOSE :: Output the cluster being matched.
- OutputLogger.printf("Considering %s%n", TriggerDiagnosticUtil.clusterToString(reconCluster));
-
- // Search through the SSP clusters for a matching cluster.
- sspLoop:
- for(SSPCluster sspCluster : sspClusters) {
- // VERBOSE :: Output the SSP cluster being considered.
- OutputLogger.printf("\t%s ", TriggerDiagnosticUtil.clusterToString(sspCluster));
-
- // If this cluster has been paired, skip it.
- if(sspMatched.contains(sspCluster)) {
- OutputLogger.printf("[ %7s; %9s ]%n", "fail", "matched");
- continue sspLoop;
- }
-
- // Matched clusters must have the same position.
- if(TriggerDiagnosticUtil.getXIndex(reconCluster) != sspCluster.getXIndex()
- || TriggerDiagnosticUtil.getYIndex(reconCluster) != sspCluster.getYIndex()) {
- OutputLogger.printf("[ %7s; %9s ]%n", "fail", "position");
- continue sspLoop;
- }
-
- // Note that a cluster was found at this position.
- matchedPosition = true;
-
- // Matched clusters must have the same time-stamp.
- if(reconCluster.getCalorimeterHits().get(0).getTime() != sspCluster.getTime()) {
- OutputLogger.printf("[ %7s; %9s ]%n", "fail", "time");
- continue sspLoop;
- }
-
- // Clusters that pass all of the above checks are the
- // same cluster.
- sspMatched.add(sspCluster);
-
- // Check that the clusters are sufficiently close in
- // energy to one another.
- if(sspCluster.getEnergy() >= reconCluster.getEnergy() - energyWindow
- && sspCluster.getEnergy() <= reconCluster.getEnergy() + energyWindow) {
- // If a cluster matches in energy, check that it
- // is also sufficiently close in hit count.
- if(sspCluster.getHitCount() >= reconCluster.getCalorimeterHits().size() - hitWindow &&
- sspCluster.getHitCount() <= reconCluster.getCalorimeterHits().size() + hitWindow) {
- // The cluster is a match.
- event.pairMatch(reconCluster, sspCluster);
- OutputLogger.printf("[ %7s; %9s ]%n", "success", "matched");
- continue reconLoop;
- } else {
- event.pairFailHitCount(reconCluster, sspCluster);
- OutputLogger.printf("[ %7s; %9s ]%n", "fail", "hit count");
- continue reconLoop;
- } // End hit count check.
- } else {
- event.pairFailEnergy(reconCluster, sspCluster);
- OutputLogger.printf("[ %7s; %9s ]%n", "fail", "energy");
- continue reconLoop;
- } // End energy check.
- }// End SSP loop.
-
- // If the reconstructed cluster has not been matched, check
- // if a cluster was found at the same position. If not, then
- // the cluster fails by reason of position.
- if(!matchedPosition) {
- event.pairFailPosition(reconCluster, null);
- }
-
- // Otherwise, the cluster had a potential matched, but the
- // time-stamps were off. The cluster fails by reason of time.
- else {
- event.pairFailTime(reconCluster, null);
- }
- } // End recon loop.
-
- // Return the populated match event.
- return event;
- }
-
- /**
- * Checks triggers simulated on SSP clusters against the SSP bank's
- * reported triggers to verify that the trigger is correctly applying
- * cuts to the clusters it sees. Additionally compares triggers
- * simulated on reconstructed clusters to measure trigger efficiency.
- */
- private void singlesTriggerVerification() {
- // Create lists of generic triggers.
- List<List<? extends Trigger<?>>> sspTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
- List<List<? extends Trigger<?>>> reconTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
-
- // Convert the simulated triggers to generic versions and add
- // them to the generic list.
- sspTriggerList.add(triggerData.getSimSSPTriggers().getSingles0Triggers());
- sspTriggerList.add(triggerData.getSimSSPTriggers().getSingles1Triggers());
- reconTriggerList.add(triggerData.getSimReconTriggers().getSingles0Triggers());
- reconTriggerList.add(triggerData.getSimReconTriggers().getSingles1Triggers());
-
- // Run generic trigger verification.
- triggerVerification(sspTriggerList, reconTriggerList, true);
- }
-
- /**
- * Checks triggers simulated on SSP clusters against the SSP bank's
- * reported triggers to verify that the trigger is correctly applying
- * cuts to the clusters it sees. Additionally compares triggers
- * simulated on reconstructed clusters to measure trigger efficiency.
- */
- private void pairTriggerVerification() {
- // Create lists of generic triggers.
- List<List<? extends Trigger<?>>> sspTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
- List<List<? extends Trigger<?>>> reconTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
-
- // Convert the simulated triggers to generic versions and add
- // them to the generic list.
- sspTriggerList.add(triggerData.getSimSSPTriggers().getPair0Triggers());
- sspTriggerList.add(triggerData.getSimSSPTriggers().getPair1Triggers());
- reconTriggerList.add(triggerData.getSimReconTriggers().getPair0Triggers());
- reconTriggerList.add(triggerData.getSimReconTriggers().getPair1Triggers());
-
- // Run generic trigger verification.
- triggerVerification(sspTriggerList, reconTriggerList, false);
- }
-
- /**
- * Performs trigger verification for both trigger types.
- * @param sspTriggerList - The list of SSP triggers.
- * @param reconTriggerList - The list of reconstructed triggers.
- * @param isSingles - Whether or not this is a singles trigger
- * verification.
- */
- private void triggerVerification(List<List<? extends Trigger<?>>> sspTriggerList,
- List<List<? extends Trigger<?>>> reconTriggerList, boolean isSingles) {
-
- // ==========================================================
- // ==== Initialize Trigger Verification =====================
- // ==========================================================
-
- // Print the cluster verification header.
- OutputLogger.println();
- OutputLogger.println();
- OutputLogger.println("======================================================================");
- if(isSingles) { OutputLogger.println("=== Singles Trigger Verification ====================================="); }
- else { OutputLogger.println("=== Pair Trigger Verification ========================================"); }
- OutputLogger.println("======================================================================");
-
- // Track the number of triggers seen and the number found.
- TriggerEvent[] triggerEvent = { new TriggerEvent(), new TriggerEvent() };
-
- // ==========================================================
- // ==== Output Event Summary ================================
- // ==========================================================
-
- // Get the list of triggers reported by the SSP.
- List<? extends SSPNumberedTrigger> sspTriggers;
- if(isSingles) { sspTriggers = sspBank.getSinglesTriggers(); }
- else { sspTriggers = sspBank.getPairTriggers(); }
-
- // Output the SSP cluster triggers.
- OutputLogger.println();
- OutputLogger.println("SSP Cluster " + (isSingles ? "Singles" : "Pair") + " Triggers");
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
- OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
- }
- }
- if(sspTriggerList.get(0).size() + sspTriggerList.get(1).size() == 0) {
- OutputLogger.println("\tNone");
- }
-
- // Output the reconstructed cluster singles triggers.
- OutputLogger.println("Reconstructed Cluster " + (isSingles ? "Singles" : "Pair") + " Triggers");
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- for(Trigger<?> simTrigger : reconTriggerList.get(triggerNum)) {
- OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
- }
- }
- if(reconTriggerList.get(0).size() + reconTriggerList.get(1).size() == 0) {
- OutputLogger.println("\tNone");
- }
-
- // Output the SSP reported triggers.
- OutputLogger.println("SSP Reported " + (isSingles ? "Singles" : "Pair") + " Triggers");
- for(SSPTrigger sspTrigger : sspTriggers) {
- OutputLogger.printf("\t%s%n", sspTrigger.toString());
- }
- if(sspTriggers.size() == 0) { OutputLogger.println("\tNone"); }
-
- // Update the trigger event with the counts for each type of
- // simulated trigger. Reported triggers are counted later when
- // already iterating over them.
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- triggerEvent[triggerNum].sawSSPSimulatedTriggers(tiFlags, sspTriggerList.get(triggerNum).size());
- triggerEvent[triggerNum].sawReconSimulatedTriggers(tiFlags, reconTriggerList.get(triggerNum).size());
- }
-
-
-
- // ==========================================================
- // ==== SSP Internal Logic Verification =====================
- // ==========================================================
-
- // Track which SSP triggers have been matched to avoid matching
- // multiple reconstructed SSP cluster triggers to the same SSP
- // trigger.
- Set<SSPNumberedTrigger> sspTriggerSet = new HashSet<SSPNumberedTrigger>();
- Set<Trigger<?>> simTriggerSet = new HashSet<Trigger<?>>();
-
- // Track the number of SSP reported triggers that are found in
- // excess of the SSP simulated triggers.
- int sspReportedExtras = sspTriggers.size() - (sspTriggerList.get(0).size() + sspTriggerList.get(1).size());
- if(sspReportedExtras > 0) {
- if(isSingles) { singlesInternalFail = true; }
- else { pairInternalFail = true; }
- } else { sspReportedExtras = 0; }
-
- // Iterate over the triggers.
- OutputLogger.println();
- OutputLogger.println("Matching SSP Reported Triggers to SSP Simulated Triggers:");
- for(SSPNumberedTrigger sspTrigger : sspTriggers) {
- // Get the trigger information.
- int triggerNum = sspTrigger.isFirstTrigger() ? 0 : 1;
- OutputLogger.printf("\t%s%n", sspTrigger.toString());
-
- // Note that a bank trigger was seen.
- triggerEvent[triggerNum].sawReportedTrigger();
-
- // Iterate over the SSP cluster simulated triggers and
- // look for a trigger that matches.
- matchLoop:
- for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
- // VERBOSE :: Output the trigger being considered for
- // matching.
- OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s ",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
-
- // If the current SSP trigger has already been matched,
- // skip it.
- if(simTriggerSet.contains(simTrigger)) {
- OutputLogger.printf("[ %-15s ]%n", "failed; matched");
- continue matchLoop;
- }
-
- // Check that the triggers have the same time. Triggers
- // generated from SSP bank clusters should always align
- // in time.
- if(sspTrigger.getTime() != getTriggerTime(simTrigger)) {
- OutputLogger.printf("[ %-15s ]%n", "failed; time");
- continue matchLoop;
- }
-
- // Check whether the trigger cuts match.
- boolean[] matchedCuts = triggerCutMatch(simTrigger, sspTrigger);
- for(int i = 0; i < matchedCuts.length; i++) {
- if(!matchedCuts[i]) {
- int typeIndex = isSingles ? 0 : 1;
- OutputLogger.printf("[ %-15s ]%n", String.format("failed; %s", cutNames[typeIndex][i]));
- continue matchLoop;
- }
- }
-
- // If all the cuts match, along with the time and the
- // trigger number, than these triggers are a match.
- sspTriggerSet.add(sspTrigger);
- simTriggerSet.add(simTrigger);
- triggerEvent[triggerNum].matchedSSPTrigger(tiFlags);
- OutputLogger.printf("[ %-15s ]%n", "success");
- break matchLoop;
- }
- }
-
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
- globalTriggerPlots.sawTrigger(simTrigger);
- if(simTriggerSet.contains(simTrigger)) {
- globalTriggerPlots.matchedTrigger(simTrigger);
- } else {
- globalTriggerPlots.failedTrigger(simTrigger);
- }
- }
- }
-
- // Iterate over the unmatched simulated triggers again and the
- // unmatched SSP reported trigger that most closely matches it.
- OutputLogger.println();
- OutputLogger.println("Matching Failed SSP Reported Triggers to Remaining SSP Simulated Triggers:");
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- simLoop:
- for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
- OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
-
- // Check whether this trigger has already been matched
- // or not. If it has been matched, skip it.
- if(simTriggerSet.contains(simTrigger)) {
- OutputLogger.println("\t\tSkipping; already matched successfully");
- continue simLoop;
- }
-
- // Get the trigger time for the simulated trigger.
- double simTime = getTriggerTime(simTrigger);
-
- // Track the match statistics for each reported trigger
- // so that the closest match may be found.
- int numMatched = -1;
- boolean[] matchedCut = null;
- SSPNumberedTrigger bestMatch = null;
-
- // Store the readout for the best match.
- String bestMatchText = null;
-
- // Iterate over the reported triggers to find a match.
- reportedLoop:
- for(SSPNumberedTrigger sspTrigger : sspTriggers) {
- OutputLogger.printf("\t\t%s ", sspTrigger.toString());
-
- // If the two triggers have different times, this
- // trigger should be skipped.
- if(sspTrigger.getTime() != simTime) {
- OutputLogger.printf("[ %-15s ]%n", "failed; time");
- continue reportedLoop;
- }
-
- // If this reported trigger has been matched then
- // it should be skipped.
- if(sspTriggerSet.contains(sspTrigger)) {
- OutputLogger.printf("[ %-15s ]%n", "failed; matched");
- continue reportedLoop;
- }
-
- // Check each of the cuts.
- boolean[] tempMatchedCut = triggerCutMatch(simTrigger, sspTrigger);
-
- // Check each cut and see if this is a closer match
- // than the previous best match.
- int tempNumMatched = 0;
- for(boolean passed : tempMatchedCut) { if(passed) { tempNumMatched++; } }
- OutputLogger.printf("[ %-15s ]%n", String.format("maybe; %d failed", tempNumMatched));
-
- // If the number of matched cuts exceeds the old
- // best result, this becomes the new best result.
- if(tempNumMatched > numMatched) {
- numMatched = tempNumMatched;
- matchedCut = tempMatchedCut;
- bestMatch = sspTrigger;
- bestMatchText = String.format("%s%n", sspTrigger.toString());
- }
- }
-
- // If there was no match found, it means that there were
- // no triggers that were both unmatched and at the same
- // time as this simulated trigger.
- if(bestMatch == null) {
- if(isSingles) { singlesInternalFail = true; }
- else { pairInternalFail = true; }
- triggerEvent[triggerNum].failedSSPTrigger();
- OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
- OutputLogger.println(" --> No Valid Match Found");
- } else {
- triggerEvent[triggerNum].matchedSSPTrigger(tiFlags, matchedCut);
- OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s",
- (triggerNum + 1), triggerPositionString(simTrigger),
- getTriggerTime(simTrigger), simTrigger.toString());
- OutputLogger.println(" --> " + bestMatchText);
- }
- }
- }
-
-
-
- // ==========================================================
- // ==== Trigger Efficiency ==================================
- // ==========================================================
-
- // Reset the SSP matched trigger set.
- sspTriggerSet.clear();
-
- // Iterate over the reconstructed cluster singles triggers.
- OutputLogger.println();
- OutputLogger.println("Recon Cluster Trigger --> SSP Reported Trigger Match Status");
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- for(Trigger<?> simTrigger : reconTriggerList.get(triggerNum)) {
- OutputLogger.printf("\tTrigger %d :: %s :: %s%n", (triggerNum + 1),
- triggerPositionString(simTrigger), simTrigger.toString());
-
- // TEMP :: Populate the recon ALL pairs plots.
- globalTriggerPlots.sawTrigger(simTrigger);
-
- // Iterate over the SSP reported triggers and compare
- // them to the reconstructed cluster simulated trigger.
- boolean matched = false;
- matchLoop:
- for(SSPNumberedTrigger sspTrigger : sspTriggers) {
- OutputLogger.printf("\t\t\t%s", sspTrigger.toString());
-
- // Only compare triggers if they are from the
- // same trigger source.
- if((triggerNum == 0 && sspTrigger.isSecondTrigger())
- || (triggerNum == 1 && sspTrigger.isFirstTrigger())) {
- OutputLogger.print(" [ fail; source ]%n");
- continue matchLoop;
- }
-
- // Only compare the singles trigger if it was
- // not already matched to another trigger.
- if(sspTriggerSet.contains(sspTrigger)) {
- OutputLogger.print(" [ fail; matched ]%n");
- continue matchLoop;
- }
-
- // Test each cut.
- int typeIndex = isSingles ? 0 : 1;
- boolean[] matchedCuts = triggerCutMatch(simTrigger, sspTrigger);
- for(int cutIndex = 0; cutIndex < matchedCuts.length; cutIndex++) {
- if(!matchedCuts[cutIndex]) {
- OutputLogger.printf(" [ fail; %-9s ]%n", cutNames[typeIndex][cutIndex]);
- continue matchLoop;
- }
- }
-
- // If all the trigger flags match, then the
- // triggers are a match.
- sspTriggerSet.add(sspTrigger);
- triggerEvent[triggerNum].matchedReconTrigger(tiFlags);
- OutputLogger.print(" [ success ]%n");
- globalTriggerPlots.matchedTrigger(simTrigger);
- matched = true;
- break matchLoop;
- }
-
- if(!matched) { globalTriggerPlots.failedTrigger(simTrigger); }
- }
- }
-
-
-
- // ==========================================================
- // ==== Output Event Results ================================
- // ==========================================================
-
- // Get the number of SSP and reconstructed cluster simulated
- // triggers.
- int sspSimTriggers = sspTriggerList.get(0).size() + sspTriggerList.get(1).size();
- int reconSimTriggers = reconTriggerList.get(0).size() + reconTriggerList.get(1).size();
- int[] sspTriggerCount = { sspTriggerList.get(0).size(), sspTriggerList.get(1).size() };
-
- // Print event statistics.
- OutputLogger.println();
- OutputLogger.println("Event Statistics:");
- OutputLogger.printf("\tSSP Cluster Sim Triggers :: %d%n", sspSimTriggers);
- OutputLogger.printf("\tRecon Cluster Sim Triggers :: %d%n", reconSimTriggers);
- OutputLogger.printf("\tSSP Reported Triggers :: %d%n", sspTriggers.size());
-
- int matchedSSPTriggers = triggerEvent[0].getMatchedSSPSimulatedTriggers() + triggerEvent[1].getMatchedSSPSimulatedTriggers();
- OutputLogger.printf("\tInternal Efficiency :: %d / %d ", matchedSSPTriggers, sspSimTriggers);
- if(sspSimTriggers == 0) { OutputLogger.printf("(N/A)%n"); }
- else { OutputLogger.printf("(%3.0f%%)%n", (100.0 * matchedSSPTriggers / sspSimTriggers)); }
-
- int matchedReconTriggers = triggerEvent[0].getMatchedReconSimulatedTriggers() + triggerEvent[1].getMatchedReconSimulatedTriggers();
- OutputLogger.printf("\tTrigger Efficiency :: %d / %d", matchedReconTriggers, reconSimTriggers);
- if(reconSimTriggers == 0) { OutputLogger.printf("(N/A)%n"); }
- else { OutputLogger.printf("(%3.0f%%)%n", (100.0 * matchedReconTriggers / reconSimTriggers)); }
-
- // Print the individual cut performances.
- if(isSingles) {
- OutputLogger.println();
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- OutputLogger.printf("Trigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
- if(sspSimTriggers == 0) {
- OutputLogger.printf("\tCluster Energy Lower Bound :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN), sspTriggerCount[triggerNum]);
- OutputLogger.printf("\tCluster Energy Upper Bound :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX), sspTriggerCount[triggerNum]);
- OutputLogger.printf("\tCluster Hit Count :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT), sspTriggerCount[triggerNum]);
- } else {
- OutputLogger.printf("\tCluster Energy Lower Bound :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN) / sspTriggerCount[triggerNum]));
- OutputLogger.printf("\tCluster Energy Upper Bound :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX) / sspTriggerCount[triggerNum]));
- OutputLogger.printf("\tCluster Hit Count :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT) / sspTriggerCount[triggerNum]));
- }
- }
-
- // Update the global trigger tracking variables.
- localStats.getTriggerStats().getSingles0Stats().addEvent(triggerEvent[0]);
- localStats.getTriggerStats().getSingles1Stats().addEvent(triggerEvent[1]);
- globalStats.getTriggerStats().getSingles0Stats().addEvent(triggerEvent[0]);
- globalStats.getTriggerStats().getSingles1Stats().addEvent(triggerEvent[1]);
- } else {
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- OutputLogger.println();
- OutputLogger.printf("Trigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
- if(sspTriggerCount[triggerNum] == 0) {
- OutputLogger.printf("\tPair Energy Sum :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM), sspTriggerCount[triggerNum]);
- OutputLogger.printf("\tPair Energy Difference :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF), sspTriggerCount[triggerNum]);
- OutputLogger.printf("\tPair Energy Slope :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount[triggerNum]);
- OutputLogger.printf("\tPair Coplanarity :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY), sspTriggerCount[triggerNum]);
- } else {
- OutputLogger.printf("\tPair Energy Sum :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM) / sspTriggerCount[triggerNum]));
- OutputLogger.printf("\tPair Energy Difference :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF) / sspTriggerCount[triggerNum]));
- OutputLogger.printf("\tPair Energy Slope :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE) / sspTriggerCount[triggerNum]));
- OutputLogger.printf("\tPair Coplanarity :: %d / %d (%3.0f%%)%n",
- triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY), sspTriggerCount[triggerNum],
- (100.0 * triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY) / sspTriggerCount[triggerNum]));
- }
- }
-
- // Update the global trigger tracking variables.
- localStats.getTriggerStats().getPair0Stats().addEvent(triggerEvent[0]);
- localStats.getTriggerStats().getPair1Stats().addEvent(triggerEvent[1]);
- globalStats.getTriggerStats().getPair0Stats().addEvent(triggerEvent[0]);
- globalStats.getTriggerStats().getPair1Stats().addEvent(triggerEvent[1]);
- }
-
- // Note whether the was a trigger match failure.
- if(triggerEvent[0].getFailedReconSimulatedTriggers() != 0 && triggerEvent[1].getFailedReconSimulatedTriggers() != 0) {
- if(isSingles) { singlesEfficiencyFail = true; }
- else { pairEfficiencyFail = true; }
- } if(triggerEvent[0].getFailedSSPSimulatedTriggers() != 0 && triggerEvent[1].getFailedSSPSimulatedTriggers() != 0) {
- if(isSingles) { singlesInternalFail = true; }
- else { pairInternalFail = true; }
- }
- }
-
- /**
- * Outputs all of the verification parameters currently in use by
- * the software. A warning will be issued if the values for NSA and
- * NSB, along with the FADC window, preclude clusters from being
- * verified.
- */
- private void logSettings() {
- // Output general settings.
- System.out.println("Cluster Verification Settings");
- System.out.printf("\tHit Threshold :: %1d hit(s)%n", hitAcceptance);
- System.out.printf("\tEnergy Threshold :: %5.3f GeV%n", energyAcceptance);
- System.out.println();
-
- // Output window settings.
- System.out.println("FADC Timing Window Settings");
- System.out.printf("\tNSB :: %3d ns%n", nsb);
- System.out.printf("\tNSA :: %3d ns%n", nsa);
- System.out.printf("\tFADC Window :: %3d ns%n", windowWidth);
-
- // Calculate the valid clustering window.
- int start = nsb;
- int end = windowWidth - nsa;
- if(start < end) {
- System.out.printf("\tValid Cluster Window :: [ %3d ns, %3d ns ]%n", start, end);
- performClusterVerification = true;
- } else {
- System.out.println("\tNSB, NSA, and FADC window preclude a valid cluster verification window.");
- System.out.println("\tCluster verification will not be performed!");
- performClusterVerification = false;
- }
- System.out.println();
-
- // Output the singles trigger settings.
- for(int i = 0; i < 2; i++) {
- // Print the settings.
- System.out.printf("Singles Trigger %d Settings%23s[%5b]%n", (i + 1), "", singlesTriggerEnabled[i]);
- System.out.printf("\tCluster Energy Low :: %.3f GeV [%5b]%n",
- singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), singlesCutsEnabled[i][0]);
- System.out.printf("\tCluster Energy High :: %.3f GeV [%5b]%n",
- singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH), singlesCutsEnabled[i][1]);
- System.out.printf("\tCluster Hit Count :: %.0f hit(s) [%5b]%n",
- singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW), singlesCutsEnabled[i][2]);
- System.out.println();
- }
-
- // Output the pair trigger settings.
- for(int i = 0; i < 2; i++) {
- System.out.printf("Pairs Trigger %d Settings%25s[%5b]%n", (i + 1), "", pairTriggerEnabled[i]);
- System.out.printf("\tCluster Energy Low :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), pairCutsEnabled[i][0]);
- System.out.printf("\tCluster Energy High :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH), pairCutsEnabled[i][1]);
- System.out.printf("\tCluster Hit Count :: %.0f hit(s) [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW), pairCutsEnabled[i][2]);
- System.out.printf("\tPair Energy Sum Low :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW), pairCutsEnabled[i][3]);
- System.out.printf("\tPair Energy Sum High :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH), pairCutsEnabled[i][3]);
- System.out.printf("\tPair Energy Difference :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH), pairCutsEnabled[i][4]);
- System.out.printf("\tPair Energy Slope :: %.3f GeV [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW), pairCutsEnabled[i][5]);
- System.out.printf("\tPair Energy Slope F :: %.4f GeV / mm%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F));
- System.out.printf("\tPair Coplanarity :: %3.0f Degrees [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_COPLANARITY_HIGH), pairCutsEnabled[i][6]);
- System.out.printf("\tPair Time Coincidence :: %2.0f ns [%5b]%n",
- pairsTrigger[i].getCutValue(TriggerModule.PAIR_TIME_COINCIDENCE), true);
- System.out.println();
- }
- }
-
- /**
- * Summarizes the global run statistics in a log to the terminal.
- */
- private void logStatistics() {
- // Print the cluster/trigger verification header.
- System.out.println();
- System.out.println();
- System.out.println("======================================================================");
- System.out.println("=== Cluster/Trigger Verification Results =============================");
- System.out.println("======================================================================");
-
- // Print the general event failure rate.
- int headSpaces = getPrintSpaces(globalStats.getEventCount());
- System.out.println("General Event Statistics:");
- System.out.printf("\tEvent Start Time :: %.3f s%n", (startTime / Math.pow(10, 9)));
- System.out.printf("\tEvent End Time :: %.3f%n", (endTime / Math.pow(10, 9)));
- System.out.printf("\tEvent Run Time :: %.3f%n", ((endTime - startTime) / Math.pow(10, 9)));
- System.out.printf("\tNoise Events :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
- globalStats.getNoiseEvents(), globalStats.getEventCount(), (100.0 * globalStats.getNoiseEvents() / globalStats.getEventCount()));
- System.out.printf("\tCluster Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
- globalStats.getFailedClusterEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedClusterEventCount() / globalStats.getEventCount()));
- System.out.printf("\tSingles Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
- globalStats.getFailedSinglesEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedSinglesEventCount() / globalStats.getEventCount()));
- System.out.printf("\tPair Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
- globalStats.getFailedPairEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedPairEventCount() / globalStats.getEventCount()));
-
- // Print out how many events reported a given TI type, both in
- // total and hierarchically.
- System.out.println();
- System.out.println("Event Triggering Type Verification:");
- System.out.printf("\t%15s\t%15s\t%15s%n", "Trigger", "Total", "Hierarchical");
- System.out.printf("\t%15s\t%15s\t%15s%n", "Pulser", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PULSER, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PULSER, true));
- System.out.printf("\t%15s\t%15s\t%15s%n", "Cosmic", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.COSMIC, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.COSMIC, true));
- System.out.printf("\t%15s\t%15s\t%15s%n", "Singles 1", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES0, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES0, true));
- System.out.printf("\t%15s\t%15s\t%15s%n", "Singles 2", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES1, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES1, true));
- System.out.printf("\t%15s\t%15s\t%15s%n", "Pair 1", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR0, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR0, true));
- System.out.printf("\t%15s\t%15s\t%15s%n", "Pair 2", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR1, false),
- globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR1, true));
-
- // Print the cluster verification data.
- System.out.println();
- System.out.println("Cluster Verification:");
- System.out.printf("\tRecon Clusters :: %d%n", globalStats.getClusterStats().getReconClusterCount());
- System.out.printf("\tSSP Clusters :: %d%n", globalStats.getClusterStats().getSSPClusterCount());
- System.out.printf("\tClusters Matched :: %d%n", globalStats.getClusterStats().getMatches());
- System.out.printf("\tFailed (Position) :: %d%n", globalStats.getClusterStats().getPositionFailures());
- System.out.printf("\tFailed (Energy) :: %d%n", globalStats.getClusterStats().getEnergyFailures());
- System.out.printf("\tFailed (Hit Count) :: %d%n", globalStats.getClusterStats().getHitCountFailures());
- if(globalStats.getClusterStats().getReconClusterCount() == 0) {
- System.out.printf("\tCluster Efficiency :: N/A%n");
- } else {
- System.out.printf("\tCluster Efficiency :: %7.3f%%%n",
- 100.0 * globalStats.getClusterStats().getMatches() / globalStats.getClusterStats().getReconClusterCount());
- }
-
- // Print the trigger verification data.
- for(int triggerType = 0; triggerType < 2; triggerType++) {
- // Get the trigger data. Type 0 represents singles triggers.
- TriggerEvent[] triggerData = new TriggerEvent[2];
- if(triggerType == 0) {
- triggerData[0] = globalStats.getTriggerStats().getSingles0Stats();
- triggerData[1] = globalStats.getTriggerStats().getSingles1Stats();
- } else {
- triggerData[0] = globalStats.getTriggerStats().getPair0Stats();
- triggerData[1] = globalStats.getTriggerStats().getPair1Stats();
- }
-
- // Get the basic trigger data.
- int sspSimTriggers = triggerData[0].getSSPSimulatedTriggers() + triggerData[1].getSSPSimulatedTriggers();
- int reconSimTriggers = triggerData[0].getReconSimulatedTriggers() + triggerData[1].getReconSimulatedTriggers();
- int sspReportedTriggers = triggerData[0].getReportedTriggers() + triggerData[1].getReportedTriggers();
- int sspMatchedTriggers = triggerData[0].getMatchedSSPSimulatedTriggers() + triggerData[1].getMatchedSSPSimulatedTriggers();
- int reconMatchedTriggers = triggerData[0].getMatchedReconSimulatedTriggers() + triggerData[1].getMatchedReconSimulatedTriggers();
-
- // Print the basic trigger statistics.
- int spaces = getPrintSpaces(sspSimTriggers, reconSimTriggers, sspReportedTriggers);
- System.out.println();
- if(triggerType == 0) { System.out.println("Singles Trigger Verification:"); }
- else { System.out.println("Pair Trigger Verification:"); }
- System.out.printf("\tSSP Cluster Sim Triggers :: %" + spaces + "d%n", sspSimTriggers);
- System.out.printf("\tRecon Cluster Sim Triggers :: %" + spaces + "d%n", reconSimTriggers);
- System.out.printf("\tSSP Reported Triggers :: %" + spaces + "d%n", sspReportedTriggers);
-
- System.out.printf("\tInternal Efficiency :: %" + spaces + "d / %" + spaces + "d ", sspMatchedTriggers, sspSimTriggers);
- if(sspSimTriggers == 0) { System.out.printf("(N/A)%n"); }
- else { System.out.printf("(%7.3f%%)%n", (100.0 * sspMatchedTriggers / sspSimTriggers)); }
-
- System.out.printf("\tTrigger Efficiency :: %" + spaces + "d / %" + spaces + "d ", reconMatchedTriggers, reconSimTriggers);
- if(reconSimTriggers == 0) { System.out.printf("(N/A)%n"); }
- else { System.out.printf("(%7.3f%%)%n" , (100.0 * reconMatchedTriggers / reconSimTriggers)); }
-
- // Print the individual cut performances.
- if(triggerType == 0) {
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- // Get the appropriate trigger statistics module.
- TriggerEvent triggerStats;
- if(triggerNum == 0) { triggerStats = globalStats.getTriggerStats().getSingles0Stats(); }
- else { triggerStats = globalStats.getTriggerStats().getSingles1Stats(); }
-
- // Get the number of SSP triggers for this trigger number.
- int sspTriggerCount = triggerStats.getSSPSimulatedTriggers();
- //int sspTriggerCount = triggerRunStats[0].getTotalSSPTriggers(triggerNum);
-
- System.out.println();
- System.out.printf("\tTrigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
- System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerStats.getUnmatchedSSPSimulatedTriggers());
- //System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerRunStats[0].getUnmatchedTriggers(triggerNum));
- if(sspTriggerCount == 0) {
- System.out.printf("\t\tCluster Energy Lower Bound :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(ENERGY_MIN), sspTriggerCount);
- System.out.printf("\t\tCluster Energy Upper Bound :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(ENERGY_MAX), sspTriggerCount);
- System.out.printf("\t\tCluster Hit Count :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(HIT_COUNT), sspTriggerCount);
- } else {
- System.out.printf("\t\tCluster Energy Lower Bound :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(ENERGY_MIN), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(ENERGY_MIN) / sspTriggerCount));
- System.out.printf("\t\tCluster Energy Upper Bound :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(ENERGY_MAX), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(ENERGY_MAX) / sspTriggerCount));
- System.out.printf("\t\tCluster Hit Count :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(HIT_COUNT), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(HIT_COUNT) / sspTriggerCount));
- }
- }
- } else {
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- // Get the appropriate trigger statistics module.
- TriggerEvent triggerStats;
- if(triggerNum == 0) { triggerStats = globalStats.getTriggerStats().getPair0Stats(); }
- else { triggerStats = globalStats.getTriggerStats().getPair1Stats(); }
-
- // Get the number of SSP triggers for this trigger number.
- int sspTriggerCount = triggerStats.getSSPSimulatedTriggers();
-
- System.out.println();
- System.out.printf("\tTrigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
- System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerStats.getUnmatchedSSPSimulatedTriggers());
- if(sspTriggerCount == 0) {
- System.out.printf("\t\tPair Energy Sum :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(ENERGY_SUM), sspTriggerCount);
- System.out.printf("\t\tPair Energy Difference :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(ENERGY_DIFF), sspTriggerCount);
- System.out.printf("\t\tPair Energy Slope :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount);
- System.out.printf("\t\tPair Coplanarity :: %" + spaces + "d / %" + spaces + "d%n",
- triggerStats.getSSPCutFailures(COPLANARITY), sspTriggerCount);
- } else {
- System.out.printf("\t\tPair Energy Sum :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(ENERGY_SUM), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(ENERGY_SUM) / sspTriggerCount));
- System.out.printf("\t\tPair Energy Difference :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(ENERGY_DIFF), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(ENERGY_DIFF) / sspTriggerCount));
- System.out.printf("\t\tPair Energy Slope :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(ENERGY_SLOPE) / sspTriggerCount));
- System.out.printf("\t\tPair Coplanarity :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
- triggerStats.getSSPCutFailures(COPLANARITY), sspTriggerCount,
- (100.0 * triggerStats.getSSPCutFailures(COPLANARITY) / sspTriggerCount));
- }
- }
- }
- }
-
- // Print out the trigger efficiency table.
- System.out.println();
- globalStats.getTriggerStats().printEfficiencyTable();
- }
-
- /**
- * Checks whether all of the hits in a cluster are within the safe
- * region of the FADC output window.
- * @param reconCluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is safe and
- * returns <code>false</code> otherwise.
- */
- private final boolean isVerifiable(Cluster reconCluster) {
- return TriggerDiagnosticUtil.isVerifiable(reconCluster, nsa, nsb, windowWidth);
- }
-
- /**
- * Generates a <code>List</code> collection that contains a set
- * of <code>ArrayList</code> collections representing a unique
- * permutation of the entries in the argument.
- * @param values - A collection of the entries to be permuted.
- * @return Returns a list of lists representing the permutations.
- */
- private static final List<List<Pair<Cluster, SSPCluster>>> getPermutations(List<Cluster> reconClusters, List<SSPCluster> sspClusters) {
- // Store the SSP cluster permutations.
- List<List<SSPCluster>> permList = new ArrayList<List<SSPCluster>>();
-
- // Make sure that the two lists are the same size.
- int reconSize = reconClusters.size();
- int sspSize = sspClusters.size();
- while(sspClusters.size() < reconClusters.size()) {
- sspClusters.add(null);
- }
- while(reconClusters.size() < sspClusters.size()) {
- reconClusters.add(null);
- }
-
- // Get the SSP cluster permutations.
- permute(new ArrayList<SSPCluster>(0), sspClusters, permList);
-
- // Create pairs from the permutations.
- List<List<Pair<Cluster, SSPCluster>>> pairList = new ArrayList<List<Pair<Cluster, SSPCluster>>>();
- for(List<SSPCluster> permutation : permList) {
- List<Pair<Cluster, SSPCluster>> pairs = new ArrayList<Pair<Cluster, SSPCluster>>(reconClusters.size());
-
- for(int clusterIndex = 0; (clusterIndex < reconClusters.size() && clusterIndex < permutation.size()); clusterIndex++) {
- pairs.add(new Pair<Cluster, SSPCluster>(reconClusters.get(clusterIndex), permutation.get(clusterIndex)));
- }
-
- pairList.add(pairs);
- }
-
- // Remove the extra values.
- for(int i = sspClusters.size() - 1; i >= sspSize; i--) { sspClusters.remove(i); }
- for(int i = reconClusters.size() - 1; i >= reconSize; i--) { reconClusters.remove(i); }
-
- // Return the pairs.
- return pairList;
- }
-
- /**
- * Recursive method for permuting all entries in the argument
- * collection <code>remainingValues</code> into the argument
- * <code>permutedValues</code> values. Completed permutations are
- * placed in the argument <code>permList</code>.
- * @param permutedValues - List to store entries that have already
- * been permuted.
- * @param remainingValues - List to store entries that need to be
- * permuted.
- * @param permList - List to store completed permutations.
- */
- private static final void permute(List<SSPCluster> permutedValues, List<SSPCluster> remainingValues, List<List<SSPCluster>> permList) {
- // If the list of entries that still need to be sorted is empty,
- // then there is nothing to sort. Just return and empty list.
- if(remainingValues.isEmpty()) { return; }
-
- // If there is only one value left in the list of entries that
- // still need to be sorted, then just add it to the permutation
- // list and return it.
- else if(remainingValues.size() <= 1) {
- // Add the last entry.
- permutedValues.add(remainingValues.get(0));
-
- // Add the permutation to the list of completed permutations.
- permList.add(permutedValues);
- }
-
- // Otherwise, continue to get all possible permutations.
- else {
- // Iterate over the entries that have not been permuted.
- for(int i = 0; i < remainingValues.size(); i++) {
- // Make new lists to contain the permutations.
- List<SSPCluster> newPermList = new ArrayList<SSPCluster>(permutedValues.size() + 1);
- List<SSPCluster> newRemainList = new ArrayList<SSPCluster>(remainingValues.size());
-
- // Copy the current permuted entries to the new list
- // and one value from the list of entries that have
- // not been permuted yet.
- newPermList.addAll(permutedValues);
- newPermList.add(remainingValues.get(i));
-
- // The new list of entries that have not been permuted
- // should be identical, except it should now be missing
- // the entry that was moved.
- for(int index = 0; index < remainingValues.size(); index++) {
- if(index != i) { newRemainList.add(remainingValues.get(index)); }
- }
-
- // Repeat the process with the new lists.
- permute(newPermList, newRemainList, permList);
- }
- }
- }
-
- /**
- * Compares two cluster matching events and finds the one that has
- * the better results. Note that this will only return results that
- * make sense if both of the events represent different permutations
- * of the same set of clusters. Comparing events with different sets
- * of clusters will produce meaningless results.
- * @param firstEvent - The first cluster matching event,
- * @param secondEvent - The second cluster matching event.
- * @return Returns the cluster matching event that is better.
- */
- private static final DetailedClusterEvent getBestPermutation(DetailedClusterEvent firstEvent, DetailedClusterEvent secondEvent) {
- // If both permutations are null, return that.
- if(firstEvent == null && secondEvent == null) {
- return null;
- }
-
- // If one permutation is null, it is not the best.
- if(firstEvent == null) { return secondEvent; }
- else if(secondEvent == null) { return firstEvent; }
-
- // A permutation is better if it has more matches.
- if(firstEvent.getMatches() > secondEvent.getMatches()) { return firstEvent; }
- else if(secondEvent.getMatches() > firstEvent.getMatches()) { return secondEvent; }
-
- // Otherwise, the permutation with the least energy failures is
- // the better permutation.
- if(firstEvent.getEnergyFailures() < secondEvent.getEnergyFailures()) { return firstEvent; }
- else if(secondEvent.getEnergyFailures() < firstEvent.getEnergyFailures()) { return secondEvent; }
-
- // If both these values are the same, then the events are identical.
- return firstEvent;
- }
-
- /**
- * Determines the number of spaces needed to render the longest of
- * a series of integers as a string.
- * @param vals - The series of integers.
- * @return Returns the number of spaces needed to render the longest
- * integer as a base-10 string.
- */
- private static final int getPrintSpaces(int... vals) {
- // Track the largest value.
- int largest = 0;
-
- // Iterate over the arguments and find the largest.
- for(int val : vals) {
- // Get the length of the string.
- int length = TriggerDiagnosticUtil.getDigits(val);
-
- // If it is larger, track it.
- if(length > largest) { largest = length; }
- }
-
- // Return the longer one.
- return largest;
- }
-
- /**
- * Gets the position of the source of a <code>Trigger</code> object
- * as text. This method only supports trigger sources of the types
- * <code>SSPCluster</code>, <code>Cluster</code>, and arrays of size
- * two of either type.
- * @param trigger - The trigger from which to obtain the source.
- * @return Returns the source of the trigger as a <code>String</code>
- * object.
- * @throws IllegalArgumentException Occurs if the source of the
- * trigger is not any of the supported types.
- */
- private static final String triggerPositionString(Trigger<?> trigger) throws IllegalArgumentException {
- // Get the trigger source.
- Object source = trigger.getTriggerSource();
-
- // Handle valid trigger sources.
- if(source instanceof SSPCluster) {
- return TriggerDiagnosticUtil.clusterPositionString((SSPCluster) source);
- } else if(source instanceof Cluster) {
- return TriggerDiagnosticUtil.clusterPositionString((Cluster) source);
- } else if(source instanceof SSPCluster[]) {
- SSPCluster[] sourcePair = (SSPCluster[]) source;
- if(sourcePair.length == 2) {
- return String.format("%s, %s", TriggerDiagnosticUtil.clusterPositionString(sourcePair[0]),
- TriggerDiagnosticUtil.clusterPositionString(sourcePair[1]));
- }
- } else if(source instanceof Cluster[]) {
- Cluster[] sourcePair = (Cluster[]) source;
- if(sourcePair.length == 2) {
- return String.format("%s, %s", TriggerDiagnosticUtil.clusterPositionString(sourcePair[0]),
- TriggerDiagnosticUtil.clusterPositionString(sourcePair[1]));
- }
- }
-
- // Otherwise, the source type is unrecognized. Throw an error.
- throw new IllegalArgumentException(String.format("Trigger source type \"%s\" is not supported.",
- trigger.getTriggerSource().getClass().getSimpleName()));
- }
-
- /**
- * Gets the time of a simulated trigger object. Method supports
- * triggers with source objects of type <code>SSPCluster</code>,
- * <code>Cluster</code>, and arrays of size two composed of either
- * object type.
- * @param trigger - The trigger.
- * @return Returns the time at which the trigger occurred.
- * @throws IllegalArgumentException Occurs if the trigger source
- * is not a supported type.
- */
- private static final double getTriggerTime(Trigger<?> trigger) throws IllegalArgumentException {
- // Get the trigger source.
- Object source = trigger.getTriggerSource();
-
- // Get the trigger time for supported trigger types.
- if(source instanceof SSPCluster) {
- return ((SSPCluster) source).getTime();
- } else if(source instanceof Cluster) {
- return TriggerDiagnosticUtil.getClusterTime((Cluster) source);
- } else if(source instanceof SSPCluster[]) {
- // Get the pair.
- SSPCluster[] sourcePair = (SSPCluster[]) source;
-
- // Get the time of the bottom cluster.
- if(sourcePair.length == 2) {
- if(sourcePair[0].getYIndex() < 0) { return sourcePair[0].getTime(); }
- else if(sourcePair[1].getYIndex() < 0) { return sourcePair[1].getTime(); }
- else { throw new IllegalArgumentException("Cluster pairs must be formed of a top/bottom pair."); }
- }
- else { throw new IllegalArgumentException("Cluster pairs must be of size 2."); }
- } else if(source instanceof Cluster[]) {
- // Get the pair.
- Cluster[] sourcePair = (Cluster[]) source;
- int[] iy = {
- TriggerDiagnosticUtil.getYIndex(sourcePair[0]),
- TriggerDiagnosticUtil.getYIndex(sourcePair[1])
- };
-
- // Get the time of the bottom cluster.
- if(sourcePair.length == 2) {
- if(iy[0] < 0) { return TriggerDiagnosticUtil.getClusterTime(sourcePair[0]); }
- else if(iy[1] < 0) { return TriggerDiagnosticUtil.getClusterTime(sourcePair[1]); }
- else { throw new IllegalArgumentException("Cluster pairs must be formed of a top/bottom pair."); }
- }
- else { throw new IllegalArgumentException("Cluster pairs must be of size 2."); }
- }
-
- // If the source type is unrecognized, throw an exception.
- throw new IllegalArgumentException(String.format("Trigger source type \"%\" is not supported.",
- source.getClass().getSimpleName()));
- }
-
- /**
- * Checks if a simulated trigger and an SSP trigger match. Note
- * that only certain types can be compared. These are:
- * <ul><li><code>SinglesTrigger<?> --> SSPSinglesTrigger</code></li>
- * <li><code>PairTrigger<?> --> SSPPairTrigger</code></li></ul>
- * @param simTrigger - The simulated trigger.
- * @param sspTrigger - The SSP bank trigger.
- * @return Returns an array of <code>boolean</code> primitives that
- * indicate which cuts passed and which failed.
- */
- private static final boolean[] triggerCutMatch(Trigger<?> simTrigger, SSPTrigger sspTrigger) {
- // Check that the cuts match for supported trigger types.
- if(simTrigger instanceof SinglesTrigger && sspTrigger instanceof SSPSinglesTrigger) {
- // Create an array to store the cut checks.
- boolean[] cutMatch = new boolean[3];
-
- // Cast the triggers.
- SinglesTrigger<?> simSingles = (SinglesTrigger<?>) simTrigger;
- SSPSinglesTrigger sspSingles = (SSPSinglesTrigger) sspTrigger;
-
- // Perform the check.
- cutMatch[ENERGY_MIN] = (simSingles.getStateClusterEnergyLow() == sspSingles.passCutEnergyMin());
- cutMatch[ENERGY_MAX] = (simSingles.getStateClusterEnergyHigh() == sspSingles.passCutEnergyMax());
- cutMatch[HIT_COUNT] = (simSingles.getStateHitCount() == sspSingles.passCutHitCount());
-
- // Return the match array.
- return cutMatch;
- } else if(simTrigger instanceof PairTrigger && sspTrigger instanceof SSPPairTrigger) {
- // Create an array to store the cut checks.
- boolean[] cutMatch = new boolean[4];
-
- // Cast the triggers.
- PairTrigger<?> simPair = (PairTrigger<?>) simTrigger;
- SSPPairTrigger sspPair = (SSPPairTrigger) sspTrigger;
-
- // Perform the check.
- cutMatch[ENERGY_SUM] = (simPair.getStateEnergySum() == sspPair.passCutEnergySum());
- cutMatch[ENERGY_DIFF] = (simPair.getStateEnergyDifference() == sspPair.passCutEnergyDifference());
- cutMatch[ENERGY_SLOPE] = (simPair.getStateEnergySlope() == sspPair.passCutEnergySlope());
- cutMatch[COPLANARITY] = (simPair.getStateCoplanarity() == sspPair.passCutCoplanarity());
-
- // Return the match array.
- return cutMatch;
- }
-
- // If this point is reached, the triggers are not of a supported
- // type for cut comparison. Produce an exception.
- throw new IllegalArgumentException(String.format("Triggers of type \"%s\" can not be cut-matched with triggers of type \"%s\".",
- simTrigger.getClass().getSimpleName(), sspTrigger.getClass().getSimpleName()));
- }
+ * clusters and a collection of SSP clusters with an algorithm that
+ * ignores the times reported for each cluster.
+ * @param reconClusters - A collection of reconstructed clusters.
+ * @param sspClusters - A collection of SSP clusters.
+ * @param energyWindow - The window of allowed deviation between
+ * the reconstructed cluster and SSP cluster energies.
+ * @param hitWindow - The window of allowed deviation between
+ * the reconstructed cluster and SSP cluster hit counts.
+ * @return Returns the cluster matching results stored inside a
+ * <code>clusterMatchEvent</code> object.
+ */
+ private static final DetailedClusterEvent matchClusters(Collection<Cluster> reconClusters,
+ Collection<SSPCluster> sspClusters, double energyWindow, int hitWindow) {
+ // Track the number of cluster pairs that were matched and that
+ // failed by failure type.
+ DetailedClusterEvent event = new DetailedClusterEvent();
+
+ // Create maps to link cluster position to the list of clusters
+ // that were found at that location.
+ Map<Point, List<Cluster>> reconClusterMap = new HashMap<Point, List<Cluster>>(reconClusters.size());
+ Map<Point, List<SSPCluster>> sspClusterMap = new HashMap<Point, List<SSPCluster>>(reconClusters.size());
+
+ // Populate the reconstructed cluster map.
+ for(Cluster reconCluster : reconClusters) {
+ // Get the cluster position.
+ Point position = new Point(TriggerDiagnosticUtil.getXIndex(reconCluster),
+ TriggerDiagnosticUtil.getYIndex(reconCluster));
+
+ // Get the list for this cluster position.
+ List<Cluster> reconList = reconClusterMap.get(position);
+ if(reconList == null) {
+ reconList = new ArrayList<Cluster>();
+ reconClusterMap.put(position, reconList);
+ }
+
+ // Add the cluster to the list.
+ reconList.add(reconCluster);
+ }
+
+ // Populate the SSP cluster map.
+ for(SSPCluster sspCluster : sspClusters) {
+ // Get the cluster position.
+ Point position = new Point(sspCluster.getXIndex(), sspCluster.getYIndex());
+
+ // Get the list for this cluster position.
+ List<SSPCluster> sspList = sspClusterMap.get(position);
+ if(sspList == null) {
+ sspList = new ArrayList<SSPCluster>();
+ sspClusterMap.put(position, sspList);
+ }
+
+ // Add the cluster to the list.
+ sspList.add(sspCluster);
+ }
+
+ // For each reconstructed cluster, attempt to match the clusters
+ // with SSP clusters at the same position.
+ positionLoop:
+ for(Entry<Point, List<Cluster>> clusterSet : reconClusterMap.entrySet()) {
+ // Get the reconstructed and SSP clusters at this position.
+ List<Cluster> reconList = clusterSet.getValue();
+ List<SSPCluster> sspList = sspClusterMap.get(clusterSet.getKey());
+
+ // Print the crystal position header.
+ OutputLogger.println();
+ OutputLogger.printf("Considering clusters at (%3d, %3d)%n", clusterSet.getKey().x, clusterSet.getKey().y);
+
+ // If there are no SSP clusters, then matching fails by
+ // reason of position. The remainder of the loop may be
+ // skipped, since there is nothing to check.
+ if(sspList == null || sspList.isEmpty()) {
+ event.pairFailPosition(reconList.size());
+ continue positionLoop;
+ }
+
+ // Get all possible permutations of SSP clusters.
+ List<List<Pair<Cluster, SSPCluster>>> permutations = getPermutations(reconList, sspList);
+
+ // Print the information for this crystal position.
+ OutputLogger.printf("\tRecon Clusters :: %d%n", reconList.size());
+ OutputLogger.printf("\tSSP Clusters :: %d%n", sspList.size());
+ OutputLogger.printf("\tPermutations :: %d%n", permutations.size());
+
+ // Track the plotted values for the current best permutation.
+ DetailedClusterEvent bestPerm = null;
+
+ // Iterate over the permutations and find the permutation
+ // that produces the best possible result when compared to
+ // the reconstructed clusters.
+ int permIndex = 0;
+ for(List<Pair<Cluster, SSPCluster>> pairs : permutations) {
+ // Update the current permutation number.
+ permIndex++;
+
+ // Track the plot values for this permutation.
+ DetailedClusterEvent perm = new DetailedClusterEvent();
+
+ // Try to match each pair.
+ pairLoop:
+ for(Pair<Cluster, SSPCluster> pair : pairs) {
+ // Print the current reconstructed/SSP cluster pair.
+ OutputLogger.printf("\tP%d :: %s --> %s", permIndex,
+ pair.getFirstElement() == null ? "None" : TriggerDiagnosticUtil.clusterToString(pair.getFirstElement()),
+ pair.getSecondElement() == null ? "None" : TriggerDiagnosticUtil.clusterToString(pair.getSecondElement()));
+
+ // If either cluster in the pair is null, there
+ // are not enough clusters to perform this match.
+ if(pair.getFirstElement() == null || pair.getSecondElement() == null) {
+ // Log the result.
+ OutputLogger.printf(" [ %18s ]%n", "failure: unpaired");
+
+ // An unpaired SSP cluster does not necessarily
+ // represent a problem. Often, this just means
+ // that the SSP cluster's matching reconstructed
+ // cluster is outside the verification window.
+ if(pair.getSecondElement() == null) {
+ perm.pairFailPosition(pair.getFirstElement(), pair.getSecondElement());
+ }
+
+ // Skip the rest of the checks.
+ continue pairLoop;
+ }
+
+ // Check if the reconstructed cluster has an energy
+ // within the allotted threshold of the SSP cluster.
+ if(pair.getSecondElement().getEnergy() >= pair.getFirstElement().getEnergy() - energyWindow &&
+ pair.getSecondElement().getEnergy() <= pair.getFirstElement().getEnergy() + energyWindow) {
+
+ // Check that the hit count of the reconstructed
+ // is within the allotted threshold of the SSP
+ // cluster.
+ if(pair.getSecondElement().getHitCount() >= pair.getFirstElement().getCalorimeterHits().size() - hitWindow &&
+ pair.getSecondElement().getHitCount() <= pair.getFirstElement().getCalorimeterHits().size() + hitWindow) {
+ // Designate the pair as a match.
+ perm.pairMatch(pair.getFirstElement(), pair.getSecondElement());
+ OutputLogger.printf(" [ %18s ]%n", "success: matched");
+ } else {
+ perm.pairFailHitCount(pair.getFirstElement(), pair.getSecondElement());
+ OutputLogger.printf(" [ %18s ]%n", "failure: hit count");
+ } // End hit count check.
+ } else {
+ perm.pairFailEnergy(pair.getFirstElement(), pair.getSecondElement());
+ OutputLogger.printf(" [ %18s ]%n", "failure: energy");
+ } // End energy check.
+ } // End Pair Loop
+
+ // Print the results of the permutation.
+ OutputLogger.printf("\t\tPermutation Matched :: %d%n", perm.getMatches());
+ OutputLogger.printf("\t\tPermutation Energy :: %d%n", perm.getEnergyFailures());
+ OutputLogger.printf("\t\tPermutation Hit Count :: %d%n", perm.getHitCountFailures());
+
+ // Check whether the results from this permutation
+ // exceed the quality of the last best results. A
+ // greater number of matches is always better. If the
+ // matches are the same, select the one with fewer
+ // failures due to energy.
+ bestPerm = getBestPermutation(bestPerm, perm);
+ } // End Permutation Loop
+
+ // Print the final results for the position.
+ OutputLogger.printf("\tPosition Matched :: %d%n", bestPerm.getMatches());
+ OutputLogger.printf("\tPosition Energy :: %d%n", bestPerm.getEnergyFailures());
+ OutputLogger.printf("\tPosition Hit Count :: %d%n", bestPerm.getHitCountFailures());
+
+ // Add the results from the best-matched permutation
+ // to the event efficiency results.
+ event.addEvent(bestPerm);
+ } // End Crystal Position Loop
+
+ // Return the cluster match summary.
+ return event;
+ }
+
+ /**
+ * Performs cluster matching between a collection of reconstructed
+ * clusters and a collection of SSP clusters using the strictly
+ * time-compliant algorithm.
+ * @param reconClusters - A collection of reconstructed clusters.
+ * @param sspClusters - A collection of SSP clusters.
+ * @param energyWindow - The window of allowed deviation between
+ * the reconstructed cluster and SSP cluster energies.
+ * @param hitWindow - The window of allowed deviation between
+ * the reconstructed cluster and SSP cluster hit counts.
+ * @return Returns the cluster matching results stored inside a
+ * <code>clusterMatchEvent</code> object.
+ */
+ private static final DetailedClusterEvent matchClustersTimeCompliant(Collection<Cluster> reconClusters,
+ Collection<SSPCluster> sspClusters, double energyWindow, int hitWindow) {
+ // Track the number of cluster pairs that were matched and that
+ // failed by failure type.
+ DetailedClusterEvent event = new DetailedClusterEvent();
+
+ // Store the clusters which have been successfully paired.
+ Set<SSPCluster> sspMatched = new HashSet<SSPCluster>(sspClusters.size());
+
+ // Find reconstructed/SSP cluster matched pairs.
+ reconLoop:
+ for(Cluster reconCluster : reconClusters) {
+ // Track whether a position-matched cluster was found.
+ boolean matchedPosition = false;
+
+ // VERBOSE :: Output the cluster being matched.
+ OutputLogger.printf("Considering %s%n", TriggerDiagnosticUtil.clusterToString(reconCluster));
+
+ // Search through the SSP clusters for a matching cluster.
+ sspLoop:
+ for(SSPCluster sspCluster : sspClusters) {
+ // VERBOSE :: Output the SSP cluster being considered.
+ OutputLogger.printf("\t%s ", TriggerDiagnosticUtil.clusterToString(sspCluster));
+
+ // If this cluster has been paired, skip it.
+ if(sspMatched.contains(sspCluster)) {
+ OutputLogger.printf("[ %7s; %9s ]%n", "fail", "matched");
+ continue sspLoop;
+ }
+
+ // Matched clusters must have the same position.
+ if(TriggerDiagnosticUtil.getXIndex(reconCluster) != sspCluster.getXIndex()
+ || TriggerDiagnosticUtil.getYIndex(reconCluster) != sspCluster.getYIndex()) {
+ OutputLogger.printf("[ %7s; %9s ]%n", "fail", "position");
+ continue sspLoop;
+ }
+
+ // Note that a cluster was found at this position.
+ matchedPosition = true;
+
+ // Matched clusters must have the same time-stamp.
+ if(reconCluster.getCalorimeterHits().get(0).getTime() != sspCluster.getTime()) {
+ OutputLogger.printf("[ %7s; %9s ]%n", "fail", "time");
+ continue sspLoop;
+ }
+
+ // Clusters that pass all of the above checks are the
+ // same cluster.
+ sspMatched.add(sspCluster);
+
+ // Check that the clusters are sufficiently close in
+ // energy to one another.
+ if(sspCluster.getEnergy() >= reconCluster.getEnergy() - energyWindow
+ && sspCluster.getEnergy() <= reconCluster.getEnergy() + energyWindow) {
+ // If a cluster matches in energy, check that it
+ // is also sufficiently close in hit count.
+ if(sspCluster.getHitCount() >= reconCluster.getCalorimeterHits().size() - hitWindow &&
+ sspCluster.getHitCount() <= reconCluster.getCalorimeterHits().size() + hitWindow) {
+ // The cluster is a match.
+ event.pairMatch(reconCluster, sspCluster);
+ OutputLogger.printf("[ %7s; %9s ]%n", "success", "matched");
+ continue reconLoop;
+ } else {
+ event.pairFailHitCount(reconCluster, sspCluster);
+ OutputLogger.printf("[ %7s; %9s ]%n", "fail", "hit count");
+ continue reconLoop;
+ } // End hit count check.
+ } else {
+ event.pairFailEnergy(reconCluster, sspCluster);
+ OutputLogger.printf("[ %7s; %9s ]%n", "fail", "energy");
+ continue reconLoop;
+ } // End energy check.
+ }// End SSP loop.
+
+ // If the reconstructed cluster has not been matched, check
+ // if a cluster was found at the same position. If not, then
+ // the cluster fails by reason of position.
+ if(!matchedPosition) {
+ event.pairFailPosition(reconCluster, null);
+ }
+
+ // Otherwise, the cluster had a potential matched, but the
+ // time-stamps were off. The cluster fails by reason of time.
+ else {
+ event.pairFailTime(reconCluster, null);
+ }
+ } // End recon loop.
+
+ // Return the populated match event.
+ return event;
+ }
+
+ /**
+ * Checks triggers simulated on SSP clusters against the SSP bank's
+ * reported triggers to verify that the trigger is correctly applying
+ * cuts to the clusters it sees. Additionally compares triggers
+ * simulated on reconstructed clusters to measure trigger efficiency.
+ */
+ private void singlesTriggerVerification() {
+ // Create lists of generic triggers.
+ List<List<? extends Trigger<?>>> sspTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
+ List<List<? extends Trigger<?>>> reconTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
+
+ // Convert the simulated triggers to generic versions and add
+ // them to the generic list.
+ sspTriggerList.add(triggerData.getSimSSPTriggers().getSingles0Triggers());
+ sspTriggerList.add(triggerData.getSimSSPTriggers().getSingles1Triggers());
+ reconTriggerList.add(triggerData.getSimReconTriggers().getSingles0Triggers());
+ reconTriggerList.add(triggerData.getSimReconTriggers().getSingles1Triggers());
+
+ // Run generic trigger verification.
+ triggerVerification(sspTriggerList, reconTriggerList, true);
+ }
+
+ /**
+ * Checks triggers simulated on SSP clusters against the SSP bank's
+ * reported triggers to verify that the trigger is correctly applying
+ * cuts to the clusters it sees. Additionally compares triggers
+ * simulated on reconstructed clusters to measure trigger efficiency.
+ */
+ private void pairTriggerVerification() {
+ // Create lists of generic triggers.
+ List<List<? extends Trigger<?>>> sspTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
+ List<List<? extends Trigger<?>>> reconTriggerList = new ArrayList<List<? extends Trigger<?>>>(2);
+
+ // Convert the simulated triggers to generic versions and add
+ // them to the generic list.
+ sspTriggerList.add(triggerData.getSimSSPTriggers().getPair0Triggers());
+ sspTriggerList.add(triggerData.getSimSSPTriggers().getPair1Triggers());
+ reconTriggerList.add(triggerData.getSimReconTriggers().getPair0Triggers());
+ reconTriggerList.add(triggerData.getSimReconTriggers().getPair1Triggers());
+
+ // Run generic trigger verification.
+ triggerVerification(sspTriggerList, reconTriggerList, false);
+ }
+
+ /**
+ * Performs trigger verification for both trigger types.
+ * @param sspTriggerList - The list of SSP triggers.
+ * @param reconTriggerList - The list of reconstructed triggers.
+ * @param isSingles - Whether or not this is a singles trigger
+ * verification.
+ */
+ private void triggerVerification(List<List<? extends Trigger<?>>> sspTriggerList,
+ List<List<? extends Trigger<?>>> reconTriggerList, boolean isSingles) {
+
+ // ==========================================================
+ // ==== Initialize Trigger Verification =====================
+ // ==========================================================
+
+ // Print the cluster verification header.
+ OutputLogger.println();
+ OutputLogger.println();
+ OutputLogger.println("======================================================================");
+ if(isSingles) { OutputLogger.println("=== Singles Trigger Verification ====================================="); }
+ else { OutputLogger.println("=== Pair Trigger Verification ========================================"); }
+ OutputLogger.println("======================================================================");
+
+ // Track the number of triggers seen and the number found.
+ TriggerEvent[] triggerEvent = { new TriggerEvent(), new TriggerEvent() };
+
+ // ==========================================================
+ // ==== Output Event Summary ================================
+ // ==========================================================
+
+ // Get the list of triggers reported by the SSP.
+ List<? extends SSPNumberedTrigger> sspTriggers;
+ if(isSingles) { sspTriggers = sspBank.getSinglesTriggers(); }
+ else { sspTriggers = sspBank.getPairTriggers(); }
+
+ // Output the SSP cluster triggers.
+ OutputLogger.println();
+ OutputLogger.println("SSP Cluster " + (isSingles ? "Singles" : "Pair") + " Triggers");
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
+ OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+ }
+ }
+ if(sspTriggerList.get(0).size() + sspTriggerList.get(1).size() == 0) {
+ OutputLogger.println("\tNone");
+ }
+
+ // Output the reconstructed cluster singles triggers.
+ OutputLogger.println("Reconstructed Cluster " + (isSingles ? "Singles" : "Pair") + " Triggers");
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ for(Trigger<?> simTrigger : reconTriggerList.get(triggerNum)) {
+ OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+ }
+ }
+ if(reconTriggerList.get(0).size() + reconTriggerList.get(1).size() == 0) {
+ OutputLogger.println("\tNone");
+ }
+
+ // Output the SSP reported triggers.
+ OutputLogger.println("SSP Reported " + (isSingles ? "Singles" : "Pair") + " Triggers");
+ for(SSPTrigger sspTrigger : sspTriggers) {
+ OutputLogger.printf("\t%s%n", sspTrigger.toString());
+ }
+ if(sspTriggers.size() == 0) { OutputLogger.println("\tNone"); }
+
+ // Update the trigger event with the counts for each type of
+ // simulated trigger. Reported triggers are counted later when
+ // already iterating over them.
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ triggerEvent[triggerNum].sawSSPSimulatedTriggers(tiFlags, sspTriggerList.get(triggerNum).size());
+ triggerEvent[triggerNum].sawReconSimulatedTriggers(tiFlags, reconTriggerList.get(triggerNum).size());
+ }
+
+
+
+ // ==========================================================
+ // ==== SSP Internal Logic Verification =====================
+ // ==========================================================
+
+ // Track which SSP triggers have been matched to avoid matching
+ // multiple reconstructed SSP cluster triggers to the same SSP
+ // trigger.
+ Set<SSPNumberedTrigger> sspTriggerSet = new HashSet<SSPNumberedTrigger>();
+ Set<Trigger<?>> simTriggerSet = new HashSet<Trigger<?>>();
+
+ // Track the number of SSP reported triggers that are found in
+ // excess of the SSP simulated triggers.
+ int sspReportedExtras = sspTriggers.size() - (sspTriggerList.get(0).size() + sspTriggerList.get(1).size());
+ if(sspReportedExtras > 0) {
+ if(isSingles) { singlesInternalFail = true; }
+ else { pairInternalFail = true; }
+ } else { sspReportedExtras = 0; }
+
+ // Iterate over the triggers.
+ OutputLogger.println();
+ OutputLogger.println("Matching SSP Reported Triggers to SSP Simulated Triggers:");
+ for(SSPNumberedTrigger sspTrigger : sspTriggers) {
+ // Get the trigger information.
+ int triggerNum = sspTrigger.isFirstTrigger() ? 0 : 1;
+ OutputLogger.printf("\t%s%n", sspTrigger.toString());
+
+ // Note that a bank trigger was seen.
+ triggerEvent[triggerNum].sawReportedTrigger();
+
+ // Iterate over the SSP cluster simulated triggers and
+ // look for a trigger that matches.
+ matchLoop:
+ for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
+ // VERBOSE :: Output the trigger being considered for
+ // matching.
+ OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s ",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+
+ // If the current SSP trigger has already been matched,
+ // skip it.
+ if(simTriggerSet.contains(simTrigger)) {
+ OutputLogger.printf("[ %-15s ]%n", "failed; matched");
+ continue matchLoop;
+ }
+
+ // Check that the triggers have the same time. Triggers
+ // generated from SSP bank clusters should always align
+ // in time.
+ if(sspTrigger.getTime() != getTriggerTime(simTrigger)) {
+ OutputLogger.printf("[ %-15s ]%n", "failed; time");
+ continue matchLoop;
+ }
+
+ // Check whether the trigger cuts match.
+ boolean[] matchedCuts = triggerCutMatch(simTrigger, sspTrigger);
+ for(int i = 0; i < matchedCuts.length; i++) {
+ if(!matchedCuts[i]) {
+ int typeIndex = isSingles ? 0 : 1;
+ OutputLogger.printf("[ %-15s ]%n", String.format("failed; %s", cutNames[typeIndex][i]));
+ continue matchLoop;
+ }
+ }
+
+ // If all the cuts match, along with the time and the
+ // trigger number, than these triggers are a match.
+ sspTriggerSet.add(sspTrigger);
+ simTriggerSet.add(simTrigger);
+ triggerEvent[triggerNum].matchedSSPTrigger(tiFlags);
+ OutputLogger.printf("[ %-15s ]%n", "success");
+ break matchLoop;
+ }
+ }
+
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
+ globalTriggerPlots.sawTrigger(simTrigger);
+ if(simTriggerSet.contains(simTrigger)) {
+ globalTriggerPlots.matchedTrigger(simTrigger);
+ } else {
+ globalTriggerPlots.failedTrigger(simTrigger);
+ }
+ }
+ }
+
+ // Iterate over the unmatched simulated triggers again and the
+ // unmatched SSP reported trigger that most closely matches it.
+ OutputLogger.println();
+ OutputLogger.println("Matching Failed SSP Reported Triggers to Remaining SSP Simulated Triggers:");
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ simLoop:
+ for(Trigger<?> simTrigger : sspTriggerList.get(triggerNum)) {
+ OutputLogger.printf("\tTrigger %d :: %s :: %3.0f :: %s%n",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+
+ // Check whether this trigger has already been matched
+ // or not. If it has been matched, skip it.
+ if(simTriggerSet.contains(simTrigger)) {
+ OutputLogger.println("\t\tSkipping; already matched successfully");
+ continue simLoop;
+ }
+
+ // Get the trigger time for the simulated trigger.
+ double simTime = getTriggerTime(simTrigger);
+
+ // Track the match statistics for each reported trigger
+ // so that the closest match may be found.
+ int numMatched = -1;
+ boolean[] matchedCut = null;
+ SSPNumberedTrigger bestMatch = null;
+
+ // Store the readout for the best match.
+ String bestMatchText = null;
+
+ // Iterate over the reported triggers to find a match.
+ reportedLoop:
+ for(SSPNumberedTrigger sspTrigger : sspTriggers) {
+ OutputLogger.printf("\t\t%s ", sspTrigger.toString());
+
+ // If the two triggers have different times, this
+ // trigger should be skipped.
+ if(sspTrigger.getTime() != simTime) {
+ OutputLogger.printf("[ %-15s ]%n", "failed; time");
+ continue reportedLoop;
+ }
+
+ // If this reported trigger has been matched then
+ // it should be skipped.
+ if(sspTriggerSet.contains(sspTrigger)) {
+ OutputLogger.printf("[ %-15s ]%n", "failed; matched");
+ continue reportedLoop;
+ }
+
+ // Check each of the cuts.
+ boolean[] tempMatchedCut = triggerCutMatch(simTrigger, sspTrigger);
+
+ // Check each cut and see if this is a closer match
+ // than the previous best match.
+ int tempNumMatched = 0;
+ for(boolean passed : tempMatchedCut) { if(passed) { tempNumMatched++; } }
+ OutputLogger.printf("[ %-15s ]%n", String.format("maybe; %d failed", tempNumMatched));
+
+ // If the number of matched cuts exceeds the old
+ // best result, this becomes the new best result.
+ if(tempNumMatched > numMatched) {
+ numMatched = tempNumMatched;
+ matchedCut = tempMatchedCut;
+ bestMatch = sspTrigger;
+ bestMatchText = String.format("%s%n", sspTrigger.toString());
+ }
+ }
+
+ // If there was no match found, it means that there were
+ // no triggers that were both unmatched and at the same
+ // time as this simulated trigger.
+ if(bestMatch == null) {
+ if(isSingles) { singlesInternalFail = true; }
+ else { pairInternalFail = true; }
+ triggerEvent[triggerNum].failedSSPTrigger();
+ OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+ OutputLogger.println(" --> No Valid Match Found");
+ } else {
+ triggerEvent[triggerNum].matchedSSPTrigger(tiFlags, matchedCut);
+ OutputLogger.printf("\t\tTrigger %d :: %s :: %3.0f :: %s",
+ (triggerNum + 1), triggerPositionString(simTrigger),
+ getTriggerTime(simTrigger), simTrigger.toString());
+ OutputLogger.println(" --> " + bestMatchText);
+ }
+ }
+ }
+
+
+
+ // ==========================================================
+ // ==== Trigger Efficiency ==================================
+ // ==========================================================
+
+ // Reset the SSP matched trigger set.
+ sspTriggerSet.clear();
+
+ // Iterate over the reconstructed cluster singles triggers.
+ OutputLogger.println();
+ OutputLogger.println("Recon Cluster Trigger --> SSP Reported Trigger Match Status");
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ for(Trigger<?> simTrigger : reconTriggerList.get(triggerNum)) {
+ OutputLogger.printf("\tTrigger %d :: %s :: %s%n", (triggerNum + 1),
+ triggerPositionString(simTrigger), simTrigger.toString());
+
+ // TEMP :: Populate the recon ALL pairs plots.
+ globalTriggerPlots.sawTrigger(simTrigger);
+
+ // Iterate over the SSP reported triggers and compare
+ // them to the reconstructed cluster simulated trigger.
+ boolean matched = false;
+ matchLoop:
+ for(SSPNumberedTrigger sspTrigger : sspTriggers) {
+ OutputLogger.printf("\t\t\t%s", sspTrigger.toString());
+
+ // Only compare triggers if they are from the
+ // same trigger source.
+ if((triggerNum == 0 && sspTrigger.isSecondTrigger())
+ || (triggerNum == 1 && sspTrigger.isFirstTrigger())) {
+ OutputLogger.print(" [ fail; source ]%n");
+ continue matchLoop;
+ }
+
+ // Only compare the singles trigger if it was
+ // not already matched to another trigger.
+ if(sspTriggerSet.contains(sspTrigger)) {
+ OutputLogger.print(" [ fail; matched ]%n");
+ continue matchLoop;
+ }
+
+ // Test each cut.
+ int typeIndex = isSingles ? 0 : 1;
+ boolean[] matchedCuts = triggerCutMatch(simTrigger, sspTrigger);
+ for(int cutIndex = 0; cutIndex < matchedCuts.length; cutIndex++) {
+ if(!matchedCuts[cutIndex]) {
+ OutputLogger.printf(" [ fail; %-9s ]%n", cutNames[typeIndex][cutIndex]);
+ continue matchLoop;
+ }
+ }
+
+ // If all the trigger flags match, then the
+ // triggers are a match.
+ sspTriggerSet.add(sspTrigger);
+ triggerEvent[triggerNum].matchedReconTrigger(tiFlags);
+ OutputLogger.print(" [ success ]%n");
+ globalTriggerPlots.matchedTrigger(simTrigger);
+ matched = true;
+ break matchLoop;
+ }
+
+ if(!matched) { globalTriggerPlots.failedTrigger(simTrigger); }
+ }
+ }
+
+
+
+ // ==========================================================
+ // ==== Output Event Results ================================
+ // ==========================================================
+
+ // Get the number of SSP and reconstructed cluster simulated
+ // triggers.
+ int sspSimTriggers = sspTriggerList.get(0).size() + sspTriggerList.get(1).size();
+ int reconSimTriggers = reconTriggerList.get(0).size() + reconTriggerList.get(1).size();
+ int[] sspTriggerCount = { sspTriggerList.get(0).size(), sspTriggerList.get(1).size() };
+
+ // Print event statistics.
+ OutputLogger.println();
+ OutputLogger.println("Event Statistics:");
+ OutputLogger.printf("\tSSP Cluster Sim Triggers :: %d%n", sspSimTriggers);
+ OutputLogger.printf("\tRecon Cluster Sim Triggers :: %d%n", reconSimTriggers);
+ OutputLogger.printf("\tSSP Reported Triggers :: %d%n", sspTriggers.size());
+
+ int matchedSSPTriggers = triggerEvent[0].getMatchedSSPSimulatedTriggers() + triggerEvent[1].getMatchedSSPSimulatedTriggers();
+ OutputLogger.printf("\tInternal Efficiency :: %d / %d ", matchedSSPTriggers, sspSimTriggers);
+ if(sspSimTriggers == 0) { OutputLogger.printf("(N/A)%n"); }
+ else { OutputLogger.printf("(%3.0f%%)%n", (100.0 * matchedSSPTriggers / sspSimTriggers)); }
+
+ int matchedReconTriggers = triggerEvent[0].getMatchedReconSimulatedTriggers() + triggerEvent[1].getMatchedReconSimulatedTriggers();
+ OutputLogger.printf("\tTrigger Efficiency :: %d / %d", matchedReconTriggers, reconSimTriggers);
+ if(reconSimTriggers == 0) { OutputLogger.printf("(N/A)%n"); }
+ else { OutputLogger.printf("(%3.0f%%)%n", (100.0 * matchedReconTriggers / reconSimTriggers)); }
+
+ // Print the individual cut performances.
+ if(isSingles) {
+ OutputLogger.println();
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ OutputLogger.printf("Trigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
+ if(sspSimTriggers == 0) {
+ OutputLogger.printf("\tCluster Energy Lower Bound :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN), sspTriggerCount[triggerNum]);
+ OutputLogger.printf("\tCluster Energy Upper Bound :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX), sspTriggerCount[triggerNum]);
+ OutputLogger.printf("\tCluster Hit Count :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT), sspTriggerCount[triggerNum]);
+ } else {
+ OutputLogger.printf("\tCluster Energy Lower Bound :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MIN) / sspTriggerCount[triggerNum]));
+ OutputLogger.printf("\tCluster Energy Upper Bound :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_MAX) / sspTriggerCount[triggerNum]));
+ OutputLogger.printf("\tCluster Hit Count :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(HIT_COUNT) / sspTriggerCount[triggerNum]));
+ }
+ }
+
+ // Update the global trigger tracking variables.
+ localStats.getTriggerStats().getSingles0Stats().addEvent(triggerEvent[0]);
+ localStats.getTriggerStats().getSingles1Stats().addEvent(triggerEvent[1]);
+ globalStats.getTriggerStats().getSingles0Stats().addEvent(triggerEvent[0]);
+ globalStats.getTriggerStats().getSingles1Stats().addEvent(triggerEvent[1]);
+ } else {
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ OutputLogger.println();
+ OutputLogger.printf("Trigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
+ if(sspTriggerCount[triggerNum] == 0) {
+ OutputLogger.printf("\tPair Energy Sum :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM), sspTriggerCount[triggerNum]);
+ OutputLogger.printf("\tPair Energy Difference :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF), sspTriggerCount[triggerNum]);
+ OutputLogger.printf("\tPair Energy Slope :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount[triggerNum]);
+ OutputLogger.printf("\tPair Coplanarity :: %d / %d%n", triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY), sspTriggerCount[triggerNum]);
+ } else {
+ OutputLogger.printf("\tPair Energy Sum :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SUM) / sspTriggerCount[triggerNum]));
+ OutputLogger.printf("\tPair Energy Difference :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_DIFF) / sspTriggerCount[triggerNum]));
+ OutputLogger.printf("\tPair Energy Slope :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(ENERGY_SLOPE) / sspTriggerCount[triggerNum]));
+ OutputLogger.printf("\tPair Coplanarity :: %d / %d (%3.0f%%)%n",
+ triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY), sspTriggerCount[triggerNum],
+ (100.0 * triggerEvent[triggerNum].getSSPCutFailures(COPLANARITY) / sspTriggerCount[triggerNum]));
+ }
+ }
+
+ // Update the global trigger tracking variables.
+ localStats.getTriggerStats().getPair0Stats().addEvent(triggerEvent[0]);
+ localStats.getTriggerStats().getPair1Stats().addEvent(triggerEvent[1]);
+ globalStats.getTriggerStats().getPair0Stats().addEvent(triggerEvent[0]);
+ globalStats.getTriggerStats().getPair1Stats().addEvent(triggerEvent[1]);
+ }
+
+ // Note whether the was a trigger match failure.
+ if(triggerEvent[0].getFailedReconSimulatedTriggers() != 0 && triggerEvent[1].getFailedReconSimulatedTriggers() != 0) {
+ if(isSingles) { singlesEfficiencyFail = true; }
+ else { pairEfficiencyFail = true; }
+ } if(triggerEvent[0].getFailedSSPSimulatedTriggers() != 0 && triggerEvent[1].getFailedSSPSimulatedTriggers() != 0) {
+ if(isSingles) { singlesInternalFail = true; }
+ else { pairInternalFail = true; }
+ }
+ }
+
+ /**
+ * Outputs all of the verification parameters currently in use by
+ * the software. A warning will be issued if the values for NSA and
+ * NSB, along with the FADC window, preclude clusters from being
+ * verified.
+ */
+ private void logSettings() {
+ // Output general settings.
+ System.out.println("Cluster Verification Settings");
+ System.out.printf("\tHit Threshold :: %1d hit(s)%n", hitAcceptance);
+ System.out.printf("\tEnergy Threshold :: %5.3f GeV%n", energyAcceptance);
+ System.out.println();
+
+ // Output window settings.
+ System.out.println("FADC Timing Window Settings");
+ System.out.printf("\tNSB :: %3d ns%n", nsb);
+ System.out.printf("\tNSA :: %3d ns%n", nsa);
+ System.out.printf("\tFADC Window :: %3d ns%n", windowWidth);
+
+ // Calculate the valid clustering window.
+ int start = nsb;
+ int end = windowWidth - nsa;
+ if(start < end) {
+ System.out.printf("\tValid Cluster Window :: [ %3d ns, %3d ns ]%n", start, end);
+ performClusterVerification = true;
+ } else {
+ System.out.println("\tNSB, NSA, and FADC window preclude a valid cluster verification window.");
+ System.out.println("\tCluster verification will not be performed!");
+ performClusterVerification = false;
+ }
+ System.out.println();
+
+ // Output the singles trigger settings.
+ for(int i = 0; i < 2; i++) {
+ // Print the settings.
+ System.out.printf("Singles Trigger %d Settings%23s[%5b]%n", (i + 1), "", singlesTriggerEnabled[i]);
+ System.out.printf("\tCluster Energy Low :: %.3f GeV [%5b]%n",
+ singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), singlesCutsEnabled[i][0]);
+ System.out.printf("\tCluster Energy High :: %.3f GeV [%5b]%n",
+ singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH), singlesCutsEnabled[i][1]);
+ System.out.printf("\tCluster Hit Count :: %.0f hit(s) [%5b]%n",
+ singlesTrigger[i].getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW), singlesCutsEnabled[i][2]);
+ System.out.println();
+ }
+
+ // Output the pair trigger settings.
+ for(int i = 0; i < 2; i++) {
+ System.out.printf("Pairs Trigger %d Settings%25s[%5b]%n", (i + 1), "", pairTriggerEnabled[i]);
+ System.out.printf("\tCluster Energy Low :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), pairCutsEnabled[i][0]);
+ System.out.printf("\tCluster Energy High :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH), pairCutsEnabled[i][1]);
+ System.out.printf("\tCluster Hit Count :: %.0f hit(s) [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW), pairCutsEnabled[i][2]);
+ System.out.printf("\tPair Energy Sum Low :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW), pairCutsEnabled[i][3]);
+ System.out.printf("\tPair Energy Sum High :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH), pairCutsEnabled[i][3]);
+ System.out.printf("\tPair Energy Difference :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH), pairCutsEnabled[i][4]);
+ System.out.printf("\tPair Energy Slope :: %.3f GeV [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW), pairCutsEnabled[i][5]);
+ System.out.printf("\tPair Energy Slope F :: %.4f GeV / mm%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F));
+ System.out.printf("\tPair Coplanarity :: %3.0f Degrees [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_COPLANARITY_HIGH), pairCutsEnabled[i][6]);
+ System.out.printf("\tPair Time Coincidence :: %2.0f ns [%5b]%n",
+ pairsTrigger[i].getCutValue(TriggerModule.PAIR_TIME_COINCIDENCE), true);
+ System.out.println();
+ }
+ }
+
+ /**
+ * Summarizes the global run statistics in a log to the terminal.
+ */
+ private void logStatistics() {
+ // Print the cluster/trigger verification header.
+ System.out.println();
+ System.out.println();
+ System.out.println("======================================================================");
+ System.out.println("=== Cluster/Trigger Verification Results =============================");
+ System.out.println("======================================================================");
+
+ // Print the general event failure rate.
+ int headSpaces = getPrintSpaces(globalStats.getEventCount());
+ System.out.println("General Event Statistics:");
+ System.out.printf("\tEvent Start Time :: %.3f s%n", (startTime / Math.pow(10, 9)));
+ System.out.printf("\tEvent End Time :: %.3f%n", (endTime / Math.pow(10, 9)));
+ System.out.printf("\tEvent Run Time :: %.3f%n", ((endTime - startTime) / Math.pow(10, 9)));
+ System.out.printf("\tNoise Events :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
+ globalStats.getNoiseEvents(), globalStats.getEventCount(), (100.0 * globalStats.getNoiseEvents() / globalStats.getEventCount()));
+ System.out.printf("\tCluster Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
+ globalStats.getFailedClusterEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedClusterEventCount() / globalStats.getEventCount()));
+ System.out.printf("\tSingles Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
+ globalStats.getFailedSinglesEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedSinglesEventCount() / globalStats.getEventCount()));
+ System.out.printf("\tPair Events Failed :: %" + headSpaces + "d / %" + headSpaces + "d (%7.3f%%)%n",
+ globalStats.getFailedPairEventCount(), globalStats.getEventCount(), (100.0 * globalStats.getFailedPairEventCount() / globalStats.getEventCount()));
+
+ // Print out how many events reported a given TI type, both in
+ // total and hierarchically.
+ System.out.println();
+ System.out.println("Event Triggering Type Verification:");
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Trigger", "Total", "Hierarchical");
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Pulser", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PULSER, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PULSER, true));
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Cosmic", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.COSMIC, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.COSMIC, true));
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Singles 1", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES0, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES0, true));
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Singles 2", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES1, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.SINGLES1, true));
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Pair 1", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR0, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR0, true));
+ System.out.printf("\t%15s\t%15s\t%15s%n", "Pair 2", globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR1, false),
+ globalStats.getTriggerStats().getTITriggers(TriggerDiagStats.PAIR1, true));
+
+ // Print the cluster verification data.
+ System.out.println();
+ System.out.println("Cluster Verification:");
+ System.out.printf("\tRecon Clusters :: %d%n", globalStats.getClusterStats().getReconClusterCount());
+ System.out.printf("\tSSP Clusters :: %d%n", globalStats.getClusterStats().getSSPClusterCount());
+ System.out.printf("\tClusters Matched :: %d%n", globalStats.getClusterStats().getMatches());
+ System.out.printf("\tFailed (Position) :: %d%n", globalStats.getClusterStats().getPositionFailures());
+ System.out.printf("\tFailed (Energy) :: %d%n", globalStats.getClusterStats().getEnergyFailures());
+ System.out.printf("\tFailed (Hit Count) :: %d%n", globalStats.getClusterStats().getHitCountFailures());
+ if(globalStats.getClusterStats().getReconClusterCount() == 0) {
+ System.out.printf("\tCluster Efficiency :: N/A%n");
+ } else {
+ System.out.printf("\tCluster Efficiency :: %7.3f%%%n",
+ 100.0 * globalStats.getClusterStats().getMatches() / globalStats.getClusterStats().getReconClusterCount());
+ }
+
+ // Print the trigger verification data.
+ for(int triggerType = 0; triggerType < 2; triggerType++) {
+ // Get the trigger data. Type 0 represents singles triggers.
+ TriggerEvent[] triggerData = new TriggerEvent[2];
+ if(triggerType == 0) {
+ triggerData[0] = globalStats.getTriggerStats().getSingles0Stats();
+ triggerData[1] = globalStats.getTriggerStats().getSingles1Stats();
+ } else {
+ triggerData[0] = globalStats.getTriggerStats().getPair0Stats();
+ triggerData[1] = globalStats.getTriggerStats().getPair1Stats();
+ }
+
+ // Get the basic trigger data.
+ int sspSimTriggers = triggerData[0].getSSPSimulatedTriggers() + triggerData[1].getSSPSimulatedTriggers();
+ int reconSimTriggers = triggerData[0].getReconSimulatedTriggers() + triggerData[1].getReconSimulatedTriggers();
+ int sspReportedTriggers = triggerData[0].getReportedTriggers() + triggerData[1].getReportedTriggers();
+ int sspMatchedTriggers = triggerData[0].getMatchedSSPSimulatedTriggers() + triggerData[1].getMatchedSSPSimulatedTriggers();
+ int reconMatchedTriggers = triggerData[0].getMatchedReconSimulatedTriggers() + triggerData[1].getMatchedReconSimulatedTriggers();
+
+ // Print the basic trigger statistics.
+ int spaces = getPrintSpaces(sspSimTriggers, reconSimTriggers, sspReportedTriggers);
+ System.out.println();
+ if(triggerType == 0) { System.out.println("Singles Trigger Verification:"); }
+ else { System.out.println("Pair Trigger Verification:"); }
+ System.out.printf("\tSSP Cluster Sim Triggers :: %" + spaces + "d%n", sspSimTriggers);
+ System.out.printf("\tRecon Cluster Sim Triggers :: %" + spaces + "d%n", reconSimTriggers);
+ System.out.printf("\tSSP Reported Triggers :: %" + spaces + "d%n", sspReportedTriggers);
+
+ System.out.printf("\tInternal Efficiency :: %" + spaces + "d / %" + spaces + "d ", sspMatchedTriggers, sspSimTriggers);
+ if(sspSimTriggers == 0) { System.out.printf("(N/A)%n"); }
+ else { System.out.printf("(%7.3f%%)%n", (100.0 * sspMatchedTriggers / sspSimTriggers)); }
+
+ System.out.printf("\tTrigger Efficiency :: %" + spaces + "d / %" + spaces + "d ", reconMatchedTriggers, reconSimTriggers);
+ if(reconSimTriggers == 0) { System.out.printf("(N/A)%n"); }
+ else { System.out.printf("(%7.3f%%)%n" , (100.0 * reconMatchedTriggers / reconSimTriggers)); }
+
+ // Print the individual cut performances.
+ if(triggerType == 0) {
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ // Get the appropriate trigger statistics module.
+ TriggerEvent triggerStats;
+ if(triggerNum == 0) { triggerStats = globalStats.getTriggerStats().getSingles0Stats(); }
+ else { triggerStats = globalStats.getTriggerStats().getSingles1Stats(); }
+
+ // Get the number of SSP triggers for this trigger number.
+ int sspTriggerCount = triggerStats.getSSPSimulatedTriggers();
+ //int sspTriggerCount = triggerRunStats[0].getTotalSSPTriggers(triggerNum);
+
+ System.out.println();
+ System.out.printf("\tTrigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
+ System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerStats.getUnmatchedSSPSimulatedTriggers());
+ //System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerRunStats[0].getUnmatchedTriggers(triggerNum));
+ if(sspTriggerCount == 0) {
+ System.out.printf("\t\tCluster Energy Lower Bound :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(ENERGY_MIN), sspTriggerCount);
+ System.out.printf("\t\tCluster Energy Upper Bound :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(ENERGY_MAX), sspTriggerCount);
+ System.out.printf("\t\tCluster Hit Count :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(HIT_COUNT), sspTriggerCount);
+ } else {
+ System.out.printf("\t\tCluster Energy Lower Bound :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(ENERGY_MIN), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(ENERGY_MIN) / sspTriggerCount));
+ System.out.printf("\t\tCluster Energy Upper Bound :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(ENERGY_MAX), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(ENERGY_MAX) / sspTriggerCount));
+ System.out.printf("\t\tCluster Hit Count :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(HIT_COUNT), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(HIT_COUNT) / sspTriggerCount));
+ }
+ }
+ } else {
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ // Get the appropriate trigger statistics module.
+ TriggerEvent triggerStats;
+ if(triggerNum == 0) { triggerStats = globalStats.getTriggerStats().getPair0Stats(); }
+ else { triggerStats = globalStats.getTriggerStats().getPair1Stats(); }
+
+ // Get the number of SSP triggers for this trigger number.
+ int sspTriggerCount = triggerStats.getSSPSimulatedTriggers();
+
+ System.out.println();
+ System.out.printf("\tTrigger %d Individual Cut Failure Rate:%n", (triggerNum + 1));
+ System.out.printf("\t\tUmatched Triggers :: %" + spaces + "d%n", triggerStats.getUnmatchedSSPSimulatedTriggers());
+ if(sspTriggerCount == 0) {
+ System.out.printf("\t\tPair Energy Sum :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(ENERGY_SUM), sspTriggerCount);
+ System.out.printf("\t\tPair Energy Difference :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(ENERGY_DIFF), sspTriggerCount);
+ System.out.printf("\t\tPair Energy Slope :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount);
+ System.out.printf("\t\tPair Coplanarity :: %" + spaces + "d / %" + spaces + "d%n",
+ triggerStats.getSSPCutFailures(COPLANARITY), sspTriggerCount);
+ } else {
+ System.out.printf("\t\tPair Energy Sum :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(ENERGY_SUM), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(ENERGY_SUM) / sspTriggerCount));
+ System.out.printf("\t\tPair Energy Difference :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(ENERGY_DIFF), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(ENERGY_DIFF) / sspTriggerCount));
+ System.out.printf("\t\tPair Energy Slope :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(ENERGY_SLOPE), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(ENERGY_SLOPE) / sspTriggerCount));
+ System.out.printf("\t\tPair Coplanarity :: %" + spaces + "d / %" + spaces + "d (%7.3f%%)%n",
+ triggerStats.getSSPCutFailures(COPLANARITY), sspTriggerCount,
+ (100.0 * triggerStats.getSSPCutFailures(COPLANARITY) / sspTriggerCount));
+ }
+ }
+ }
+ }
+
+ // Print out the trigger efficiency table.
+ System.out.println();
+ globalStats.getTriggerStats().printEfficiencyTable();
+ }
+
+ /**
+ * Checks whether all of the hits in a cluster are within the safe
+ * region of the FADC output window.
+ * @param reconCluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is safe and
+ * returns <code>false</code> otherwise.
+ */
+ private final boolean isVerifiable(Cluster reconCluster) {
+ return TriggerDiagnosticUtil.isVerifiable(reconCluster, nsa, nsb, windowWidth);
+ }
+
+ /**
+ * Generates a <code>List</code> collection that contains a set
+ * of <code>ArrayList</code> collections representing a unique
+ * permutation of the entries in the argument.
+ * @param values - A collection of the entries to be permuted.
+ * @return Returns a list of lists representing the permutations.
+ */
+ private static final List<List<Pair<Cluster, SSPCluster>>> getPermutations(List<Cluster> reconClusters, List<SSPCluster> sspClusters) {
+ // Store the SSP cluster permutations.
+ List<List<SSPCluster>> permList = new ArrayList<List<SSPCluster>>();
+
+ // Make sure that the two lists are the same size.
+ int reconSize = reconClusters.size();
+ int sspSize = sspClusters.size();
+ while(sspClusters.size() < reconClusters.size()) {
+ sspClusters.add(null);
+ }
+ while(reconClusters.size() < sspClusters.size()) {
+ reconClusters.add(null);
+ }
+
+ // Get the SSP cluster permutations.
+ permute(new ArrayList<SSPCluster>(0), sspClusters, permList);
+
+ // Create pairs from the permutations.
+ List<List<Pair<Cluster, SSPCluster>>> pairList = new ArrayList<List<Pair<Cluster, SSPCluster>>>();
+ for(List<SSPCluster> permutation : permList) {
+ List<Pair<Cluster, SSPCluster>> pairs = new ArrayList<Pair<Cluster, SSPCluster>>(reconClusters.size());
+
+ for(int clusterIndex = 0; (clusterIndex < reconClusters.size() && clusterIndex < permutation.size()); clusterIndex++) {
+ pairs.add(new Pair<Cluster, SSPCluster>(reconClusters.get(clusterIndex), permutation.get(clusterIndex)));
+ }
+
+ pairList.add(pairs);
+ }
+
+ // Remove the extra values.
+ for(int i = sspClusters.size() - 1; i >= sspSize; i--) { sspClusters.remove(i); }
+ for(int i = reconClusters.size() - 1; i >= reconSize; i--) { reconClusters.remove(i); }
+
+ // Return the pairs.
+ return pairList;
+ }
+
+ /**
+ * Recursive method for permuting all entries in the argument
+ * collection <code>remainingValues</code> into the argument
+ * <code>permutedValues</code> values. Completed permutations are
+ * placed in the argument <code>permList</code>.
+ * @param permutedValues - List to store entries that have already
+ * been permuted.
+ * @param remainingValues - List to store entries that need to be
+ * permuted.
+ * @param permList - List to store completed permutations.
+ */
+ private static final void permute(List<SSPCluster> permutedValues, List<SSPCluster> remainingValues, List<List<SSPCluster>> permList) {
+ // If the list of entries that still need to be sorted is empty,
+ // then there is nothing to sort. Just return and empty list.
+ if(remainingValues.isEmpty()) { return; }
+
+ // If there is only one value left in the list of entries that
+ // still need to be sorted, then just add it to the permutation
+ // list and return it.
+ else if(remainingValues.size() <= 1) {
+ // Add the last entry.
+ permutedValues.add(remainingValues.get(0));
+
+ // Add the permutation to the list of completed permutations.
+ permList.add(permutedValues);
+ }
+
+ // Otherwise, continue to get all possible permutations.
+ else {
+ // Iterate over the entries that have not been permuted.
+ for(int i = 0; i < remainingValues.size(); i++) {
+ // Make new lists to contain the permutations.
+ List<SSPCluster> newPermList = new ArrayList<SSPCluster>(permutedValues.size() + 1);
+ List<SSPCluster> newRemainList = new ArrayList<SSPCluster>(remainingValues.size());
+
+ // Copy the current permuted entries to the new list
+ // and one value from the list of entries that have
+ // not been permuted yet.
+ newPermList.addAll(permutedValues);
+ newPermList.add(remainingValues.get(i));
+
+ // The new list of entries that have not been permuted
+ // should be identical, except it should now be missing
+ // the entry that was moved.
+ for(int index = 0; index < remainingValues.size(); index++) {
+ if(index != i) { newRemainList.add(remainingValues.get(index)); }
+ }
+
+ // Repeat the process with the new lists.
+ permute(newPermList, newRemainList, permList);
+ }
+ }
+ }
+
+ /**
+ * Compares two cluster matching events and finds the one that has
+ * the better results. Note that this will only return results that
+ * make sense if both of the events represent different permutations
+ * of the same set of clusters. Comparing events with different sets
+ * of clusters will produce meaningless results.
+ * @param firstEvent - The first cluster matching event,
+ * @param secondEvent - The second cluster matching event.
+ * @return Returns the cluster matching event that is better.
+ */
+ private static final DetailedClusterEvent getBestPermutation(DetailedClusterEvent firstEvent, DetailedClusterEvent secondEvent) {
+ // If both permutations are null, return that.
+ if(firstEvent == null && secondEvent == null) {
+ return null;
+ }
+
+ // If one permutation is null, it is not the best.
+ if(firstEvent == null) { return secondEvent; }
+ else if(secondEvent == null) { return firstEvent; }
+
+ // A permutation is better if it has more matches.
+ if(firstEvent.getMatches() > secondEvent.getMatches()) { return firstEvent; }
+ else if(secondEvent.getMatches() > firstEvent.getMatches()) { return secondEvent; }
+
+ // Otherwise, the permutation with the least energy failures is
+ // the better permutation.
+ if(firstEvent.getEnergyFailures() < secondEvent.getEnergyFailures()) { return firstEvent; }
+ else if(secondEvent.getEnergyFailures() < firstEvent.getEnergyFailures()) { return secondEvent; }
+
+ // If both these values are the same, then the events are identical.
+ return firstEvent;
+ }
+
+ /**
+ * Determines the number of spaces needed to render the longest of
+ * a series of integers as a string.
+ * @param vals - The series of integers.
+ * @return Returns the number of spaces needed to render the longest
+ * integer as a base-10 string.
+ */
+ private static final int getPrintSpaces(int... vals) {
+ // Track the largest value.
+ int largest = 0;
+
+ // Iterate over the arguments and find the largest.
+ for(int val : vals) {
+ // Get the length of the string.
+ int length = TriggerDiagnosticUtil.getDigits(val);
+
+ // If it is larger, track it.
+ if(length > largest) { largest = length; }
+ }
+
+ // Return the longer one.
+ return largest;
+ }
+
+ /**
+ * Gets the position of the source of a <code>Trigger</code> object
+ * as text. This method only supports trigger sources of the types
+ * <code>SSPCluster</code>, <code>Cluster</code>, and arrays of size
+ * two of either type.
+ * @param trigger - The trigger from which to obtain the source.
+ * @return Returns the source of the trigger as a <code>String</code>
+ * object.
+ * @throws IllegalArgumentException Occurs if the source of the
+ * trigger is not any of the supported types.
+ */
+ private static final String triggerPositionString(Trigger<?> trigger) throws IllegalArgumentException {
+ // Get the trigger source.
+ Object source = trigger.getTriggerSource();
+
+ // Handle valid trigger sources.
+ if(source instanceof SSPCluster) {
+ return TriggerDiagnosticUtil.clusterPositionString((SSPCluster) source);
+ } else if(source instanceof Cluster) {
+ return TriggerDiagnosticUtil.clusterPositionString((Cluster) source);
+ } else if(source instanceof SSPCluster[]) {
+ SSPCluster[] sourcePair = (SSPCluster[]) source;
+ if(sourcePair.length == 2) {
+ return String.format("%s, %s", TriggerDiagnosticUtil.clusterPositionString(sourcePair[0]),
+ TriggerDiagnosticUtil.clusterPositionString(sourcePair[1]));
+ }
+ } else if(source instanceof Cluster[]) {
+ Cluster[] sourcePair = (Cluster[]) source;
+ if(sourcePair.length == 2) {
+ return String.format("%s, %s", TriggerDiagnosticUtil.clusterPositionString(sourcePair[0]),
+ TriggerDiagnosticUtil.clusterPositionString(sourcePair[1]));
+ }
+ }
+
+ // Otherwise, the source type is unrecognized. Throw an error.
+ throw new IllegalArgumentException(String.format("Trigger source type \"%s\" is not supported.",
+ trigger.getTriggerSource().getClass().getSimpleName()));
+ }
+
+ /**
+ * Gets the time of a simulated trigger object. Method supports
+ * triggers with source objects of type <code>SSPCluster</code>,
+ * <code>Cluster</code>, and arrays of size two composed of either
+ * object type.
+ * @param trigger - The trigger.
+ * @return Returns the time at which the trigger occurred.
+ * @throws IllegalArgumentException Occurs if the trigger source
+ * is not a supported type.
+ */
+ private static final double getTriggerTime(Trigger<?> trigger) throws IllegalArgumentException {
+ // Get the trigger source.
+ Object source = trigger.getTriggerSource();
+
+ // Get the trigger time for supported trigger types.
+ if(source instanceof SSPCluster) {
+ return ((SSPCluster) source).getTime();
+ } else if(source instanceof Cluster) {
+ return TriggerDiagnosticUtil.getClusterTime((Cluster) source);
+ } else if(source instanceof SSPCluster[]) {
+ // Get the pair.
+ SSPCluster[] sourcePair = (SSPCluster[]) source;
+
+ // Get the time of the bottom cluster.
+ if(sourcePair.length == 2) {
+ if(sourcePair[0].getYIndex() < 0) { return sourcePair[0].getTime(); }
+ else if(sourcePair[1].getYIndex() < 0) { return sourcePair[1].getTime(); }
+ else { throw new IllegalArgumentException("Cluster pairs must be formed of a top/bottom pair."); }
+ }
+ else { throw new IllegalArgumentException("Cluster pairs must be of size 2."); }
+ } else if(source instanceof Cluster[]) {
+ // Get the pair.
+ Cluster[] sourcePair = (Cluster[]) source;
+ int[] iy = {
+ TriggerDiagnosticUtil.getYIndex(sourcePair[0]),
+ TriggerDiagnosticUtil.getYIndex(sourcePair[1])
+ };
+
+ // Get the time of the bottom cluster.
+ if(sourcePair.length == 2) {
+ if(iy[0] < 0) { return TriggerDiagnosticUtil.getClusterTime(sourcePair[0]); }
+ else if(iy[1] < 0) { return TriggerDiagnosticUtil.getClusterTime(sourcePair[1]); }
+ else { throw new IllegalArgumentException("Cluster pairs must be formed of a top/bottom pair."); }
+ }
+ else { throw new IllegalArgumentException("Cluster pairs must be of size 2."); }
+ }
+
+ // If the source type is unrecognized, throw an exception.
+ throw new IllegalArgumentException(String.format("Trigger source type \"%\" is not supported.",
+ source.getClass().getSimpleName()));
+ }
+
+ /**
+ * Checks if a simulated trigger and an SSP trigger match. Note
+ * that only certain types can be compared. These are:
+ * <ul><li><code>SinglesTrigger<?> --> SSPSinglesTrigger</code></li>
+ * <li><code>PairTrigger<?> --> SSPPairTrigger</code></li></ul>
+ * @param simTrigger - The simulated trigger.
+ * @param sspTrigger - The SSP bank trigger.
+ * @return Returns an array of <code>boolean</code> primitives that
+ * indicate which cuts passed and which failed.
+ */
+ private static final boolean[] triggerCutMatch(Trigger<?> simTrigger, SSPTrigger sspTrigger) {
+ // Check that the cuts match for supported trigger types.
+ if(simTrigger instanceof SinglesTrigger && sspTrigger instanceof SSPSinglesTrigger) {
+ // Create an array to store the cut checks.
+ boolean[] cutMatch = new boolean[3];
+
+ // Cast the triggers.
+ SinglesTrigger<?> simSingles = (SinglesTrigger<?>) simTrigger;
+ SSPSinglesTrigger sspSingles = (SSPSinglesTrigger) sspTrigger;
+
+ // Perform the check.
+ cutMatch[ENERGY_MIN] = (simSingles.getStateClusterEnergyLow() == sspSingles.passCutEnergyMin());
+ cutMatch[ENERGY_MAX] = (simSingles.getStateClusterEnergyHigh() == sspSingles.passCutEnergyMax());
+ cutMatch[HIT_COUNT] = (simSingles.getStateHitCount() == sspSingles.passCutHitCount());
+
+ // Return the match array.
+ return cutMatch;
+ } else if(simTrigger instanceof PairTrigger && sspTrigger instanceof SSPPairTrigger) {
+ // Create an array to store the cut checks.
+ boolean[] cutMatch = new boolean[4];
+
+ // Cast the triggers.
+ PairTrigger<?> simPair = (PairTrigger<?>) simTrigger;
+ SSPPairTrigger sspPair = (SSPPairTrigger) sspTrigger;
+
+ // Perform the check.
+ cutMatch[ENERGY_SUM] = (simPair.getStateEnergySum() == sspPair.passCutEnergySum());
+ cutMatch[ENERGY_DIFF] = (simPair.getStateEnergyDifference() == sspPair.passCutEnergyDifference());
+ cutMatch[ENERGY_SLOPE] = (simPair.getStateEnergySlope() == sspPair.passCutEnergySlope());
+ cutMatch[COPLANARITY] = (simPair.getStateCoplanarity() == sspPair.passCutCoplanarity());
+
+ // Return the match array.
+ return cutMatch;
+ }
+
+ // If this point is reached, the triggers are not of a supported
+ // type for cut comparison. Produce an exception.
+ throw new IllegalArgumentException(String.format("Triggers of type \"%s\" can not be cut-matched with triggers of type \"%s\".",
+ simTrigger.getClass().getSimpleName(), sspTrigger.getClass().getSimpleName()));
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java Wed Apr 27 11:11:32 2016
@@ -70,9 +70,9 @@
public TriggerTurnOnDriver() {
}
- public void setShowPlots(boolean showPlots) {
- this.showPlots = showPlots;
- }
+ public void setShowPlots(boolean showPlots) {
+ this.showPlots = showPlots;
+ }
@Override
protected void detectorChanged(Detector detector) {
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterEvent.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterEvent.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterEvent.java Wed Apr 27 11:11:32 2016
@@ -9,104 +9,104 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ClusterEvent extends ClusterStatModule {
- /**
- * Fuses another <code>ClusterEvent</code> with this object. The
- * other event's cluster pairs and states will be added to those
- * already in this event.
- * @param event - The event to fuse.
- */
- public void addEvent(ClusterEvent event) {
- // If the event is null, do nothing.
- if(event == null) { return; }
-
- // Add the values stored in the argument event to the counters
- // in this event.
- sspClusters += event.sspClusters;
- reconClusters += event.reconClusters;
- matches += event.matches;
- failEnergy += event.failEnergy;
- failPosition += event.failPosition;
- failHitCount += event.failHitCount;
- }
-
- /**
- * Indicates whether at least one cluster pair in the event created
- * a fail state.
- * @return Returns <code>true</code> if not all clusters matched and
- * <code>false</code> otherwise.
- */
- public boolean isFailState() {
- return (failEnergy > 0) || (failHitCount > 0) || (failTime > 0) || (failPosition > 0);
- }
-
- /**
- * Notes that a reconstructed cluster and SSP cluster pair failed
- * due to energy.
- */
- public void pairFailEnergy() {
- failEnergy++;
- }
-
- /**
- * Notes that a reconstructed cluster and SSP cluster pair failed
- * due to hit count.
- */
- public void pairFailHitCount() {
- failHitCount++;
- }
-
- /**
- * Notes that a reconstructed cluster and SSP cluster pair failed
- * due to position.
- */
- public void pairFailPosition() {
- failPosition++;
- }
-
- /**
- * Notes that one or more reconstructed cluster and SSP cluster pair
- * failed due to position.
- * @param count - The number of events that failed in this manner.
- */
- public void pairFailPosition(int count) {
- // negative values are non-physical.
- if(count < 0) {
- throw new IllegalArgumentException("Cluster failure counts must be non-negative.");
- }
-
- // Increment the count.
- failPosition += count;
- }
-
- /**
- * Notes that a reconstructed cluster and SSP cluster pair failed
- * due to time.
- */
- public void pairFailTime() {
- failTime++;
- }
-
- /**
- * Notes that a reconstructed cluster and SSP cluster pair was
- * successfully matched.
- */
- public void pairMatch() {
- matches++;
- }
-
- /**
- * Increments the number of reconstructed FADC clusters seen.
- * @param count - The number of clusters seen.
- */
- public void sawReconClusters(int count) {
- reconClusters += count;
- }
-
- /**
- * Increments the number of SSP bank clusters seen.
- * @param count - The number of clusters seen.
- */
- public void sawSSPClusters(int count) {
- sspClusters += count;
- }
+ /**
+ * Fuses another <code>ClusterEvent</code> with this object. The
+ * other event's cluster pairs and states will be added to those
+ * already in this event.
+ * @param event - The event to fuse.
+ */
+ public void addEvent(ClusterEvent event) {
+ // If the event is null, do nothing.
+ if(event == null) { return; }
+
+ // Add the values stored in the argument event to the counters
+ // in this event.
+ sspClusters += event.sspClusters;
+ reconClusters += event.reconClusters;
+ matches += event.matches;
+ failEnergy += event.failEnergy;
+ failPosition += event.failPosition;
+ failHitCount += event.failHitCount;
+ }
+
+ /**
+ * Indicates whether at least one cluster pair in the event created
+ * a fail state.
+ * @return Returns <code>true</code> if not all clusters matched and
+ * <code>false</code> otherwise.
+ */
+ public boolean isFailState() {
+ return (failEnergy > 0) || (failHitCount > 0) || (failTime > 0) || (failPosition > 0);
+ }
+
+ /**
+ * Notes that a reconstructed cluster and SSP cluster pair failed
+ * due to energy.
+ */
+ public void pairFailEnergy() {
+ failEnergy++;
+ }
+
+ /**
+ * Notes that a reconstructed cluster and SSP cluster pair failed
+ * due to hit count.
+ */
+ public void pairFailHitCount() {
+ failHitCount++;
+ }
+
+ /**
+ * Notes that a reconstructed cluster and SSP cluster pair failed
+ * due to position.
+ */
+ public void pairFailPosition() {
+ failPosition++;
+ }
+
+ /**
+ * Notes that one or more reconstructed cluster and SSP cluster pair
+ * failed due to position.
+ * @param count - The number of events that failed in this manner.
+ */
+ public void pairFailPosition(int count) {
+ // negative values are non-physical.
+ if(count < 0) {
+ throw new IllegalArgumentException("Cluster failure counts must be non-negative.");
+ }
+
+ // Increment the count.
+ failPosition += count;
+ }
+
+ /**
+ * Notes that a reconstructed cluster and SSP cluster pair failed
+ * due to time.
+ */
+ public void pairFailTime() {
+ failTime++;
+ }
+
+ /**
+ * Notes that a reconstructed cluster and SSP cluster pair was
+ * successfully matched.
+ */
+ public void pairMatch() {
+ matches++;
+ }
+
+ /**
+ * Increments the number of reconstructed FADC clusters seen.
+ * @param count - The number of clusters seen.
+ */
+ public void sawReconClusters(int count) {
+ reconClusters += count;
+ }
+
+ /**
+ * Increments the number of SSP bank clusters seen.
+ * @param count - The number of clusters seen.
+ */
+ public void sawSSPClusters(int count) {
+ sspClusters += count;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterMatchedPair.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterMatchedPair.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterMatchedPair.java Wed Apr 27 11:11:32 2016
@@ -14,114 +14,114 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ClusterMatchedPair extends Pair<Cluster, SSPCluster> {
- // CLass variables.
- private final byte state;
-
- /**
- * Instantiates a new <code>ClusterMatchedPair</code> object from
- * the two indicated clusters and marks their match state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- * @param state - The pair match state.
- */
- public ClusterMatchedPair(Cluster reconCluster, SSPCluster sspCluster, byte state) {
- // Set the cluster pairs.
- super(reconCluster, sspCluster);
-
- // If the state is defined, set it. Otherwise, it is unknown.
- if(state == TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED
- || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION
- || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY
- || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT) {
- this.state = state;
- } else {
- this.state = TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_UNKNOWN;
- }
- }
-
- /**
- * Gets the reconstructed cluster of the pair.
- * @return Returns the reconstructed cluster a <code>Cluster</cod>
- * object.
- */
- public Cluster getReconstructedCluster() {
- return getFirstElement();
- }
-
- /**
- * Gets the SSP cluster of the pair.
- * @return Returns the SSP cluster as an <code>SSPCluster</code>
- * object.
- */
- public SSPCluster getSSPCluster() {
- return getSecondElement();
- }
-
- /**
- * Gets the raw state identifier.
- * @return Returns the state identifier as a <code>byte</code>
- * primitive. Valid identifiers are defined in the class
- * <code>TriggerDiagnosticUtil</code>.
- */
- public byte getState() {
- return state;
- }
-
- /**
- * Indicates whether the recon/SSP pair failed to not being close
- * enough in energy.
- * @return Returns <code>true</code> if the pair match state is an
- * energy fail state and <code>false</code> otherwise.
- */
- public boolean isEnergyFailState() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY);
- }
-
- /**
- * Indicates whether the recon/SSP pair failed to match due to not
- * being close enough in hit count.
- * @return Returns <code>true</code> if the pair match state is a
- * hit count fail state and <code>false</code> otherwise.
- */
- public boolean isHitCountFailState() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT);
- }
-
- /**
- * Indicates whether the recon/SSP pair matched.
- * @return Returns <code>true</code> if the pair match state is a
- * match state and <code>false</code> otherwise.
- */
- public boolean isMatch() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED);
- }
-
- /**
- * Indicates whether the recon/SSP pair failed to match due to the
- * cluster positions not aligning.
- * @return Returns <code>true</code> if the pair match state is a
- * position fail state and <code>false</code> otherwise.
- */
- public boolean isPositionFailState() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION);
- }
-
- /**
- * Indicates whether the recon/SSP pair failed to match due to the
- * cluster time-stamps not aligning.
- * @return Returns <code>true</code> if the pair match state is a
- * time fail state and <code>false</code> otherwise.
- */
- public boolean isTimeFailState() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_TIME);
- }
-
- /**
- * Indicates whether the recon/SSP pair has no known match state.
- * @return Returns <code>true</code> if the pair match state is
- * unknown and <code>false</code> otherwise.
- */
- public boolean isUnknownState() {
- return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_UNKNOWN);
- }
+ // CLass variables.
+ private final byte state;
+
+ /**
+ * Instantiates a new <code>ClusterMatchedPair</code> object from
+ * the two indicated clusters and marks their match state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ * @param state - The pair match state.
+ */
+ public ClusterMatchedPair(Cluster reconCluster, SSPCluster sspCluster, byte state) {
+ // Set the cluster pairs.
+ super(reconCluster, sspCluster);
+
+ // If the state is defined, set it. Otherwise, it is unknown.
+ if(state == TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED
+ || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION
+ || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY
+ || state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT) {
+ this.state = state;
+ } else {
+ this.state = TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_UNKNOWN;
+ }
+ }
+
+ /**
+ * Gets the reconstructed cluster of the pair.
+ * @return Returns the reconstructed cluster a <code>Cluster</cod>
+ * object.
+ */
+ public Cluster getReconstructedCluster() {
+ return getFirstElement();
+ }
+
+ /**
+ * Gets the SSP cluster of the pair.
+ * @return Returns the SSP cluster as an <code>SSPCluster</code>
+ * object.
+ */
+ public SSPCluster getSSPCluster() {
+ return getSecondElement();
+ }
+
+ /**
+ * Gets the raw state identifier.
+ * @return Returns the state identifier as a <code>byte</code>
+ * primitive. Valid identifiers are defined in the class
+ * <code>TriggerDiagnosticUtil</code>.
+ */
+ public byte getState() {
+ return state;
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair failed to not being close
+ * enough in energy.
+ * @return Returns <code>true</code> if the pair match state is an
+ * energy fail state and <code>false</code> otherwise.
+ */
+ public boolean isEnergyFailState() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY);
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair failed to match due to not
+ * being close enough in hit count.
+ * @return Returns <code>true</code> if the pair match state is a
+ * hit count fail state and <code>false</code> otherwise.
+ */
+ public boolean isHitCountFailState() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT);
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair matched.
+ * @return Returns <code>true</code> if the pair match state is a
+ * match state and <code>false</code> otherwise.
+ */
+ public boolean isMatch() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED);
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair failed to match due to the
+ * cluster positions not aligning.
+ * @return Returns <code>true</code> if the pair match state is a
+ * position fail state and <code>false</code> otherwise.
+ */
+ public boolean isPositionFailState() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION);
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair failed to match due to the
+ * cluster time-stamps not aligning.
+ * @return Returns <code>true</code> if the pair match state is a
+ * time fail state and <code>false</code> otherwise.
+ */
+ public boolean isTimeFailState() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_TIME);
+ }
+
+ /**
+ * Indicates whether the recon/SSP pair has no known match state.
+ * @return Returns <code>true</code> if the pair match state is
+ * unknown and <code>false</code> otherwise.
+ */
+ public boolean isUnknownState() {
+ return (state == TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_UNKNOWN);
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterStatModule.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterStatModule.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/ClusterStatModule.java Wed Apr 27 11:11:32 2016
@@ -7,100 +7,100 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ClusterStatModule {
- // Track cluster statistics.
- protected int sspClusters = 0;
- protected int reconClusters = 0;
- protected int matches = 0;
- protected int failEnergy = 0;
- protected int failPosition = 0;
- protected int failHitCount = 0;
- protected int failTime = 0;
-
- /**
- * Instantiates a <code>ClusterStatModule</code> with no statistics
- * stored.
- */
- ClusterStatModule() { }
-
- /**
- * Clears all statistical information and resets the object of its
- * default, empty state.
- */
- void clear() {
- sspClusters = 0;
- reconClusters = 0;
- matches = 0;
- failEnergy = 0;
- failPosition = 0;
- failHitCount = 0;
- failTime = 0;
- }
-
- @Override
- public ClusterStatModule clone() {
- // Create a clone.
- ClusterStatModule clone = new ClusterStatModule();
-
- // Copy the statistical values to the clone.
- clone.sspClusters = sspClusters;
- clone.reconClusters = reconClusters;
- clone.matches = matches;
- clone.failEnergy = failEnergy;
- clone.failPosition = failPosition;
- clone.failHitCount = failHitCount;
- clone.failTime = failTime;
-
- // Return the clone.
- return clone;
- }
-
- /**
- * Gets the number of cluster pairs stored in this event that are
- * marked with energy fail states.
- * @return Returns the number of instances of this state as an
- * <code>int</code> primitive.
- */
- public int getEnergyFailures() {
- return failEnergy;
- }
-
- /**
- * Gets the number of cluster pairs stored in this event that are
- * marked with hit count fail states.
- * @return Returns the number of instances of this state as an
- * <code>int</code> primitive.
- */
- public int getHitCountFailures() {
- return failHitCount;
- }
-
- /**
- * Gets the number of cluster pairs stored in this event that are
- * marked with position fail states.
- * @return Returns the number of instances of this state as an
- * <code>int</code> primitive.
- */
- public int getMatches() {
- return matches;
- }
-
- /**
- * Gets the number of cluster pairs stored in this event that are
- * marked with position fail states.
- * @return Returns the number of instances of this state as an
- * <code>int</code> primitive.
- */
- public int getPositionFailures() {
- return failPosition;
- }
-
- /**
- * Gets the total number of verifiable reconstructed clusters seen.
+ // Track cluster statistics.
+ protected int sspClusters = 0;
+ protected int reconClusters = 0;
+ protected int matches = 0;
+ protected int failEnergy = 0;
+ protected int failPosition = 0;
+ protected int failHitCount = 0;
+ protected int failTime = 0;
+
+ /**
+ * Instantiates a <code>ClusterStatModule</code> with no statistics
+ * stored.
+ */
+ ClusterStatModule() { }
+
+ /**
+ * Clears all statistical information and resets the object of its
+ * default, empty state.
+ */
+ void clear() {
+ sspClusters = 0;
+ reconClusters = 0;
+ matches = 0;
+ failEnergy = 0;
+ failPosition = 0;
+ failHitCount = 0;
+ failTime = 0;
+ }
+
+ @Override
+ public ClusterStatModule clone() {
+ // Create a clone.
+ ClusterStatModule clone = new ClusterStatModule();
+
+ // Copy the statistical values to the clone.
+ clone.sspClusters = sspClusters;
+ clone.reconClusters = reconClusters;
+ clone.matches = matches;
+ clone.failEnergy = failEnergy;
+ clone.failPosition = failPosition;
+ clone.failHitCount = failHitCount;
+ clone.failTime = failTime;
+
+ // Return the clone.
+ return clone;
+ }
+
+ /**
+ * Gets the number of cluster pairs stored in this event that are
+ * marked with energy fail states.
+ * @return Returns the number of instances of this state as an
+ * <code>int</code> primitive.
+ */
+ public int getEnergyFailures() {
+ return failEnergy;
+ }
+
+ /**
+ * Gets the number of cluster pairs stored in this event that are
+ * marked with hit count fail states.
+ * @return Returns the number of instances of this state as an
+ * <code>int</code> primitive.
+ */
+ public int getHitCountFailures() {
+ return failHitCount;
+ }
+
+ /**
+ * Gets the number of cluster pairs stored in this event that are
+ * marked with position fail states.
+ * @return Returns the number of instances of this state as an
+ * <code>int</code> primitive.
+ */
+ public int getMatches() {
+ return matches;
+ }
+
+ /**
+ * Gets the number of cluster pairs stored in this event that are
+ * marked with position fail states.
+ * @return Returns the number of instances of this state as an
+ * <code>int</code> primitive.
+ */
+ public int getPositionFailures() {
+ return failPosition;
+ }
+
+ /**
+ * Gets the total number of verifiable reconstructed clusters seen.
* @return Returns the cluster count as an <code>int</code>
* primitive.
- */
+ */
public int getReconClusterCount() {
- return reconClusters;
+ return reconClusters;
}
/**
@@ -109,16 +109,16 @@
* primitive.
*/
public int getSSPClusterCount() {
- return sspClusters;
+ return sspClusters;
}
-
- /**
- * Gets the number of cluster pairs stored in this event that are
- * marked with time fail states.
- * @return Returns the number of instances of this state as an
- * <code>int</code> primitive.
- */
- public int getTimeFailures() {
- return failTime;
- }
+
+ /**
+ * Gets the number of cluster pairs stored in this event that are
+ * marked with time fail states.
+ * @return Returns the number of instances of this state as an
+ * <code>int</code> primitive.
+ */
+ public int getTimeFailures() {
+ return failTime;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DetailedClusterEvent.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DetailedClusterEvent.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DetailedClusterEvent.java Wed Apr 27 11:11:32 2016
@@ -8,89 +8,89 @@
import org.lcsim.event.Cluster;
public class DetailedClusterEvent extends ClusterEvent {
- // Store all of the pairs.
- private List<ClusterMatchedPair> pairList = new ArrayList<ClusterMatchedPair>();
-
- /**
- * Fuses another <code>ClusterEvent</code> with this object. The
- * other event's cluster pairs and states will be added to those
- * already in this event.
- * @param event - The event to fuse.
- */
- public void addEvent(ClusterEvent event) {
- // Run the superclass method.
- super.addEvent(event);
-
- // If the event is null, do nothing.
- if(event == null) { return; }
-
- // Merge the list of cluster pairs, if applicable.
- if(event instanceof DetailedClusterEvent) {
- pairList.addAll(((DetailedClusterEvent) event).pairList);
- }
- }
-
- /**
- * Adds a reconstructed/SSP cluster pair and marks it as having an
- * energy fail state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- */
- public void pairFailEnergy(Cluster reconCluster, SSPCluster sspCluster) {
- pairFailEnergy();
- pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY));
- }
-
- /**
- * Adds a reconstructed/SSP cluster pair and marks it as having a
- * hit count fail state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- */
- public void pairFailHitCount(Cluster reconCluster, SSPCluster sspCluster) {
- pairFailHitCount();
- pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT));
- }
-
- /**
- * Adds a reconstructed/SSP cluster pair and marks it as having a
- * position fail state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- */
- public void pairFailPosition(Cluster reconCluster, SSPCluster sspCluster) {
- pairFailPosition();
- pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION));
- }
-
- /**
- * Adds a reconstructed/SSP cluster pair and marks it as having a
- * time fail state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- */
- public void pairFailTime(Cluster reconCluster, SSPCluster sspCluster) {
- pairFailTime();
- pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_TIME));
- }
-
- /**
- * Adds a reconstructed/SSP cluster pair and marks it as having a
- * match state.
- * @param reconCluster - The reconstructed cluster.
- * @param sspCluster - The SSP cluster.
- */
- public void pairMatch(Cluster reconCluster, SSPCluster sspCluster) {
- pairMatch();
- pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED));
- }
-
- /**
- * Gets a list of all matched cluster pairs and their match states.
- * @return Returns the matched cluster pairs as a <code>List</code>
- * of <code>ClusterMatchedPair</code> objects.
- */
- public List<ClusterMatchedPair> getClusterPairs() {
- return pairList;
- }
+ // Store all of the pairs.
+ private List<ClusterMatchedPair> pairList = new ArrayList<ClusterMatchedPair>();
+
+ /**
+ * Fuses another <code>ClusterEvent</code> with this object. The
+ * other event's cluster pairs and states will be added to those
+ * already in this event.
+ * @param event - The event to fuse.
+ */
+ public void addEvent(ClusterEvent event) {
+ // Run the superclass method.
+ super.addEvent(event);
+
+ // If the event is null, do nothing.
+ if(event == null) { return; }
+
+ // Merge the list of cluster pairs, if applicable.
+ if(event instanceof DetailedClusterEvent) {
+ pairList.addAll(((DetailedClusterEvent) event).pairList);
+ }
+ }
+
+ /**
+ * Adds a reconstructed/SSP cluster pair and marks it as having an
+ * energy fail state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ */
+ public void pairFailEnergy(Cluster reconCluster, SSPCluster sspCluster) {
+ pairFailEnergy();
+ pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_ENERGY));
+ }
+
+ /**
+ * Adds a reconstructed/SSP cluster pair and marks it as having a
+ * hit count fail state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ */
+ public void pairFailHitCount(Cluster reconCluster, SSPCluster sspCluster) {
+ pairFailHitCount();
+ pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_HIT_COUNT));
+ }
+
+ /**
+ * Adds a reconstructed/SSP cluster pair and marks it as having a
+ * position fail state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ */
+ public void pairFailPosition(Cluster reconCluster, SSPCluster sspCluster) {
+ pairFailPosition();
+ pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_POSITION));
+ }
+
+ /**
+ * Adds a reconstructed/SSP cluster pair and marks it as having a
+ * time fail state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ */
+ public void pairFailTime(Cluster reconCluster, SSPCluster sspCluster) {
+ pairFailTime();
+ pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_FAIL_TIME));
+ }
+
+ /**
+ * Adds a reconstructed/SSP cluster pair and marks it as having a
+ * match state.
+ * @param reconCluster - The reconstructed cluster.
+ * @param sspCluster - The SSP cluster.
+ */
+ public void pairMatch(Cluster reconCluster, SSPCluster sspCluster) {
+ pairMatch();
+ pairList.add(new ClusterMatchedPair(reconCluster, sspCluster, TriggerDiagnosticUtil.CLUSTER_STATE_MATCHED));
+ }
+
+ /**
+ * Gets a list of all matched cluster pairs and their match states.
+ * @return Returns the matched cluster pairs as a <code>List</code>
+ * of <code>ClusterMatchedPair</code> objects.
+ */
+ public List<ClusterMatchedPair> getClusterPairs() {
+ return pairList;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java Wed Apr 27 11:11:32 2016
@@ -8,117 +8,117 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class DiagnosticSnapshot {
- // Store the TI trigger information.
- private int[] tiSeenAll = new int[6];
- private int[] tiSeenHierarchical = new int[6];
-
- // Store the statistical modules.
- private final GeneralStatModule generalStats;
- private final ClusterStatModule clusterStats;
- private final TriggerStatModule[] triggerStats = new TriggerStatModule[4];
-
- /**
- * Creates a snapshot of the trigger diagnostic results.
- * @param stats - The run statistical object.
- */
- DiagnosticSnapshot(RunDiagStats stats) {
- // Store the statistical modules.
- generalStats = stats.clone();
- clusterStats = stats.getClusterStats().clone();
- triggerStats[0] = stats.getTriggerStats().getSingles0Stats().clone();
- triggerStats[1] = stats.getTriggerStats().getSingles1Stats().clone();
- triggerStats[2] = stats.getTriggerStats().getPair0Stats().clone();
- triggerStats[3] = stats.getTriggerStats().getPair1Stats().clone();
-
- // Copy the TI trigger data.
- for(int triggerType = 0; triggerType < 6; triggerType++) {
- tiSeenAll[triggerType] = stats.getTriggerStats().getTITriggers(triggerType, false);
- tiSeenHierarchical[triggerType] = stats.getTriggerStats().getTITriggers(triggerType, true);
- }
- }
-
- /**
- * Gets the general run statistics.
- * @return Returns a <code>GeneralStatModule</code> object that
- * contains the statistics.
- */
- public GeneralStatModule getGeneralStats() {
- return generalStats;
- }
-
- /**
- * Gets the cluster statistics.
- * @return Returns a <code>ClusterStatModule</code> object that
- * contains the statistics.
- */
- public ClusterStatModule getClusterStats() {
- return clusterStats;
- }
-
- /**
- * Gets the singles 0 trigger statistics.
- * @return Returns a <code>TriggerStatModule</code> object that
- * contains the statistics.
- */
- public TriggerStatModule getSingles0Stats() {
- return triggerStats[0];
- }
-
- /**
- * Gets the singles 1 trigger statistics.
- * @return Returns a <code>TriggerStatModule</code> object that
- * contains the statistics.
- */
- public TriggerStatModule getSingles1Stats() {
- return triggerStats[1];
- }
-
- /**
- * Gets the pair 0 trigger statistics.
- * @return Returns a <code>TriggerStatModule</code> object that
- * contains the statistics.
- */
- public TriggerStatModule getPair0Stats() {
- return triggerStats[2];
- }
-
- /**
- * Gets the pair 1 trigger statistics.
- * @return Returns a <code>TriggerStatModule</code> object that
- * contains the statistics.
- */
- public TriggerStatModule getPair1Stats() {
- return triggerStats[3];
- }
-
- /**
- * Gets the total number of events where the TI reported a trigger
- * of the specified type.
- * @param triggerID - The identifier for the type of trigger.
- * @param unique - <code>true</code> returns only the number of
- * events where this trigger type was the <i>only</i> type seen by
- * the TI while <code>false</code> returns the number of events
- * that saw this trigger type without regards for other trigger
- * flags.
- * @return Returns the count as an <code>int</code>.
- */
- public int getTITriggers(int triggerID, boolean hierarchical) {
- // Verify the trigger type.
- validateTriggerType(triggerID);
-
- // Increment the counters.
- if(hierarchical) { return tiSeenHierarchical[triggerID]; }
- else { return tiSeenAll[triggerID]; }
- }
-
- /**
- * Produces an exception if the argument trigger type is not of a
- * supported type.
- * @param triggerType - The trigger type to verify.
- */
- private static final void validateTriggerType(int triggerType) {
- if(triggerType < 0 || triggerType > 5) {
- throw new IndexOutOfBoundsException(String.format("Trigger type \"%d\" is not supported.", triggerType));
- }
- }
+ // Store the TI trigger information.
+ private int[] tiSeenAll = new int[6];
+ private int[] tiSeenHierarchical = new int[6];
+
+ // Store the statistical modules.
+ private final GeneralStatModule generalStats;
+ private final ClusterStatModule clusterStats;
+ private final TriggerStatModule[] triggerStats = new TriggerStatModule[4];
+
+ /**
+ * Creates a snapshot of the trigger diagnostic results.
+ * @param stats - The run statistical object.
+ */
+ DiagnosticSnapshot(RunDiagStats stats) {
+ // Store the statistical modules.
+ generalStats = stats.clone();
+ clusterStats = stats.getClusterStats().clone();
+ triggerStats[0] = stats.getTriggerStats().getSingles0Stats().clone();
+ triggerStats[1] = stats.getTriggerStats().getSingles1Stats().clone();
+ triggerStats[2] = stats.getTriggerStats().getPair0Stats().clone();
+ triggerStats[3] = stats.getTriggerStats().getPair1Stats().clone();
+
+ // Copy the TI trigger data.
+ for(int triggerType = 0; triggerType < 6; triggerType++) {
+ tiSeenAll[triggerType] = stats.getTriggerStats().getTITriggers(triggerType, false);
+ tiSeenHierarchical[triggerType] = stats.getTriggerStats().getTITriggers(triggerType, true);
+ }
+ }
+
+ /**
+ * Gets the general run statistics.
+ * @return Returns a <code>GeneralStatModule</code> object that
+ * contains the statistics.
+ */
+ public GeneralStatModule getGeneralStats() {
+ return generalStats;
+ }
+
+ /**
+ * Gets the cluster statistics.
+ * @return Returns a <code>ClusterStatModule</code> object that
+ * contains the statistics.
+ */
+ public ClusterStatModule getClusterStats() {
+ return clusterStats;
+ }
+
+ /**
+ * Gets the singles 0 trigger statistics.
+ * @return Returns a <code>TriggerStatModule</code> object that
+ * contains the statistics.
+ */
+ public TriggerStatModule getSingles0Stats() {
+ return triggerStats[0];
+ }
+
+ /**
+ * Gets the singles 1 trigger statistics.
+ * @return Returns a <code>TriggerStatModule</code> object that
+ * contains the statistics.
+ */
+ public TriggerStatModule getSingles1Stats() {
+ return triggerStats[1];
+ }
+
+ /**
+ * Gets the pair 0 trigger statistics.
+ * @return Returns a <code>TriggerStatModule</code> object that
+ * contains the statistics.
+ */
+ public TriggerStatModule getPair0Stats() {
+ return triggerStats[2];
+ }
+
+ /**
+ * Gets the pair 1 trigger statistics.
+ * @return Returns a <code>TriggerStatModule</code> object that
+ * contains the statistics.
+ */
+ public TriggerStatModule getPair1Stats() {
+ return triggerStats[3];
+ }
+
+ /**
+ * Gets the total number of events where the TI reported a trigger
+ * of the specified type.
+ * @param triggerID - The identifier for the type of trigger.
+ * @param hierarchical - <code>true</code> returns only the number of
+ * events where this trigger type was the <i>only</i> type seen by
+ * the TI while <code>false</code> returns the number of events
+ * that saw this trigger type without regards for other trigger
+ * flags.
+ * @return Returns the count as an <code>int</code>.
+ */
+ public int getTITriggers(int triggerID, boolean hierarchical) {
+ // Verify the trigger type.
+ validateTriggerType(triggerID);
+
+ // Increment the counters.
+ if(hierarchical) { return tiSeenHierarchical[triggerID]; }
+ else { return tiSeenAll[triggerID]; }
+ }
+
+ /**
+ * Produces an exception if the argument trigger type is not of a
+ * supported type.
+ * @param triggerType - The trigger type to verify.
+ */
+ private static final void validateTriggerType(int triggerType) {
+ if(triggerType < 0 || triggerType > 5) {
+ throw new IndexOutOfBoundsException(String.format("Trigger type \"%d\" is not supported.", triggerType));
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/GeneralStatModule.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/GeneralStatModule.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/GeneralStatModule.java Wed Apr 27 11:11:32 2016
@@ -7,97 +7,97 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class GeneralStatModule {
- // Store general run statistics.
- protected long endTime = -1;
- protected long startTime = -1;
- protected int totalEvents = 0;
- protected int noiseEvents = 0;
- protected int failedPairEvents = 0;
- protected int failedClusterEvents = 0;
- protected int failedSinglesEvents = 0;
-
- /**
- * Clears all of the statistical counters in the object.
- */
- void clear() {
- endTime = -1;
- startTime = -1;
- totalEvents = 0;
- noiseEvents = 0;
- failedPairEvents = 0;
- failedClusterEvents = 0;
- failedSinglesEvents = 0;
- }
-
- @Override
- public GeneralStatModule clone() {
- // Create the a cloned object.
- GeneralStatModule clone = new GeneralStatModule();
-
- // Copy the tracked statistical data to the clone.
- clone.endTime = endTime;
- clone.startTime = startTime;
- clone.totalEvents = totalEvents;
- clone.noiseEvents = noiseEvents;
- clone.failedPairEvents = failedPairEvents;
- clone.failedClusterEvents = failedClusterEvents;
- clone.failedSinglesEvents = failedSinglesEvents;
-
- // Return the clone.
- return clone;
- }
-
- /**
- * Gets the length of time, in nanoseconds, over which the events
- * represented by this object occurred.
- * @return Returns the length of time as a <code>long</code>.
- */
- public long getDuration() {
- return endTime - startTime;
- }
-
- /**
- * Gets the number of events seen.
- * @return Returns the number of events as an <code>int</code>.
- */
- public int getEventCount() {
- return totalEvents;
- }
-
- /**
- * Gets the number of events in which at least one cluster was
- * not matched.
- * @return Returns the number of events as an <code>int</code>.
- */
- public int getFailedClusterEventCount() {
- return failedClusterEvents;
- }
-
- /**
- * Gets the number of events in which at least one pair trigger
- * was not matched.
- * @return Returns the number of events as an <code>int</code>.
- */
- public int getFailedPairEventCount() {
- return failedPairEvents;
- }
+ // Store general run statistics.
+ protected long endTime = -1;
+ protected long startTime = -1;
+ protected int totalEvents = 0;
+ protected int noiseEvents = 0;
+ protected int failedPairEvents = 0;
+ protected int failedClusterEvents = 0;
+ protected int failedSinglesEvents = 0;
+
+ /**
+ * Clears all of the statistical counters in the object.
+ */
+ void clear() {
+ endTime = -1;
+ startTime = -1;
+ totalEvents = 0;
+ noiseEvents = 0;
+ failedPairEvents = 0;
+ failedClusterEvents = 0;
+ failedSinglesEvents = 0;
+ }
+
+ @Override
+ public GeneralStatModule clone() {
+ // Create the a cloned object.
+ GeneralStatModule clone = new GeneralStatModule();
+
+ // Copy the tracked statistical data to the clone.
+ clone.endTime = endTime;
+ clone.startTime = startTime;
+ clone.totalEvents = totalEvents;
+ clone.noiseEvents = noiseEvents;
+ clone.failedPairEvents = failedPairEvents;
+ clone.failedClusterEvents = failedClusterEvents;
+ clone.failedSinglesEvents = failedSinglesEvents;
+
+ // Return the clone.
+ return clone;
+ }
+
+ /**
+ * Gets the length of time, in nanoseconds, over which the events
+ * represented by this object occurred.
+ * @return Returns the length of time as a <code>long</code>.
+ */
+ public long getDuration() {
+ return endTime - startTime;
+ }
+
+ /**
+ * Gets the number of events seen.
+ * @return Returns the number of events as an <code>int</code>.
+ */
+ public int getEventCount() {
+ return totalEvents;
+ }
+
+ /**
+ * Gets the number of events in which at least one cluster was
+ * not matched.
+ * @return Returns the number of events as an <code>int</code>.
+ */
+ public int getFailedClusterEventCount() {
+ return failedClusterEvents;
+ }
+
+ /**
+ * Gets the number of events in which at least one pair trigger
+ * was not matched.
+ * @return Returns the number of events as an <code>int</code>.
+ */
+ public int getFailedPairEventCount() {
+ return failedPairEvents;
+ }
- /**
- * Gets the number of events in which at least one singles trigger
- * was not matched.
- * @return Returns the number of events as an <code>int</code>.
- */
- public int getFailedSinglesEventCount() {
- return failedSinglesEvents;
- }
-
- /**
- * Gets the number of events which were ignored due to having too
- * many hits in them.
- * @return Returns the number of events as an <code>int</code>.
- */
- public int getNoiseEvents() {
- return noiseEvents;
- }
+ /**
+ * Gets the number of events in which at least one singles trigger
+ * was not matched.
+ * @return Returns the number of events as an <code>int</code>.
+ */
+ public int getFailedSinglesEventCount() {
+ return failedSinglesEvents;
+ }
+
+ /**
+ * Gets the number of events which were ignored due to having too
+ * many hits in them.
+ * @return Returns the number of events as an <code>int</code>.
+ */
+ public int getNoiseEvents() {
+ return noiseEvents;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/RunDiagStats.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/RunDiagStats.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/RunDiagStats.java Wed Apr 27 11:11:32 2016
@@ -9,89 +9,89 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class RunDiagStats extends GeneralStatModule {
- // Store the statistics for trigger matching.
- private TriggerDiagStats triggerStats = new TriggerDiagStats();
-
- // Store the statistics for cluster matching.
- private ClusterEvent clusterStats = new ClusterEvent();
-
- /**
- * Clears all of the statistical counters in the object.
- */
- public void clear() {
- super.clear();
- clusterStats.clear();
- triggerStats.clear();
- }
-
- /**
- * Notes that an event failed to match all clusters.
- */
- public void failedClusterEvent() {
- failedClusterEvents++;
- }
-
- /**
- * Notes that an event failed to match all pair triggers.
- */
- public void failedPairEvent() {
- failedPairEvents++;
- }
+ // Store the statistics for trigger matching.
+ private TriggerDiagStats triggerStats = new TriggerDiagStats();
+
+ // Store the statistics for cluster matching.
+ private ClusterEvent clusterStats = new ClusterEvent();
+
+ /**
+ * Clears all of the statistical counters in the object.
+ */
+ public void clear() {
+ super.clear();
+ clusterStats.clear();
+ triggerStats.clear();
+ }
+
+ /**
+ * Notes that an event failed to match all clusters.
+ */
+ public void failedClusterEvent() {
+ failedClusterEvents++;
+ }
+
+ /**
+ * Notes that an event failed to match all pair triggers.
+ */
+ public void failedPairEvent() {
+ failedPairEvents++;
+ }
- /**
- * Notes that an event failed to match all singles triggers.
- */
- public void failedSinglesEvent() {
- failedSinglesEvents++;
- }
-
- /**
- * Gets the cluster data.
- * @return Returns the <code>ClusterEvent</code> object that holds
- * the cluster data.
- */
- public ClusterEvent getClusterStats() {
- return clusterStats;
- }
-
- /**
- * Gets a snapshot of the statistical data at the present time. The
- * snapshot will remain static and unchanged even if the generating
- * object itself is updated.
- * @return Returns a snapshot as a <code>DiagnosticSnapshot</code>
- * object.
- */
- public DiagnosticSnapshot getSnapshot() {
- return new DiagnosticSnapshot(this);
- }
-
- /**
- * Gets the trigger data.
- * @return Returns the <code>TriggerDiagStats</code> object that holds
- * the cluster data.
- */
- public TriggerDiagStats getTriggerStats() {
- return triggerStats;
- }
-
- /**
- * Notes that an event occurred.
- */
- public void sawEvent(long eventTime) {
- // Increment the event count.
- totalEvents++;
-
- // If the start time is not defined, use this as the start time.
- if(startTime == -1) { startTime = eventTime; }
-
- // The end time should always match the most recent event.
- endTime = eventTime;
- }
+ /**
+ * Notes that an event failed to match all singles triggers.
+ */
+ public void failedSinglesEvent() {
+ failedSinglesEvents++;
+ }
+
+ /**
+ * Gets the cluster data.
+ * @return Returns the <code>ClusterEvent</code> object that holds
+ * the cluster data.
+ */
+ public ClusterEvent getClusterStats() {
+ return clusterStats;
+ }
+
+ /**
+ * Gets a snapshot of the statistical data at the present time. The
+ * snapshot will remain static and unchanged even if the generating
+ * object itself is updated.
+ * @return Returns a snapshot as a <code>DiagnosticSnapshot</code>
+ * object.
+ */
+ public DiagnosticSnapshot getSnapshot() {
+ return new DiagnosticSnapshot(this);
+ }
+
+ /**
+ * Gets the trigger data.
+ * @return Returns the <code>TriggerDiagStats</code> object that holds
+ * the cluster data.
+ */
+ public TriggerDiagStats getTriggerStats() {
+ return triggerStats;
+ }
+
+ /**
+ * Notes that an event occurred.
+ */
+ public void sawEvent(long eventTime) {
+ // Increment the event count.
+ totalEvents++;
+
+ // If the start time is not defined, use this as the start time.
+ if(startTime == -1) { startTime = eventTime; }
+
+ // The end time should always match the most recent event.
+ endTime = eventTime;
+ }
- /**
- * Notes that an event was labeled as noise.
- */
- public void sawNoiseEvent() {
- noiseEvents++;
- }
+ /**
+ * Notes that an event was labeled as noise.
+ */
+ public void sawNoiseEvent() {
+ noiseEvents++;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java Wed Apr 27 11:11:32 2016
@@ -4,304 +4,304 @@
import org.hps.analysis.trigger.util.TriggerDiagnosticUtil;
public class TriggerDiagStats {
- // Define TI trigger type identifiers.
- public static final int SINGLES0 = TriggerStatModule.SINGLES_0;
- public static final int SINGLES1 = TriggerStatModule.SINGLES_1;
- public static final int PAIR0 = TriggerStatModule.PAIR_0;
- public static final int PAIR1 = TriggerStatModule.PAIR_1;
- public static final int PULSER = TriggerStatModule.PULSER;
- public static final int COSMIC = TriggerStatModule.COSMIC;
-
- // Tracks the number of TI triggers seen across all events for only
- // the TI trigger with the highest priority in the event.
- private int[] tiSeenHierarchical = new int[6];
-
- // Tracks the number of TI triggers across all events.
- private int[] tiSeenAll = new int[6];
-
- // Store the statistics modules for each of the regular triggers.
- private TriggerEvent[] triggerStats = new TriggerEvent[4];
-
- /**
- * Instantiates a new <code>TriggerDiagStats</code> object.
- */
- public TriggerDiagStats() {
- // Instantiate a trigger statistics module for each of the
- // triggers for which statistics are supported.
- for(int triggerType = 0; triggerType < 4; triggerType++) {
- triggerStats[triggerType] = new TriggerEvent();
- }
- }
-
- /**
- * Clears all of the statistical counters in the object.
- */
- void clear() {
- // Clear the tracked TI trigger data.
- for(int tiType = 0; tiType < 6; tiType++) {
- tiSeenAll[tiType] = 0;
- tiSeenHierarchical[tiType] = 0;
- }
-
- // Clear the trigger statistical modules.
- for(int triggerType = 0; triggerType < 4; triggerType++) {
- triggerStats[triggerType].clear();
- }
- }
-
- /**
- * Gets the trigger data for the pair 0 trigger.
- * @return Returns the <code>TriggerEvent</code> object that holds
- * the trigger data for the pair 0 trigger.
- */
- public TriggerEvent getPair0Stats() {
- return triggerStats[PAIR0];
- }
-
- /**
- * Gets the trigger data for the pair 1 trigger.
- * @return Returns the <code>TriggerEvent</code> object that holds
- * the trigger data for the pair 1 trigger.
- */
- public TriggerEvent getPair1Stats() {
- return triggerStats[PAIR1];
- }
-
- /**
- * Gets the trigger data for the singles 0 trigger.
- * @return Returns the <code>TriggerEvent</code> object that holds
- * the trigger data for the singles 0 trigger.
- */
- public TriggerEvent getSingles0Stats() {
- return triggerStats[SINGLES0];
- }
-
- /**
- * Gets the trigger data for the singles 1 trigger.
- * @return Returns the <code>TriggerEvent</code> object that holds
- * the trigger data for the singles 1 trigger.
- */
- public TriggerEvent getSingles1Stats() {
- return triggerStats[SINGLES1];
- }
-
- /**
- * Gets the total number of events where the TI reported a trigger
- * of the specified type.
- * @param triggerID - The identifier for the type of trigger.
- * @param unique - <code>true</code> returns only the number of
- * events where this trigger type was the <i>only</i> type seen by
- * the TI while <code>false</code> returns the number of events
- * that saw this trigger type without regards for other trigger
- * flags.
- * @return Returns the count as an <code>int</code>.
- */
- public int getTITriggers(int triggerID, boolean hierarchical) {
- // Verify the trigger type.
- validateTriggerType(triggerID);
-
- // Increment the counters.
- if(hierarchical) { return tiSeenHierarchical[triggerID]; }
- else { return tiSeenAll[triggerID]; }
- }
-
- /**
- * Increments the counts tracking the number of TI flags seen.
- * @param flags - An array of <code>boolean</code> values of size
- * six. This represents one flag for each possible TI trigger type.
- */
- public void sawTITriggers(boolean[] flags) {
- // There must be six trigger flags and the array must not be
- // null.
- if(flags == null) {
- throw new NullPointerException("TI trigger flags can not be null.");
- } if(flags.length != 6) {
- throw new IllegalArgumentException("TI trigger flags must be of size six.");
- }
-
- // Check each TI flag in the order of the flag hierarchy. The
- // first flag in the hierarchy that is true is recorded in the
- // hierarchical count. All flags are recorded in the all count.
- boolean foundHierarchical = false;
- if(flags[PAIR1]) {
- tiSeenAll[PAIR1]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[PAIR1]++;
- foundHierarchical = true;
- }
- } if(flags[PAIR0]) {
- tiSeenAll[PAIR0]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[PAIR0]++;
- foundHierarchical = true;
- }
- } if(flags[SINGLES1]) {
- tiSeenAll[SINGLES1]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[SINGLES1]++;
- foundHierarchical = true;
- }
- } if(flags[SINGLES0]) {
- tiSeenAll[SINGLES0]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[SINGLES0]++;
- foundHierarchical = true;
- }
- } if(flags[PULSER]) {
- tiSeenAll[PULSER]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[PULSER]++;
- foundHierarchical = true;
- }
- } if(flags[COSMIC]) {
- tiSeenAll[COSMIC]++;
- if(!foundHierarchical) {
- tiSeenHierarchical[COSMIC]++;
- foundHierarchical = true;
- }
- }
- }
-
- /**
- * Prints the trigger statistics to the terminal as a table.
- */
- public void printEfficiencyTable() {
- // Get the trigger statistics tables.
- int[][] seenStats = new int[6][4];
- int[][] matchedStats = new int[6][4];
- TriggerEvent[] triggerEvents = { getSingles0Stats(), getSingles1Stats(), getPair0Stats(), getPair1Stats() };
- for(int i = 0; i < 4; i++) {
- for(int j = 0; j < 6; j++) {
- seenStats[j][i] = triggerEvents[i].getReconSimulatedTriggers(j);
- matchedStats[j][i] = triggerEvents[i].getMatchedReconSimulatedTriggers(j);
- }
- }
-
- // Define constant spacing variables.
- int columnSpacing = 3;
-
- // Define table headers.
- String sourceName = "Source";
- String seenName = "Trigger Efficiency";
-
- // Get the longest column header name.
- int longestHeader = -1;
- String[] headerNames = {
- TriggerDiagnosticUtil.TRIGGER_NAME[0],
- TriggerDiagnosticUtil.TRIGGER_NAME[1],
- TriggerDiagnosticUtil.TRIGGER_NAME[2],
- TriggerDiagnosticUtil.TRIGGER_NAME[3],
- "TI Highest Type"
- };
- for(String triggerName : headerNames) {
- longestHeader = ComponentUtils.max(longestHeader, triggerName.length());
- }
- longestHeader = ComponentUtils.max(longestHeader, sourceName.length());
-
- // Determine the spacing needed to display the largest numerical
- // cell value.
- int numWidth = -1;
- int longestCell = -1;
- for(int eventTriggerID = 0; eventTriggerID < 6; eventTriggerID++) {
- for(int seenTriggerID = 0; seenTriggerID < 4; seenTriggerID++) {
- int valueSize = ComponentUtils.getDigits(seenStats[eventTriggerID][seenTriggerID]);
- int cellSize = valueSize * 2 + 13;
- if(cellSize > longestCell) {
- longestCell = cellSize;
- numWidth = valueSize;
- }
- }
- }
-
- // The total column width can then be calculated from the
- // longer of the header and cell values.
- int columnWidth = ComponentUtils.max(longestCell, longestHeader);
- int sourceWidth = ComponentUtils.max(
- TriggerDiagnosticUtil.TRIGGER_NAME[0].length(), TriggerDiagnosticUtil.TRIGGER_NAME[1].length(),
- TriggerDiagnosticUtil.TRIGGER_NAME[2].length(), TriggerDiagnosticUtil.TRIGGER_NAME[3].length(),
- TriggerDiagnosticUtil.TRIGGER_NAME[4].length(), TriggerDiagnosticUtil.TRIGGER_NAME[5].length(),
- sourceName.length() );
-
- // Calculate the total width of the table value header columns.
- int headerTotalWidth = (headerNames.length * columnWidth)
- + ((headerNames.length - 1) * columnSpacing);
-
- // Write the table header.
- String spacingText = ComponentUtils.getChars(' ', columnSpacing);
- System.out.println(ComponentUtils.getChars(' ', sourceWidth) + spacingText
- + getCenteredString(seenName, headerTotalWidth));
-
- // Create the format strings for the cell values.
- String headerFormat = "%-" + sourceWidth + "s" + spacingText;
- String cellFormat = "%" + numWidth + "d / %" + numWidth + "d (%7.3f)";
- String nullText = getCenteredString(ComponentUtils.getChars('-', numWidth) + " / "
- + ComponentUtils.getChars('-', numWidth) + " ( N/A )", columnWidth) + spacingText;
-
- // Print the column headers.
- System.out.printf(headerFormat, sourceName);
- for(String header : headerNames) {
- System.out.print(getCenteredString(header, columnWidth) + spacingText);
- }
- System.out.println();
-
- // Write out the value columns.
- for(int eventTriggerID = 0; eventTriggerID < 6; eventTriggerID++) {
- // Print out the row header.
- System.out.printf(headerFormat, TriggerDiagnosticUtil.TRIGGER_NAME[eventTriggerID]);
-
- // Print the cell values.
- for(int seenTriggerID = 0; seenTriggerID < 4; seenTriggerID++) {
- if(seenTriggerID == eventTriggerID) { System.out.print(nullText); }
- else {
- String cellText = String.format(cellFormat, matchedStats[eventTriggerID][seenTriggerID],
- seenStats[eventTriggerID][seenTriggerID],
- (100.0 * matchedStats[eventTriggerID][seenTriggerID] / seenStats[eventTriggerID][seenTriggerID]));
- System.out.print(getCenteredString(cellText, columnWidth) + spacingText);
- }
- }
-
- // Output the number of events that had only the trigger
- // type ID for the current trigger type flagged by the TI.
- System.out.print(getCenteredString("" + getTITriggers(eventTriggerID, true), columnWidth) + spacingText);
-
- // Start a new line.
- System.out.println();
- }
- }
-
- /**
- * Produces a <code>String</code> of the indicated length with the
- * text <code>value</code> centered in the middle. Extra length is
- * filled through spaces before and after the text.
- * @param value - The text to display.
- * @param width - The number of spaces to include.
- * @return Returns a <code>String</code> of the specified length,
- * or the argument text if it is longer.
- */
- private static final String getCenteredString(String value, int width) {
- // The method can not perform as intended if the argument text
- // exceeds the requested string length. Just return the text.
- if(width <= value.length()) {
- return value;
- }
-
- // Otherwise, get the amount of buffering needed to center the
- // text and add it around the text to produce the string.
- else {
- int buffer = (width - value.length()) / 2;
- return ComponentUtils.getChars(' ', buffer) + value
- + ComponentUtils.getChars(' ', width - buffer - value.length());
- }
- }
-
- /**
- * Produces an exception if the argument trigger type is not of a
- * supported type.
- * @param triggerType - The trigger type to verify.
- */
- private static final void validateTriggerType(int triggerType) {
- if(triggerType < 0 || triggerType > 5) {
- throw new IndexOutOfBoundsException(String.format("Trigger type \"%d\" is not supported.", triggerType));
- }
- }
+ // Define TI trigger type identifiers.
+ public static final int SINGLES0 = TriggerStatModule.SINGLES_0;
+ public static final int SINGLES1 = TriggerStatModule.SINGLES_1;
+ public static final int PAIR0 = TriggerStatModule.PAIR_0;
+ public static final int PAIR1 = TriggerStatModule.PAIR_1;
+ public static final int PULSER = TriggerStatModule.PULSER;
+ public static final int COSMIC = TriggerStatModule.COSMIC;
+
+ // Tracks the number of TI triggers seen across all events for only
+ // the TI trigger with the highest priority in the event.
+ private int[] tiSeenHierarchical = new int[6];
+
+ // Tracks the number of TI triggers across all events.
+ private int[] tiSeenAll = new int[6];
+
+ // Store the statistics modules for each of the regular triggers.
+ private TriggerEvent[] triggerStats = new TriggerEvent[4];
+
+ /**
+ * Instantiates a new <code>TriggerDiagStats</code> object.
+ */
+ public TriggerDiagStats() {
+ // Instantiate a trigger statistics module for each of the
+ // triggers for which statistics are supported.
+ for(int triggerType = 0; triggerType < 4; triggerType++) {
+ triggerStats[triggerType] = new TriggerEvent();
+ }
+ }
+
+ /**
+ * Clears all of the statistical counters in the object.
+ */
+ void clear() {
+ // Clear the tracked TI trigger data.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ tiSeenAll[tiType] = 0;
+ tiSeenHierarchical[tiType] = 0;
+ }
+
+ // Clear the trigger statistical modules.
+ for(int triggerType = 0; triggerType < 4; triggerType++) {
+ triggerStats[triggerType].clear();
+ }
+ }
+
+ /**
+ * Gets the trigger data for the pair 0 trigger.
+ * @return Returns the <code>TriggerEvent</code> object that holds
+ * the trigger data for the pair 0 trigger.
+ */
+ public TriggerEvent getPair0Stats() {
+ return triggerStats[PAIR0];
+ }
+
+ /**
+ * Gets the trigger data for the pair 1 trigger.
+ * @return Returns the <code>TriggerEvent</code> object that holds
+ * the trigger data for the pair 1 trigger.
+ */
+ public TriggerEvent getPair1Stats() {
+ return triggerStats[PAIR1];
+ }
+
+ /**
+ * Gets the trigger data for the singles 0 trigger.
+ * @return Returns the <code>TriggerEvent</code> object that holds
+ * the trigger data for the singles 0 trigger.
+ */
+ public TriggerEvent getSingles0Stats() {
+ return triggerStats[SINGLES0];
+ }
+
+ /**
+ * Gets the trigger data for the singles 1 trigger.
+ * @return Returns the <code>TriggerEvent</code> object that holds
+ * the trigger data for the singles 1 trigger.
+ */
+ public TriggerEvent getSingles1Stats() {
+ return triggerStats[SINGLES1];
+ }
+
+ /**
+ * Gets the total number of events where the TI reported a trigger
+ * of the specified type.
+ * @param triggerID - The identifier for the type of trigger.
+ * @param hierarchical - <code>true</code> returns only the number of
+ * events where this trigger type was the <i>only</i> type seen by
+ * the TI while <code>false</code> returns the number of events
+ * that saw this trigger type without regards for other trigger
+ * flags.
+ * @return Returns the count as an <code>int</code>.
+ */
+ public int getTITriggers(int triggerID, boolean hierarchical) {
+ // Verify the trigger type.
+ validateTriggerType(triggerID);
+
+ // Increment the counters.
+ if(hierarchical) { return tiSeenHierarchical[triggerID]; }
+ else { return tiSeenAll[triggerID]; }
+ }
+
+ /**
+ * Increments the counts tracking the number of TI flags seen.
+ * @param flags - An array of <code>boolean</code> values of size
+ * six. This represents one flag for each possible TI trigger type.
+ */
+ public void sawTITriggers(boolean[] flags) {
+ // There must be six trigger flags and the array must not be
+ // null.
+ if(flags == null) {
+ throw new NullPointerException("TI trigger flags can not be null.");
+ } if(flags.length != 6) {
+ throw new IllegalArgumentException("TI trigger flags must be of size six.");
+ }
+
+ // Check each TI flag in the order of the flag hierarchy. The
+ // first flag in the hierarchy that is true is recorded in the
+ // hierarchical count. All flags are recorded in the all count.
+ boolean foundHierarchical = false;
+ if(flags[PAIR1]) {
+ tiSeenAll[PAIR1]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[PAIR1]++;
+ foundHierarchical = true;
+ }
+ } if(flags[PAIR0]) {
+ tiSeenAll[PAIR0]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[PAIR0]++;
+ foundHierarchical = true;
+ }
+ } if(flags[SINGLES1]) {
+ tiSeenAll[SINGLES1]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[SINGLES1]++;
+ foundHierarchical = true;
+ }
+ } if(flags[SINGLES0]) {
+ tiSeenAll[SINGLES0]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[SINGLES0]++;
+ foundHierarchical = true;
+ }
+ } if(flags[PULSER]) {
+ tiSeenAll[PULSER]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[PULSER]++;
+ foundHierarchical = true;
+ }
+ } if(flags[COSMIC]) {
+ tiSeenAll[COSMIC]++;
+ if(!foundHierarchical) {
+ tiSeenHierarchical[COSMIC]++;
+ foundHierarchical = true;
+ }
+ }
+ }
+
+ /**
+ * Prints the trigger statistics to the terminal as a table.
+ */
+ public void printEfficiencyTable() {
+ // Get the trigger statistics tables.
+ int[][] seenStats = new int[6][4];
+ int[][] matchedStats = new int[6][4];
+ TriggerEvent[] triggerEvents = { getSingles0Stats(), getSingles1Stats(), getPair0Stats(), getPair1Stats() };
+ for(int i = 0; i < 4; i++) {
+ for(int j = 0; j < 6; j++) {
+ seenStats[j][i] = triggerEvents[i].getReconSimulatedTriggers(j);
+ matchedStats[j][i] = triggerEvents[i].getMatchedReconSimulatedTriggers(j);
+ }
+ }
+
+ // Define constant spacing variables.
+ int columnSpacing = 3;
+
+ // Define table headers.
+ String sourceName = "Source";
+ String seenName = "Trigger Efficiency";
+
+ // Get the longest column header name.
+ int longestHeader = -1;
+ String[] headerNames = {
+ TriggerDiagnosticUtil.TRIGGER_NAME[0],
+ TriggerDiagnosticUtil.TRIGGER_NAME[1],
+ TriggerDiagnosticUtil.TRIGGER_NAME[2],
+ TriggerDiagnosticUtil.TRIGGER_NAME[3],
+ "TI Highest Type"
+ };
+ for(String triggerName : headerNames) {
+ longestHeader = ComponentUtils.max(longestHeader, triggerName.length());
+ }
+ longestHeader = ComponentUtils.max(longestHeader, sourceName.length());
+
+ // Determine the spacing needed to display the largest numerical
+ // cell value.
+ int numWidth = -1;
+ int longestCell = -1;
+ for(int eventTriggerID = 0; eventTriggerID < 6; eventTriggerID++) {
+ for(int seenTriggerID = 0; seenTriggerID < 4; seenTriggerID++) {
+ int valueSize = ComponentUtils.getDigits(seenStats[eventTriggerID][seenTriggerID]);
+ int cellSize = valueSize * 2 + 13;
+ if(cellSize > longestCell) {
+ longestCell = cellSize;
+ numWidth = valueSize;
+ }
+ }
+ }
+
+ // The total column width can then be calculated from the
+ // longer of the header and cell values.
+ int columnWidth = ComponentUtils.max(longestCell, longestHeader);
+ int sourceWidth = ComponentUtils.max(
+ TriggerDiagnosticUtil.TRIGGER_NAME[0].length(), TriggerDiagnosticUtil.TRIGGER_NAME[1].length(),
+ TriggerDiagnosticUtil.TRIGGER_NAME[2].length(), TriggerDiagnosticUtil.TRIGGER_NAME[3].length(),
+ TriggerDiagnosticUtil.TRIGGER_NAME[4].length(), TriggerDiagnosticUtil.TRIGGER_NAME[5].length(),
+ sourceName.length() );
+
+ // Calculate the total width of the table value header columns.
+ int headerTotalWidth = (headerNames.length * columnWidth)
+ + ((headerNames.length - 1) * columnSpacing);
+
+ // Write the table header.
+ String spacingText = ComponentUtils.getChars(' ', columnSpacing);
+ System.out.println(ComponentUtils.getChars(' ', sourceWidth) + spacingText
+ + getCenteredString(seenName, headerTotalWidth));
+
+ // Create the format strings for the cell values.
+ String headerFormat = "%-" + sourceWidth + "s" + spacingText;
+ String cellFormat = "%" + numWidth + "d / %" + numWidth + "d (%7.3f)";
+ String nullText = getCenteredString(ComponentUtils.getChars('-', numWidth) + " / "
+ + ComponentUtils.getChars('-', numWidth) + " ( N/A )", columnWidth) + spacingText;
+
+ // Print the column headers.
+ System.out.printf(headerFormat, sourceName);
+ for(String header : headerNames) {
+ System.out.print(getCenteredString(header, columnWidth) + spacingText);
+ }
+ System.out.println();
+
+ // Write out the value columns.
+ for(int eventTriggerID = 0; eventTriggerID < 6; eventTriggerID++) {
+ // Print out the row header.
+ System.out.printf(headerFormat, TriggerDiagnosticUtil.TRIGGER_NAME[eventTriggerID]);
+
+ // Print the cell values.
+ for(int seenTriggerID = 0; seenTriggerID < 4; seenTriggerID++) {
+ if(seenTriggerID == eventTriggerID) { System.out.print(nullText); }
+ else {
+ String cellText = String.format(cellFormat, matchedStats[eventTriggerID][seenTriggerID],
+ seenStats[eventTriggerID][seenTriggerID],
+ (100.0 * matchedStats[eventTriggerID][seenTriggerID] / seenStats[eventTriggerID][seenTriggerID]));
+ System.out.print(getCenteredString(cellText, columnWidth) + spacingText);
+ }
+ }
+
+ // Output the number of events that had only the trigger
+ // type ID for the current trigger type flagged by the TI.
+ System.out.print(getCenteredString("" + getTITriggers(eventTriggerID, true), columnWidth) + spacingText);
+
+ // Start a new line.
+ System.out.println();
+ }
+ }
+
+ /**
+ * Produces a <code>String</code> of the indicated length with the
+ * text <code>value</code> centered in the middle. Extra length is
+ * filled through spaces before and after the text.
+ * @param value - The text to display.
+ * @param width - The number of spaces to include.
+ * @return Returns a <code>String</code> of the specified length,
+ * or the argument text if it is longer.
+ */
+ private static final String getCenteredString(String value, int width) {
+ // The method can not perform as intended if the argument text
+ // exceeds the requested string length. Just return the text.
+ if(width <= value.length()) {
+ return value;
+ }
+
+ // Otherwise, get the amount of buffering needed to center the
+ // text and add it around the text to produce the string.
+ else {
+ int buffer = (width - value.length()) / 2;
+ return ComponentUtils.getChars(' ', buffer) + value
+ + ComponentUtils.getChars(' ', width - buffer - value.length());
+ }
+ }
+
+ /**
+ * Produces an exception if the argument trigger type is not of a
+ * supported type.
+ * @param triggerType - The trigger type to verify.
+ */
+ private static final void validateTriggerType(int triggerType) {
+ if(triggerType < 0 || triggerType > 5) {
+ throw new IndexOutOfBoundsException(String.format("Trigger type \"%d\" is not supported.", triggerType));
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java Wed Apr 27 11:11:32 2016
@@ -9,263 +9,255 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class TriggerEvent extends TriggerStatModule {
- /**
- * Adds the statistics from one event object into this one.
- * @param event - The event data to add.
- */
- public void addEvent(TriggerStatModule event) {
- // Merge the values that do not depend on trigger source type.
- reportedTriggers += event.reportedTriggers;
-
- // Merge each value that depends on the trigger source type.
- for(int sourceType = 0; sourceType < 2; sourceType++) {
- simTriggers[sourceType] += event.simTriggers[sourceType];
- matchedTriggers[sourceType] += event.matchedTriggers[sourceType];
- unmatchedTriggers[sourceType] += event.unmatchedTriggers[sourceType];
-
- // Merge the number of times each cut failed.
- for(int cutID = 0; cutID < 4; cutID++) {
- failedCuts[sourceType][cutID] += event.failedCuts[sourceType][cutID];
- }
-
- // Copy the values for the TI flag trigger counters.
- for(int tiType = 0; tiType < 6; tiType++) {
- tiTriggersSeen[sourceType][tiType] += event.tiTriggersSeen[sourceType][tiType];
- tiTriggersMatched[sourceType][tiType] += event.tiTriggersMatched[sourceType][tiType];
- }
- }
- }
-
- /**
- * Indicates that a reconstructed trigger could not be matched, even
- * partially, to an SSP bank trigger.
- */
- public void failedReconTrigger() {
- unmatchedTriggers[RECON]++;
- }
-
- /**
- * Indicates that an SSP simulated trigger could not be matched, even
- * partially, to an SSP bank trigger.
- */
- public void failedSSPTrigger() {
- unmatchedTriggers[SSP]++;
- }
+ /**
+ * Adds the statistics from one event object into this one.
+ * @param event - The event data to add.
+ */
+ public void addEvent(TriggerStatModule event) {
+ // Merge the values that do not depend on trigger source type.
+ reportedTriggers += event.reportedTriggers;
+
+ // Merge each value that depends on the trigger source type.
+ for(int sourceType = 0; sourceType < 2; sourceType++) {
+ simTriggers[sourceType] += event.simTriggers[sourceType];
+ matchedTriggers[sourceType] += event.matchedTriggers[sourceType];
+ unmatchedTriggers[sourceType] += event.unmatchedTriggers[sourceType];
+
+ // Merge the number of times each cut failed.
+ for(int cutID = 0; cutID < 4; cutID++) {
+ failedCuts[sourceType][cutID] += event.failedCuts[sourceType][cutID];
+ }
+
+ // Copy the values for the TI flag trigger counters.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ tiTriggersSeen[sourceType][tiType] += event.tiTriggersSeen[sourceType][tiType];
+ tiTriggersMatched[sourceType][tiType] += event.tiTriggersMatched[sourceType][tiType];
+ }
+ }
+ }
+
+ /**
+ * Indicates that a reconstructed trigger could not be matched, even
+ * partially, to an SSP bank trigger.
+ */
+ public void failedReconTrigger() {
+ unmatchedTriggers[RECON]++;
+ }
+
+ /**
+ * Indicates that an SSP simulated trigger could not be matched, even
+ * partially, to an SSP bank trigger.
+ */
+ public void failedSSPTrigger() {
+ unmatchedTriggers[SSP]++;
+ }
- /**
- * Indicates that a trigger simulated from a reconstructed cluster
- * was successfully matched to a trigger in the SSP bank.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
- */
- public void matchedReconTrigger(boolean[] tiFlags) {
- matchedTriggers(tiFlags, RECON);
- }
-
- /**
- * Indicates that a trigger simulated from a reconstructed cluster
- * was partially matched to a trigger in the SSP bank, and notes
- * which cuts did and did not match.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
- * @param matchedCuts - An array of size 3 or 4 indicating which
- * cuts did and did not align between the triggers.
- */
- public void matchedReconTrigger(boolean[] tiFlags, boolean[] matchedCuts) {
- matchedTriggers(tiFlags, matchedCuts, RECON);
- }
-
- /**
- * Indicates that a trigger simulated from an SSP bank cluster was
- * successfully matched to a trigger in the SSP bank.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
- */
- public void matchedSSPTrigger(boolean[] tiFlags) {
- matchedTriggers(tiFlags, SSP);
- }
-
- /**
- * Indicates that a trigger simulated from an SSP bank cluster was
- * partially matched to a trigger in the SSP bank, and notes which
- * cuts did and did not match.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
- * @param matchedCuts - An array of size 3 or 4 indicating which
- * cuts did and did not align between the triggers.
- */
- public void matchedSSPTrigger(boolean[] tiFlags, boolean[] matchedCuts) {
- matchedTriggers(tiFlags, matchedCuts, SSP);
- }
-
- /**
- * Indicates that a trigger simulated from a reconstructed cluster
- * was seen and increments the count for this type of trigger by one.
- * @param tiFlags - Whether or not each of the TI bank flags is
- * active or not.
- */
- public void sawReconSimulatedTrigger(boolean[] tiFlags) {
- sawReconSimulatedTriggers(tiFlags, 1);
- }
-
- /**
- * Indicates that a number triggers simulated from reconstructed
- * clusters were seen and increments the count for this type of
- * trigger by the indicated number.
- * @param tiFlags - Whether or not each of the TI bank flags is
- * active or not.
- * @param count - The number of simulated triggers seen.
- */
- public void sawReconSimulatedTriggers(boolean[] tiFlags, int count) {
- // Increment the total count.
- simTriggers[RECON] += count;
-
- // Increment the TI flag counters.
- for(int tiType = 0; tiType < 6; tiType++) {
- if(tiFlags[tiType]) {
- tiTriggersSeen[RECON][tiType] += count;
- }
- }
- }
-
- /**
- * Indicates that a trigger from the SSP trigger bank was seen and
- * increments the count for this type of trigger by one.
- */
- public void sawReportedTrigger() {
- sawReportedTriggers(1);
- }
-
- /**
- * Indicates that a number triggers from the SSP trigger bank were
- * seen and increments the count for this type of trigger by the
- * indicated number.
- * @param count - The number of simulated triggers seen.
- */
- public void sawReportedTriggers(int count) {
- reportedTriggers += count;
- }
-
- /**
- * Indicates that a trigger simulated from an SSP bank cluster was
- * seen and increments the count for this type of trigger by one.
- * @param tiFlags - Whether or not each of the TI bank flags is
- * active or not.
- */
- public void sawSSPSimulatedTrigger(boolean[] tiFlags) {
- sawSSPSimulatedTriggers(tiFlags, 1);
- }
-
- /**
- * Indicates that a number triggers simulated from SSP bank clusters
- * were seen and increments the count for this type of trigger by
- * the indicated number.
- * @param tiFlags - Whether or not each of the TI bank flags is
- * active or not.
- * @param count - The number of simulated triggers seen.
- */
- public void sawSSPSimulatedTriggers(boolean[] tiFlags, int count) {
- // Increment the total count.
- simTriggers[SSP] += count;
-
- // Increment the TI flag counters.
- for(int tiType = 0; tiType < 6; tiType++) {
- if(tiFlags[tiType]) {
- tiTriggersSeen[SSP][tiType] += count;
- }
- }
- }
-
- /**
- * Indicates that a simulated trigger was successfully matched to
- * an SSP bank trigger.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param sourceType - Uses <code>SSP</code> for triggers simulated
- * from an SSP bank cluster and <code>RECON</code> for triggers that
- * were simulated from a reconstructed cluster.
- */
- private final void matchedTriggers(boolean[] tiFlags, int sourceType) {
- // Increment the total triggers matched.
- matchedTriggers[sourceType]++;
-
- // Increment the triggers matched for this type for each if
- // the active TI bank flags.
- for(int tiType = 0; tiType < 6; tiType++) {
- if(tiFlags[tiType]) {
- tiTriggersMatched[sourceType][tiType]++;
- }
- }
- }
-
- /**
- * Indicates that a simulated trigger was partially matched to a
- * trigger in the SSP bank, and notes which cuts did and did not
- * match.
- * @param tiFlags - An array of size 6 indicating which TI bank
- * flags are active and which are not.
- * @param sourceType - Uses <code>SSP</code> for triggers simulated
- * from an SSP bank cluster and <code>RECON</code> for triggers that
- * were simulated from a reconstructed cluster.
- */
- private void matchedTriggers(boolean[] tiFlags, boolean[] matchedCuts, int sourceType) {
- // The matched cuts must be defined.
- if(matchedCuts == null) {
- throw new NullPointerException("The matched cuts array must be defined.");
- }
-
- // The matched cuts array must be of either size 3 or 4.
- if(matchedCuts.length != 3 && matchedCuts.length != 4) {
- throw new IllegalArgumentException("All triggers must use either three or four cuts.");
- }
-
- // Increment the counters for each cut that was no matched. Also
- // track whether or not a cut actually failed.
- boolean cutFailed = false;
- for(int cutIndex = 0; cutIndex < matchedCuts.length; cutIndex++) {
- if(!matchedCuts[cutIndex]) {
- failedCuts[sourceType][cutIndex]++;
- cutFailed = true;
- }
- }
-
- // If no cut failed, this is actually a match. Increment the
- // appropriate counters.
- if(!cutFailed) {
- matchedTriggers(tiFlags, sourceType);
- }
- }
-
- @Deprecated
- public String getPrintData() {
- StringBuffer out = new StringBuffer();
+ /**
+ * Indicates that a trigger simulated from a reconstructed cluster
+ * was successfully matched to a trigger in the SSP bank.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ */
+ public void matchedReconTrigger(boolean[] tiFlags) {
+ matchedTriggers(tiFlags, RECON);
+ }
+
+ /**
+ * Indicates that a trigger simulated from a reconstructed cluster
+ * was partially matched to a trigger in the SSP bank, and notes
+ * which cuts did and did not match.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ * @param matchedCuts - An array of size 3 or 4 indicating which
+ * cuts did and did not align between the triggers.
+ */
+ public void matchedReconTrigger(boolean[] tiFlags, boolean[] matchedCuts) {
+ matchedTriggers(tiFlags, matchedCuts, RECON);
+ }
+
+ /**
+ * Indicates that a trigger simulated from an SSP bank cluster was
+ * successfully matched to a trigger in the SSP bank.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ */
+ public void matchedSSPTrigger(boolean[] tiFlags) {
+ matchedTriggers(tiFlags, SSP);
+ }
+
+ /**
+ * Indicates that a trigger simulated from an SSP bank cluster was
+ * partially matched to a trigger in the SSP bank, and notes which
+ * cuts did and did not match.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ * @param matchedCuts - An array of size 3 or 4 indicating which
+ * cuts did and did not align between the triggers.
+ */
+ public void matchedSSPTrigger(boolean[] tiFlags, boolean[] matchedCuts) {
+ matchedTriggers(tiFlags, matchedCuts, SSP);
+ }
+
+ /**
+ * Indicates that a trigger simulated from a reconstructed cluster
+ * was seen and increments the count for this type of trigger by one.
+ * @param tiFlags - Whether or not each of the TI bank flags is
+ * active or not.
+ */
+ public void sawReconSimulatedTrigger(boolean[] tiFlags) {
+ sawReconSimulatedTriggers(tiFlags, 1);
+ }
+
+ /**
+ * Indicates that a number triggers simulated from reconstructed
+ * clusters were seen and increments the count for this type of
+ * trigger by the indicated number.
+ * @param tiFlags - Whether or not each of the TI bank flags is
+ * active or not.
+ * @param count - The number of simulated triggers seen.
+ */
+ public void sawReconSimulatedTriggers(boolean[] tiFlags, int count) {
+ // Increment the total count.
+ simTriggers[RECON] += count;
+
+ // Increment the TI flag counters.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ if(tiFlags[tiType]) {
+ tiTriggersSeen[RECON][tiType] += count;
+ }
+ }
+ }
+
+ /**
+ * Indicates that a trigger from the SSP trigger bank was seen and
+ * increments the count for this type of trigger by one.
+ */
+ public void sawReportedTrigger() {
+ sawReportedTriggers(1);
+ }
+
+ /**
+ * Indicates that a number triggers from the SSP trigger bank were
+ * seen and increments the count for this type of trigger by the
+ * indicated number.
+ * @param count - The number of simulated triggers seen.
+ */
+ public void sawReportedTriggers(int count) {
+ reportedTriggers += count;
+ }
+
+ /**
+ * Indicates that a trigger simulated from an SSP bank cluster was
+ * seen and increments the count for this type of trigger by one.
+ * @param tiFlags - Whether or not each of the TI bank flags is
+ * active or not.
+ */
+ public void sawSSPSimulatedTrigger(boolean[] tiFlags) {
+ sawSSPSimulatedTriggers(tiFlags, 1);
+ }
+
+ /**
+ * Indicates that a number triggers simulated from SSP bank clusters
+ * were seen and increments the count for this type of trigger by
+ * the indicated number.
+ * @param tiFlags - Whether or not each of the TI bank flags is
+ * active or not.
+ * @param count - The number of simulated triggers seen.
+ */
+ public void sawSSPSimulatedTriggers(boolean[] tiFlags, int count) {
+ // Increment the total count.
+ simTriggers[SSP] += count;
+
+ // Increment the TI flag counters.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ if(tiFlags[tiType]) {
+ tiTriggersSeen[SSP][tiType] += count;
+ }
+ }
+ }
+
+ /**
+ * Indicates that a simulated trigger was successfully matched to
+ * an SSP bank trigger.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ * @param sourceType - Uses <code>SSP</code> for triggers simulated
+ * from an SSP bank cluster and <code>RECON</code> for triggers that
+ * were simulated from a reconstructed cluster.
+ */
+ private final void matchedTriggers(boolean[] tiFlags, int sourceType) {
+ // Increment the total triggers matched.
+ matchedTriggers[sourceType]++;
+
+ // Increment the triggers matched for this type for each if
+ // the active TI bank flags.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ if(tiFlags[tiType]) {
+ tiTriggersMatched[sourceType][tiType]++;
+ }
+ }
+ }
+
+ /**
+ * Indicates that a simulated trigger was partially matched to a
+ * trigger in the SSP bank, and notes which cuts did and did not
+ * match.
+ * @param tiFlags - An array of size 6 indicating which TI bank
+ * flags are active and which are not.
+ * @param sourceType - Uses <code>SSP</code> for triggers simulated
+ * from an SSP bank cluster and <code>RECON</code> for triggers that
+ * were simulated from a reconstructed cluster.
+ */
+ private void matchedTriggers(boolean[] tiFlags, boolean[] matchedCuts, int sourceType) {
+ // The matched cuts must be defined.
+ if(matchedCuts == null) {
+ throw new NullPointerException("The matched cuts array must be defined.");
+ }
+
+ // The matched cuts array must be of either size 3 or 4.
+ if(matchedCuts.length != 3 && matchedCuts.length != 4) {
+ throw new IllegalArgumentException("All triggers must use either three or four cuts.");
+ }
+
+ // Increment the counters for each cut that was no matched. Also
+ // track whether or not a cut actually failed.
+ boolean cutFailed = false;
+ for(int cutIndex = 0; cutIndex < matchedCuts.length; cutIndex++) {
+ if(!matchedCuts[cutIndex]) {
+ failedCuts[sourceType][cutIndex]++;
+ cutFailed = true;
+ }
+ }
+
+ // If no cut failed, this is actually a match. Increment the
+ // appropriate counters.
+ if(!cutFailed) {
+ matchedTriggers(tiFlags, sourceType);
+ }
+ }
+
+ @Deprecated
+ public String getPrintData() {
+ StringBuffer out = new StringBuffer();
- out.append("\n");
- out.append("Trigger Result\n");
- out.append("SSP Sim Triggers :: " + simTriggers[SSP] + "\n");
- out.append("Recon Sim Triggers :: " + simTriggers[RECON] + "\n");
- out.append("Reported Triggers :: " + reportedTriggers + "\n");
- out.append(String.format("Internal Efficiency :: %d / %d (%7.3f)%n", matchedTriggers[SSP], simTriggers[SSP],
- (100.0 * matchedTriggers[SSP] / simTriggers[SSP])));
- out.append(String.format("Trigger Efficiency :: %d / %d (%7.3f)%n", matchedTriggers[RECON], simTriggers[RECON],
- (100.0 * matchedTriggers[RECON] / simTriggers[RECON])));
-
- out.append("\n");
- out.append("Individual Cut Failure Rates\n");
- out.append("Unmatched Triggers :: " + unmatchedTriggers[SSP] + "\n");
- for(int i = 0; i < 4; i++) {
- out.append(String.format("\tCut %d :: %d / %d (%7.3f)%n", i, failedCuts[SSP][i], simTriggers[SSP],
- (100.0 * failedCuts[SSP][i] / simTriggers[SSP])));
- }
-
- return out.toString();
- }
+ out.append("\n");
+ out.append("Trigger Result\n");
+ out.append("SSP Sim Triggers :: " + simTriggers[SSP] + "\n");
+ out.append("Recon Sim Triggers :: " + simTriggers[RECON] + "\n");
+ out.append("Reported Triggers :: " + reportedTriggers + "\n");
+ out.append(String.format("Internal Efficiency :: %d / %d (%7.3f)%n", matchedTriggers[SSP], simTriggers[SSP],
+ (100.0 * matchedTriggers[SSP] / simTriggers[SSP])));
+ out.append(String.format("Trigger Efficiency :: %d / %d (%7.3f)%n", matchedTriggers[RECON], simTriggers[RECON],
+ (100.0 * matchedTriggers[RECON] / simTriggers[RECON])));
+
+ out.append("\n");
+ out.append("Individual Cut Failure Rates\n");
+ out.append("Unmatched Triggers :: " + unmatchedTriggers[SSP] + "\n");
+ for(int i = 0; i < 4; i++) {
+ out.append(String.format("\tCut %d :: %d / %d (%7.3f)%n", i, failedCuts[SSP][i], simTriggers[SSP],
+ (100.0 * failedCuts[SSP][i] / simTriggers[SSP])));
+ }
+
+ return out.toString();
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java Wed Apr 27 11:11:32 2016
@@ -10,285 +10,281 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class TriggerStatModule {
- // Store the reference index for SSP simulated triggers and recon
- // simulated triggers.
- protected static final int SSP = 0;
- protected static final int RECON = 1;
-
- // Define TI trigger type identifiers.
- public static final int SINGLES_0 = 0;
- public static final int SINGLES_1 = 1;
- public static final int PAIR_0 = 2;
- public static final int PAIR_1 = 3;
- public static final int PULSER = 4;
- public static final int COSMIC = 5;
-
- // Track the number of simulated triggers seen for each source type.
- // SSP simulated triggers from the SSP bank clusters. Reconstructed
- // simulated triggers come from clusters built from FADC data.
- protected int[] simTriggers = new int[2];
-
- // Also track the number of triggers reported by the SSP bank.
- protected int reportedTriggers = 0;
-
- // Track the number of simulated triggers of each type that were
- // successfully matched.
- protected int[] matchedTriggers = new int[2];
-
- // Track the number of simulated triggers that could not be matched
- // at all.
- protected int[] unmatchedTriggers = new int[2];
-
- // Track which cuts succeeded and which cuts failed for each type.
- // Note that this is currently only tracked for SSP cluster triggers.
- protected int[][] failedCuts = new int[2][4];
-
- // Store the number of trigger matches seen over all events that
- // contain a given TI flag.
- protected int[][] tiTriggersSeen = new int[2][6];
- protected int[][] tiTriggersMatched = new int[2][6];
-
- /**
- * Clears all of the statistical counters in the object.
- */
- void clear() {
- // Clear all values.
- for(int sourceType = 0; sourceType < 2; sourceType++) {
- // Clear the general statistics.
- simTriggers[sourceType] = 0;
- matchedTriggers[sourceType] = 0;
- unmatchedTriggers[sourceType] = 0;
-
- // Clear the cut failure statistics.
- for(int cutID = 0; cutID < 4; cutID++) {
- failedCuts[sourceType][cutID] = 0;
- }
-
- // Clear the TI flag statistics.
- for(int tiType = 0; tiType < 6; tiType++) {
- tiTriggersSeen[sourceType][tiType] = 0;
- tiTriggersMatched[sourceType][tiType] = 0;
- }
- }
- }
-
- @Override
- public TriggerStatModule clone() {
- // Make a new statistics module.
- TriggerStatModule clone = new TriggerStatModule();
-
- // Copy the values that do not depend on trigger source type.
- clone.reportedTriggers = reportedTriggers;
-
- // Set each value that depends on the trigger source type.
- for(int sourceType = 0; sourceType < 2; sourceType++) {
- clone.simTriggers[sourceType] = simTriggers[sourceType];
- clone.matchedTriggers[sourceType] = matchedTriggers[sourceType];
- clone.unmatchedTriggers[sourceType] = unmatchedTriggers[sourceType];
-
- // Set the number of times each cut failed.
- for(int cutID = 0; cutID < 4; cutID++) {
- clone.failedCuts[sourceType][cutID] = failedCuts[sourceType][cutID];
- }
-
- // Copy the values for the TI flag trigger counters.
- for(int tiType = 0; tiType < 6; tiType++) {
- clone.tiTriggersSeen[sourceType][tiType] = tiTriggersSeen[sourceType][tiType];
- clone.tiTriggersMatched[sourceType][tiType] = tiTriggersMatched[sourceType][tiType];
- }
- }
-
- // Return the copied clone.
- return clone;
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were not matched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getFailedReconSimulatedTriggers() {
- return simTriggers[RECON] - matchedTriggers[RECON];
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were not matched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getFailedSSPSimulatedTriggers() {
- return simTriggers[SSP] - matchedTriggers[SSP];
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were matched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getMatchedReconSimulatedTriggers() {
- return matchedTriggers[RECON];
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were matched for a given type of trigger when a given TI
- * bank flag was active.
- * @param tiTypeID - The identifier for the type of TI bank trigger
- * that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getMatchedReconSimulatedTriggers(int tiTypeID) {
- return tiTriggersMatched[RECON][tiTypeID];
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were matched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getMatchedSSPSimulatedTriggers() {
- return matchedTriggers[SSP];
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were matched for a given type of trigger when a given TI
- * bank flag was active.
- * @param tiTypeID - The identifier for the type of TI bank trigger
- * that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getMatchedSSPSimulatedTriggers(int tiTypeID) {
- return tiTriggersMatched[SSP][tiTypeID];
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were matched, but did not see full cut alignment.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getPartiallyMatchedReconSimulatedTriggers() {
- return simTriggers[RECON] - (matchedTriggers[RECON] + unmatchedTriggers[RECON]);
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were matched, but did not see full cut alignment.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getPartiallyMatchedSSPSimulatedTriggers() {
- return simTriggers[SSP] - (matchedTriggers[SSP] + unmatchedTriggers[SSP]);
- }
-
- /**
- * Gets the number of times the specified cut failed for triggers
- * that were partially matched for triggers simulated from FADC
- * reconstructed clusters.
- * @param cutIndex - The numerical cut identifier.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getReconCutFailures(int cutIndex) {
- return getCutFailures(RECON, cutIndex);
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were seen.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getReconSimulatedTriggers() {
- return simTriggers[RECON];
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were seen for a given trigger type when a given TI bank
- * flag was active.
- * @param tiTypeID - The identifier for the type of TI bank trigger
- * that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getReconSimulatedTriggers(int tiTypeID) {
- return tiTriggersSeen[RECON][tiTypeID];
- }
-
- /**
- * Gets the number of triggers reported by the SSP bank.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getReportedTriggers() {
- return reportedTriggers;
- }
-
- /**
- * Gets the number of times the specified cut failed for triggers
- * that were partially matched for triggers simulated from SSP
- * bank clusters.
- * @param cutIndex - The numerical cut identifier.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getSSPCutFailures(int cutIndex) {
- return getCutFailures(SSP, cutIndex);
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were seen.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getSSPSimulatedTriggers() {
- return simTriggers[SSP];
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were seen for a given trigger type when a given TI bank
- * flag was active.
- * @param tiTypeID - The identifier for the type of TI bank trigger
- * that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getSSPSimulatedTriggers(int tiTypeID) {
- return tiTriggersSeen[SSP][tiTypeID];
- }
-
- /**
- * Gets the number of simulated triggers from reconstructed clusters
- * that were completely unmatched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getUnmatchedReconSimulatedTriggers() {
- return unmatchedTriggers[RECON];
- }
-
- /**
- * Gets the number of simulated triggers from SSP bank clusters
- * that were completely unmatched.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- public int getUnmatchedSSPSimulatedTriggers() {
- return unmatchedTriggers[SSP];
- }
-
- /**
- * Gets the number of times the specified cut failed for triggers
- * that were partially matched for triggers simulated from the type
- * of cluster indicated.
- * @param type - Either <code>SSP</code> or <code>RECON</code>.
- * @param cutIndex - The numerical cut identifier.
- * @return Returns the number of triggers as an <code>int</code>.
- */
- private int getCutFailures(int type, int cutIndex) {
- // Ensure that the cut index is valid.
- if(cutIndex < 0 || cutIndex >= 4) {
- throw new IndexOutOfBoundsException(String.format("Cut index \"%d\" is not recognized.", cutIndex));
- }
-
- // Return the cut failures.
- return failedCuts[type][cutIndex];
- }
+ // Store the reference index for SSP simulated triggers and recon
+ // simulated triggers.
+ protected static final int SSP = 0;
+ protected static final int RECON = 1;
+
+ // Define TI trigger type identifiers.
+ public static final int SINGLES_0 = 0;
+ public static final int SINGLES_1 = 1;
+ public static final int PAIR_0 = 2;
+ public static final int PAIR_1 = 3;
+ public static final int PULSER = 4;
+ public static final int COSMIC = 5;
+
+ // Track the number of simulated triggers seen for each source type.
+ // SSP simulated triggers from the SSP bank clusters. Reconstructed
+ // simulated triggers come from clusters built from FADC data.
+ protected int[] simTriggers = new int[2];
+
+ // Also track the number of triggers reported by the SSP bank.
+ protected int reportedTriggers = 0;
+
+ // Track the number of simulated triggers of each type that were
+ // successfully matched.
+ protected int[] matchedTriggers = new int[2];
+
+ // Track the number of simulated triggers that could not be matched
+ // at all.
+ protected int[] unmatchedTriggers = new int[2];
+
+ // Track which cuts succeeded and which cuts failed for each type.
+ // Note that this is currently only tracked for SSP cluster triggers.
+ protected int[][] failedCuts = new int[2][4];
+
+ // Store the number of trigger matches seen over all events that
+ // contain a given TI flag.
+ protected int[][] tiTriggersSeen = new int[2][6];
+ protected int[][] tiTriggersMatched = new int[2][6];
+
+ /**
+ * Clears all of the statistical counters in the object.
+ */
+ void clear() {
+ // Clear all values.
+ for(int sourceType = 0; sourceType < 2; sourceType++) {
+ // Clear the general statistics.
+ simTriggers[sourceType] = 0;
+ matchedTriggers[sourceType] = 0;
+ unmatchedTriggers[sourceType] = 0;
+
+ // Clear the cut failure statistics.
+ for(int cutID = 0; cutID < 4; cutID++) {
+ failedCuts[sourceType][cutID] = 0;
+ }
+
+ // Clear the TI flag statistics.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ tiTriggersSeen[sourceType][tiType] = 0;
+ tiTriggersMatched[sourceType][tiType] = 0;
+ }
+ }
+ }
+
+ @Override
+ public TriggerStatModule clone() {
+ // Make a new statistics module.
+ TriggerStatModule clone = new TriggerStatModule();
+
+ // Copy the values that do not depend on trigger source type.
+ clone.reportedTriggers = reportedTriggers;
+
+ // Set each value that depends on the trigger source type.
+ for(int sourceType = 0; sourceType < 2; sourceType++) {
+ clone.simTriggers[sourceType] = simTriggers[sourceType];
+ clone.matchedTriggers[sourceType] = matchedTriggers[sourceType];
+ clone.unmatchedTriggers[sourceType] = unmatchedTriggers[sourceType];
+
+ // Set the number of times each cut failed.
+ for(int cutID = 0; cutID < 4; cutID++) {
+ clone.failedCuts[sourceType][cutID] = failedCuts[sourceType][cutID];
+ }
+
+ // Copy the values for the TI flag trigger counters.
+ for(int tiType = 0; tiType < 6; tiType++) {
+ clone.tiTriggersSeen[sourceType][tiType] = tiTriggersSeen[sourceType][tiType];
+ clone.tiTriggersMatched[sourceType][tiType] = tiTriggersMatched[sourceType][tiType];
+ }
+ }
+
+ // Return the copied clone.
+ return clone;
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were not matched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getFailedReconSimulatedTriggers() {
+ return simTriggers[RECON] - matchedTriggers[RECON];
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were not matched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getFailedSSPSimulatedTriggers() {
+ return simTriggers[SSP] - matchedTriggers[SSP];
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were matched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getMatchedReconSimulatedTriggers() {
+ return matchedTriggers[RECON];
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were matched for a given type of trigger when a given TI
+ * bank flag was active.
+ * @param tiTypeID - The identifier for the type of TI bank trigger
+ * that should be active.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getMatchedReconSimulatedTriggers(int tiTypeID) {
+ return tiTriggersMatched[RECON][tiTypeID];
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were matched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getMatchedSSPSimulatedTriggers() {
+ return matchedTriggers[SSP];
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were matched for a given type of trigger when a given TI
+ * bank flag was active.
+ * @param tiTypeID - The identifier for the type of TI bank trigger
+ * that should be active.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getMatchedSSPSimulatedTriggers(int tiTypeID) {
+ return tiTriggersMatched[SSP][tiTypeID];
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were matched, but did not see full cut alignment.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getPartiallyMatchedReconSimulatedTriggers() {
+ return simTriggers[RECON] - (matchedTriggers[RECON] + unmatchedTriggers[RECON]);
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were matched, but did not see full cut alignment.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getPartiallyMatchedSSPSimulatedTriggers() {
+ return simTriggers[SSP] - (matchedTriggers[SSP] + unmatchedTriggers[SSP]);
+ }
+
+ /**
+ * Gets the number of times the specified cut failed for triggers
+ * that were partially matched for triggers simulated from FADC
+ * reconstructed clusters.
+ * @param cutIndex - The numerical cut identifier.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getReconCutFailures(int cutIndex) {
+ return getCutFailures(RECON, cutIndex);
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were seen.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getReconSimulatedTriggers() {
+ return simTriggers[RECON];
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were seen for a given trigger type when a given TI bank
+ * flag was active.
+ * @param tiTypeID - The identifier for the type of TI bank trigger
+ * that should be active.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getReconSimulatedTriggers(int tiTypeID) {
+ return tiTriggersSeen[RECON][tiTypeID];
+ }
+
+ /**
+ * Gets the number of triggers reported by the SSP bank.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getReportedTriggers() {
+ return reportedTriggers;
+ }
+
+ /**
+ * Gets the number of times the specified cut failed for triggers
+ * that were partially matched for triggers simulated from SSP
+ * bank clusters.
+ * @param cutIndex - The numerical cut identifier.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getSSPCutFailures(int cutIndex) {
+ return getCutFailures(SSP, cutIndex);
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were seen.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getSSPSimulatedTriggers() {
+ return simTriggers[SSP];
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were seen for a given trigger type when a given TI bank
+ * flag was active.
+ * @param tiTypeID - The identifier for the type of TI bank trigger
+ * that should be active.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getSSPSimulatedTriggers(int tiTypeID) {
+ return tiTriggersSeen[SSP][tiTypeID];
+ }
+
+ /**
+ * Gets the number of simulated triggers from reconstructed clusters
+ * that were completely unmatched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getUnmatchedReconSimulatedTriggers() {
+ return unmatchedTriggers[RECON];
+ }
+
+ /**
+ * Gets the number of simulated triggers from SSP bank clusters
+ * that were completely unmatched.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ public int getUnmatchedSSPSimulatedTriggers() {
+ return unmatchedTriggers[SSP];
+ }
+
+ /**
+ * Gets the number of times the specified cut failed for triggers
+ * that were partially matched for triggers simulated from the type
+ * of cluster indicated.
+ * @param type - Either <code>SSP</code> or <code>RECON</code>.
+ * @param cutIndex - The numerical cut identifier.
+ * @return Returns the number of triggers as an <code>int</code>.
+ */
+ private int getCutFailures(int type, int cutIndex) {
+ // Ensure that the cut index is valid.
+ if(cutIndex < 0 || cutIndex >= 4) {
+ throw new IndexOutOfBoundsException(String.format("Cut index \"%d\" is not recognized.", cutIndex));
+ }
+
+ // Return the cut failures.
+ return failedCuts[type][cutIndex];
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/event/TriggerPlotsModule.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/event/TriggerPlotsModule.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/event/TriggerPlotsModule.java Wed Apr 27 11:11:32 2016
@@ -16,383 +16,383 @@
* @author Kyle McCarty
*/
public class TriggerPlotsModule {
- // Reference variables.
- private static final int RECON = 0;
- private static final int SSP = 1;
- private static final int ALL = 0;
- private static final int MATCHED = 1;
- private static final int FAILED = 2;
- private static final int TRIGGERED = 3;
- private static final int NO_CUTS = 4;
-
- // Class variables.
- private final double[] energySlopeParamF;
- private static final double MÃLLER_SUM_THRESHOLD = 0.750;
-
- // Plots.
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D[][][] singlesClusterEnergyPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] singlesHitCountPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] singlesTriggerTimePlot = new IHistogram1D[2][2][5];
-
- private IHistogram1D[][][] pairClusterEnergyPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairHitCountPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairTimePlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairSumPlot = new IHistogram1D[2][2][5];
+ // Reference variables.
+ private static final int RECON = 0;
+ private static final int SSP = 1;
+ private static final int ALL = 0;
+ private static final int MATCHED = 1;
+ private static final int FAILED = 2;
+ private static final int TRIGGERED = 3;
+ private static final int NO_CUTS = 4;
+
+ // Class variables.
+ private final double[] energySlopeParamF;
+ private static final double MÃLLER_SUM_THRESHOLD = 0.750;
+
+ // Plots.
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D[][][] singlesClusterEnergyPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] singlesHitCountPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] singlesTriggerTimePlot = new IHistogram1D[2][2][5];
+
+ private IHistogram1D[][][] pairClusterEnergyPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairHitCountPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairTimePlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairSumPlot = new IHistogram1D[2][2][5];
private IHistogram2D[][][] pairSumEnergiesPlot = new IHistogram2D[2][2][5];
- private IHistogram1D[][][] pairDiffPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairSlopePlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairCoplanarityPlot = new IHistogram1D[2][2][5];
- private IHistogram1D[][][] pairTriggerTimePlot = new IHistogram1D[2][2][5];
-
- private IHistogram1D[] møllerClusterEnergyPlot = new IHistogram1D[2];
- private IHistogram1D[] møllerHitCountPlot = new IHistogram1D[2];
- private IHistogram1D[] møllerTimePlot = new IHistogram1D[2];
- private IHistogram1D[] møllerSumPlot = new IHistogram1D[2];
+ private IHistogram1D[][][] pairDiffPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairSlopePlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairCoplanarityPlot = new IHistogram1D[2][2][5];
+ private IHistogram1D[][][] pairTriggerTimePlot = new IHistogram1D[2][2][5];
+
+ private IHistogram1D[] møllerClusterEnergyPlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerHitCountPlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerTimePlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerSumPlot = new IHistogram1D[2];
private IHistogram2D[] møllerSumEnergiesPlot = new IHistogram2D[2];
- private IHistogram1D[] møllerDiffPlot = new IHistogram1D[2];
- private IHistogram1D[] møllerSlopePlot = new IHistogram1D[2];
- private IHistogram1D[] møllerCoplanarityPlot = new IHistogram1D[2];
- private IHistogram1D[] møllerTriggerTimePlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerDiffPlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerSlopePlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerCoplanarityPlot = new IHistogram1D[2];
+ private IHistogram1D[] møllerTriggerTimePlot = new IHistogram1D[2];
private IHistogram2D[] møllerPositionPlot = new IHistogram2D[2];
-
- private IHistogram1D[] tridentClusterEnergyPlot = new IHistogram1D[2];
- private IHistogram1D[] tridentHitCountPlot = new IHistogram1D[2];
+
+ private IHistogram1D[] tridentClusterEnergyPlot = new IHistogram1D[2];
+ private IHistogram1D[] tridentHitCountPlot = new IHistogram1D[2];
private IHistogram2D[] tridentPositionPlot = new IHistogram2D[2];
-
- /**
- * Instantiates a new <code>TriggerPlotsModule</code> that will use
- * the indicated values for the energy slope conversion factor when
- * plotting energy slope values. Plots will be attached to the
- * default AIDA instance.
- * @param trigger0F - The energy slope conversion factor for the
- * first trigger.
- * @param trigger1F - The energy slope conversion factor for the
- * second trigger.
- */
- public TriggerPlotsModule(double trigger0F, double trigger1F) {
- // Store the energy slope parameter.
- energySlopeParamF = new double[2];
- energySlopeParamF[0] = trigger0F;
- energySlopeParamF[1] = trigger1F;
-
- // Define type string values.
- String[] sourceType = { "Recon", "SSP" };
- String[] resultType = { "All", "Matched", "Failed", "Triggered", "No Cuts" };
-
- // Instantiate the trigger result plots for each trigger.
- for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- // Get the directory for the current triggers.
- String pairDir = "Pair Trigger " + triggerNum;
- String singlesDir = "Singles Trigger " + triggerNum;
-
- // Instantiate the trigger result plots for each type of
- // trigger source object.
- for(int source = 0; source < 2; source++) {
- // Instantiate the trigger result plots for each type
- // of trigger match result.
- for(int result = 0; result < 5; result++) {
- // Instantiate the singles trigger plots.
- singlesClusterEnergyPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Cluster Energy (%s)",
- singlesDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
- singlesHitCountPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Hit Count (%s)",
- singlesDir, sourceType[source], resultType[result]), 9, 0.5, 9.5);
- singlesTriggerTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Trigger Time (%s)",
- singlesDir, sourceType[source], resultType[result]), 100, 0, 400);
-
- // Instantiate the pair trigger plots.
- pairHitCountPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Hit Count (%s)",
- pairDir, sourceType[source], resultType[result]), 9, 0.5, 9.5);
- pairClusterEnergyPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Cluster Energy (%s)",
- pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
- pairTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Time Coincidence (%s)",
- pairDir, sourceType[source], resultType[result]), 8, 0, 32);
- pairSumPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Sum (%s)",
- pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
+
+ /**
+ * Instantiates a new <code>TriggerPlotsModule</code> that will use
+ * the indicated values for the energy slope conversion factor when
+ * plotting energy slope values. Plots will be attached to the
+ * default AIDA instance.
+ * @param trigger0F - The energy slope conversion factor for the
+ * first trigger.
+ * @param trigger1F - The energy slope conversion factor for the
+ * second trigger.
+ */
+ public TriggerPlotsModule(double trigger0F, double trigger1F) {
+ // Store the energy slope parameter.
+ energySlopeParamF = new double[2];
+ energySlopeParamF[0] = trigger0F;
+ energySlopeParamF[1] = trigger1F;
+
+ // Define type string values.
+ String[] sourceType = { "Recon", "SSP" };
+ String[] resultType = { "All", "Matched", "Failed", "Triggered", "No Cuts" };
+
+ // Instantiate the trigger result plots for each trigger.
+ for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
+ // Get the directory for the current triggers.
+ String pairDir = "Pair Trigger " + triggerNum;
+ String singlesDir = "Singles Trigger " + triggerNum;
+
+ // Instantiate the trigger result plots for each type of
+ // trigger source object.
+ for(int source = 0; source < 2; source++) {
+ // Instantiate the trigger result plots for each type
+ // of trigger match result.
+ for(int result = 0; result < 5; result++) {
+ // Instantiate the singles trigger plots.
+ singlesClusterEnergyPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Cluster Energy (%s)",
+ singlesDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
+ singlesHitCountPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Hit Count (%s)",
+ singlesDir, sourceType[source], resultType[result]), 9, 0.5, 9.5);
+ singlesTriggerTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Singles Trigger Time (%s)",
+ singlesDir, sourceType[source], resultType[result]), 100, 0, 400);
+
+ // Instantiate the pair trigger plots.
+ pairHitCountPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Hit Count (%s)",
+ pairDir, sourceType[source], resultType[result]), 9, 0.5, 9.5);
+ pairClusterEnergyPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Cluster Energy (%s)",
+ pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
+ pairTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Time Coincidence (%s)",
+ pairDir, sourceType[source], resultType[result]), 8, 0, 32);
+ pairSumPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Sum (%s)",
+ pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
pairSumEnergiesPlot[triggerNum][source][result] = aida.histogram2D(String.format("%s/%s/Pair 2D Energy Sum (%s)",
pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0, 300, 0.0, 3.0);
- pairDiffPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Difference (%s)",
- pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
- pairSlopePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Slope (%s)",
- pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
- pairCoplanarityPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Coplanarity (%s)",
- pairDir, sourceType[source], resultType[result]), 180, 0, 180);
- pairTriggerTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Trigger Time (%s)",
- pairDir, sourceType[source], resultType[result]), 100, 0, 400);
- }
- }
-
- // Instantiate the Møller plots.
- møllerHitCountPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Hit Count",
- pairDir), 9, 0.5, 9.5);
- møllerClusterEnergyPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Cluster Energy",
- pairDir), 300, 0.0, 3.0);
- møllerTimePlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Time Coincidence",
- pairDir), 8, 0, 32);
- møllerSumPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Energy Sum",
- pairDir), 300, 0.0, 3.0);
- møllerSumEnergiesPlot[triggerNum] = aida.histogram2D(String.format("%s/Møller/Møller-like Pair 2D Energy Sum",
+ pairDiffPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Difference (%s)",
+ pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
+ pairSlopePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Energy Slope (%s)",
+ pairDir, sourceType[source], resultType[result]), 300, 0.0, 3.0);
+ pairCoplanarityPlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Coplanarity (%s)",
+ pairDir, sourceType[source], resultType[result]), 180, 0, 180);
+ pairTriggerTimePlot[triggerNum][source][result] = aida.histogram1D(String.format("%s/%s/Pair Trigger Time (%s)",
+ pairDir, sourceType[source], resultType[result]), 100, 0, 400);
+ }
+ }
+
+ // Instantiate the Møller plots.
+ møllerHitCountPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Hit Count",
+ pairDir), 9, 0.5, 9.5);
+ møllerClusterEnergyPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Cluster Energy",
+ pairDir), 300, 0.0, 3.0);
+ møllerTimePlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Time Coincidence",
+ pairDir), 8, 0, 32);
+ møllerSumPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Energy Sum",
+ pairDir), 300, 0.0, 3.0);
+ møllerSumEnergiesPlot[triggerNum] = aida.histogram2D(String.format("%s/Møller/Møller-like Pair 2D Energy Sum",
pairDir), 300, 0.0, 3.0, 300, 0.0, 3.0);
møllerDiffPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Energy Difference",
- pairDir), 300, 0.0, 3.0);
+ pairDir), 300, 0.0, 3.0);
møllerSlopePlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Energy Slope",
- pairDir), 300, 0.0, 3.0);
+ pairDir), 300, 0.0, 3.0);
møllerCoplanarityPlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Coplanarity",
- pairDir), 180, 0, 180);
+ pairDir), 180, 0, 180);
møllerTriggerTimePlot[triggerNum] = aida.histogram1D(String.format("%s/Møller/Møller-like Pair Trigger Time",
- pairDir), 100, 0, 400);
+ pairDir), 100, 0, 400);
møllerPositionPlot[triggerNum] = aida.histogram2D(String.format("%s/Møller/Møller-like Pair Position",
- pairDir), 46, -23, 23, 11, -5.5, 5.5);
+ pairDir), 46, -23, 23, 11, -5.5, 5.5);
// Instantiate the trident plots.
tridentHitCountPlot[triggerNum] = aida.histogram1D(String.format("%s/Trident/Trident-like Pair Hit Count",
- singlesDir), 9, 0.5, 9.5);
- tridentClusterEnergyPlot[triggerNum] = aida.histogram1D(String.format("%s/Trident/Trident-like Pair Cluster Energy",
- singlesDir), 300, 0.0, 3.0);
+ singlesDir), 9, 0.5, 9.5);
+ tridentClusterEnergyPlot[triggerNum] = aida.histogram1D(String.format("%s/Trident/Trident-like Pair Cluster Energy",
+ singlesDir), 300, 0.0, 3.0);
tridentPositionPlot[triggerNum] = aida.histogram2D(String.format("%s/Trident/Trident-like Pair Position",
- singlesDir), 46, -23, 23, 11, -5.5, 5.5);
- }
- }
-
- /**
- * Populates the "failed" plots of the appropriate type with the
- * cut results from the argument trigger.
- * @param trigger - The trigger from which to populate the plots.
- */
- public void failedTrigger(Trigger<?> trigger) {
- processTrigger(trigger, FAILED);
- }
-
- /**
- * Populates the "matched" plots of the appropriate type with the
- * cut results from the argument trigger.
- * @param trigger - The trigger from which to populate the plots.
- */
- public void matchedTrigger(Trigger<?> trigger) {
- processTrigger(trigger, MATCHED);
- }
-
- /**
- * Populates the "triggered" plots of the appropriate type with the
- * cut results from the argument trigger.
- * @param trigger - The trigger from which to populate the plots.
- */
- public void passedTrigger(Trigger<?> trigger) {
- processTrigger(trigger, TRIGGERED);
- }
-
- /**
- * Indicates that a cluster was seen by a trigger and adds it to
- * the "no cuts" plots.
- * @param triggerNum - The number of the trigger.
- * @param cluster - The cluster that was seen.
- */
- public void sawCluster(int triggerNum, Cluster cluster) {
- processSingles(triggerNum, NO_CUTS, cluster);
- }
-
- /**
- * Indicates that a cluster was seen by a trigger and adds it to
- * the "no cuts" plots.
- * @param triggerNum - The number of the trigger.
- * @param cluster - The cluster that was seen.
- */
- public void sawCluster(int triggerNum, SSPCluster cluster) {
- processSingles(triggerNum, NO_CUTS, cluster);
- }
-
- /**
- * Indicates that a cluster pair was seen by a trigger and adds it
- * to the "no cuts" plots.
- * @param triggerNum - The number of the trigger.
- * @param pair - The cluster pair that was seen.
- */
- public void sawPair(int triggerNum, Cluster[] pair) {
- processPair(triggerNum, NO_CUTS, pair);
- }
-
- /**
- * Indicates that a cluster pair was seen by a trigger and adds it
- * to the "no cuts" plots.
- * @param triggerNum - The number of the trigger.
- * @param pair - The cluster pair that was seen.
- */
- public void sawPair(int triggerNum, SSPCluster[] pair) {
- processPair(triggerNum, NO_CUTS, pair);
- }
-
- /**
- * Populates the "all" plots of the appropriate type with the cut
- * results from the argument trigger.
- * @param trigger - The trigger from which to populate the plots.
- */
- public void sawTrigger(Trigger<?> trigger) {
- processTrigger(trigger, ALL);
- }
-
- /**
- * Sets the energy slope conversion factor to be used to calculate
- * the energy slope value for plots.
- * @param triggerNum - The trigger for which the conversion factor
- * should be used.
- * @param value - The conversion factor in units of GeV/mm.
- */
- public void setEnergySlopeParamF(int triggerNum, double value) {
- // Make sure that the trigger number is valid.
- if(triggerNum < 0 || triggerNum > 1) {
- throw new IllegalArgumentException(String.format("Trigger number %d is not valid.", triggerNum));
- }
-
- // Set the parameter.
- energySlopeParamF[triggerNum] = value;
- }
-
- /**
- * Populates the indicated type of plots of the appropriate type
- * for the argument trigger.
- * @param trigger - The trigger from which to populate the plots.
- * @param plotType - The type of plot to populate. This must be one
- * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
- */
- private void processTrigger(Trigger<?> trigger, int plotType) {
- // Get the trigger number and source.
- Object source = trigger.getTriggerSource();
- int triggerNum = trigger.getTriggerNumber();
-
- // Populate the plots using the appropriate method.
- if(source instanceof Cluster) {
- processSingles(triggerNum, plotType, (Cluster) source);
- }
- else if(source instanceof SSPCluster) {
- processSingles(triggerNum, plotType, (SSPCluster) source);
- }
- else if(source instanceof Cluster[]) {
- processPair(triggerNum, plotType, (Cluster[]) source);
- }
- else if(source instanceof SSPCluster[]) {
- processPair(triggerNum, plotType, (SSPCluster[]) source);
- }
-
- // If the trigger source is unsupported, produce an error.
- else {
- throw new IllegalArgumentException(String.format("Trigger source \"%s\" is not supported.", source.getClass().getSimpleName()));
- }
- }
-
- /**
- * Populates the trigger singles plots for the indicated type for
- * reconstructed clusters.
- * @param triggerNum - The trigger number of the source trigger.
- * @param plotType - The type of plot to populate. This must be one
- * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
- * @param pair - The triggering cluster.
- */
- private void processSingles(int triggerNum, int plotType, Cluster cluster) {
- // Fill the cluster singles plots.
- singlesHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(cluster));
- singlesClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
- singlesTriggerTimePlot[triggerNum][RECON][plotType].fill(cluster.getCalorimeterHits().get(0).getTime());
- }
-
- /**
- * Populates the trigger singles plots for the indicated type for SSP
- * clusters.
- * @param triggerNum - The trigger number of the source trigger.
- * @param plotType - The type of plot to populate. This must be one
- * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
- * @param pair - The triggering cluster.
- */
- private void processSingles(int triggerNum, int plotType, SSPCluster cluster) {
- // Fill the cluster singles plots.
- singlesHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(cluster));
- singlesClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
- singlesTriggerTimePlot[triggerNum][SSP][plotType].fill(cluster.getTime());
-
- // Check if this cluster is "trident-like."
- // TODO: Define "trident-like."
- boolean processTrident = false;
-
- // If this is a trident-like event, add it to the trident plots.
- if(processTrident) {
- tridentHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(cluster));
- tridentClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
- tridentPositionPlot[triggerNum].fill(cluster.getXIndex() > 0 ? cluster.getXIndex() - 1 : cluster.getXIndex(), cluster.getYIndex());
- }
- }
-
- /**
- * Populates the trigger pair plots for the indicated type for
- * reconstructed cluster pairs.
- * @param triggerNum - The trigger number of the source trigger.
- * @param plotType - The type of plot to populate. This must be one
- * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
- * @param pair - The triggering pair.
- */
- private void processPair(int triggerNum, int plotType, Cluster[] pair) {
- // Fill the cluster singles plots.
- pairHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(pair[0]));
- pairHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(pair[1]));
- pairClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
- pairClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
- pairTriggerTimePlot[triggerNum][RECON][plotType].fill(pair[0].getCalorimeterHits().get(0).getTime());
- pairTriggerTimePlot[triggerNum][RECON][plotType].fill(pair[1].getCalorimeterHits().get(0).getTime());
-
- // Fill the cluster pair plots.
- pairTimePlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueTimeCoincidence(pair));
- pairSumPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergySum(pair));
+ singlesDir), 46, -23, 23, 11, -5.5, 5.5);
+ }
+ }
+
+ /**
+ * Populates the "failed" plots of the appropriate type with the
+ * cut results from the argument trigger.
+ * @param trigger - The trigger from which to populate the plots.
+ */
+ public void failedTrigger(Trigger<?> trigger) {
+ processTrigger(trigger, FAILED);
+ }
+
+ /**
+ * Populates the "matched" plots of the appropriate type with the
+ * cut results from the argument trigger.
+ * @param trigger - The trigger from which to populate the plots.
+ */
+ public void matchedTrigger(Trigger<?> trigger) {
+ processTrigger(trigger, MATCHED);
+ }
+
+ /**
+ * Populates the "triggered" plots of the appropriate type with the
+ * cut results from the argument trigger.
+ * @param trigger - The trigger from which to populate the plots.
+ */
+ public void passedTrigger(Trigger<?> trigger) {
+ processTrigger(trigger, TRIGGERED);
+ }
+
+ /**
+ * Indicates that a cluster was seen by a trigger and adds it to
+ * the "no cuts" plots.
+ * @param triggerNum - The number of the trigger.
+ * @param cluster - The cluster that was seen.
+ */
+ public void sawCluster(int triggerNum, Cluster cluster) {
+ processSingles(triggerNum, NO_CUTS, cluster);
+ }
+
+ /**
+ * Indicates that a cluster was seen by a trigger and adds it to
+ * the "no cuts" plots.
+ * @param triggerNum - The number of the trigger.
+ * @param cluster - The cluster that was seen.
+ */
+ public void sawCluster(int triggerNum, SSPCluster cluster) {
+ processSingles(triggerNum, NO_CUTS, cluster);
+ }
+
+ /**
+ * Indicates that a cluster pair was seen by a trigger and adds it
+ * to the "no cuts" plots.
+ * @param triggerNum - The number of the trigger.
+ * @param pair - The cluster pair that was seen.
+ */
+ public void sawPair(int triggerNum, Cluster[] pair) {
+ processPair(triggerNum, NO_CUTS, pair);
+ }
+
+ /**
+ * Indicates that a cluster pair was seen by a trigger and adds it
+ * to the "no cuts" plots.
+ * @param triggerNum - The number of the trigger.
+ * @param pair - The cluster pair that was seen.
+ */
+ public void sawPair(int triggerNum, SSPCluster[] pair) {
+ processPair(triggerNum, NO_CUTS, pair);
+ }
+
+ /**
+ * Populates the "all" plots of the appropriate type with the cut
+ * results from the argument trigger.
+ * @param trigger - The trigger from which to populate the plots.
+ */
+ public void sawTrigger(Trigger<?> trigger) {
+ processTrigger(trigger, ALL);
+ }
+
+ /**
+ * Sets the energy slope conversion factor to be used to calculate
+ * the energy slope value for plots.
+ * @param triggerNum - The trigger for which the conversion factor
+ * should be used.
+ * @param value - The conversion factor in units of GeV/mm.
+ */
+ public void setEnergySlopeParamF(int triggerNum, double value) {
+ // Make sure that the trigger number is valid.
+ if(triggerNum < 0 || triggerNum > 1) {
+ throw new IllegalArgumentException(String.format("Trigger number %d is not valid.", triggerNum));
+ }
+
+ // Set the parameter.
+ energySlopeParamF[triggerNum] = value;
+ }
+
+ /**
+ * Populates the indicated type of plots of the appropriate type
+ * for the argument trigger.
+ * @param trigger - The trigger from which to populate the plots.
+ * @param plotType - The type of plot to populate. This must be one
+ * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
+ */
+ private void processTrigger(Trigger<?> trigger, int plotType) {
+ // Get the trigger number and source.
+ Object source = trigger.getTriggerSource();
+ int triggerNum = trigger.getTriggerNumber();
+
+ // Populate the plots using the appropriate method.
+ if(source instanceof Cluster) {
+ processSingles(triggerNum, plotType, (Cluster) source);
+ }
+ else if(source instanceof SSPCluster) {
+ processSingles(triggerNum, plotType, (SSPCluster) source);
+ }
+ else if(source instanceof Cluster[]) {
+ processPair(triggerNum, plotType, (Cluster[]) source);
+ }
+ else if(source instanceof SSPCluster[]) {
+ processPair(triggerNum, plotType, (SSPCluster[]) source);
+ }
+
+ // If the trigger source is unsupported, produce an error.
+ else {
+ throw new IllegalArgumentException(String.format("Trigger source \"%s\" is not supported.", source.getClass().getSimpleName()));
+ }
+ }
+
+ /**
+ * Populates the trigger singles plots for the indicated type for
+ * reconstructed clusters.
+ * @param triggerNum - The trigger number of the source trigger.
+ * @param plotType - The type of plot to populate. This must be one
+ * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
+ * @param pair - The triggering cluster.
+ */
+ private void processSingles(int triggerNum, int plotType, Cluster cluster) {
+ // Fill the cluster singles plots.
+ singlesHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(cluster));
+ singlesClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
+ singlesTriggerTimePlot[triggerNum][RECON][plotType].fill(cluster.getCalorimeterHits().get(0).getTime());
+ }
+
+ /**
+ * Populates the trigger singles plots for the indicated type for SSP
+ * clusters.
+ * @param triggerNum - The trigger number of the source trigger.
+ * @param plotType - The type of plot to populate. This must be one
+ * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
+ * @param pair - The triggering cluster.
+ */
+ private void processSingles(int triggerNum, int plotType, SSPCluster cluster) {
+ // Fill the cluster singles plots.
+ singlesHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(cluster));
+ singlesClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
+ singlesTriggerTimePlot[triggerNum][SSP][plotType].fill(cluster.getTime());
+
+ // Check if this cluster is "trident-like."
+ // TODO: Define "trident-like."
+ boolean processTrident = false;
+
+ // If this is a trident-like event, add it to the trident plots.
+ if(processTrident) {
+ tridentHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(cluster));
+ tridentClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(cluster));
+ tridentPositionPlot[triggerNum].fill(cluster.getXIndex() > 0 ? cluster.getXIndex() - 1 : cluster.getXIndex(), cluster.getYIndex());
+ }
+ }
+
+ /**
+ * Populates the trigger pair plots for the indicated type for
+ * reconstructed cluster pairs.
+ * @param triggerNum - The trigger number of the source trigger.
+ * @param plotType - The type of plot to populate. This must be one
+ * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
+ * @param pair - The triggering pair.
+ */
+ private void processPair(int triggerNum, int plotType, Cluster[] pair) {
+ // Fill the cluster singles plots.
+ pairHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(pair[0]));
+ pairHitCountPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterHitCount(pair[1]));
+ pairClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
+ pairClusterEnergyPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
+ pairTriggerTimePlot[triggerNum][RECON][plotType].fill(pair[0].getCalorimeterHits().get(0).getTime());
+ pairTriggerTimePlot[triggerNum][RECON][plotType].fill(pair[1].getCalorimeterHits().get(0).getTime());
+
+ // Fill the cluster pair plots.
+ pairTimePlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueTimeCoincidence(pair));
+ pairSumPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergySum(pair));
pairSumEnergiesPlot[triggerNum][RECON][plotType].fill(pair[0].getEnergy(), pair[1].getEnergy());
- pairDiffPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergyDifference(pair));
- pairSlopePlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
- pairCoplanarityPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueCoplanarity(pair));
- }
-
- /**
- * Populates the trigger pair plots for the indicated type for SSP
- * cluster pairs.
- * @param triggerNum - The trigger number of the source trigger.
- * @param plotType - The type of plot to populate. This must be one
- * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
- * @param pair - The triggering pair.
- */
- private void processPair(int triggerNum, int plotType, SSPCluster[] pair) {
- // Fill the cluster singles plots.
- pairHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(pair[0]));
- pairHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(pair[1]));
- pairClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
- pairClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
- pairTriggerTimePlot[triggerNum][SSP][plotType].fill(pair[0].getTime());
- pairTriggerTimePlot[triggerNum][SSP][plotType].fill(pair[1].getTime());
-
- // Fill the cluster pair plots.
- pairTimePlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueTimeCoincidence(pair));
- pairSumPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergySum(pair));
+ pairDiffPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergyDifference(pair));
+ pairSlopePlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
+ pairCoplanarityPlot[triggerNum][RECON][plotType].fill(TriggerModule.getValueCoplanarity(pair));
+ }
+
+ /**
+ * Populates the trigger pair plots for the indicated type for SSP
+ * cluster pairs.
+ * @param triggerNum - The trigger number of the source trigger.
+ * @param plotType - The type of plot to populate. This must be one
+ * of <code>ALL</code>, <code>MATCHED</code>, or <code>FAILED</code>.
+ * @param pair - The triggering pair.
+ */
+ private void processPair(int triggerNum, int plotType, SSPCluster[] pair) {
+ // Fill the cluster singles plots.
+ pairHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(pair[0]));
+ pairHitCountPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterHitCount(pair[1]));
+ pairClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
+ pairClusterEnergyPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
+ pairTriggerTimePlot[triggerNum][SSP][plotType].fill(pair[0].getTime());
+ pairTriggerTimePlot[triggerNum][SSP][plotType].fill(pair[1].getTime());
+
+ // Fill the cluster pair plots.
+ pairTimePlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueTimeCoincidence(pair));
+ pairSumPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergySum(pair));
pairSumEnergiesPlot[triggerNum][SSP][plotType].fill(pair[0].getEnergy(), pair[1].getEnergy());
- pairDiffPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergyDifference(pair));
- pairSlopePlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
- pairCoplanarityPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueCoplanarity(pair));
-
- // Check if this pair is "Møller-like."
- boolean processMøller = TriggerModule.getValueEnergySum(pair) >= MÃLLER_SUM_THRESHOLD;
-
- // If the pair is Møller-like, populate the Møller plots.
- if(processMøller) {
- // Fill the cluster singles plots.
- møllerHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(pair[0]));
- møllerHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(pair[1]));
- møllerClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
- møllerClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
- møllerTriggerTimePlot[triggerNum].fill(pair[0].getTime());
- møllerTriggerTimePlot[triggerNum].fill(pair[1].getTime());
- møllerPositionPlot[triggerNum].fill(pair[0].getXIndex() > 0 ? pair[0].getXIndex() - 1 : pair[0].getXIndex(), pair[0].getYIndex());
- møllerPositionPlot[triggerNum].fill(pair[1].getXIndex() > 0 ? pair[1].getXIndex() - 1 : pair[1].getXIndex(), pair[1].getYIndex());
-
- // Fill the cluster pair plots.
- møllerTimePlot[triggerNum].fill(TriggerModule.getValueTimeCoincidence(pair));
- møllerSumPlot[triggerNum].fill(TriggerModule.getValueEnergySum(pair));
- møllerSumEnergiesPlot[triggerNum].fill(pair[0].getEnergy(), pair[1].getEnergy());
- møllerDiffPlot[triggerNum].fill(TriggerModule.getValueEnergyDifference(pair));
- møllerSlopePlot[triggerNum].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
- møllerCoplanarityPlot[triggerNum].fill(TriggerModule.getValueCoplanarity(pair));
- }
- }
+ pairDiffPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergyDifference(pair));
+ pairSlopePlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
+ pairCoplanarityPlot[triggerNum][SSP][plotType].fill(TriggerModule.getValueCoplanarity(pair));
+
+ // Check if this pair is "Møller-like."
+ boolean processMøller = TriggerModule.getValueEnergySum(pair) >= MÃLLER_SUM_THRESHOLD;
+
+ // If the pair is Møller-like, populate the Møller plots.
+ if(processMøller) {
+ // Fill the cluster singles plots.
+ møllerHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(pair[0]));
+ møllerHitCountPlot[triggerNum].fill(TriggerModule.getValueClusterHitCount(pair[1]));
+ møllerClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(pair[0]));
+ møllerClusterEnergyPlot[triggerNum].fill(TriggerModule.getValueClusterTotalEnergy(pair[1]));
+ møllerTriggerTimePlot[triggerNum].fill(pair[0].getTime());
+ møllerTriggerTimePlot[triggerNum].fill(pair[1].getTime());
+ møllerPositionPlot[triggerNum].fill(pair[0].getXIndex() > 0 ? pair[0].getXIndex() - 1 : pair[0].getXIndex(), pair[0].getYIndex());
+ møllerPositionPlot[triggerNum].fill(pair[1].getXIndex() > 0 ? pair[1].getXIndex() - 1 : pair[1].getXIndex(), pair[1].getYIndex());
+
+ // Fill the cluster pair plots.
+ møllerTimePlot[triggerNum].fill(TriggerModule.getValueTimeCoincidence(pair));
+ møllerSumPlot[triggerNum].fill(TriggerModule.getValueEnergySum(pair));
+ møllerSumEnergiesPlot[triggerNum].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ møllerDiffPlot[triggerNum].fill(TriggerModule.getValueEnergyDifference(pair));
+ møllerSlopePlot[triggerNum].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF[triggerNum]));
+ møllerCoplanarityPlot[triggerNum].fill(TriggerModule.getValueCoplanarity(pair));
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/ComponentUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/ComponentUtils.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/ComponentUtils.java Wed Apr 27 11:11:32 2016
@@ -1,8 +1,6 @@
package org.hps.analysis.trigger.util;
import java.awt.Component;
-
-import org.hps.analysis.trigger.util.TriggerDiagnosticUtil;
/**
* Class <code>ComponentUtils</code> is a list of utility methods used
@@ -11,116 +9,116 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ComponentUtils {
- /** The default spacing used between a horizontal edge of one
- * component and the horizontal edge of another. */
- public static final int hinternal = 10;
- /** The default spacing used between a vertical edge of one
- * component and the vertical edge of another. */
- public static final int vinternal = 10;
- /** The default spacing used between a horizontal edge of one
- * component and the edge of its parent component. */
- public static final int hexternal = 0;
- /** The default spacing used between a vertical edge of one
- * component and the edge of its parent component. */
- public static final int vexternal = 0;
-
- /**
- * Gets a <code>String</code> composed of a number of instances of
- * character <code>c</code> equal to <code>number</code>.
- * @param c - The character to repeat.
- * @param number - The number of repetitions.
- * @return Returns the repeated character as a <code>String</code>.
- */
- public static final String getChars(char c, int number) {
- // Create a buffer to store the characters in.
- StringBuffer s = new StringBuffer();
-
- // Add the indicated number of instances.
- for(int i = 0; i < number; i++) {
- s.append(c);
- }
-
- // Return the string.
- return s.toString();
- }
-
- /**
- * Gets the number of digits in the base-10 String representation
- * of an integer primitive. Negative signs are not included in the
- * digit count.
- * @param value - The value of which to obtain the length.
- * @return Returns the number of digits in the String representation
- * of the argument value.
- */
- public static final int getDigits(int value) {
- return TriggerDiagnosticUtil.getDigits(value);
- }
-
- /**
- * Gets the maximum value from a list of values.
- * @param values - The values to compare.
- * @return Returns the largest of the argument values.
- * @throws IllegalArgumentException Occurs if no values are given.
- */
- public static final int max(int... values) throws IllegalArgumentException {
- // Throw an error if no arguments are provided.
- if(values == null || values.length == 0) {
- throw new IllegalArgumentException("Can not determine maximum value from a list of 0 values.");
- }
-
- // If there is only one value, return it.
- if(values.length == 1) { return values[0]; }
-
- // Otherwise, get the largest value.
- int largest = Integer.MIN_VALUE;
- for(int value : values) {
- if(value > largest) { largest = value; }
- }
-
- // Return the result.
- return largest;
- }
-
- /**
- * Gets the x-coordinate immediately to the right of the given
- * component.
- * @param c - The component of which to find the edge.
- * @return Returns the x-coordinate as an <code>int</code> value.
- */
- public static final int getNextX(Component c) {
- return getNextX(c, 0);
- }
-
- /**
- * Gets the x-coordinate a given distance to the right edge of the
- * argument component.
- * @param c - The component of which to find the edge.
- * @param spacing - The additional spacing past the edge of the
- * component to add.
- * @return Returns the x-coordinate as an <code>int</code> value.
- */
- public static final int getNextX(Component c, int spacing) {
- return c.getX() + c.getWidth() + spacing;
- }
-
- /**
- * Gets the y-coordinate immediately below the given component.
- * @param c - The component of which to find the edge.
- * @return Returns the y-coordinate as an <code>int</code> value.
- */
- public static final int getNextY(Component c) {
- return getNextY(c, 0);
- }
-
- /**
- * Gets the y-coordinate a given distance below the bottom edge
- * of the argument component.
- * @param c - The component of which to find the edge.
- * @param spacing - The additional spacing past the edge of the
- * component to add.
- * @return Returns the y-coordinate as an <code>int</code> value.
- */
- public static final int getNextY(Component c, int spacing) {
- return c.getY() + c.getHeight() + spacing;
- }
+ /** The default spacing used between a horizontal edge of one
+ * component and the horizontal edge of another. */
+ public static final int hinternal = 10;
+ /** The default spacing used between a vertical edge of one
+ * component and the vertical edge of another. */
+ public static final int vinternal = 10;
+ /** The default spacing used between a horizontal edge of one
+ * component and the edge of its parent component. */
+ public static final int hexternal = 0;
+ /** The default spacing used between a vertical edge of one
+ * component and the edge of its parent component. */
+ public static final int vexternal = 0;
+
+ /**
+ * Gets a <code>String</code> composed of a number of instances of
+ * character <code>c</code> equal to <code>number</code>.
+ * @param c - The character to repeat.
+ * @param number - The number of repetitions.
+ * @return Returns the repeated character as a <code>String</code>.
+ */
+ public static final String getChars(char c, int number) {
+ // Create a buffer to store the characters in.
+ StringBuffer s = new StringBuffer();
+
+ // Add the indicated number of instances.
+ for(int i = 0; i < number; i++) {
+ s.append(c);
+ }
+
+ // Return the string.
+ return s.toString();
+ }
+
+ /**
+ * Gets the number of digits in the base-10 String representation
+ * of an integer primitive. Negative signs are not included in the
+ * digit count.
+ * @param value - The value of which to obtain the length.
+ * @return Returns the number of digits in the String representation
+ * of the argument value.
+ */
+ public static final int getDigits(int value) {
+ return TriggerDiagnosticUtil.getDigits(value);
+ }
+
+ /**
+ * Gets the maximum value from a list of values.
+ * @param values - The values to compare.
+ * @return Returns the largest of the argument values.
+ * @throws IllegalArgumentException Occurs if no values are given.
+ */
+ public static final int max(int... values) throws IllegalArgumentException {
+ // Throw an error if no arguments are provided.
+ if(values == null || values.length == 0) {
+ throw new IllegalArgumentException("Can not determine maximum value from a list of 0 values.");
+ }
+
+ // If there is only one value, return it.
+ if(values.length == 1) { return values[0]; }
+
+ // Otherwise, get the largest value.
+ int largest = Integer.MIN_VALUE;
+ for(int value : values) {
+ if(value > largest) { largest = value; }
+ }
+
+ // Return the result.
+ return largest;
+ }
+
+ /**
+ * Gets the x-coordinate immediately to the right of the given
+ * component.
+ * @param c - The component of which to find the edge.
+ * @return Returns the x-coordinate as an <code>int</code> value.
+ */
+ public static final int getNextX(Component c) {
+ return getNextX(c, 0);
+ }
+
+ /**
+ * Gets the x-coordinate a given distance to the right edge of the
+ * argument component.
+ * @param c - The component of which to find the edge.
+ * @param spacing - The additional spacing past the edge of the
+ * component to add.
+ * @return Returns the x-coordinate as an <code>int</code> value.
+ */
+ public static final int getNextX(Component c, int spacing) {
+ return c.getX() + c.getWidth() + spacing;
+ }
+
+ /**
+ * Gets the y-coordinate immediately below the given component.
+ * @param c - The component of which to find the edge.
+ * @return Returns the y-coordinate as an <code>int</code> value.
+ */
+ public static final int getNextY(Component c) {
+ return getNextY(c, 0);
+ }
+
+ /**
+ * Gets the y-coordinate a given distance below the bottom edge
+ * of the argument component.
+ * @param c - The component of which to find the edge.
+ * @param spacing - The additional spacing past the edge of the
+ * component to add.
+ * @return Returns the y-coordinate as an <code>int</code> value.
+ */
+ public static final int getNextY(Component c, int spacing) {
+ return c.getY() + c.getHeight() + spacing;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/OutputLogger.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/OutputLogger.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/OutputLogger.java Wed Apr 27 11:11:32 2016
@@ -3,28 +3,28 @@
public class OutputLogger {
private static StringBuffer outputBuffer = new StringBuffer();
- public static final void printf(String text, Object... args) {
- outputBuffer.append(String.format(text, args));
- }
-
- public static final void println() { printf(String.format("%n")); }
-
- public static final void println(String text) { printf(String.format("%s%n", text)); }
-
- public static final void print(String text) { printf(text); }
-
- public static final void printLog() {
- System.out.println(outputBuffer.toString());
- clearLog();
- }
-
- public static final void printNewLine() { println(); }
-
- public static final void printNewLine(int quantity) {
- for(int i = 0; i < quantity; i++) { println(); }
- }
-
- public static final void clearLog() {
- outputBuffer = new StringBuffer();
- }
+ public static final void printf(String text, Object... args) {
+ outputBuffer.append(String.format(text, args));
+ }
+
+ public static final void println() { printf(String.format("%n")); }
+
+ public static final void println(String text) { printf(String.format("%s%n", text)); }
+
+ public static final void print(String text) { printf(text); }
+
+ public static final void printLog() {
+ System.out.println(outputBuffer.toString());
+ clearLog();
+ }
+
+ public static final void printNewLine() { println(); }
+
+ public static final void printNewLine(int quantity) {
+ for(int i = 0; i < quantity; i++) { println(); }
+ }
+
+ public static final void clearLog() {
+ outputBuffer = new StringBuffer();
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java Wed Apr 27 11:11:32 2016
@@ -8,32 +8,32 @@
* @param <F> - The object type of the second element in the pair.
*/
public class Pair<E, F> {
- private final E firstObject;
- private final F secondObject;
-
- /**
- * Creates a pair of the two indicated objects.
- * @param firstObject - The first object.
- * @param secondObject - The second object.
- */
- public Pair(E firstElement, F secondElement) {
- this.firstObject = firstElement;
- this.secondObject = secondElement;
- }
-
- /**
- * Gets the first element of the pair.
- * @return Returns the first element.
- */
- public E getFirstElement() {
- return firstObject;
- }
-
- /**
- * Gets the second element of the pair.
- * @return Returns the second element.
- */
- public F getSecondElement() {
- return secondObject;
- }
+ private final E firstObject;
+ private final F secondObject;
+
+ /**
+ * Creates a pair of the two indicated objects.
+ * @param firstElement - The first object.
+ * @param secondElement - The second object.
+ */
+ public Pair(E firstElement, F secondElement) {
+ this.firstObject = firstElement;
+ this.secondObject = secondElement;
+ }
+
+ /**
+ * Gets the first element of the pair.
+ * @return Returns the first element.
+ */
+ public E getFirstElement() {
+ return firstObject;
+ }
+
+ /**
+ * Gets the second element of the pair.
+ * @return Returns the second element.
+ */
+ public F getSecondElement() {
+ return secondObject;
+ }
}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/PairTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/PairTrigger.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/PairTrigger.java Wed Apr 27 11:11:32 2016
@@ -3,164 +3,164 @@
import org.hps.record.triggerbank.TriggerModule;
public class PairTrigger<E> extends SinglesTrigger<E> {
- // Define the supported trigger cuts.
- private static final String PAIR_ENERGY_SUM_LOW = TriggerModule.PAIR_ENERGY_SUM_LOW;
- private static final String PAIR_ENERGY_SUM_HIGH = TriggerModule.PAIR_ENERGY_SUM_HIGH;
- private static final String PAIR_ENERGY_DIFFERENCE_HIGH = TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH;
- private static final String PAIR_ENERGY_SLOPE_LOW = TriggerModule.PAIR_ENERGY_SLOPE_LOW;
- private static final String PAIR_COPLANARITY_HIGH = TriggerModule.PAIR_COPLANARITY_HIGH;
+ // Define the supported trigger cuts.
+ private static final String PAIR_ENERGY_SUM_LOW = TriggerModule.PAIR_ENERGY_SUM_LOW;
+ private static final String PAIR_ENERGY_SUM_HIGH = TriggerModule.PAIR_ENERGY_SUM_HIGH;
+ private static final String PAIR_ENERGY_DIFFERENCE_HIGH = TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH;
+ private static final String PAIR_ENERGY_SLOPE_LOW = TriggerModule.PAIR_ENERGY_SLOPE_LOW;
+ private static final String PAIR_COPLANARITY_HIGH = TriggerModule.PAIR_COPLANARITY_HIGH;
private static final String PAIR_TIME_COINCIDENCE = "pairTimeCoincidence";
-
- /**
- * Instantiates a new <code>PairTrigger</code> with all cut
- * states set to <code>false</code> and with the trigger source
- * defined according to the specified object.
- * @param source - The object from which the trigger cut states
- * are derived.
- */
- public PairTrigger(E source, int triggerNum) {
- // Instantiate the superclass.
- super(source, triggerNum);
-
- // Add the supported cuts types.
- addValidCut(PAIR_ENERGY_SUM_LOW);
- addValidCut(PAIR_ENERGY_SUM_HIGH);
- addValidCut(PAIR_ENERGY_DIFFERENCE_HIGH);
- addValidCut(PAIR_ENERGY_SLOPE_LOW);
- addValidCut(PAIR_COPLANARITY_HIGH);
- addValidCut(PAIR_TIME_COINCIDENCE);
- }
-
- /**
- * Gets whether the pair energy sum lower bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateEnergySumLow() {
- return getCutState(PAIR_ENERGY_SUM_LOW);
- }
-
- /**
- * Gets whether the pair energy sum upper bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateEnergySumHigh() {
- return getCutState(PAIR_ENERGY_SUM_HIGH);
- }
-
- /**
- * Gets whether both the pair energy sum upper and lower bound cuts
- * were met.
- * @return Returns <code>true</code> if the cuts were met and
- * <code>false</code> otherwise.
- */
- public boolean getStateEnergySum() {
- return getCutState(PAIR_ENERGY_SUM_HIGH);
- }
-
- /**
- * Gets whether the pair energy difference cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateEnergyDifference() {
- return getCutState(PAIR_ENERGY_DIFFERENCE_HIGH);
- }
-
- /**
- * Gets whether the pair energy slope cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateEnergySlope() {
- return getCutState(PAIR_ENERGY_SLOPE_LOW);
- }
-
- /**
- * Gets whether the pair coplanarity cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateCoplanarity() {
- return getCutState(PAIR_COPLANARITY_HIGH);
- }
-
- /**
- * Gets whether the time coincidence cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateTimeCoincidence() {
- return getCutState(PAIR_TIME_COINCIDENCE);
- }
-
- /**
- * Sets whether the conditions for the pair energy sum lower bound
- * cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateEnergySumLow(boolean state) {
- setCutState(PAIR_ENERGY_SUM_LOW, state);
- }
-
- /**
- * Sets whether the conditions for the pair energy sum upper bound
- * cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateEnergySumHigh(boolean state) {
- setCutState(PAIR_ENERGY_SUM_HIGH, state);
- }
-
- /**
- * Sets whether the conditions for the pair energy difference cut
- * were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateEnergyDifference(boolean state) {
- setCutState(PAIR_ENERGY_DIFFERENCE_HIGH, state);
- }
-
- /**
- * Sets whether the conditions for the pair energy slope cut were
- * met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateEnergySlope(boolean state) {
- setCutState(PAIR_ENERGY_SLOPE_LOW, state);
- }
-
- /**
- * Sets whether the conditions for the pair coplanarity cut were
- * met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateCoplanarity(boolean state) {
- setCutState(PAIR_COPLANARITY_HIGH, state);
- }
-
- /**
- * Sets whether the conditions for the time coincidence cut were
- * met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateTimeCoincidence(boolean state) {
- setCutState(PAIR_TIME_COINCIDENCE, state);
- }
-
- @Override
- public String toString() {
- return String.format("EClusterLow: %d; EClusterHigh %d; HitCount: %d; ESumLow: %d, ESumHigh: %d, EDiff: %d, ESlope: %d, Coplanarity: %d",
- getStateClusterEnergyLow() ? 1 : 0, getStateClusterEnergyHigh() ? 1 : 0,
- getStateHitCount() ? 1 : 0, getStateEnergySumLow() ? 1 : 0,
- getStateEnergySumHigh() ? 1 : 0, getStateEnergyDifference() ? 1 : 0,
- getStateEnergySlope() ? 1 : 0, getStateCoplanarity() ? 1 : 0);
- }
-}
+
+ /**
+ * Instantiates a new <code>PairTrigger</code> with all cut
+ * states set to <code>false</code> and with the trigger source
+ * defined according to the specified object.
+ * @param source - The object from which the trigger cut states
+ * are derived.
+ */
+ public PairTrigger(E source, int triggerNum) {
+ // Instantiate the superclass.
+ super(source, triggerNum);
+
+ // Add the supported cuts types.
+ addValidCut(PAIR_ENERGY_SUM_LOW);
+ addValidCut(PAIR_ENERGY_SUM_HIGH);
+ addValidCut(PAIR_ENERGY_DIFFERENCE_HIGH);
+ addValidCut(PAIR_ENERGY_SLOPE_LOW);
+ addValidCut(PAIR_COPLANARITY_HIGH);
+ addValidCut(PAIR_TIME_COINCIDENCE);
+ }
+
+ /**
+ * Gets whether the pair energy sum lower bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateEnergySumLow() {
+ return getCutState(PAIR_ENERGY_SUM_LOW);
+ }
+
+ /**
+ * Gets whether the pair energy sum upper bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateEnergySumHigh() {
+ return getCutState(PAIR_ENERGY_SUM_HIGH);
+ }
+
+ /**
+ * Gets whether both the pair energy sum upper and lower bound cuts
+ * were met.
+ * @return Returns <code>true</code> if the cuts were met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateEnergySum() {
+ return getCutState(PAIR_ENERGY_SUM_HIGH);
+ }
+
+ /**
+ * Gets whether the pair energy difference cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateEnergyDifference() {
+ return getCutState(PAIR_ENERGY_DIFFERENCE_HIGH);
+ }
+
+ /**
+ * Gets whether the pair energy slope cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateEnergySlope() {
+ return getCutState(PAIR_ENERGY_SLOPE_LOW);
+ }
+
+ /**
+ * Gets whether the pair coplanarity cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateCoplanarity() {
+ return getCutState(PAIR_COPLANARITY_HIGH);
+ }
+
+ /**
+ * Gets whether the time coincidence cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateTimeCoincidence() {
+ return getCutState(PAIR_TIME_COINCIDENCE);
+ }
+
+ /**
+ * Sets whether the conditions for the pair energy sum lower bound
+ * cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateEnergySumLow(boolean state) {
+ setCutState(PAIR_ENERGY_SUM_LOW, state);
+ }
+
+ /**
+ * Sets whether the conditions for the pair energy sum upper bound
+ * cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateEnergySumHigh(boolean state) {
+ setCutState(PAIR_ENERGY_SUM_HIGH, state);
+ }
+
+ /**
+ * Sets whether the conditions for the pair energy difference cut
+ * were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateEnergyDifference(boolean state) {
+ setCutState(PAIR_ENERGY_DIFFERENCE_HIGH, state);
+ }
+
+ /**
+ * Sets whether the conditions for the pair energy slope cut were
+ * met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateEnergySlope(boolean state) {
+ setCutState(PAIR_ENERGY_SLOPE_LOW, state);
+ }
+
+ /**
+ * Sets whether the conditions for the pair coplanarity cut were
+ * met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateCoplanarity(boolean state) {
+ setCutState(PAIR_COPLANARITY_HIGH, state);
+ }
+
+ /**
+ * Sets whether the conditions for the time coincidence cut were
+ * met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateTimeCoincidence(boolean state) {
+ setCutState(PAIR_TIME_COINCIDENCE, state);
+ }
+
+ @Override
+ public String toString() {
+ return String.format("EClusterLow: %d; EClusterHigh %d; HitCount: %d; ESumLow: %d, ESumHigh: %d, EDiff: %d, ESlope: %d, Coplanarity: %d",
+ getStateClusterEnergyLow() ? 1 : 0, getStateClusterEnergyHigh() ? 1 : 0,
+ getStateHitCount() ? 1 : 0, getStateEnergySumLow() ? 1 : 0,
+ getStateEnergySumHigh() ? 1 : 0, getStateEnergyDifference() ? 1 : 0,
+ getStateEnergySlope() ? 1 : 0, getStateCoplanarity() ? 1 : 0);
+ }
+}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/SinglesTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/SinglesTrigger.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/SinglesTrigger.java Wed Apr 27 11:11:32 2016
@@ -3,151 +3,151 @@
import org.hps.record.triggerbank.TriggerModule;
public class SinglesTrigger<E> extends Trigger<E> {
- // Define the supported trigger cuts.
- private static final String CLUSTER_HIT_COUNT_LOW = TriggerModule.CLUSTER_HIT_COUNT_LOW;
- private static final String CLUSTER_SEED_ENERGY_LOW = TriggerModule.CLUSTER_SEED_ENERGY_LOW;
- private static final String CLUSTER_SEED_ENERGY_HIGH = TriggerModule.CLUSTER_SEED_ENERGY_HIGH;
- private static final String CLUSTER_TOTAL_ENERGY_LOW = TriggerModule.CLUSTER_TOTAL_ENERGY_LOW;
- private static final String CLUSTER_TOTAL_ENERGY_HIGH = TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH;
-
- /**
- * Instantiates a new <code>SinglesTrigger</code> with all cut
- * states set to <code>false</code> and with the trigger source
- * defined according to the specified object.
- * @param source - The object from which the trigger cut states
- * are derived.
- */
- public SinglesTrigger(E source, int triggerNum) {
- // Instantiate the superclass.
- super(source, triggerNum);
-
- // Add the supported cuts types.
- addValidCut(CLUSTER_HIT_COUNT_LOW);
- addValidCut(CLUSTER_SEED_ENERGY_LOW);
- addValidCut(CLUSTER_SEED_ENERGY_HIGH);
- addValidCut(CLUSTER_TOTAL_ENERGY_LOW);
- addValidCut(CLUSTER_TOTAL_ENERGY_HIGH);
- }
-
- /**
- * Gets whether the cluster hit count cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateHitCount() {
- return getCutState(CLUSTER_HIT_COUNT_LOW);
- }
-
- /**
- * Gets whether the cluster seed energy lower bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateSeedEnergyLow() {
- return getCutState(CLUSTER_SEED_ENERGY_LOW);
- }
-
- /**
- * Gets whether the cluster seed energy upper bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateSeedEnergyHigh() {
- return getCutState(CLUSTER_SEED_ENERGY_HIGH);
- }
-
- /**
- * Gets whether both the cluster seed energy upper and lower bound
- * cuts were met.
- * @return Returns <code>true</code> if the cuts were met and
- * <code>false</code> otherwise.
- */
- public boolean getStateSeedEnergy() {
- return getCutState(CLUSTER_SEED_ENERGY_LOW) && getCutState(CLUSTER_SEED_ENERGY_HIGH);
- }
-
- /**
- * Gets whether the cluster total energy lower bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateClusterEnergyLow() {
- return getCutState(CLUSTER_TOTAL_ENERGY_LOW);
- }
-
- /**
- * Gets whether the cluster total energy upper bound cut was met.
- * @return Returns <code>true</code> if the cut was met and
- * <code>false</code> otherwise.
- */
- public boolean getStateClusterEnergyHigh() {
- return getCutState(CLUSTER_TOTAL_ENERGY_HIGH);
- }
-
- /**
- * Gets whether both the cluster total energy upper and lower bound
- * cuts were met.
- * @return Returns <code>true</code> if the cuts were met and
- * <code>false</code> otherwise.
- */
- public boolean getStateClusterEnergy() {
- return getCutState(CLUSTER_TOTAL_ENERGY_LOW) && getCutState(CLUSTER_TOTAL_ENERGY_HIGH);
- }
-
- /**
- * Sets whether the conditions for the cluster hit count cut were
- * met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateHitCount(boolean state) {
- setCutState(CLUSTER_HIT_COUNT_LOW, state);
- }
-
- /**
- * Sets whether the conditions for the cluster seed energy lower
- * bound cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateSeedEnergyLow(boolean state) {
- setCutState(CLUSTER_SEED_ENERGY_LOW, state);
- }
-
- /**
- * Sets whether the conditions for the cluster seed energy upper
- * bound cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateSeedEnergyHigh(boolean state) {
- setCutState(CLUSTER_SEED_ENERGY_HIGH, state);
- }
-
- /**
- * Sets whether the conditions for the cluster total energy lower
- * bound cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateClusterEnergyLow(boolean state) {
- setCutState(CLUSTER_TOTAL_ENERGY_LOW, state);
- }
-
- /**
- * Sets whether the conditions for the cluster total energy upper
- * bound cut were met.
- * @param state - <code>true</code> indicates that the cut conditions
- * were met and <code>false</code> that they were not.
- */
- public void setStateClusterEnergyHigh(boolean state) {
- setCutState(CLUSTER_TOTAL_ENERGY_HIGH, state);
- }
-
- @Override
- public String toString() {
- return String.format("EClusterLow: %d; EClusterHigh %d; HitCount: %d",
- getStateClusterEnergyLow() ? 1 : 0, getStateClusterEnergyHigh() ? 1 : 0,
- getStateHitCount() ? 1 : 0);
- }
-}
+ // Define the supported trigger cuts.
+ private static final String CLUSTER_HIT_COUNT_LOW = TriggerModule.CLUSTER_HIT_COUNT_LOW;
+ private static final String CLUSTER_SEED_ENERGY_LOW = TriggerModule.CLUSTER_SEED_ENERGY_LOW;
+ private static final String CLUSTER_SEED_ENERGY_HIGH = TriggerModule.CLUSTER_SEED_ENERGY_HIGH;
+ private static final String CLUSTER_TOTAL_ENERGY_LOW = TriggerModule.CLUSTER_TOTAL_ENERGY_LOW;
+ private static final String CLUSTER_TOTAL_ENERGY_HIGH = TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH;
+
+ /**
+ * Instantiates a new <code>SinglesTrigger</code> with all cut
+ * states set to <code>false</code> and with the trigger source
+ * defined according to the specified object.
+ * @param source - The object from which the trigger cut states
+ * are derived.
+ */
+ public SinglesTrigger(E source, int triggerNum) {
+ // Instantiate the superclass.
+ super(source, triggerNum);
+
+ // Add the supported cuts types.
+ addValidCut(CLUSTER_HIT_COUNT_LOW);
+ addValidCut(CLUSTER_SEED_ENERGY_LOW);
+ addValidCut(CLUSTER_SEED_ENERGY_HIGH);
+ addValidCut(CLUSTER_TOTAL_ENERGY_LOW);
+ addValidCut(CLUSTER_TOTAL_ENERGY_HIGH);
+ }
+
+ /**
+ * Gets whether the cluster hit count cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateHitCount() {
+ return getCutState(CLUSTER_HIT_COUNT_LOW);
+ }
+
+ /**
+ * Gets whether the cluster seed energy lower bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateSeedEnergyLow() {
+ return getCutState(CLUSTER_SEED_ENERGY_LOW);
+ }
+
+ /**
+ * Gets whether the cluster seed energy upper bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateSeedEnergyHigh() {
+ return getCutState(CLUSTER_SEED_ENERGY_HIGH);
+ }
+
+ /**
+ * Gets whether both the cluster seed energy upper and lower bound
+ * cuts were met.
+ * @return Returns <code>true</code> if the cuts were met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateSeedEnergy() {
+ return getCutState(CLUSTER_SEED_ENERGY_LOW) && getCutState(CLUSTER_SEED_ENERGY_HIGH);
+ }
+
+ /**
+ * Gets whether the cluster total energy lower bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateClusterEnergyLow() {
+ return getCutState(CLUSTER_TOTAL_ENERGY_LOW);
+ }
+
+ /**
+ * Gets whether the cluster total energy upper bound cut was met.
+ * @return Returns <code>true</code> if the cut was met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateClusterEnergyHigh() {
+ return getCutState(CLUSTER_TOTAL_ENERGY_HIGH);
+ }
+
+ /**
+ * Gets whether both the cluster total energy upper and lower bound
+ * cuts were met.
+ * @return Returns <code>true</code> if the cuts were met and
+ * <code>false</code> otherwise.
+ */
+ public boolean getStateClusterEnergy() {
+ return getCutState(CLUSTER_TOTAL_ENERGY_LOW) && getCutState(CLUSTER_TOTAL_ENERGY_HIGH);
+ }
+
+ /**
+ * Sets whether the conditions for the cluster hit count cut were
+ * met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateHitCount(boolean state) {
+ setCutState(CLUSTER_HIT_COUNT_LOW, state);
+ }
+
+ /**
+ * Sets whether the conditions for the cluster seed energy lower
+ * bound cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateSeedEnergyLow(boolean state) {
+ setCutState(CLUSTER_SEED_ENERGY_LOW, state);
+ }
+
+ /**
+ * Sets whether the conditions for the cluster seed energy upper
+ * bound cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateSeedEnergyHigh(boolean state) {
+ setCutState(CLUSTER_SEED_ENERGY_HIGH, state);
+ }
+
+ /**
+ * Sets whether the conditions for the cluster total energy lower
+ * bound cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateClusterEnergyLow(boolean state) {
+ setCutState(CLUSTER_TOTAL_ENERGY_LOW, state);
+ }
+
+ /**
+ * Sets whether the conditions for the cluster total energy upper
+ * bound cut were met.
+ * @param state - <code>true</code> indicates that the cut conditions
+ * were met and <code>false</code> that they were not.
+ */
+ public void setStateClusterEnergyHigh(boolean state) {
+ setCutState(CLUSTER_TOTAL_ENERGY_HIGH, state);
+ }
+
+ @Override
+ public String toString() {
+ return String.format("EClusterLow: %d; EClusterHigh %d; HitCount: %d",
+ getStateClusterEnergyLow() ? 1 : 0, getStateClusterEnergyHigh() ? 1 : 0,
+ getStateHitCount() ? 1 : 0);
+ }
+}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Trigger.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Trigger.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/Trigger.java Wed Apr 27 11:11:32 2016
@@ -14,139 +14,139 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public abstract class Trigger<E> {
- // Track whether the trigger conditions were met.
- private boolean passTrigger = false;
- // Store the cut condition states.
- private Map<String, Boolean> passMap = new HashMap<String, Boolean>();
- // Store the cluster associated with the trigger.
- private final E source;
- // Store the trigger number.
- private final int triggerNum;
-
- /**
- * Creates a new <code>Trigger</code> object with the argument
- * specifying the object from whence the trigger state is derived.
- * @param source - The trigger source object.
- */
- protected Trigger(E source) {
- this(source, -1);
- }
-
- /**
- * Creates a new <code>Trigger</code> object with the argument
- * specifying the object from whence the trigger state is derived.
- * @param source - The trigger source object.
- * @param triggerNum - The number of the trigger.
- */
- protected Trigger(E source, int triggerNum) {
- this.source = source;
- this.triggerNum = triggerNum;
- }
-
- /**
- * Adds a cut to the set of cuts tracked by this trigger.
- * @param cut - The identifier for the cut.
- */
- protected void addValidCut(String cut) {
- passMap.put(cut, new Boolean(false));
- }
-
- /**
- * Gets the state of the specified cut.
- * @param cut - The identifier for the cut.
- * @return Returns <code>true</code> if the conditions for the
- * specified cut were met and <code>false</code> otherwise.
- * @throws IllegalArgumentException Occurs if the specified cut
- * is not supported by the object.
- */
- protected boolean getCutState(String cut) throws IllegalArgumentException {
- if(passMap.containsKey(cut)) {
- return passMap.get(cut);
- } else {
- throw new IllegalArgumentException(String.format("Trigger cut \"%s\" is not a supported trigger cut.", cut));
- }
- }
-
- /**
- * Gets the number of the trigger. If the trigger has no number,
- * it will return <code>-1</code>.
- * @return Returns the trigger number as an <code>int</code>.
- */
- public int getTriggerNumber() {
- return triggerNum;
- }
-
- /**
- * Gets the object to which the trigger cuts are applied.
- * @return Returns the trigger source object.
- */
- public E getTriggerSource() { return source; }
-
- /**
- * Gets whether the conditions for the trigger were met.
- * @return Returns <code>true</code> if the conditions for the
- * trigger were met and <code>false</code> if they were not.
- */
- public boolean getTriggerState() {
- return passTrigger;
- }
-
- /**
- * Removes a cut from the set of cuts tracked by the trigger.
- * @param cut - The identifier for the cut.
- */
- protected void removeValidCut(String cut) {
- passMap.remove(cut);
- }
-
- /**
- * Checks whether the all of the trigger cut conditions were met.
- * @return Returns <code>true</code> if all of the cut conditions
- * were met and <code>false</code> otherwise.
- */
- private boolean isValidTrigger() {
- // Iterate over all of the cuts and look for any that have not
- // been met.
- for(Entry<String, Boolean> cut : passMap.entrySet()) {
- if(!cut.getValue()) { return false; }
- }
-
- // If there are no cut conditions that have not been met, then
- // the trigger is valid.
- return true;
- }
-
- /**
- * Sets whether the conditions for the specified cut were met.
- * @param cut - The identifier for the cut.
- * @param state - <code>true</code> indicates that the conditions
- * for the cut were met and <code>false</code> that they were not.
- * @throws IllegalArgumentException Occurs if the specified cut
- * is not supported by the object.
- */
- protected void setCutState(String cut, boolean state) throws IllegalArgumentException {
- if(passMap.containsKey(cut)) {
- // Set the cut state.
- passMap.put(cut, state);
-
- // If the cut state is true, then all cut conditions may have
- // been met. Check whether this is true and, if so, set the
- // trigger state accordingly.
- if(state && isValidTrigger()) { passTrigger = true; }
- else { passTrigger = false; }
- } else {
- throw new IllegalArgumentException(String.format("Trigger cut \"%s\" is not a supported trigger cut.", cut));
- }
- }
-
- /**
- * Indicates whether the specified cut state is tracked by this
- * object or not.
- * @param cut - The identifier for the cut.
- * @return Returns <code>true</code> if the cut state is tracked
- * by this object and <code>false</code> otherwise.
- */
- protected boolean supportsCut(String cut) {
- return passMap.containsKey(cut);
- }
-}
+ // Track whether the trigger conditions were met.
+ private boolean passTrigger = false;
+ // Store the cut condition states.
+ private Map<String, Boolean> passMap = new HashMap<String, Boolean>();
+ // Store the cluster associated with the trigger.
+ private final E source;
+ // Store the trigger number.
+ private final int triggerNum;
+
+ /**
+ * Creates a new <code>Trigger</code> object with the argument
+ * specifying the object from whence the trigger state is derived.
+ * @param source - The trigger source object.
+ */
+ protected Trigger(E source) {
+ this(source, -1);
+ }
+
+ /**
+ * Creates a new <code>Trigger</code> object with the argument
+ * specifying the object from whence the trigger state is derived.
+ * @param source - The trigger source object.
+ * @param triggerNum - The number of the trigger.
+ */
+ protected Trigger(E source, int triggerNum) {
+ this.source = source;
+ this.triggerNum = triggerNum;
+ }
+
+ /**
+ * Adds a cut to the set of cuts tracked by this trigger.
+ * @param cut - The identifier for the cut.
+ */
+ protected void addValidCut(String cut) {
+ passMap.put(cut, new Boolean(false));
+ }
+
+ /**
+ * Gets the state of the specified cut.
+ * @param cut - The identifier for the cut.
+ * @return Returns <code>true</code> if the conditions for the
+ * specified cut were met and <code>false</code> otherwise.
+ * @throws IllegalArgumentException Occurs if the specified cut
+ * is not supported by the object.
+ */
+ protected boolean getCutState(String cut) throws IllegalArgumentException {
+ if(passMap.containsKey(cut)) {
+ return passMap.get(cut);
+ } else {
+ throw new IllegalArgumentException(String.format("Trigger cut \"%s\" is not a supported trigger cut.", cut));
+ }
+ }
+
+ /**
+ * Gets the number of the trigger. If the trigger has no number,
+ * it will return <code>-1</code>.
+ * @return Returns the trigger number as an <code>int</code>.
+ */
+ public int getTriggerNumber() {
+ return triggerNum;
+ }
+
+ /**
+ * Gets the object to which the trigger cuts are applied.
+ * @return Returns the trigger source object.
+ */
+ public E getTriggerSource() { return source; }
+
+ /**
+ * Gets whether the conditions for the trigger were met.
+ * @return Returns <code>true</code> if the conditions for the
+ * trigger were met and <code>false</code> if they were not.
+ */
+ public boolean getTriggerState() {
+ return passTrigger;
+ }
+
+ /**
+ * Removes a cut from the set of cuts tracked by the trigger.
+ * @param cut - The identifier for the cut.
+ */
+ protected void removeValidCut(String cut) {
+ passMap.remove(cut);
+ }
+
+ /**
+ * Checks whether the all of the trigger cut conditions were met.
+ * @return Returns <code>true</code> if all of the cut conditions
+ * were met and <code>false</code> otherwise.
+ */
+ private boolean isValidTrigger() {
+ // Iterate over all of the cuts and look for any that have not
+ // been met.
+ for(Entry<String, Boolean> cut : passMap.entrySet()) {
+ if(!cut.getValue()) { return false; }
+ }
+
+ // If there are no cut conditions that have not been met, then
+ // the trigger is valid.
+ return true;
+ }
+
+ /**
+ * Sets whether the conditions for the specified cut were met.
+ * @param cut - The identifier for the cut.
+ * @param state - <code>true</code> indicates that the conditions
+ * for the cut were met and <code>false</code> that they were not.
+ * @throws IllegalArgumentException Occurs if the specified cut
+ * is not supported by the object.
+ */
+ protected void setCutState(String cut, boolean state) throws IllegalArgumentException {
+ if(passMap.containsKey(cut)) {
+ // Set the cut state.
+ passMap.put(cut, state);
+
+ // If the cut state is true, then all cut conditions may have
+ // been met. Check whether this is true and, if so, set the
+ // trigger state accordingly.
+ if(state && isValidTrigger()) { passTrigger = true; }
+ else { passTrigger = false; }
+ } else {
+ throw new IllegalArgumentException(String.format("Trigger cut \"%s\" is not a supported trigger cut.", cut));
+ }
+ }
+
+ /**
+ * Indicates whether the specified cut state is tracked by this
+ * object or not.
+ * @param cut - The identifier for the cut.
+ * @return Returns <code>true</code> if the cut state is tracked
+ * by this object and <code>false</code> otherwise.
+ */
+ protected boolean supportsCut(String cut) {
+ return passMap.containsKey(cut);
+ }
+}
Modified: java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/TriggerDiagnosticUtil.java
=============================================================================
--- java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/TriggerDiagnosticUtil.java (original)
+++ java/branches/HPSJAVA-409/analysis/src/main/java/org/hps/analysis/trigger/util/TriggerDiagnosticUtil.java Wed Apr 27 11:11:32 2016
@@ -15,208 +15,208 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class TriggerDiagnosticUtil {
- // Cluster match state variables.
- public static final byte CLUSTER_STATE_MATCHED = 0;
- public static final byte CLUSTER_STATE_FAIL_POSITION = 1;
- public static final byte CLUSTER_STATE_FAIL_ENERGY = 2;
- public static final byte CLUSTER_STATE_FAIL_HIT_COUNT = 3;
- public static final byte CLUSTER_STATE_FAIL_TIME = 4;
- public static final byte CLUSTER_STATE_FAIL_UNKNOWN = 5;
-
- // Trigger match cut IDs.
- public static final int SINGLES_ENERGY_MIN = 0;
- public static final int SINGLES_ENERGY_MAX = 1;
- public static final int SINGLES_HIT_COUNT = 2;
- public static final int PAIR_ENERGY_SUM = 0;
- public static final int PAIR_ENERGY_DIFF = 1;
- public static final int PAIR_ENERGY_SLOPE = 2;
- public static final int PAIR_COPLANARITY = 3;
-
- // Trigger type variables.
- public static final int TRIGGER_PULSER = TriggerStatModule.PULSER;
- public static final int TRIGGER_COSMIC = TriggerStatModule.COSMIC;
- public static final int TRIGGER_SINGLES_0 = TriggerStatModule.SINGLES_0;
- public static final int TRIGGER_SINGLES_1 = TriggerStatModule.SINGLES_1;
- public static final int TRIGGER_PAIR_0 = TriggerStatModule.PAIR_0;
- public static final int TRIGGER_PAIR_1 = TriggerStatModule.PAIR_1;
- public static final String[] TRIGGER_NAME = { "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Pulser", "Cosmic" };
-
- /**
- * Convenience method that writes the position of a cluster in the
- * form (ix, iy).
- * @param cluster - The cluster.
- * @return Returns the cluster position as a <code>String</code>.
- */
- public static final String clusterPositionString(Cluster cluster) {
- return String.format("(%3d, %3d)",
- cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
- cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"));
- }
-
- /**
- * Convenience method that writes the position of a cluster in the
- * form (ix, iy).
- * @param cluster - The cluster.
- * @return Returns the cluster position as a <code>String</code>.
- */
- public static final String clusterPositionString(SSPCluster cluster) {
- return String.format("(%3d, %3d)", cluster.getXIndex(), cluster.getYIndex());
- }
-
- /**
- * Convenience method that writes the information in a cluster to
- * a <code>String</code>.
- * @param cluster - The cluster.
- * @return Returns the cluster information as a <code>String</code>.
- */
- public static final String clusterToString(Cluster cluster) {
- return String.format("Cluster at (%3d, %3d) with %.3f GeV and %d hits at %4.0f ns.",
- cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
- cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"),
- cluster.getEnergy(), cluster.getCalorimeterHits().size(),
- cluster.getCalorimeterHits().get(0).getTime());
- }
-
- /**
- * Convenience method that writes the information in a cluster to
- * a <code>String</code>.
- * @param cluster - The cluster.
- * @return Returns the cluster information as a <code>String</code>.
- */
- public static final String clusterToString(SSPCluster cluster) {
- return String.format("Cluster at (%3d, %3d) with %.3f GeV and %d hits at %4d ns.",
- cluster.getXIndex(), cluster.getYIndex(), cluster.getEnergy(),
- cluster.getHitCount(), cluster.getTime());
- }
-
- /**
- * Gets the x/y-indices of the cluster.
- * @param cluster - The cluster of which to obtain the indices.
- * @return Returns the indices as a <code>Point</code> object.
- */
- public static final Point getClusterPosition(Cluster cluster) {
- return new Point(getXIndex(cluster), getYIndex(cluster));
- }
-
- /**
- * Gets the x/y-indices of the cluster.
- * @param cluster - The cluster of which to obtain the indices.
- * @return Returns the indices as a <code>Point</code> object.
- */
- public static final Point getClusterPosition(SSPCluster cluster) {
- return new Point(cluster.getXIndex(), cluster.getYIndex());
- }
-
- /**
- * Gets the time stamp of the cluster in nanoseconds.
- * @param cluster - The cluster.
- * @return Returns the time-stamp.
- */
- public static final double getClusterTime(Cluster cluster) {
- return cluster.getCalorimeterHits().get(0).getTime();
- }
-
- /**
- * Gets the time stamp of the cluster in nanoseconds.
- * @param cluster - The cluster.
- * @return Returns the time-stamp.
- */
- public static final int getClusterTime(SSPCluster cluster) {
- return cluster.getTime();
- }
-
- /**
- * Gets the number of digits in the base-10 String representation
- * of an integer primitive. Negative signs are not included in the
- * digit count.
- * @param value - The value of which to obtain the length.
- * @return Returns the number of digits in the String representation
- * of the argument value.
- */
- public static final int getDigits(int value) {
- if(value < 0) { return Integer.toString(value).length() - 1; }
- else { return Integer.toString(value).length(); }
- }
-
- /**
- * Gets the number of hits in a cluster.
- * @param cluster - The cluster.
- * @return Returns the number of hits in the cluster.
- */
- public static final int getHitCount(Cluster cluster) {
- return cluster.getCalorimeterHits().size();
- }
-
- /**
- * Gets the number of hits in a cluster.
- * @param cluster - The cluster.
- * @return Returns the number of hits in the cluster.
- */
- public static final int getHitCount(SSPCluster cluster) {
- return cluster.getHitCount();
- }
-
- /**
- * Gets the x-index of the cluster's seed hit.
- * @param cluster - The cluster.
- * @return Returns the x-index.
- */
- public static final int getXIndex(Cluster cluster) {
- return cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- }
-
- /**
- * Gets the x-index of the cluster's seed hit.
- * @param cluster - The cluster.
- * @return Returns the x-index.
- */
- public static final int getXIndex(SSPCluster cluster) {
- return cluster.getXIndex();
- }
-
- /**
- * Gets the y-index of the cluster's seed hit.
- * @param cluster - The cluster.
- * @return Returns the y-index.
- */
- public static final int getYIndex(Cluster cluster) {
- return cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
- }
-
- /**
- * Gets the y-index of the cluster's seed hit.
- * @param cluster - The cluster.
- * @return Returns the y-index.
- */
- public static final int getYIndex(SSPCluster cluster) {
- return cluster.getYIndex();
- }
-
- /**
- * Checks whether all of the hits in a cluster are within the safe
- * region of the FADC output window.
- * @param reconCluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is safe and
- * returns <code>false</code> otherwise.
- */
- public static final boolean isVerifiable(Cluster reconCluster, int nsa, int nsb, int windowWidth) {
- // Iterate over the hits in the cluster.
- for(CalorimeterHit hit : reconCluster.getCalorimeterHits()) {
- // Check that none of the hits are within the disallowed
- // region of the FADC readout window.
- if(hit.getTime() <= nsb || hit.getTime() >= (windowWidth - nsa)) {
- return false;
- }
-
- // Also check to make sure that the cluster does not have
- // any negative energy hits. These are, obviously, wrong.
- if(hit.getCorrectedEnergy() < 0.0) {
- return false;
- }
- }
-
- // If all of the cluster hits pass the time cut, the cluster
- // is valid.
- return true;
- }
+ // Cluster match state variables.
+ public static final byte CLUSTER_STATE_MATCHED = 0;
+ public static final byte CLUSTER_STATE_FAIL_POSITION = 1;
+ public static final byte CLUSTER_STATE_FAIL_ENERGY = 2;
+ public static final byte CLUSTER_STATE_FAIL_HIT_COUNT = 3;
+ public static final byte CLUSTER_STATE_FAIL_TIME = 4;
+ public static final byte CLUSTER_STATE_FAIL_UNKNOWN = 5;
+
+ // Trigger match cut IDs.
+ public static final int SINGLES_ENERGY_MIN = 0;
+ public static final int SINGLES_ENERGY_MAX = 1;
+ public static final int SINGLES_HIT_COUNT = 2;
+ public static final int PAIR_ENERGY_SUM = 0;
+ public static final int PAIR_ENERGY_DIFF = 1;
+ public static final int PAIR_ENERGY_SLOPE = 2;
+ public static final int PAIR_COPLANARITY = 3;
+
+ // Trigger type variables.
+ public static final int TRIGGER_PULSER = TriggerStatModule.PULSER;
+ public static final int TRIGGER_COSMIC = TriggerStatModule.COSMIC;
+ public static final int TRIGGER_SINGLES_0 = TriggerStatModule.SINGLES_0;
+ public static final int TRIGGER_SINGLES_1 = TriggerStatModule.SINGLES_1;
+ public static final int TRIGGER_PAIR_0 = TriggerStatModule.PAIR_0;
+ public static final int TRIGGER_PAIR_1 = TriggerStatModule.PAIR_1;
+ public static final String[] TRIGGER_NAME = { "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Pulser", "Cosmic" };
+
+ /**
+ * Convenience method that writes the position of a cluster in the
+ * form (ix, iy).
+ * @param cluster - The cluster.
+ * @return Returns the cluster position as a <code>String</code>.
+ */
+ public static final String clusterPositionString(Cluster cluster) {
+ return String.format("(%3d, %3d)",
+ cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
+ cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"));
+ }
+
+ /**
+ * Convenience method that writes the position of a cluster in the
+ * form (ix, iy).
+ * @param cluster - The cluster.
+ * @return Returns the cluster position as a <code>String</code>.
+ */
+ public static final String clusterPositionString(SSPCluster cluster) {
+ return String.format("(%3d, %3d)", cluster.getXIndex(), cluster.getYIndex());
+ }
+
+ /**
+ * Convenience method that writes the information in a cluster to
+ * a <code>String</code>.
+ * @param cluster - The cluster.
+ * @return Returns the cluster information as a <code>String</code>.
+ */
+ public static final String clusterToString(Cluster cluster) {
+ return String.format("Cluster at (%3d, %3d) with %.3f GeV and %d hits at %4.0f ns.",
+ cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
+ cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"),
+ cluster.getEnergy(), cluster.getCalorimeterHits().size(),
+ cluster.getCalorimeterHits().get(0).getTime());
+ }
+
+ /**
+ * Convenience method that writes the information in a cluster to
+ * a <code>String</code>.
+ * @param cluster - The cluster.
+ * @return Returns the cluster information as a <code>String</code>.
+ */
+ public static final String clusterToString(SSPCluster cluster) {
+ return String.format("Cluster at (%3d, %3d) with %.3f GeV and %d hits at %4d ns.",
+ cluster.getXIndex(), cluster.getYIndex(), cluster.getEnergy(),
+ cluster.getHitCount(), cluster.getTime());
+ }
+
+ /**
+ * Gets the x/y-indices of the cluster.
+ * @param cluster - The cluster of which to obtain the indices.
+ * @return Returns the indices as a <code>Point</code> object.
+ */
+ public static final Point getClusterPosition(Cluster cluster) {
+ return new Point(getXIndex(cluster), getYIndex(cluster));
+ }
+
+ /**
+ * Gets the x/y-indices of the cluster.
+ * @param cluster - The cluster of which to obtain the indices.
+ * @return Returns the indices as a <code>Point</code> object.
+ */
+ public static final Point getClusterPosition(SSPCluster cluster) {
+ return new Point(cluster.getXIndex(), cluster.getYIndex());
+ }
+
+ /**
+ * Gets the time stamp of the cluster in nanoseconds.
+ * @param cluster - The cluster.
+ * @return Returns the time-stamp.
+ */
+ public static final double getClusterTime(Cluster cluster) {
+ return cluster.getCalorimeterHits().get(0).getTime();
+ }
+
+ /**
+ * Gets the time stamp of the cluster in nanoseconds.
+ * @param cluster - The cluster.
+ * @return Returns the time-stamp.
+ */
+ public static final int getClusterTime(SSPCluster cluster) {
+ return cluster.getTime();
+ }
+
+ /**
+ * Gets the number of digits in the base-10 String representation
+ * of an integer primitive. Negative signs are not included in the
+ * digit count.
+ * @param value - The value of which to obtain the length.
+ * @return Returns the number of digits in the String representation
+ * of the argument value.
+ */
+ public static final int getDigits(int value) {
+ if(value < 0) { return Integer.toString(value).length() - 1; }
+ else { return Integer.toString(value).length(); }
+ }
+
+ /**
+ * Gets the number of hits in a cluster.
+ * @param cluster - The cluster.
+ * @return Returns the number of hits in the cluster.
+ */
+ public static final int getHitCount(Cluster cluster) {
+ return cluster.getCalorimeterHits().size();
+ }
+
+ /**
+ * Gets the number of hits in a cluster.
+ * @param cluster - The cluster.
+ * @return Returns the number of hits in the cluster.
+ */
+ public static final int getHitCount(SSPCluster cluster) {
+ return cluster.getHitCount();
+ }
+
+ /**
+ * Gets the x-index of the cluster's seed hit.
+ * @param cluster - The cluster.
+ * @return Returns the x-index.
+ */
+ public static final int getXIndex(Cluster cluster) {
+ return cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ }
+
+ /**
+ * Gets the x-index of the cluster's seed hit.
+ * @param cluster - The cluster.
+ * @return Returns the x-index.
+ */
+ public static final int getXIndex(SSPCluster cluster) {
+ return cluster.getXIndex();
+ }
+
+ /**
+ * Gets the y-index of the cluster's seed hit.
+ * @param cluster - The cluster.
+ * @return Returns the y-index.
+ */
+ public static final int getYIndex(Cluster cluster) {
+ return cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+ }
+
+ /**
+ * Gets the y-index of the cluster's seed hit.
+ * @param cluster - The cluster.
+ * @return Returns the y-index.
+ */
+ public static final int getYIndex(SSPCluster cluster) {
+ return cluster.getYIndex();
+ }
+
+ /**
+ * Checks whether all of the hits in a cluster are within the safe
+ * region of the FADC output window.
+ * @param reconCluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is safe and
+ * returns <code>false</code> otherwise.
+ */
+ public static final boolean isVerifiable(Cluster reconCluster, int nsa, int nsb, int windowWidth) {
+ // Iterate over the hits in the cluster.
+ for(CalorimeterHit hit : reconCluster.getCalorimeterHits()) {
+ // Check that none of the hits are within the disallowed
+ // region of the FADC readout window.
+ if(hit.getTime() <= nsb || hit.getTime() >= (windowWidth - nsa)) {
+ return false;
+ }
+
+ // Also check to make sure that the cluster does not have
+ // any negative energy hits. These are, obviously, wrong.
+ if(hit.getCorrectedEnergy() < 0.0) {
+ return false;
+ }
+ }
+
+ // If all of the cluster hits pass the time cut, the cluster
+ // is valid.
+ return true;
+ }
}
Modified: java/branches/HPSJAVA-409/conditions/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/conditions/pom.xml (original)
+++ java/branches/HPSJAVA-409/conditions/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/conditions/</url>
@@ -37,6 +37,7 @@
<exclude>org/hps/conditions/api/ConditionsTagTest.java</exclude>
<exclude>org/hps/conditions/HPSJAVA_529_Test.java</exclude>
<exclude>org/hps/conditions/dummy/**.java</exclude>
+ <exclude>org/hps/conditions/beam/BeamEnergyTest.java</exclude>
</excludes>
</configuration>
</plugin>
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java Wed Apr 27 11:11:32 2016
@@ -28,12 +28,15 @@
* This is a "special" Driver which must have its initialization occur at the right time. It has a custom initialization
* method {@link #initialize()} which should be called after all Driver setup has occurred, but before the job actually
* begins. This is so the conditions system functions properly, including the activation of registered listeners. The
- * setup is performed by in the class {@link org.hps.job.JobManager}, which is used in the default command line front
- * end of the hps-distribution. If that class is not being used, then the method must be executed manually at the right
+ * setup is performed by the <code>JobManager</code>, which is used in the default command line front end of the
+ * hps-distribution. If that class is not being used, then the method must be executed manually at the right
* time to achieve the proper behavior.
*
* @author Jeremy McCormick, SLAC
+ *
+ * @deprecated Use built-in options of job manager.
*/
+@Deprecated
public class ConditionsDriver extends Driver {
/** The name of the detector model. */
@@ -142,4 +145,12 @@
public final void setXmlConfigResource(final String xmlConfigResource) {
this.xmlConfigResource = xmlConfigResource;
}
+
+ public int getRunNumber() {
+ return this.runNumber;
+ }
+
+ public String getDetectorName() {
+ return this.detectorName;
+ }
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java Wed Apr 27 11:11:32 2016
@@ -15,7 +15,7 @@
*
* @author Jeremy McCormick, SLAC
*/
-public class BaseConditionsObject implements ConditionsObject {
+public abstract class BaseConditionsObject implements ConditionsObject {
/**
* Field name for collection ID.
@@ -206,8 +206,8 @@
* Get a field value.
*
* @param name the field name
- * @param T the field value
- * @param <T> the implicit return return
+ * @param <T> the implicit return type
+ * @return the value of field cast to given type
*/
@Override
public <T> T getFieldValue(final String name) {
@@ -247,7 +247,7 @@
/**
* Return <code>true</code> if collection ID is valid.
*
- * @param <code>true</code> if collection ID is valid
+ * @return <code>true</code> if collection ID is valid
*/
@Override
public boolean hasValidCollectionId() {
@@ -440,4 +440,22 @@
}
return rowsUpdated != 0;
}
+
+ public boolean equals(Object object) {
+ // Is it the same object?
+ if (object == this) {
+ return true;
+ }
+ // Are these objects the same class?
+ if (object.getClass().equals(this.getClass())) {
+ BaseConditionsObject otherObject = BaseConditionsObject.class.cast(object);
+ // Do the row IDs and database table name match?
+ if (otherObject.getTableMetaData().getTableName().equals(this.getTableMetaData().getTableName()) &&
+ this.getRowId() == otherObject.getRowId()) {
+ // These are considered the same object (same database table and row ID).
+ return true;
+ }
+ }
+ return false;
+ }
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java Wed Apr 27 11:11:32 2016
@@ -102,6 +102,15 @@
if (object == null) {
throw new IllegalArgumentException("The object argument is null.");
}
+ //checkCollectionId(object);
+ final boolean added = this.objects.add(object);
+ if (!added) {
+ throw new RuntimeException("Failed to add object.");
+ }
+ return added;
+ }
+
+ private void checkCollectionId(final ObjectType object) {
// Does this collection have a valid ID yet?
if (this.getCollectionId() != BaseConditionsObject.UNSET_COLLECTION_ID) {
// Does the object that is being added have a collection ID?
@@ -122,11 +131,6 @@
}
}
}
- final boolean added = this.objects.add(object);
- if (!added) {
- throw new RuntimeException("Failed to add object.");
- }
- return added;
}
/**
@@ -344,7 +348,7 @@
} else {
// If the collection already exists in the database with this ID then it cannot be inserted.
if (this.exists()) {
- throw new DatabaseObjectException("The collection " + this.collectionId
+ throw new DatabaseObjectException("The collection ID " + this.collectionId
+ " cannot be inserted because it already exists in the " + this.tableMetaData.getTableName()
+ " table.", this);
}
@@ -605,7 +609,7 @@
/**
* Set the table meta data of the collection.
*
- * @param the table meta data of the collection
+ * @param tableMetaData the table meta data of the collection
* @see TableMetaData
*/
@Override
@@ -703,7 +707,6 @@
public void writeCsv(final File file) throws IOException {
FileWriter fileWriter = null;
CSVPrinter csvFilePrinter = null;
-
try {
fileWriter = new FileWriter(file);
csvFilePrinter = new CSVPrinter(fileWriter, CSVFormat.DEFAULT);
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java Wed Apr 27 11:11:32 2016
@@ -64,7 +64,6 @@
* Load collection from a CSV file.
*
* @param file the input CSV file
- * @param delimiter the field delimiter (leave blank for default which is comma-delimited)
* @throws IOException if there is an error closing the reader
* @throws FileNotFoundException if the input file does not exist
* @throws ConditionsObjectException if there is an error creating a conditions object
@@ -104,7 +103,6 @@
* Write the collection contents to a text file.
*
* @param file the output text file
- * @param delimiter the field delimiter (leave blank for default which is comma-delimited)
*/
void writeCsv(File file) throws IOException;
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
/**
* Error with a message.
*
- * @param message the error message
+ * @param e the original exception
*/
public ConditionsObjectException(Exception e) {
super(e);
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsRecord.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsRecord.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/ConditionsRecord.java Wed Apr 27 11:11:32 2016
@@ -8,6 +8,7 @@
import org.hps.conditions.database.ConditionsRecordConverter;
import org.hps.conditions.database.Converter;
import org.hps.conditions.database.Field;
+import org.hps.conditions.database.MultipleCollectionsAction;
import org.hps.conditions.database.Table;
/**
@@ -233,6 +234,33 @@
public final ConditionsRecordCollection sortedByUpdated() {
return (ConditionsRecordCollection) this.sorted(new UpdatedComparator());
}
+
+ /**
+ * Find a unique record using the selected action for disambiguating conditions with the same key.
+ * @param key the name of the key
+ * @param action the disambiguation action
+ * @return the unique conditions record or <code>null</code> if does not exist
+ */
+ public ConditionsRecord findUniqueRecord(String key, MultipleCollectionsAction action) {
+ ConditionsRecord record = null;
+ ConditionsRecordCollection keyRecords = this.findByKey(key);
+ if (keyRecords.size() > 0) {
+ if (keyRecords.size() == 1) {
+ record = keyRecords.get(0);
+ } else {
+ if (action.equals(MultipleCollectionsAction.LAST_UPDATED)) {
+ record = keyRecords.sortedByUpdated().get(keyRecords.size() - 1);
+ } else if (action.equals(MultipleCollectionsAction.LAST_CREATED)) {
+ record = keyRecords.sortedByCreated().get(keyRecords.size() - 1);
+ } else if (action.equals(MultipleCollectionsAction.LATEST_RUN_START)) {
+ record = keyRecords.sortedByRunStart().get(keyRecords.size() - 1);
+ } else if (action.equals(MultipleCollectionsAction.ERROR)) {
+ throw new RuntimeException("Multiple ConditionsRecord object found for conditions key " + key + ".");
+ }
+ }
+ }
+ return record;
+ }
}
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/package-info.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/package-info.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/api/package-info.java Wed Apr 27 11:11:32 2016
@@ -1,11 +1,13 @@
/**
- * User interface to the database conditions system
+ * User API to the database conditions system
*
* @author Jeremy McCormick, SLAC
- * @see ConditionsObject
- * @see ConditionsObjectCollection
- * @see ConditionsSeries
- * @see ConditionsRecord
+ *
+ * @see org.hps.conditions.api.ConditionsObject
+ * @see org.hps.conditions.api.ConditionsObjectCollection
+ * @see org.hps.conditions.api.ConditionsSeries
+ * @see org.hps.conditions.api.ConditionsRecord
+ * @see org.hps.conditions.api.ConditionsTag
*/
package org.hps.conditions.api;
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AbstractCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AbstractCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AbstractCommand.java Wed Apr 27 11:11:32 2016
@@ -1,7 +1,7 @@
package org.hps.conditions.cli;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
@@ -32,7 +32,7 @@
/**
* The parser for the options.
*/
- private final DefaultParser parser = new DefaultParser();
+ private final PosixParser parser = new PosixParser();
/**
* Class constructor.
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java Wed Apr 27 11:11:32 2016
@@ -32,16 +32,13 @@
*/
private static final Options OPTIONS = new Options();
static {
- OPTIONS.addOption(new Option("h", false, "print help for add command"));
- OPTIONS.addOption("r", true, "starting run number (required)");
- OPTIONS.getOption("r").setRequired(true);
- OPTIONS.addOption("e", true, "ending run number (default is starting run number)");
- OPTIONS.addOption("t", true, "table name (required)");
- OPTIONS.getOption("t").setRequired(true);
- OPTIONS.addOption("c", true, "collection ID (required)");
- OPTIONS.getOption("c").setRequired(true);
- OPTIONS.addOption("u", true, "user name (optional)");
- OPTIONS.addOption("m", true, "notes about this conditions set (optional)");
+ OPTIONS.addOption(new Option("h", "help", false, "print help for add command"));
+ OPTIONS.addOption("r", "run-start", true, "starting run number (required)");
+ OPTIONS.addOption("e", "run-end", true, "ending run number (default is starting run number)");
+ OPTIONS.addOption("t", "table", true, "table name (required)");
+ OPTIONS.addOption("c", "collection", true, "collection ID (required)");
+ OPTIONS.addOption("u", "user", true, "user name (optional)");
+ OPTIONS.addOption("m", "notes", true, "notes about this conditions set (optional)");
}
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java Wed Apr 27 11:11:32 2016
@@ -11,7 +11,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
@@ -34,13 +34,12 @@
private static Options OPTIONS = new Options();
static {
- OPTIONS.addOption(new Option("h", false, "print help"));
- OPTIONS.addOption(new Option("d", true, "detector name"));
- OPTIONS.addOption(new Option("r", true, "run number"));
- OPTIONS.addOption(new Option("p", true, "database connection properties file"));
- OPTIONS.addOption(new Option("x", true, "conditions XML configuration file"));
- OPTIONS.addOption(new Option("t", true, "conditions tag to use for filtering records"));
- OPTIONS.addOption(new Option("l", true, "log level of the conditions manager (INFO, FINE, etc.)"));
+ OPTIONS.addOption(new Option("h", "help", false, "print help"));
+ OPTIONS.addOption(new Option("d", "detector", true, "detector name"));
+ OPTIONS.addOption(new Option("r", "run", true, "run number"));
+ OPTIONS.addOption(new Option("p", "connection", true, "database connection properties file"));
+ OPTIONS.addOption(new Option("x", "xml", true, "conditions XML configuration file"));
+ OPTIONS.addOption(new Option("t", "tag", true, "conditions tag to use for filtering records"));
}
/**
@@ -80,7 +79,7 @@
/**
* The options parser.
*/
- private final DefaultParser parser = new DefaultParser();
+ private final PosixParser parser = new PosixParser();
/**
* Exit with the given status.
@@ -176,13 +175,6 @@
// Create new manager.
this.conditionsManager = DatabaseConditionsManager.getInstance();
-
- // Set the conditions manager log level (does not affect logger of this class or sub-commands).
- if (commandLine.hasOption("l")) {
- final Level newLevel = Level.parse(commandLine.getOptionValue("l"));
- Logger.getLogger(DatabaseConditionsManager.class.getPackage().getName()).setLevel(newLevel);
- LOGGER.config("conditions manager log level will be set to " + newLevel.toString());
- }
// Connection properties.
if (commandLine.hasOption("p")) {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java Wed Apr 27 11:11:32 2016
@@ -33,12 +33,10 @@
*/
private static final Options OPTIONS = new Options();
static {
- OPTIONS.addOption(new Option("h", false, "print help for load command"));
- OPTIONS.addOption(new Option("t", true, "name of the target table (required)"));
- OPTIONS.getOption("t").setRequired(true);
- OPTIONS.addOption(new Option("f", true, "input data file path (required)"));
- OPTIONS.getOption("f").setRequired(true);
- OPTIONS.addOption(new Option("d", true, "description for the collection log"));
+ OPTIONS.addOption(new Option("h", "help", false, "print help for load command"));
+ OPTIONS.addOption(new Option("t", "table", true, "name of the target table (required)"));
+ OPTIONS.addOption(new Option("f", "file", true, "input data file path (required)"));
+ OPTIONS.addOption(new Option("d", "description", true, "description for the collection log"));
}
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java Wed Apr 27 11:11:32 2016
@@ -38,18 +38,17 @@
static Options options = new Options();
static {
- options.addOption(new Option("h", false, "print help for print command"));
- options.addOption(new Option("t", true, "table name"));
- options.addOption(new Option("i", false, "print the ID for the records (off by default)"));
- options.addOption(new Option("f", true, "write print output to a file (must be used with -t option)"));
- options.addOption(new Option("H", false, "suppress printing of conditions record and table info"));
- options.addOption(new Option("d", false, "use tabs for field delimiter instead of spaces"));
+ options.addOption(new Option("h", "help", false, "print help for print command"));
+ options.addOption(new Option("t", "table", true, "table name"));
+ options.addOption(new Option("i", "print-id", false, "include the row IDs in printouts"));
+ options.addOption(new Option("f", "file", true, "write output to a file (requires -t option)"));
+ options.addOption(new Option("H", "no-header", false, "suppress printing of conditions record and table info"));
}
/**
* The field delimiter for print output.
*/
- private char fieldDelimiter = ' ';
+ private static final char DELIMITER = ',';
/**
* Output file if printing to a file.
@@ -125,11 +124,6 @@
// Print header info. Option turns this off.
if (commandLine.hasOption("h")) {
this.printHeaders = false;
- }
-
- // Use tabs instead of spaces for field delimiter.
- if (commandLine.hasOption("d")) {
- this.fieldDelimiter = '\t';
}
// List of conditions records to print.
@@ -173,7 +167,7 @@
for (final String columnName : collection.getTableMetaData().getFieldNames()) {
if (!"collection_id".equals(columnName)) {
buffer.append(((ConditionsObject) object).getFieldValue(columnName));
- buffer.append(this.fieldDelimiter);
+ buffer.append(DELIMITER);
}
}
buffer.setLength(buffer.length() - 1);
@@ -221,15 +215,17 @@
private void printColumnNames(final TableMetaData tableMetaData) {
if (this.printIDs) {
this.ps.print("id");
- this.ps.print(this.fieldDelimiter);
- }
+ this.ps.print(DELIMITER);
+ }
+ StringBuffer sb = new StringBuffer();
for (final String columnName : tableMetaData.getFieldNames()) {
if (!"collection_id".equals(columnName)) {
- this.ps.print(columnName);
- this.ps.print(this.fieldDelimiter);
- }
- }
- this.ps.println();
+ sb.append(columnName);
+ sb.append(DELIMITER);
+ }
+ }
+ sb.setLength(sb.length() - 1);
+ this.ps.println(sb.toString());
}
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java Wed Apr 27 11:11:32 2016
@@ -35,8 +35,8 @@
*/
static Options options = new Options();
static {
- options.addOption(new Option("h", false, "Show help for run-summary command"));
- options.addOption(new Option("a", false, "Print all collections found for the run"));
+ options.addOption(new Option("h", "help", false, "Show help for run-summary command"));
+ options.addOption(new Option("a", "all", false, "Print all collections found for the run"));
}
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java Wed Apr 27 11:11:32 2016
@@ -1,10 +1,9 @@
package org.hps.conditions.cli;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
import java.sql.SQLException;
-import java.util.logging.Level;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.TreeSet;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
@@ -16,17 +15,16 @@
import org.hps.conditions.api.ConditionsTag;
import org.hps.conditions.api.ConditionsTag.ConditionsTagCollection;
import org.hps.conditions.api.DatabaseObjectException;
-import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.api.TableRegistry;
+import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.database.MultipleCollectionsAction;
+import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
/**
* Create a conditions system tag.
* <p>
* The tag groups together conditions records from the <i>conditions</i> database table with a run validity range that
* is between a specified starting and ending run.
- * <p>
- * Tagging will not disambiguate overlapping conditions, which is done at run-time based on the current run number.
*
* @author Jeremy McCormick, SLAC
*/
@@ -41,20 +39,29 @@
* Defines command options.
*/
private static Options OPTIONS = new Options();
+
+ private MultipleCollectionsAction multipleCollectionsAction = MultipleCollectionsAction.LAST_CREATED;
+
+ private static String getMultipleCollectionsActionString() {
+ StringBuffer sb = new StringBuffer();
+ for (MultipleCollectionsAction action : MultipleCollectionsAction.values()) {
+ sb.append(action.name() + " ");
+ }
+ sb.setLength(sb.length() - 1);
+ return sb.toString();
+ }
/**
* Define all command options.
*/
static {
- OPTIONS.addOption(new Option("h", false, "Show help for tag command"));
- OPTIONS.addOption(new Option("t", true, "Conditions tag name"));
- OPTIONS.addOption(new Option("s", true, "Starting run number (required)"));
- OPTIONS.getOption("s").setRequired(true);
- OPTIONS.addOption(new Option("e", true, "Ending run number (default is unlimited)"));
- OPTIONS.getOption("t").setRequired(true);
- OPTIONS.addOption(new Option("m", true,
- "MultipleCollectionsAction to use for disambiguation (default is LAST_CREATED)"));
- OPTIONS.addOption(new Option("d", false, "Don't prompt before making tag (be careful!)"));
+ OPTIONS.addOption(new Option("h", "help", false, "Show help for tag command"));
+ OPTIONS.addOption(new Option("t", "tag", true, "Conditions tag name"));
+ OPTIONS.addOption(new Option("s", "run-start", true, "Starting run number (required)"));
+ OPTIONS.addOption(new Option("e", "run-end", true, "Ending run number (required)"));
+ OPTIONS.addOption(new Option("m", "multiple", true,
+ "set run overlap handling (" + getMultipleCollectionsActionString() + ")"));
+ OPTIONS.addOption(new Option("D", "dont-prompt", false, "Don't prompt before making tag (careful!)"));
}
/**
@@ -103,6 +110,11 @@
} else {
throw new RuntimeException("Missing required -t argument with the tag name.");
}
+
+ // Check if tag exists already.
+ if (getManager().getAvailableTags().contains(tagName)) {
+ throw new RuntimeException("The tag '" + tagName + "' already exists in the database.");
+ }
// Starting run number (required).
int runStart = -1;
@@ -110,19 +122,16 @@
runStart = Integer.parseInt(commandLine.getOptionValue("s"));
LOGGER.config("run start set to " + runStart);
} else {
- throw new RuntimeException("missing require -s argument with starting run number");
- }
-
- // Ending run number (max integer is default).
- int runEnd = Integer.MAX_VALUE;
+ throw new RuntimeException("Missing required -s argument with starting run number.");
+ }
+
+ // Ending run number (required).
+ int runEnd = -1;
if (commandLine.hasOption("e")) {
runEnd = Integer.parseInt(commandLine.getOptionValue("e"));
LOGGER.config("run end set to " + runEnd);
- }
-
- // Run end must be greater than or equal to run start.
- if (runEnd < runStart) {
- throw new IllegalArgumentException("runEnd < runStart");
+ } else {
+ throw new RuntimeException("Missing required -e argument with starting run number.");
}
// Action for disambiguating overlapping collections (default is to use the most recent creation date).
@@ -131,20 +140,24 @@
multipleCollectionsAction = MultipleCollectionsAction
.valueOf(commandLine.getOptionValue("m").toUpperCase());
}
- LOGGER.config("multiple collections action set tco " + multipleCollectionsAction);
+ LOGGER.config("run overlaps will be disambiguated using " + multipleCollectionsAction);
// Whether to prompt before tagging (default is yes).
boolean promptBeforeTagging = true;
if (commandLine.hasOption("d")) {
promptBeforeTagging = false;
}
- LOGGER.config("prompt before tagging: " + promptBeforeTagging);
+ LOGGER.config("prompt before tagging = " + promptBeforeTagging);
// Conditions system configuration.
this.getManager().setXmlConfig("/org/hps/conditions/config/conditions_database_no_svt.xml");
// Find all the applicable conditions records by their run number ranges.
ConditionsRecordCollection tagConditionsRecordCollection = this.findConditionsRecords(runStart, runEnd);
+
+ if (tagConditionsRecordCollection.size() == 0) {
+ throw new RuntimeException("No records found for tag.");
+ }
LOGGER.info("found " + tagConditionsRecordCollection.size() + " conditions records for the tag");
@@ -152,8 +165,8 @@
final ConditionsTagCollection conditionsTagCollection = this.createConditionsTagCollection(
tagConditionsRecordCollection, tagName);
- LOGGER.info("created " + conditionsTagCollection.size() + " tag records ..." + '\n' + conditionsTagCollection);
-
+ printConditionsRecords(tagConditionsRecordCollection);
+
// Prompt user to verify tag creation.
boolean createTag = true;
if (promptBeforeTagging) {
@@ -178,68 +191,85 @@
LOGGER.info("done!");
}
-
- /**
- * Find all the conditions records that are applicable for the given run range.
- * <p>
- * Overlapping run numbers in conditions with the same key are not disambiguated.
- * This must be done in the user's job at runtime; usually the most recently created
- * conditions record will be used if multiple one's are applicable to the current run.
- *
- * @param runStart the start run
- * @param runEnd the end run (must be greater than or equal to <code>runStart</code>)
- * @return the conditions records that fall in the run range
+
+ /**
+ * Print information about conditions records in the tag to the log.
+ *
+ * @param collection the conditions tag collection
+ */
+ private void printConditionsRecords(ConditionsRecordCollection records) {
+ StringBuffer sb = new StringBuffer();
+ Set<String> keys = new TreeSet<String>(records.getConditionsKeys());
+ for (String key : keys) {
+ ConditionsRecordCollection keyRecords = records.findByKey(key);
+ keyRecords.sortByKey();
+ for (ConditionsRecord record : keyRecords) {
+ sb.append("conditions_id: " + record.getRowId() + ", name: " + record.getName() + ", collection_id: "
+ + record.getCollectionId() + ", run_start: " + record.getRunStart()
+ + ", run_end: " + record.getRunEnd() + ", notes: " + record.getNotes() + '\n');
+
+ }
+ }
+ LOGGER.info("including " + records.size() + " records in tag ..." + '\n' + sb.toString());
+ }
+
+ /**
+ * Scan through a run range to find conditions records for the tag.
+ *
+ * @param runStart the starting run number
+ * @param runEnd the ending run number
+ * @return the conditions records for the tag
*/
private ConditionsRecordCollection findConditionsRecords(final int runStart, final int runEnd) {
- if (runStart > runEnd) {
- throw new IllegalArgumentException("runStart > runEnd");
- }
- if (runStart < 0) {
- throw new IllegalArgumentException("invalid runStart: " + runStart);
- }
- if (runEnd < 0) {
- throw new IllegalArgumentException("invalid runEnd: " + runEnd);
- }
- final Connection connection = this.getManager().getConnection();
- final ConditionsRecordCollection conditionsRecordCollection = new ConditionsRecordCollection();
- final TableMetaData tableMetaData = TableRegistry.getTableRegistry().findByTableName("conditions");
- PreparedStatement statement = null;
- try {
- /*
- * SQL statement handles 3 cases:
- * 1) condition's run_start in range
- * 2) condition's run_end in range
- * 3) condition's run_start and run_end enclose the range
- */
- statement = connection
- .prepareStatement("SELECT id FROM conditions WHERE (run_start >= ? and run_start <= ?) or (run_end >= ? and run_end <= ?)"
- + " or (run_start <= ? and run_end >= ?)");
- statement.setInt(1, runStart);
- statement.setInt(2, runEnd);
- statement.setInt(3, runStart);
- statement.setInt(4, runEnd);
- statement.setInt(5, runStart);
- statement.setInt(6, runEnd);
-
- final ResultSet resultSet = statement.executeQuery();
- while (resultSet.next()) {
- final ConditionsRecord record = new ConditionsRecord();
- record.setConnection(connection);
- record.setTableMetaData(tableMetaData);
- record.select(resultSet.getInt(1));
- conditionsRecordCollection.add(record);
- }
- } catch (DatabaseObjectException | ConditionsObjectException | SQLException e) {
- throw new RuntimeException(e);
- } finally {
+ if (runStart < 0 ) {
+ throw new IllegalArgumentException("The run start " + runStart + " is invalid.");
+ }
+ if (runEnd < 0 ) {
+ throw new IllegalArgumentException("The run end " + runEnd + " is invalid.");
+ }
+ if (runStart > runEnd ) {
+ throw new IllegalArgumentException("The run start is greater than the run end.");
+ }
+ DatabaseConditionsManager dbManager = this.getManager();
+ if (dbManager.isFrozen()) {
+ dbManager.unfreeze();
+ }
+ if (!dbManager.getActiveTags().isEmpty()) {
+ dbManager.clearTags();
+ }
+ final String detectorName = "HPS-dummy-detector";
+ ConditionsRecordCollection tagRecords = new ConditionsRecordCollection();
+ Set<Integer> ids = new HashSet<Integer>();
+ for (int run = runStart; run <= runEnd; run++) {
+ LOGGER.info("loading run " + run);
try {
- if (statement != null) {
- statement.close();
+ dbManager.setDetector(detectorName, run);
+ } catch (ConditionsNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ ConditionsRecordCollection runRecords = dbManager.getConditionsRecords();
+ Set<String> keys = runRecords.getConditionsKeys();
+ LOGGER.fine("run has " + runRecords.size() + " conditions records");
+ for (String key : keys) {
+ ConditionsRecord record = runRecords.findUniqueRecord(key, this.multipleCollectionsAction);
+ if (record == null) {
+ throw new RuntimeException("Missing expected unique condition record for " + key + ".");
}
- } catch (final SQLException e) {
- e.printStackTrace();
- }
- }
- return conditionsRecordCollection;
+ if (!ids.contains(record.getRowId())) {
+ try {
+ LOGGER.fine("adding conditions to tag ..." + '\n' + record.toString());
+ tagRecords.add(record);
+ ids.add(record.getRowId());
+ } catch (ConditionsObjectException e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ LOGGER.fine("Conditions record with row id " + record.getRowId() + " is already in the tag.");
+ }
+ }
+ LOGGER.info("done processing run " + run);
+ }
+ LOGGER.info("Found " + tagRecords.size() + " conditions records for tag.");
+ return tagRecords;
}
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsRecordConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsRecordConverter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsRecordConverter.java Wed Apr 27 11:11:32 2016
@@ -3,7 +3,6 @@
import java.sql.ResultSet;
import java.sql.SQLException;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
import org.hps.conditions.api.ConditionsObject;
import org.hps.conditions.api.ConditionsObjectCollection;
import org.hps.conditions.api.ConditionsObjectException;
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java Wed Apr 27 11:11:32 2016
@@ -5,7 +5,6 @@
import java.sql.ResultSet;
import java.sql.SQLException;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
import org.hps.conditions.api.ConditionsObjectException;
import org.hps.conditions.api.ConditionsTag;
import org.hps.conditions.api.ConditionsTag.ConditionsTagCollection;
@@ -24,14 +23,13 @@
private static final String SELECT_SQL = "SELECT conditions_id, tag from conditions_tags where tag = ?";
/**
- * Get a {@link org.hps.conditions.api.ConditionsTagCollection} which specifies a group of collections
- * that are tagged in the <i>conditions_tags</i> table in the database.
- * <p>
- * The run number is not used, and the <code>name</code> argument specifies the tag name.
+ * Get a {@link org.hps.conditions.api.ConditionsTag.ConditionsTagCollection} which specifies a group of
+ * collections that are tagged in the <i>conditions_tags</i> table in the database. The <code>name</code>
+ * argument is the tag name.
*
- * @param manager The current conditions manager.
- * @param name The name of the conditions set.
- * @return The matching ConditionsRecords.
+ * @param manager the current conditions manager
+ * @param name the name of the conditions set
+ * @return the matching <code>ConditionsRecord</code> objects
*/
@Override
public ConditionsTagCollection getData(final ConditionsManager manager, final String name) {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConnectionParameters.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConnectionParameters.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConnectionParameters.java Wed Apr 27 11:11:32 2016
@@ -13,7 +13,8 @@
import java.util.logging.Logger;
/**
- * This class encapsulates the parameters for connecting to a database, including host name, port, user and password.
+ * This class encapsulates the parameters for connecting to a database,
+ * including host name, port, user and password.
*
* @author Jeremy McCormick, SLAC
*/
@@ -28,6 +29,12 @@
* Number of connection retries allowed.
*/
private static final int MAX_ATTEMPTS = 10;
+
+ /**
+ * Wait time (in millis) for the first retry. The nth retry waits for
+ * n*RETRY_WAIT millis.
+ */
+ private static final int RETRY_WAIT = 5000;
/**
* Configure the connection parameters from a properties file.
@@ -46,7 +53,8 @@
}
/**
- * Configure the connection parameters from an <code>InputStream</code> of properties.
+ * Configure the connection parameters from an <code>InputStream</code> of
+ * properties.
*
* @param in the InputStream of the properties
* @return the connection parameters
@@ -71,7 +79,8 @@
}
/**
- * Configure the connection parameters from an embedded classpath resource which should be a properties file.
+ * Configure the connection parameters from an embedded classpath resource
+ * which should be a properties file.
*
* @param resource the resource path
* @return the connection parameters
@@ -146,8 +155,8 @@
}
/**
- * Create a database connection from these parameters. The caller becomes the "owner" and is responsible for closing
- * it when finished.
+ * Create a database connection from these parameters. The caller becomes
+ * the "owner" and is responsible for closing it when finished.
*
* @return the new <code>Connection</code> object
*/
@@ -166,7 +175,7 @@
throw new RuntimeException("Failed to connect to database after " + attempt + " attempts: " + this.getConnectionString(), x);
}
try {
- Thread.sleep(attempt * 1000);
+ Thread.sleep(attempt * RETRY_WAIT);
} catch (InterruptedException ex) {
Logger.getLogger(ConnectionParameters.class.getName()).log(Level.SEVERE, null, ex);
}
@@ -247,13 +256,13 @@
String getUser() {
return this.user;
}
-
+
/**
* Convert to human readable string.
- *
+ *
* @return this object converted to a string
*/
- public String toString() {
+ public String toString() {
return "ConnectionParameters { database: " + database + ", hostname: " + hostname + ", password: " + password
+ ", port: " + port + ", user: " + user + " }";
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/Converter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/Converter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/Converter.java Wed Apr 27 11:11:32 2016
@@ -4,8 +4,6 @@
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
-
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
/**
* This is an annotation for providing converter configuration for {@link org.hps.conditions.api.ConditionsObject}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConverterRegistry.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConverterRegistry.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/ConverterRegistry.java Wed Apr 27 11:11:32 2016
@@ -6,7 +6,6 @@
import javassist.Modifier;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
import org.hps.conditions.api.BaseConditionsObjectCollection;
import org.hps.conditions.api.ConditionsObject;
import org.hps.conditions.api.TableRegistry;
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java Wed Apr 27 11:11:32 2016
@@ -20,7 +20,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
import org.hps.conditions.api.ConditionsObject;
import org.hps.conditions.api.ConditionsObjectCollection;
import org.hps.conditions.api.ConditionsRecord.ConditionsRecordCollection;
@@ -338,7 +337,7 @@
}
/**
- * Clear the tags used to filter the {@link org.hps.conditons.api.ConditionsRecord}s.
+ * Clear the tags used to filter the {@link org.hps.conditions.api.ConditionsRecord}s.
*/
public void clearTags() {
this.tags.clear();
@@ -505,9 +504,8 @@
/**
* Add a row for a new collection and return the new collection ID assigned to it.
- *
- * @param tableName the name of the table
- * @param comment an optional comment about this new collection
+ * @param collection the conditions object collection
+ * @param description text description for the new collection ID record in the database
* @return the collection's ID
* @throws SQLException
*/
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/package-info.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/package-info.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/database/package-info.java Wed Apr 27 11:11:32 2016
@@ -1,5 +1,10 @@
/**
- * Implementation of database API to detector conditions
+ * Implementation of database API for detector conditions
+ * <p>
+ * The {@link org.hps.conditions.database.DatabaseConditionsManager} has a set of converters for handling the
+ * conversion of conditions table data to typed collections. The converters are created automatically using
+ * introspection of {@link org.hps.conditions.api.ConditionsObject} classes that have the
+ * {@link org.hps.conditions.database.Table} and {@link org.hps.conditions.database.Field} annotations.
*
* @author Jeremy McCormick, SLAC
*/
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/dummy/DummyConditionsObjectConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/dummy/DummyConditionsObjectConverter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/dummy/DummyConditionsObjectConverter.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,6 @@
package org.hps.conditions.dummy;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
+import org.hps.conditions.database.AbstractConditionsObjectConverter;
import org.hps.conditions.dummy.DummyConditionsObject.DummyConditionsObjectCollection;
/**
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java Wed Apr 27 11:11:32 2016
@@ -4,12 +4,12 @@
import java.util.HashMap;
import java.util.Map;
-import org.hps.conditions.api.AbstractConditionsObjectConverter;
import org.hps.conditions.api.AbstractIdentifier;
import org.hps.conditions.api.BaseConditionsObject;
import org.hps.conditions.api.BaseConditionsObjectCollection;
import org.hps.conditions.api.ConditionsObjectCollection;
import org.hps.conditions.api.ConditionsObjectException;
+import org.hps.conditions.database.AbstractConditionsObjectConverter;
import org.hps.conditions.database.Converter;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.database.Field;
@@ -290,11 +290,16 @@
public EcalChannelCollection getData(final ConditionsManager conditionsManager, final String name) {
final EcalChannelCollection collection = super.getData(conditionsManager, name);
final Subdetector ecal = DatabaseConditionsManager.getInstance().getEcalSubdetector();
- if (ecal.getDetectorElement() != null) {
- collection.buildGeometryMap(ecal.getDetectorElement().getIdentifierHelper(), ecal.getSystemID());
+ if (ecal != null) {
+ if (ecal.getDetectorElement() != null) {
+ collection.buildGeometryMap(ecal.getDetectorElement().getIdentifierHelper(), ecal.getSystemID());
+ } else {
+ // This can happen when not running with the detector-framework jar in the classpath.
+ throw new IllegalStateException("The ECal subdetector's detector element is not setup.");
+ }
} else {
- // This can happen when not running with the detector-framework jar in the classpath.
- throw new IllegalStateException("The ECal subdetector's detector element is not setup.");
+ // Bad detector or conditions system not initialized properly.
+ throw new IllegalStateException("The ECal subdetector object is null.");
}
return collection;
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java Wed Apr 27 11:11:32 2016
@@ -11,7 +11,7 @@
* settings, per crystal.
* <p>
* Unlike most conditions data types, it does not extend {@link org.hps.conditions.api.ConditionsObject}, because it is
- * a composite object containing data assembled from many other {@link org.hps.conditions.ConditionsObjects} and has a
+ * a composite object containing data assembled from many other {@link org.hps.conditions.api.ConditionsObject} and has a
* special data converter {@link EcalConditionsConverter}.
*
* @author Jeremy McCormick, SLAC
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java Wed Apr 27 11:11:32 2016
@@ -22,6 +22,7 @@
*
* @author Jeremy McCormick, SLAC
* @author Omar Moreno, UCSC
+ *
* @see EcalConditions
* @see EcalChannel
* @see EcalGain
@@ -38,8 +39,7 @@
/**
* Create combined ECAL conditions object containing all data for the current run.
- *
- * @param manager the conditions manager
+ *
* @param name the conditions set name (unused but must satisfy conditions API)
*/
@Override
@@ -129,7 +129,6 @@
/**
* Get the default collections of {@link EcalBadChannel} objects.
*
- * @param manager the conditions manager
* @return the collections of ECAL bad channel objects
*/
protected ConditionsSeries<EcalBadChannel, EcalBadChannelCollection> getEcalBadChannelSeries() {
@@ -139,7 +138,6 @@
/**
* Get the default {@link EcalCalibration} collection.
*
- * @param manager the conditions manager
* @return the collection of ECAL channel calibration objects
*/
protected EcalCalibrationCollection getEcalCalibrationCollection() {
@@ -149,7 +147,6 @@
/**
* Get the default {@link EcalChannel} collection.
*
- * @param manager the conditions manager
* @return the default ECAL channel object collection
*/
protected EcalChannelCollection getEcalChannelCollection() {
@@ -159,7 +156,6 @@
/**
* Get the default {@link EcalGain} collection.
*
- * @param manager the conditions manager
* @return the ECAL channel gain collection
*/
protected EcalGainCollection getEcalGainCollection() {
@@ -169,7 +165,6 @@
/**
* Get the default {@link EcalTimeShift} collection.
*
- * @param manager the conditions manager
* @return the collection of ECAL time shift objects
*/
protected EcalTimeShiftCollection getEcalTimeShiftCollection() {
@@ -177,9 +172,8 @@
}
/**
- * Get the default {@link EcalPulseWith} collection.
- *
- * @param manager the conditions manager
+ * Get the default {@link EcalPulseWidth} collection.
+ *
* @return the collection of ECAL pulse widths
*/
protected EcalPulseWidthCollection getEcalPulseWidthCollection() {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java Wed Apr 27 11:11:32 2016
@@ -20,7 +20,6 @@
/**
* Get the collections of {@link EcalBadChannel} objects for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run bad channel collections
*/
@Override
@@ -31,7 +30,6 @@
/**
* Get the {@link EcalCalibration} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL calibration collection
*/
@Override
@@ -43,7 +41,6 @@
/**
* Get the {@link EcalChannel} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL channel collection
*/
@Override
@@ -54,7 +51,6 @@
/**
* Get the {@link EcalGain} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL gain collection
*/
@Override
@@ -65,7 +61,6 @@
/**
* Get the {@link EcalTimeShift} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL time shift collection
*/
@Override
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/package-info.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/package-info.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/package-info.java Wed Apr 27 11:11:32 2016
@@ -2,8 +2,7 @@
* Database conditions system
* <p>
* The HPS conditions module provides facilities for accessing time dependent conditions for a detector at runtime using
- * a framework built on the LCSim conditions system. The {@link DatabaseConditionsReader} has a set of converters for
- * reading data from tables using SQL queries and creating appropriate, typed objects for them.
+ * a framework built on the LCSim conditions system.
*
* @author Jeremy McCormick, SLAC
* @see org.hps.conditions.api
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java Wed Apr 27 11:11:32 2016
@@ -25,7 +25,7 @@
* The rows are accessible as raw CSV data through the Apache Commons CSV library, and this data must be manually cleaned up and converted
* to the correct data type before being inserted into the conditions database.
*
- * @author Jeremy McCormick
+ * @author Jeremy McCormick, SLAC
*/
public final class RunSpreadsheet {
@@ -38,23 +38,23 @@
"start_time",
"end_time",
"to_tape",
- "n_events",
+ "events",
"files",
"trigger_rate",
"target",
"beam_current",
"beam_x",
- "beam_y",
- "trigger_config",
- /*
- "ecal_fadc_mode",
+ "beam_y",
+ "trigger_config",
+ /* Next 7 are actually hidden in the spreadsheet! */
+ "ecal_fadc_mode",
"ecal_fadc_thresh",
"ecal_fadc_window",
"ecal_cluster_thresh_seed",
"ecal_cluster_thresh_cluster",
"ecal_cluster_window_hits",
- "ecal_cluster_window_pairs",
- */
+ "ecal_cluster_window_pairs",
+ /* End hidden fields. */
"ecal_scalers_fadc",
"ecal_scalers_dsc",
"svt_y_position",
@@ -62,8 +62,7 @@
"svt_offset_time",
"ecal_temp",
"ecal_lv_current",
- "notes"
- };
+ "notes"};
/**
* Read the CSV file from the command line and print the data to the terminal (just a basic test).
@@ -100,11 +99,14 @@
* @param file the CSV file
*/
public RunSpreadsheet(final File file) {
+ if (file == null) {
+ throw new IllegalArgumentException("The file argument is null.");
+ }
this.file = file;
try {
this.fromCsv(this.file);
} catch (final Exception e) {
- throw new RuntimeException();
+ throw new RuntimeException("Failed to parse run spreadsheet.", e);
}
}
@@ -161,14 +163,15 @@
return records;
}
- public static final AnotherSimpleDateFormat DATE_FORMAT = new AnotherSimpleDateFormat("MM/dd/yyyy H:mm");
+ public static final RunSpreadsheetDateFormat DATE_FORMAT = new RunSpreadsheetDateFormat("MM/dd/yyyy H:mm");
private static final TimeZone TIME_ZONE = TimeZone.getTimeZone("EST");
@SuppressWarnings("serial")
public
- static class AnotherSimpleDateFormat extends SimpleDateFormat {
- public AnotherSimpleDateFormat(String formatstring) {
+ static class RunSpreadsheetDateFormat extends SimpleDateFormat {
+
+ public RunSpreadsheetDateFormat(String formatstring) {
super(formatstring);
//Calendar c = Calendar.getInstance(TIME_ZONE,Locale.US);
//setTimeZone(TIME_ZONE);
@@ -264,7 +267,7 @@
try {
addRunData(new RunData(record));
} catch (NumberFormatException e) {
- e.printStackTrace();
+ //e.printStackTrace();
}
}
}
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java Wed Apr 27 11:11:32 2016
@@ -20,7 +20,7 @@
@SuppressWarnings("serial")
public static abstract class AbstractSvtDaqMappingCollection<T extends AbstractSvtDaqMapping> extends
BaseConditionsObjectCollection<T> {
-
+
/**
* Get the orientation of a sensor.
*
@@ -65,8 +65,8 @@
/**
* Get the orientation of an SVT sensor (AXIAL or STEREO).
*
- * @see AXIAL
- * @see STEREO
+ * @see #AXIAL
+ * @see #STEREO
* @return the orientation of the SVT sensor
*/
@Field(names = {"orientation"})
@@ -87,7 +87,7 @@
/**
* Set the SVT sensor layer number (1-10 for test run and 1-12 for engineering run).
*
- * @param layer : SVT sensor layer number
+ * @param layer SVT sensor layer number
*/
public final void setLayerNumber(final int layer) {
this.setFieldValue("layer", layer);
@@ -109,8 +109,8 @@
* Set the SVT half that the sensor belongs to.
*
* @param svtHalf the SVT half (TOP or BOTTOM)
- * @see TOP_HALF
- * @see BOTTOM_HALF
+ * @see #TOP_HALF
+ * @see #BOTTOM_HALF
*/
public final void setSvtHalf(final String svtHalf) {
if (!svtHalf.equals(AbstractSvtDaqMapping.TOP_HALF) && !svtHalf.equals(AbstractSvtDaqMapping.BOTTOM_HALF)) {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java Wed Apr 27 11:11:32 2016
@@ -96,7 +96,7 @@
* Method that is triggered when the end of a tag is encountered.
*
* @param uri the Namespace URI.
- * @param locaName the local name (without prefix)
+ * @param localName the local name (without prefix)
* @param qName the qualified name (with prefix)
* @throws SAXException if there is an error processing the element
*/
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/MotorPositionLoader.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/MotorPositionLoader.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/MotorPositionLoader.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
/**
* Load SVT motor positions from a MYA dump, figure out time ranges (same position for > 10 seconds), and then convert
@@ -195,7 +195,7 @@
*/
void run(final String args[]) {
- final DefaultParser parser = new DefaultParser();
+ final PosixParser parser = new PosixParser();
CommandLine cl = null;
try {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java Wed Apr 27 11:11:32 2016
@@ -30,7 +30,7 @@
* Check if the run record looks good.
*
* @param data
- * @return
+ * @return whether to accept the run or not
*/
private static boolean acceptRun(final RunData data) {
return !data.getRecord().get("to_tape").equals("JUNK")
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.conditions.api.ConditionsRecord;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.run.RunRange;
@@ -176,7 +176,7 @@
// options.addOption(new Option("b", true, "beam current file"));
options.addOption(new Option("s", false, "Show plots"));
- final CommandLineParser parser = new DefaultParser();
+ final CommandLineParser parser = new PosixParser();
CommandLine cl = null;
try {
cl = parser.parse(options, args);
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.conditions.api.ConditionsRecord;
import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.database.DatabaseConditionsManager;
@@ -60,7 +60,7 @@
final Options options = setupCommandLineOptions();
// Parse the command line arguments
- final CommandLineParser parser = new DefaultParser();
+ final CommandLineParser parser = new PosixParser();
final CommandLine commandLine;
try {
commandLine = parser.parse(options, args);
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java Wed Apr 27 11:11:32 2016
@@ -129,8 +129,8 @@
/**
* Get the side of the sensor (ELECTRON or POSITRON).
*
- * @see ELECTRON
- * @see POSITRON
+ * @see #ELECTRON
+ * @see #POSITRON
* @return sensor side (ELECTRON or POSITRON)
*/
@Field(names = {"side"})
@@ -160,8 +160,8 @@
* Set the side of the sensor (ELECTRON or POSITRON).
*
* @param side the sensor side (ELECTRON or POSITRON)
- * @see {@link #ELECTRON}
- * @see {@link #POSITRON}
+ * @see #ELECTRON
+ * @see #POSITRON
*/
public final void setSide(final String side) {
if (!side.equals(SvtDaqMapping.ELECTRON) && !side.equals(SvtDaqMapping.POSITRON)) {
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
/**
* Get the {@link SvtT0Shift} associated with a given DAQ pair.
*
- * @param DAQ pair for a given sensor
+ * @param pair DAQ pair for a given sensor
* @return the {@link SvtT0Shift} associated with the DAQ pair or null if does not exist
*/
@Override
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java Wed Apr 27 11:11:32 2016
@@ -26,7 +26,7 @@
/**
* Find a collection of channels by their DAQ pair assignment.
*
- * @param the DAQ pair (FEB ID and FEB Hybrid ID)
+ * @param pair the DAQ pair (FEB ID and FEB Hybrid ID)
* @return the collection of channels
*/
@Override
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java Wed Apr 27 11:11:32 2016
@@ -10,7 +10,7 @@
/**
* This class contains all test run SVT conditions data by readout channel. {@link TestRunSvtChannel} objects from the
- * SVT channel map should be used to lookup the conditions using the {@link #getChannelConstants(TestRunSvtChannel)}
+ * SVT channel map should be used to lookup the conditions using the {@link #getChannelConstants(AbstractSvtChannel)}
* method.
*
* @author Jeremy McCormick, SLAC
Modified: java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java
=============================================================================
--- java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java (original)
+++ java/branches/HPSJAVA-409/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java Wed Apr 27 11:11:32 2016
@@ -22,7 +22,7 @@
/**
* Get the {@link TestRunSvtT0Shift} associated with a given DAQ pair
*
- * @param DAQ pair for a given sensor
+ * @param pair DAQ pair for a given sensor
* @return the {@link TestRunSvtT0Shift} associated with the DAQ pair or null if does not exist
*/
@Override
Modified: java/branches/HPSJAVA-409/crawler/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/crawler/pom.xml (original)
+++ java/branches/HPSJAVA-409/crawler/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/crawler/</url>
@@ -17,7 +17,11 @@
<dependencies>
<dependency>
<groupId>org.hps</groupId>
- <artifactId>hps-run-database</artifactId>
+ <artifactId>hps-record-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>srs</groupId>
+ <artifactId>org-srs-datacat-client</artifactId>
</dependency>
</dependencies>
</project>
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java Wed Apr 27 11:11:32 2016
@@ -12,7 +12,7 @@
*
* @author Jeremy McCormick, SLAC
*/
-public class AidaMetadataReader implements FileMetadataReader {
+final class AidaMetadataReader implements FileMetadataReader {
/**
* Get the metadata for a ROOT DQM file.
@@ -22,7 +22,7 @@
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
final Map<String, Object> metadata = new HashMap<String, Object>();
- final int run = CrawlerFileUtilities.getRunFromFileName(file);
+ final Long run = FileUtilities.getRunFromFileName(file);
metadata.put("runMin", run);
metadata.put("runMax", run);
return metadata;
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java Wed Apr 27 11:11:32 2016
@@ -11,13 +11,9 @@
import java.util.Set;
import org.hps.conditions.database.ConnectionParameters;
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.datacat.client.DatasetSite;
/**
* Full configuration information for the {@link Crawler} class.
- * <p>
- * Method chaining of setters is supported.
*
* @author Jeremy McCormick, SLAC
*/
@@ -41,20 +37,13 @@
/**
* The name of the folder in the data catalog for inserting data (under "/HPS" root folder).
- * <p>
- * Default provided for Eng Run 2015 data.
*/
private String datacatFolder = null;
/**
- * Set whether extraction of metadata from files is enabled.
- */
- private boolean enableMetadata;
-
- /**
- * Set of file formats for filtering files.
- */
- Set<DatasetFileFormat> formats = new HashSet<DatasetFileFormat>();
+ * Set of accepted file formats.
+ */
+ private Set<FileFormat> formats = new HashSet<FileFormat>();
/**
* The maximum depth to crawl.
@@ -69,7 +58,7 @@
/**
* The dataset site for the datacat.
*/
- private DatasetSite site;
+ private Site site = Site.JLAB;
/**
* A timestamp to use for filtering input files on their creation date.
@@ -80,6 +69,21 @@
* A file to use for getting the timestamp date.
*/
private File timestampFile = null;
+
+ /**
+ * Dry run for not actually executing updates.
+ */
+ private boolean dryRun = false;
+
+ /**
+ * Base URL of datacat client.
+ */
+ private String baseUrl = DatacatHelper.DATACAT_URL;
+
+ /**
+ * Set of paths used for filtering files (file's path must match one of these).
+ */
+ private Set<String> paths = new HashSet<String>();
/**
* Get the set of runs that will be accepted for the job.
@@ -94,7 +98,7 @@
* Add the default file formats.
*/
CrawlerConfig addDefaultFileFormats() {
- final List<DatasetFileFormat> defaultFormats = Arrays.asList(DatasetFileFormat.values());
+ final List<FileFormat> defaultFormats = Arrays.asList(FileFormat.values());
this.formats.addAll(defaultFormats);
return this;
}
@@ -104,9 +108,8 @@
*
* @param format the file format
*/
- CrawlerConfig addFileFormat(final DatasetFileFormat format) {
+ void addFileFormat(final FileFormat format) {
this.formats.add(format);
- return this;
}
/**
@@ -123,7 +126,7 @@
*
* @return the data catalog folder
*/
- String datacatFolder() {
+ String folder() {
return this.datacatFolder;
}
@@ -132,25 +135,16 @@
*
* @return the dataset site
*/
- DatasetSite datasetSite() {
+ Site site() {
return this.site;
}
/**
- * Return <code>true</code> if metadata extraction from files is enabled.
- *
- * @return <code>true</code> if metadata extraction is enabled
- */
- boolean enableMetaData() {
- return this.enableMetadata;
- }
-
- /**
* Get the file formats for filtering.
*
* @return the file formats for filtering
*/
- Set<DatasetFileFormat> getFileFormats() {
+ Set<FileFormat> getFileFormats() {
return this.formats;
}
@@ -164,7 +158,7 @@
}
/**
- * Get the root directory for the file search.
+ * Get the root directory in the file catalog.
*
* @return the root directory for the file search
*/
@@ -178,9 +172,8 @@
* @param acceptRuns the list of acceptable run numbers
* @return this object
*/
- CrawlerConfig setAcceptRuns(final Set<Integer> acceptRuns) {
+ void setAcceptRuns(final Set<Integer> acceptRuns) {
this.acceptRuns = acceptRuns;
- return this;
}
/**
@@ -189,9 +182,8 @@
* @param connectionParameters the database connection parameters
* @return this object
*/
- CrawlerConfig setConnection(final ConnectionParameters connectionParameters) {
+ void setConnection(final ConnectionParameters connectionParameters) {
this.connectionParameters = connectionParameters;
- return this;
}
/**
@@ -199,9 +191,8 @@
*
* @param datacatFolder the data catalog folder
*/
- CrawlerConfig setDatacatFolder(final String datacatFolder) {
+ void setDatacatFolder(final String datacatFolder) {
this.datacatFolder = datacatFolder;
- return this;
}
/**
@@ -209,29 +200,28 @@
*
* @return this object
*/
- void setDatasetSite(final DatasetSite site) {
+ void setSite(final Site site) {
this.site = site;
}
-
- /**
- * Set whether metadata extraction is enabled.
- *
- * @param enableMetadata <code>true</code> to enable metadata
- * @return this object
- */
- CrawlerConfig setEnableMetadata(final boolean enableMetadata) {
- this.enableMetadata = enableMetadata;
- return this;
- }
+
+ /**
+ * Enable dry run.
+ *
+ * @param dryRun set to <code>true</code> to enable dry run
+ * @return this object
+ */
+ void setDryRun(boolean dryRun) {
+ this.dryRun = dryRun;
+ }
+
/**
* Set the max depth.
*
* @param maxDepth the max depth
*/
- CrawlerConfig setMaxDepth(final Integer maxDepth) {
+ void setMaxDepth(final Integer maxDepth) {
this.maxDepth = maxDepth;
- return this;
}
/**
@@ -240,9 +230,8 @@
* @param rootDir the root directory for the file search
* @return this object
*/
- CrawlerConfig setRootDir(final File rootDir) {
+ void setRootDir(final File rootDir) {
this.rootDir = rootDir;
- return this;
}
/**
@@ -253,9 +242,8 @@
* @param timestamp the date for filtering files
* @return this object
*/
- CrawlerConfig setTimestamp(final Date timestamp) {
+ void setTimestamp(final Date timestamp) {
this.timestamp = timestamp;
- return this;
}
/**
@@ -267,9 +255,8 @@
* @param timestamp the date string for filtering files
* @return this object
*/
- CrawlerConfig setTimestamp(final String timestampString) throws ParseException {
+ void setTimestamp(final String timestampString) throws ParseException {
TIMESTAMP_FORMAT.parse(timestampString);
- return this;
}
/**
@@ -278,9 +265,8 @@
* @param timestampFile the timestamp file for date filtering
* @return this object
*/
- CrawlerConfig setTimestampFile(final File timestampFile) {
+ void setTimestampFile(final File timestampFile) {
this.timestampFile = timestampFile;
- return this;
}
/**
@@ -302,4 +288,49 @@
File timestampFile() {
return timestampFile;
}
+
+ /**
+ * Returns <code>true</code> if dry run which means no updates will occur.
+ *
+ * @return <code>true</code> if dry run
+ */
+ boolean dryRun() {
+ return this.dryRun;
+ }
+
+ /**
+ * Set the data catalog URL.
+ *
+ * @param baseUrl the data catalog URL
+ */
+ void setDatacatUrl(String baseUrl) {
+ this.baseUrl = baseUrl;
+ }
+
+ /**
+ * Get the data catalog URL.
+ *
+ * @return the data catalog URL
+ */
+ String datacatUrl() {
+ return this.baseUrl;
+ }
+
+ /**
+ * Add a path for filtering files.
+ *
+ * @param path the path for filtering
+ */
+ void addPath(String path) {
+ this.paths.add(path);
+ }
+
+ /**
+ * Get the list of paths for filtering.
+ *
+ * @return the list of paths for filtering
+ */
+ Set<String> paths() {
+ return this.paths;
+ }
}
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java Wed Apr 27 11:11:32 2016
@@ -1,112 +1,30 @@
package org.hps.crawler;
import java.io.File;
-import java.io.FileFilter;
import java.io.IOException;
import java.nio.file.FileVisitOption;
-import java.nio.file.FileVisitResult;
import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
-import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatacatClientFactory;
-import org.hps.datacat.client.DatasetFileFormat;
+import org.srs.datacat.model.DatasetModel;
/**
* Command line file crawler for populating the data catalog.
*
* @author Jeremy McCormick, SLAC
*/
-public class DatacatCrawler {
-
- /**
- * Visitor which creates a {@link FileSet} from walking a directory tree.
- * <p>
- * Any number of {@link java.io.FileFilter} objects can be registered with this visitor to restrict which files are
- * accepted.
- *
- * @author Jeremy McCormick, SLAC
- */
- final class DatacatFileVisitor extends SimpleFileVisitor<Path> {
-
- /**
- * The run log containing information about files from each run.
- */
- private final FileSet fileSet = new FileSet();
-
- /**
- * A list of file filters to apply.
- */
- private final List<FileFilter> filters = new ArrayList<FileFilter>();
-
- /**
- * Run the filters on the file to tell whether it should be accepted or not.
- *
- * @param file the EVIO file
- * @return <code>true</code> if file should be accepted
- */
- private boolean accept(final File file) {
- boolean accept = true;
- for (final FileFilter filter : this.filters) {
- accept = filter.accept(file);
- if (!accept) {
- break;
- }
- }
- return accept;
- }
-
- /**
- * Add a file filter.
- *
- * @param filter the file filter
- */
- void addFilter(final FileFilter filter) {
- this.filters.add(filter);
- }
-
- /**
- * Get the file set created by visiting the directory tree.
- *
- * @return the file set from visiting the directory tree
- */
- FileSet getFileSet() {
- return this.fileSet;
- }
-
- /**
- * Visit a single file.
- *
- * @param path the file to visit
- * @param attrs the file attributes
- */
- @Override
- public FileVisitResult visitFile(final Path path, final BasicFileAttributes attrs) {
- final File file = path.toFile();
- if (this.accept(file)) {
- final DatasetFileFormat format = DatacatUtilities.getFileFormat(file);
- fileSet.addFile(format, file);
- }
- return FileVisitResult.CONTINUE;
- }
- }
+public final class DatacatCrawler {
/**
* Make a list of available file formats for printing help.
@@ -117,14 +35,14 @@
* Setup the logger.
*/
private static final Logger LOGGER = Logger.getLogger(DatacatCrawler.class.getPackage().getName());
-
+
/**
* Command line options for the crawler.
*/
private static final Options OPTIONS = new Options();
static {
final StringBuffer buffer = new StringBuffer();
- for (final DatasetFileFormat format : DatasetFileFormat.values()) {
+ for (final FileFormat format : FileFormat.values()) {
buffer.append(format.name() + " ");
}
buffer.setLength(buffer.length() - 1);
@@ -135,17 +53,17 @@
* Statically define the command options.
*/
static {
- OPTIONS.addOption("L", "log-level", true, "set the log level (INFO, FINE, etc.)");
OPTIONS.addOption("b", "min-date", true, "min date for a file (example \"2015-03-26 11:28:59\")");
OPTIONS.addOption("d", "directory", true, "root directory to crawl");
OPTIONS.addOption("f", "folder", true, "datacat folder");
OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
OPTIONS.addOption("o", "format", true, "add a file format for filtering: " + AVAILABLE_FORMATS);
- OPTIONS.addOption("m", "metadata", false, "create metadata for datasets");
OPTIONS.addOption("r", "run", true, "add a run number to accept");
OPTIONS.addOption("s", "site", true, "datacat site");
OPTIONS.addOption("t", "timestamp-file", true, "existing or new timestamp file name");
OPTIONS.addOption("x", "max-depth", true, "max depth to crawl");
+ OPTIONS.addOption("D", "dry-run", false, "dry run which will not update the datacat");
+ OPTIONS.addOption("u", "base-url", true, "provide a base URL of the datacat server");
}
/**
@@ -165,33 +83,16 @@
/**
* The options parser.
*/
- private final DefaultParser parser = new DefaultParser();
-
- /**
- * Throw an exception if the path doesn't exist in the data catalog or it is not a folder.
- *
- * @param folder the folder in the datacat
- * @throws RuntimeException if the given path does not exist or it is not a folder
- */
- void checkFolder(final String folder) {
- final DatacatClient datacatClient = new DatacatClientFactory().createClient();
- if (!datacatClient.exists(folder)) {
- throw new RuntimeException("The folder " + folder + " does not exist in the data catalog.");
- }
- if (!datacatClient.isFolder(folder)) {
- throw new RuntimeException("The path " + folder + " is not a folder.");
- }
- }
-
+ private final PosixParser parser = new PosixParser();
+
/**
* Parse command line options.
*
* @param args the command line arguments
* @return this object (for method chaining)
*/
- public DatacatCrawler parse(final String[] args) {
- config = new CrawlerConfig();
-
+ private DatacatCrawler parse(final String[] args) {
+
LOGGER.config("parsing command line options");
this.config = new CrawlerConfig();
@@ -202,13 +103,6 @@
// Print help.
if (cl.hasOption("h") || args.length == 0) {
this.printUsage();
- }
-
- // Log level.
- if (cl.hasOption("L")) {
- final Level level = Level.parse(cl.getOptionValue("L"));
- LOGGER.config("setting log level to " + level);
- LOGGER.setLevel(level);
}
// Root directory for file crawling.
@@ -221,7 +115,7 @@
throw new IllegalArgumentException("The specified path is not a directory.");
}
config.setRootDir(rootDir);
- LOGGER.config("root dir set to " + config.rootDir());
+ LOGGER.config("root dir " + config.rootDir());
}
// Timestamp file for date filtering.
@@ -278,9 +172,9 @@
// Configure enabled file formats.
if (cl.hasOption("o")) {
for (final String arg : cl.getOptionValues("o")) {
- DatasetFileFormat format = null;
+ FileFormat format = null;
try {
- format = DatasetFileFormat.valueOf(arg);
+ format = FileFormat.valueOf(arg);
} catch (IllegalArgumentException | NullPointerException e) {
throw new IllegalArgumentException("The format " + arg + " is not valid.", e);
}
@@ -288,19 +182,22 @@
this.config.addFileFormat(format);
}
} else {
- throw new RuntimeException("The -o argument with data format must be supplied at least once.");
- }
-
- // Enable metadata extraction from files.
- if (cl.hasOption("m")) {
- config.setEnableMetadata(true);
- LOGGER.config("metadata extraction enabled");
+ for (FileFormat format : FileFormat.values()) {
+ this.config.addFileFormat(format);
+ LOGGER.config("adding default format " + format);
+ }
+ }
+
+ // Enable the default set of file formats.
+ if (this.config.getFileFormats().isEmpty()) {
+ LOGGER.config("enabling default file formats");
+ this.config.addDefaultFileFormats();
}
// Datacat folder.
if (cl.hasOption("f")) {
config.setDatacatFolder(cl.getOptionValue("f"));
- LOGGER.config("set datacat folder to " + config.datacatFolder());
+ LOGGER.config("set datacat folder to " + config.folder());
} else {
throw new RuntimeException("The -f argument with the datacat folder is required.");
}
@@ -313,20 +210,43 @@
}
config.setAcceptRuns(acceptRuns);
}
+
+ // Dry run.
+ if (cl.hasOption("D")) {
+ config.setDryRun(true);
+ }
+
+ // List of paths.
+ if (!cl.getArgList().isEmpty()) {
+ for (String arg : cl.getArgList()) {
+ config.addPath(arg);
+ }
+ }
+
+ // Dataset site (defaults to JLAB).
+ Site site = Site.JLAB;
+ if (cl.hasOption("s")) {
+ site = Site.valueOf(cl.getOptionValue("s"));
+ }
+ LOGGER.config("dataset site " + site);
+ config.setSite(site);
+
+ // Data catalog URL.
+ if (cl.hasOption("u")) {
+ config.setDatacatUrl(cl.getOptionValue("u"));
+ LOGGER.config("datacat URL " + config.datacatUrl());
+ }
} catch (final ParseException e) {
throw new RuntimeException("Error parsing options.", e);
}
- // Check the datacat folder which must already exist.
- this.checkFolder(config.datacatFolder());
-
// Check that there is at least one file format enabled for filtering.
if (this.config.getFileFormats().isEmpty()) {
- throw new IllegalStateException("At least one file format must be provided with the -f switch.");
- }
-
- LOGGER.info("done parsing command line options");
+ throw new IllegalStateException("At least one file format must be provided with the -o switch.");
+ }
+
+ LOGGER.info("Done parsing command line options.");
return this;
}
@@ -336,72 +256,65 @@
*/
private void printUsage() {
final HelpFormatter help = new HelpFormatter();
- help.printHelp(70, "DatacatCrawler [options]", "", OPTIONS, "");
+ help.printHelp(70, "DatacatCrawler [options] path ...", "", OPTIONS, "");
System.exit(0);
}
/**
* Run the crawler job.
*/
- void run() {
-
+ private void run() {
+
// Create the file visitor for crawling the root directory with the given date filter.
- final DatacatFileVisitor visitor = new DatacatFileVisitor();
+ final CrawlerFileVisitor visitor = new CrawlerFileVisitor();
// Add date filter if timestamp is supplied.
if (config.timestamp() != null) {
visitor.addFilter(new DateFileFilter(config.timestamp()));
+ LOGGER.config("added timestamp filter " + config.timestamp());
+ }
+
+ // Add path filter.
+ if (!config.paths().isEmpty()) {
+ visitor.addFilter(new PathFilter(config.paths()));
+ StringBuffer sb = new StringBuffer();
+ for (String path : config.paths()) {
+ sb.append(path + ":");
+ }
+ sb.setLength(sb.length() - 1);
+ LOGGER.config("added paths " + sb.toString());
}
// Add file format filter.
- for (final DatasetFileFormat fileFormat : config.getFileFormats()) {
- LOGGER.info("adding file format filter for " + fileFormat.name());
- }
visitor.addFilter(new FileFormatFilter(config.getFileFormats()));
- // Run number filter.
+ // Add run number filter.
if (!config.acceptRuns().isEmpty()) {
visitor.addFilter(new RunFilter(config.acceptRuns()));
}
- // Walk the file tree using the visitor.
+ // Walk the file tree and get list of files.
this.walk(visitor);
-
- // Update the data catalog.
- this.updateDatacat(visitor.getFileSet());
- }
-
- /**
- * Update the data catalog.
- *
- * @param runMap the map of run information including the EVIO file list
- */
- private void updateDatacat(final FileSet fileSet) {
- final DatacatClient datacatClient = new DatacatClientFactory().createClient();
- for (final DatasetFileFormat fileFormat : config.getFileFormats()) {
- LOGGER.info("adding files to datacat with format " + fileFormat.name());
- for (final File file : fileSet.get(fileFormat)) {
-
- LOGGER.info("adding file " + file.getAbsolutePath() + " to datacat");
-
- // Create metadata if this is enabled (takes awhile).
- Map<String, Object> metadata = new HashMap<String, Object>();
- if (config.enableMetaData()) {
- metadata = DatacatUtilities.createMetadata(file);
- }
-
- // Register file in the catalog.
- DatacatUtilities.addFile(datacatClient, config.datacatFolder(), file, metadata);
- }
- }
- }
-
- /**
- * Walk the directory tree to find EVIO files for the runs that are being processed in the job.
+
+ // Insert datasets if files were found.
+ if (!visitor.getFiles().isEmpty()) {
+ List<DatasetModel> datasets = DatacatHelper.createDatasets(visitor.getFiles(), config.folder(), config.site().toString());
+ LOGGER.info("built " + datasets.size() + " datasets");
+ DatacatHelper.addDatasets(datasets, config.folder(), config.datacatUrl());
+ LOGGER.info("added datasets to datacat");
+ } else {
+ LOGGER.warning("No files were found by the crawler.");
+ }
+
+ LOGGER.info("Done!");
+ }
+
+ /**
+ * Walk the directory tree to find files for the runs that are being processed in the job.
*
* @param visitor the file visitor
*/
- private void walk(final DatacatFileVisitor visitor) {
+ private void walk(final CrawlerFileVisitor visitor) {
try {
// Walk the file tree from the root directory.
final EnumSet<FileVisitOption> options = EnumSet.noneOf(FileVisitOption.class);
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java Wed Apr 27 11:11:32 2016
@@ -2,147 +2,330 @@
import java.io.File;
import java.io.IOException;
-import java.util.Date;
-import java.util.HashMap;
+import java.math.RoundingMode;
+import java.text.DecimalFormat;
+import java.util.LinkedHashMap;
import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.evio.EventTagConstant;
import org.hps.record.evio.EvioEventUtilities;
import org.hps.record.evio.EvioFileUtilities;
+import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
+import org.hps.record.triggerbank.HeadBankData;
+import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
+import org.hps.record.triggerbank.TriggerType;
+import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
/**
- * Reads metadata from EVIO files.
- *
+ * Creates detailed metadata for the datacat from an EVIO input file.
+ *
* @author Jeremy McCormick, SLAC
*/
-public class EvioMetadataReader implements FileMetadataReader {
+final class EvioMetadataReader implements FileMetadataReader {
/**
- * Initialize the logger.
+ * Initialize the package logger.
*/
private static Logger LOGGER = Logger.getLogger(EvioMetadataReader.class.getPackage().getName());
/**
+ * Head bank definition.
+ */
+ private static IntBankDefinition HEAD_BANK = new IntBankDefinition(HeadBankData.class, new int[] {0x2e, 0xe10f});
+
+ /**
* Get the EVIO file metadata.
- *
+ *
* @param file the EVIO file
* @return the metadata map of key and value pairs
*/
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
-
- Date startDate = null;
- Date endDate = null;
- int badEventCount = 0;
- int eventCount = 0;
- int byteCount = 0;
- boolean hasPrestart = false;
- boolean hasEnd = false;
- int[] eventIdData = null;
- Integer run = null;
- Integer endEvent = null;
- Integer startEvent = null;
- Long lastTimestamp = null;
+
+ long totalEvents = 0;
+ int physicsEvents = 0;
+ int badEvents = 0;
+ int blinded = 0;
+ Long run = null;
+ Integer firstHeadTimestamp = null;
+ Integer lastHeadTimestamp = null;
+ Integer lastPhysicsEvent = null;
+ Integer firstPhysicsEvent = null;
+ Integer prestartTimestamp = null;
+ Integer endTimestamp = null;
+ Integer goTimestamp = null;
+ Double triggerRate = null;
+
+ // Processor for calculating TI time offsets.
+ TiTimeOffsetEvioProcessor tiProcessor = new TiTimeOffsetEvioProcessor();
+
+ // Create map for counting trigger types.
+ Map<TriggerType, Integer> triggerCounts = new LinkedHashMap<TriggerType, Integer>();
+ for (TriggerType triggerType : TriggerType.values()) {
+ triggerCounts.put(triggerType, 0);
+ }
+
+ // Get the file number from the name.
+ final int fileNumber = EvioFileUtilities.getSequenceFromName(file);
+
+ // File numbers indivisible by 10 are blinded (Eng Run 2015 scheme).
+ if (!(fileNumber % 10 == 0)) {
+ blinded = 1;
+ }
+
+ // Get file size.
+ long size = 0;
+ File cacheFile = file;
+ if (FileUtilities.isMssFile(file)) {
+ cacheFile = FileUtilities.getCachedFile(file);
+ }
+ size = cacheFile.length();
+
+ // Compute MD5 checksum string.
+ String checksum = FileUtilities.createMD5Checksum(cacheFile);
EvioReader evioReader = null;
try {
- evioReader = EvioFileUtilities.open(file, false);
+ // Open file in sequential mode.
+ evioReader = EvioFileUtilities.open(file, true);
+ EvioEvent evioEvent = null;
+
+ // Event read loop.
+ eventLoop: while (true) {
+ try {
+ // Parse next event.
+ evioEvent = evioReader.parseNextEvent();
+
+ // End of file.
+ if (evioEvent == null) {
+ LOGGER.fine("EOF after " + totalEvents + " events.");
+ break eventLoop;
+ }
+
+ // Increment event count (doesn't count events that can't be parsed).
+ ++totalEvents;
+
+ // Debug print event number and tag.
+ LOGGER.finest("Parsed event " + evioEvent.getEventNumber() + " with tag 0x"
+ + String.format("%08x", evioEvent.getHeader().getTag()));
+
+ // Get head bank.
+ BaseStructure headBank = HEAD_BANK.findBank(evioEvent);
+
+ // Current timestamp.
+ int thisTimestamp = 0;
+
+ // Process head bank if not null.
+ if (headBank != null) {
+ if (headBank != null) {
+ final int[] headBankData = headBank.getIntData();
+ thisTimestamp = headBankData[3];
+ if (thisTimestamp != 0) {
+ // First header timestamp.
+ if (firstHeadTimestamp == null) {
+ firstHeadTimestamp = thisTimestamp;
+ LOGGER.finer("First head timestamp " + firstHeadTimestamp + " from event "
+ + evioEvent.getEventNumber());
+ }
+
+ // Last header timestamp.
+ lastHeadTimestamp = thisTimestamp;
+ }
+
+ // Run number.
+ if (run == null) {
+ if (headBankData[1] != 0) {
+ run = (long) headBankData[1];
+ LOGGER.finer("Run number " + run + " from event " + evioEvent.getEventNumber());
+ }
+ }
+ }
+ }
+
+ if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
+
+ final int[] eventIdData = EvioEventUtilities.getEventIdData(evioEvent);
+
+ if (eventIdData != null) {
+
+ // Set the last physics event.
+ lastPhysicsEvent = eventIdData[0];
+
+ // Set the first physics event.
+ if (firstPhysicsEvent == null) {
+ firstPhysicsEvent = eventIdData[0];
+ LOGGER.finer("Set first physics event " + firstPhysicsEvent);
+ }
+ }
+
+ ++physicsEvents;
+ } else if (EvioEventUtilities.isControlEvent(evioEvent)) {
+ int[] controlData = EvioEventUtilities.getControlEventData(evioEvent);
+ if (controlData[0] != 0) {
+ if (EventTagConstant.PRESTART.isEventTag(evioEvent)) {
+ prestartTimestamp = controlData[0];
+ }
+ if (EventTagConstant.GO.isEventTag(evioEvent)) {
+ goTimestamp = controlData[0];
+ }
+ if (EventTagConstant.END.isEventTag(evioEvent)) {
+ endTimestamp = controlData[0];
+ }
+ }
+ }
+
+ // Count trigger types for this event.
+ Set<TriggerType> triggerTypes = TriggerType.getTriggerTypes(evioEvent);
+ for (TriggerType mask : triggerTypes) {
+ int count = triggerCounts.get(mask) + 1;
+ triggerCounts.put(mask, count);
+ LOGGER.finest("Incremented " + mask.name() + " to " + count);
+ }
+
+ // Activate TI time offset processor.
+ tiProcessor.process(evioEvent);
+
+ } catch (Exception e) {
+ // Trap all event processing errors.
+ badEvents++;
+ LOGGER.warning("Error processing EVIO event " + evioEvent.getEventNumber());
+ }
+ }
} catch (final EvioException e) {
- throw new IOException(e);
- }
-
- final int fileNumber = EvioFileUtilities.getSequenceFromName(file);
-
- EvioEvent evioEvent = null;
-
- while (true) {
- try {
- evioEvent = evioReader.parseNextEvent();
- } catch (IOException | EvioException e) {
- ++badEventCount;
- continue;
+ // Error reading the EVIO file.
+ throw new IOException("Error reading EVIO file.", e);
+ } finally {
+ // Close the reader.
+ if (evioReader != null) {
+ try {
+ evioReader.close();
+ } catch (IOException e) {
+ LOGGER.log(Level.WARNING, "Error closing EVIO reader", e);
+ }
}
- if (evioEvent == null) {
- break;
+ }
+
+ LOGGER.info("Done reading " + totalEvents + " events from " + file.getPath());
+
+ // Rough trigger rate calculation.
+ try {
+ if (firstHeadTimestamp != null && lastHeadTimestamp != null && totalEvents > 0
+ && (firstHeadTimestamp - lastHeadTimestamp != 0)) {
+ triggerRate = calculateTriggerRate(firstHeadTimestamp, lastHeadTimestamp, totalEvents);
+ } else {
+ LOGGER.log(Level.WARNING, "Missing information for calculating trigger rate.");
}
- byteCount += evioEvent.getTotalBytes();
- if (EventTagConstant.PRESTART.equals(evioEvent)) {
- LOGGER.info("found PRESTART");
- hasPrestart = true;
- final int[] controlEventData = EvioEventUtilities.getControlEventData(evioEvent);
- final long timestamp = controlEventData[0] * 1000L;
- startDate = new Date(timestamp);
- LOGGER.info("set start date to " + startDate + " from PRESTART");
- if (run == null) {
- run = controlEventData[1];
- LOGGER.info("set run to " + run);
- }
- } else if (EventTagConstant.END.equals(evioEvent)) {
- LOGGER.info("found END event");
- hasEnd = true;
- final int[] controlEventData = EvioEventUtilities.getControlEventData(evioEvent);
- final long timestamp = controlEventData[0] * 1000L;
- endDate = new Date(timestamp);
- LOGGER.info("set end date to " + endDate);
- if (run == null) {
- run = controlEventData[1];
- LOGGER.info("set run to " + run);
- }
- } else if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
- final int[] headBankData = EvioEventUtilities.getHeadBankData(evioEvent);
- if (startDate == null) {
- if (headBankData[3] != 0) {
- startDate = new Date(headBankData[3] * 1000L);
- LOGGER.info("set start date to " + startDate + " from physics event");
- }
- }
- if (run == null) {
- run = headBankData[1];
- LOGGER.info("set run to " + run + " from physics event");
- }
- eventIdData = EvioEventUtilities.getEventIdData(evioEvent);
- if (startEvent == null) {
- startEvent = eventIdData[0];
- LOGGER.info("set start event " + startEvent);
- }
- if (headBankData[3] != 0) {
- lastTimestamp = headBankData[3] * 1000L;
- }
- ++eventCount;
+ } catch (Exception e) {
+ LOGGER.log(Level.WARNING, "Error calculating the trigger rate.", e);
+ }
+
+ // Create and fill the metadata map.
+ final Map<String, Object> metadataMap = new LinkedHashMap<String, Object>();
+
+ try {
+ if (run == null) {
+ run = new Long(EvioFileUtilities.getRunFromName(file));
}
- }
-
- // Set end date from last valid timestamp.
- if (endDate == null) {
- endDate = new Date(lastTimestamp);
- LOGGER.info("set end date to " + endDate + " from last timestamp " + lastTimestamp);
- }
-
- // Set end event number.
- if (eventIdData != null) {
- endEvent = eventIdData[0];
- LOGGER.info("set end event " + endEvent);
- }
-
- final Map<String, Object> metaDataMap = new HashMap<String, Object>();
-
- metaDataMap.put("runMin", run);
- metaDataMap.put("runMax", run);
- metaDataMap.put("eventCount", eventCount);
- metaDataMap.put("size", byteCount);
- metaDataMap.put("fileNumber", fileNumber);
- metaDataMap.put("badEventCount", badEventCount);
- metaDataMap.put("endTimestamp", endDate.getTime());
- metaDataMap.put("startTimestamp", startDate.getTime());
- metaDataMap.put("startEvent", startEvent);
- metaDataMap.put("endEvent", endEvent);
- metaDataMap.put("hasEnd", hasEnd ? 1 : 0);
- metaDataMap.put("hasPrestart", hasPrestart ? 1 : 0);
-
- return metaDataMap;
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to get run number from event data or file name.", e);
+ }
+
+ // Set locationExtras metadata.
+ metadataMap.put("runMin", run);
+ metadataMap.put("runMax", run);
+ metadataMap.put("eventCount", totalEvents);
+ metadataMap.put("size", size);
+ metadataMap.put("checksum", checksum);
+
+ // File sequence number.
+ metadataMap.put("FILE", fileNumber);
+
+ // Blinded flag.
+ metadataMap.put("BLINDED", blinded);
+
+ // First and last timestamps which may come from control or physics events.
+ if (firstHeadTimestamp != null) {
+ metadataMap.put("FIRST_HEAD_TIMESTAMP", firstHeadTimestamp);
+ }
+
+ if (lastHeadTimestamp != null) {
+ metadataMap.put("LAST_HEAD_TIMESTAMP", lastHeadTimestamp);
+ }
+
+ // First and last physics event numbers.
+ if (firstPhysicsEvent != null) {
+ metadataMap.put("FIRST_PHYSICS_EVENT", firstPhysicsEvent);
+ }
+
+ if (lastPhysicsEvent != null) {
+ metadataMap.put("LAST_PHYSICS_EVENT", lastPhysicsEvent);
+ }
+
+ // Timestamps which are only set if the corresponding control events were found in the file.
+ if (prestartTimestamp != null) {
+ metadataMap.put("PRESTART_TIMESTAMP", prestartTimestamp);
+ }
+ if (endTimestamp != null) {
+ metadataMap.put("END_TIMESTAMP", endTimestamp);
+ }
+ if (goTimestamp != null) {
+ metadataMap.put("GO_TIMESTAMP", goTimestamp);
+ }
+
+ // TI times and offset.
+ metadataMap.put("TI_TIME_MIN_OFFSET", new Long(tiProcessor.getMinOffset()).toString());
+ metadataMap.put("TI_TIME_MAX_OFFSET", new Long(tiProcessor.getMaxOffset()).toString());
+ metadataMap.put("TI_TIME_N_OUTLIERS", tiProcessor.getNumOutliers());
+
+ // Event counts.
+ metadataMap.put("BAD_EVENTS", badEvents);
+
+ // Physics event count.
+ metadataMap.put("PHYSICS_EVENTS", physicsEvents);
+
+ // Rough trigger rate.
+ if (triggerRate != null && !Double.isInfinite(triggerRate) && !Double.isNaN(triggerRate)) {
+ DecimalFormat df = new DecimalFormat("#.##");
+ df.setRoundingMode(RoundingMode.CEILING);
+ LOGGER.info("Setting trigger rate to " + triggerRate + " Hz.");
+ metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
+ } else {
+ LOGGER.warning("Failed to calculate trigger rate.");
+ }
+
+ // Trigger type counts.
+ for (Entry<TriggerType, Integer> entry : triggerCounts.entrySet()) {
+ metadataMap.put(entry.getKey().name(), entry.getValue());
+ }
+
+ // Print the file metadata to log.
+ StringBuffer sb = new StringBuffer();
+ sb.append('\n');
+ for (Entry<String, Object> entry : metadataMap.entrySet()) {
+ sb.append(" " + entry.getKey() + " = " + entry.getValue() + '\n');
+ }
+ LOGGER.info("File metadata ..." + '\n' + sb.toString());
+
+ // Return the completed metadata map.
+ return metadataMap;
+ }
+
+ /**
+ * Calculate the trigger rate in Hz.
+ *
+ * @param firstTimestamp the first physics timestamp
+ * @param lastTimestamp the last physics timestamp
+ * @param events the number of physics events
+ * @return the trigger rate calculation in KHz
+ */
+ private double calculateTriggerRate(Integer firstTimestamp, Integer lastTimestamp, long events) {
+ return ((double) events / ((double) lastTimestamp - (double) firstTimestamp));
}
}
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java Wed Apr 27 11:11:32 2016
@@ -3,9 +3,6 @@
import java.io.File;
import java.io.FileFilter;
import java.util.Set;
-import java.util.logging.Logger;
-
-import org.hps.datacat.client.DatasetFileFormat;
/**
* Filter files on their format.
@@ -17,21 +14,16 @@
public class FileFormatFilter implements FileFilter {
/**
- * Initialize the logger.
- */
- private static final Logger LOGGER = Logger.getLogger(FileFormatFilter.class.getPackage().getName());
-
- /**
* The file format.
*/
- private final Set<DatasetFileFormat> formats;
+ private final Set<FileFormat> formats;
/**
* Create a new filter with the given format.
*
* @param format the file format
*/
- FileFormatFilter(final Set<DatasetFileFormat> formats) {
+ FileFormatFilter(final Set<FileFormat> formats) {
if (formats == null) {
throw new IllegalArgumentException("The formats collection is null.");
}
@@ -48,13 +40,10 @@
*/
@Override
public boolean accept(final File pathname) {
- LOGGER.info(pathname.getPath());
- final DatasetFileFormat fileFormat = DatacatUtilities.getFileFormat(pathname);
+ final FileFormat fileFormat = DatacatHelper.getFileFormat(pathname);
if (fileFormat != null) {
- LOGGER.info("file " + pathname.getPath() + " has format " + fileFormat.name());
return formats.contains(fileFormat);
} else {
- LOGGER.info("rejected file " + pathname.getPath() + " with unknown format");
return false;
}
}
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java Wed Apr 27 11:11:32 2016
@@ -4,8 +4,19 @@
import java.io.IOException;
import java.util.Map;
-
+/**
+ * Interface for reading metadata for the datacat from files.
+ *
+ * @author Jeremy McCormick, SLAC
+ */
public interface FileMetadataReader {
- public Map<String, Object> getMetadata(File file) throws IOException;
+ /**
+ * Create a metadata map with keys and values from the contents of a file.
+ *
+ * @param file the input file from which to extract metadata
+ * @return the metadata map of field names to values
+ * @throws IOException if there is an error reading the file
+ */
+ Map<String, Object> getMetadata(File file) throws IOException;
}
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java Wed Apr 27 11:11:32 2016
@@ -22,7 +22,7 @@
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
final Map<String, Object> metadata = new HashMap<String, Object>();
- final int run = CrawlerFileUtilities.getRunFromFileName(file);
+ final Long run = FileUtilities.getRunFromFileName(file);
metadata.put("runMin", run);
metadata.put("runMax", run);
return metadata;
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDstMetadataReader.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDstMetadataReader.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RootDstMetadataReader.java Wed Apr 27 11:11:32 2016
@@ -1,10 +1,10 @@
package org.hps.crawler;
-import hep.io.root.RootClassNotFound;
-import hep.io.root.RootFileReader;
-import hep.io.root.interfaces.TLeafElement;
-import hep.io.root.interfaces.TObjArray;
-import hep.io.root.interfaces.TTree;
+//import hep.io.root.RootClassNotFound;
+//import hep.io.root.RootFileReader;
+//import hep.io.root.interfaces.TLeafElement;
+//import hep.io.root.interfaces.TObjArray;
+//import hep.io.root.interfaces.TTree;
import java.io.File;
import java.io.IOException;
@@ -28,6 +28,10 @@
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
final Map<String, Object> metadata = new HashMap<String, Object>();
+ Long run = FileUtilities.getRunFromFileName(file);
+ metadata.put("runMin", run);
+ metadata.put("runMax", run);
+ /*
RootFileReader rootReader = null;
long eventCount = 0;
int runMin = 0;
@@ -60,6 +64,7 @@
metadata.put("runMin", runMin);
metadata.put("runMax", runMax);
metadata.put("size", size);
+ */
return metadata;
}
}
Modified: java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RunFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RunFilter.java (original)
+++ java/branches/HPSJAVA-409/crawler/src/main/java/org/hps/crawler/RunFilter.java Wed Apr 27 11:11:32 2016
@@ -3,8 +3,6 @@
import java.io.File;
import java.io.FileFilter;
import java.util.Set;
-
-import org.hps.record.evio.EvioFileUtilities;
/**
* A filter which rejects files with run numbers not in a specified set.
@@ -25,7 +23,7 @@
*/
RunFilter(final Set<Integer> acceptRuns) {
if (acceptRuns.isEmpty()) {
- throw new IllegalArgumentException("the acceptRuns collection is empty");
+ throw new IllegalArgumentException("The acceptRuns collection is empty.");
}
this.acceptRuns = acceptRuns;
}
@@ -38,6 +36,11 @@
*/
@Override
public boolean accept(final File file) {
- return this.acceptRuns.contains(EvioFileUtilities.getRunFromName(file));
+ try {
+ int run = Integer.parseInt(file.getName().substring(5, 10));
+ return this.acceptRuns.contains(run);
+ } catch (Exception e) {
+ return false;
+ }
}
}
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-3/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-3/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-3/compact.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
</comment>
</info>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
4th: round start with v1-3
L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw
</comment>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
</comment>
</info>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
4th: round start with v1-3
1) float L2_tu_rw, L3_tu_rw, L4_tu_rw
</comment>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L2-3-4_tu_rw.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L2-3-4_tu_rw.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-4/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L2-3-4_tu_rw.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
</comment>
</info>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-1/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
</comment>
</info>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-2/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-HPS-EngRun2015-Nominal-v1-4-1-100k-L456_L123_L234_L345_L123_L456_tu_rwIter0Iter1Iter2Iter3Iter4Iter5.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-2/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-HPS-EngRun2015-Nominal-v1-4-1-100k-L456_L123_L234_L345_L123_L456_tu_rwIter0Iter1Iter2Iter3Iter4Iter5.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-EngRun2015-Nominal-v1-5-2/compact_millepede-milleBinaryISN_hps_005772.evio.0.gbl-HPS-EngRun2015-Nominal-v1-4-1-100k-L456_L123_L234_L345_L123_L456_tu_rwIter0Iter1Iter2Iter3Iter4Iter5.xml Wed Apr 27 11:11:32 2016
@@ -9,8 +9,8 @@
1) float L2_tu, 2) float L2_tu, L4_tu, L5_tu
2nd: round
1) float L2_tu, L4_tu, L5_tu
- 3rd: round
- 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
+ 3rd: round
+ 1) float L2_tu_rw, L4_tu_rw, L5_tu_rw
4th: round start with v1-3
L3_L4_L5_tu_rw_THEN_L2_tu_rw_THEN_L2_L3_L4_tu_rw_THEN_L2_L3_L5_tu_rw_THEN_L2_L3_L4_L5_excl_L3ST_tu_L4Sb_tu_THEN_L2_L4_L5_tu_rw
</comment>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-Proposal2014-v3-2pt2-0zOffset/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-Proposal2014-v3-2pt2-0zOffset/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-Proposal2014-v3-2pt2-0zOffset/compact.xml Wed Apr 27 11:11:32 2016
@@ -305,7 +305,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="endOfFieldZ" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v5/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v5/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v5/compact.xml Wed Apr 27 11:11:32 2016
@@ -488,7 +488,7 @@
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001"/>
<fraction n="1.0" ref="Vacuum" />
@@ -578,7 +578,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="endOfFieldZ" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v6/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v6/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v6/compact.xml Wed Apr 27 11:11:32 2016
@@ -488,7 +488,7 @@
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001"/>
<fraction n="1.0" ref="Vacuum" />
@@ -578,7 +578,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="endOfFieldZ" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-2/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-2/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-2/compact.xml Wed Apr 27 11:11:32 2016
@@ -86,127 +86,127 @@
<constant name="y9b_misalign" value="0.0"/>
<constant name="y10b_misalign" value="0.0"/>
- <!-- alignment corrections -->
- <constant name="x1t_align" value="0.000000"/>
- <constant name="x2t_align" value="0.000000"/>
- <constant name="x3t_align" value="0.000000"/>
- <constant name="x4t_align" value="0.000000"/>
- <constant name="x5t_align" value="0.000000"/>
- <constant name="x6t_align" value="0.000000"/>
- <constant name="x7t_align" value="0.000000"/>
- <constant name="x8t_align" value="0.000000"/>
- <constant name="x9t_align" value="0.000000"/>
- <constant name="x10t_align" value="0.000000"/>
- <constant name="y1t_align" value="0.000000"/>
- <constant name="y2t_align" value="0.000000"/>
- <constant name="y3t_align" value="0.0"/>
- <constant name="y4t_align" value="0.0"/>
- <constant name="y5t_align" value="0.000000"/>
- <constant name="y6t_align" value="0.000000"/>
- <constant name="y7t_align" value="0.000000"/>
- <constant name="y8t_align" value="0.000000"/>
- <constant name="y9t_align" value="0.000000"/>
- <constant name="y10t_align" value="0.000000"/>
- <constant name="z1t_align" value="0.000000"/>
- <constant name="z2t_align" value="0.000000"/>
- <constant name="z3t_align" value="0.000000"/>
- <constant name="z4t_align" value="0.000000"/>
- <constant name="z5t_align" value="0.000000"/>
- <constant name="z6t_align" value="0.000000"/>
- <constant name="z7t_align" value="0.000000"/>
- <constant name="z8t_align" value="0.000000"/>
- <constant name="z9t_align" value="0.000000"/>
- <constant name="z10t_align" value="0.000000"/>
- <constant name="rx1t_align" value="0.000000"/>
- <constant name="rx2t_align" value="0.000000"/>
- <constant name="rx3t_align" value="0.000000"/>
- <constant name="rx4t_align" value="0.000000"/>
- <constant name="rx5t_align" value="0.000000"/>
- <constant name="rx6t_align" value="0.000000"/>
- <constant name="rx7t_align" value="0.000000"/>
- <constant name="rx8t_align" value="0.000000"/>
- <constant name="rx9t_align" value="0.000000"/>
- <constant name="rx10t_align" value="0.000000"/>
- <constant name="ry1t_align" value="0.000000"/>
- <constant name="ry2t_align" value="0.000000"/>
- <constant name="ry3t_align" value="0.000000"/>
- <constant name="ry4t_align" value="0.000000"/>
- <constant name="ry5t_align" value="0.000000"/>
- <constant name="ry6t_align" value="0.000000"/>
- <constant name="ry7t_align" value="0.000000"/>
- <constant name="ry8t_align" value="0.000000"/>
- <constant name="ry9t_align" value="0.000000"/>
- <constant name="ry10t_align" value="0.000000"/>
- <constant name="rz1t_align" value="0.000000"/>
- <constant name="rz2t_align" value="0.000000"/>
- <constant name="rz3t_align" value="0.000000"/>
- <constant name="rz4t_align" value="0.000000"/>
- <constant name="rz5t_align" value="0.000000"/>
- <constant name="rz6t_align" value="0.000000"/>
- <constant name="rz7t_align" value="0.000000"/>
- <constant name="rz8t_align" value="0.000000"/>
- <constant name="rz9t_align" value="0.000000"/>
- <constant name="rz10t_align" value="0.000000"/>
- <constant name="x1b_align" value="0.000000"/>
- <constant name="x2b_align" value="0.000000"/>
- <constant name="x3b_align" value="0.000000"/>
- <constant name="x4b_align" value="0.000000"/>
- <constant name="x5b_align" value="0.000000"/>
- <constant name="x6b_align" value="0.00000"/>
- <constant name="x7b_align" value="0.000000"/>
- <constant name="x8b_align" value="0.000000"/>
- <constant name="x9b_align" value="0.000000"/>
- <constant name="x10b_align" value="0.000000"/>
- <constant name="y1b_align" value="0.000000"/>
- <constant name="y2b_align" value="0.000000"/>
- <constant name="y3b_align" value="0.000000"/>
- <constant name="y4b_align" value="0.000000"/>
- <constant name="y5b_align" value="0.0"/>
- <constant name="y6b_align" value="0.0"/>
- <constant name="y7b_align" value="0.000000"/>
- <constant name="y8b_align" value="0.000000"/>
- <constant name="y9b_align" value="0.000000"/>
- <constant name="y10b_align" value="0.000000"/>
- <constant name="z1b_align" value="0.000000"/>
- <constant name="z2b_align" value="0.000000"/>
- <constant name="z3b_align" value="0.000000"/>
- <constant name="z4b_align" value="0.000000"/>
- <constant name="z5b_align" value="0.000000"/>
- <constant name="z6b_align" value="0.000000"/>
- <constant name="z7b_align" value="0.000000"/>
- <constant name="z8b_align" value="0.000000"/>
- <constant name="z9b_align" value="0.000000"/>
- <constant name="z10b_align" value="0.000000"/>
- <constant name="rx1b_align" value="0.000000"/>
- <constant name="rx2b_align" value="0.000000"/>
- <constant name="rx3b_align" value="0.000000"/>
- <constant name="rx4b_align" value="0.000000"/>
- <constant name="rx5b_align" value="0.000000"/>
- <constant name="rx6b_align" value="0.000000"/>
- <constant name="rx7b_align" value="0.000000"/>
- <constant name="rx8b_align" value="0.000000"/>
- <constant name="rx9b_align" value="0.000000"/>
- <constant name="rx10b_align" value="0.000000"/>
- <constant name="ry1b_align" value="0.000000"/>
- <constant name="ry2b_align" value="0.000000"/>
- <constant name="ry3b_align" value="0.000000"/>
- <constant name="ry4b_align" value="0.000000"/>
- <constant name="ry5b_align" value="0.000000"/>
- <constant name="ry6b_align" value="0.000000"/>
- <constant name="ry7b_align" value="0.000000"/>
- <constant name="ry8b_align" value="0.000000"/>
- <constant name="ry9b_align" value="0.000000"/>
- <constant name="ry10b_align" value="0.000000"/>
- <constant name="rz1b_align" value="0.000000"/>
- <constant name="rz2b_align" value="0.000000"/>
- <constant name="rz3b_align" value="0.000000"/>
- <constant name="rz4b_align" value="0.000000"/>
- <constant name="rz5b_align" value="0.000000"/>
- <constant name="rz6b_align" value="0.000000"/>
- <constant name="rz7b_align" value="0.000000"/>
- <constant name="rz8b_align" value="0.000000"/>
- <constant name="rz9b_align" value="0.000000"/>
- <constant name="rz10b_align" value="0.000000"/>
+ <!-- alignment corrections -->
+ <constant name="x1t_align" value="0.000000"/>
+ <constant name="x2t_align" value="0.000000"/>
+ <constant name="x3t_align" value="0.000000"/>
+ <constant name="x4t_align" value="0.000000"/>
+ <constant name="x5t_align" value="0.000000"/>
+ <constant name="x6t_align" value="0.000000"/>
+ <constant name="x7t_align" value="0.000000"/>
+ <constant name="x8t_align" value="0.000000"/>
+ <constant name="x9t_align" value="0.000000"/>
+ <constant name="x10t_align" value="0.000000"/>
+ <constant name="y1t_align" value="0.000000"/>
+ <constant name="y2t_align" value="0.000000"/>
+ <constant name="y3t_align" value="0.0"/>
+ <constant name="y4t_align" value="0.0"/>
+ <constant name="y5t_align" value="0.000000"/>
+ <constant name="y6t_align" value="0.000000"/>
+ <constant name="y7t_align" value="0.000000"/>
+ <constant name="y8t_align" value="0.000000"/>
+ <constant name="y9t_align" value="0.000000"/>
+ <constant name="y10t_align" value="0.000000"/>
+ <constant name="z1t_align" value="0.000000"/>
+ <constant name="z2t_align" value="0.000000"/>
+ <constant name="z3t_align" value="0.000000"/>
+ <constant name="z4t_align" value="0.000000"/>
+ <constant name="z5t_align" value="0.000000"/>
+ <constant name="z6t_align" value="0.000000"/>
+ <constant name="z7t_align" value="0.000000"/>
+ <constant name="z8t_align" value="0.000000"/>
+ <constant name="z9t_align" value="0.000000"/>
+ <constant name="z10t_align" value="0.000000"/>
+ <constant name="rx1t_align" value="0.000000"/>
+ <constant name="rx2t_align" value="0.000000"/>
+ <constant name="rx3t_align" value="0.000000"/>
+ <constant name="rx4t_align" value="0.000000"/>
+ <constant name="rx5t_align" value="0.000000"/>
+ <constant name="rx6t_align" value="0.000000"/>
+ <constant name="rx7t_align" value="0.000000"/>
+ <constant name="rx8t_align" value="0.000000"/>
+ <constant name="rx9t_align" value="0.000000"/>
+ <constant name="rx10t_align" value="0.000000"/>
+ <constant name="ry1t_align" value="0.000000"/>
+ <constant name="ry2t_align" value="0.000000"/>
+ <constant name="ry3t_align" value="0.000000"/>
+ <constant name="ry4t_align" value="0.000000"/>
+ <constant name="ry5t_align" value="0.000000"/>
+ <constant name="ry6t_align" value="0.000000"/>
+ <constant name="ry7t_align" value="0.000000"/>
+ <constant name="ry8t_align" value="0.000000"/>
+ <constant name="ry9t_align" value="0.000000"/>
+ <constant name="ry10t_align" value="0.000000"/>
+ <constant name="rz1t_align" value="0.000000"/>
+ <constant name="rz2t_align" value="0.000000"/>
+ <constant name="rz3t_align" value="0.000000"/>
+ <constant name="rz4t_align" value="0.000000"/>
+ <constant name="rz5t_align" value="0.000000"/>
+ <constant name="rz6t_align" value="0.000000"/>
+ <constant name="rz7t_align" value="0.000000"/>
+ <constant name="rz8t_align" value="0.000000"/>
+ <constant name="rz9t_align" value="0.000000"/>
+ <constant name="rz10t_align" value="0.000000"/>
+ <constant name="x1b_align" value="0.000000"/>
+ <constant name="x2b_align" value="0.000000"/>
+ <constant name="x3b_align" value="0.000000"/>
+ <constant name="x4b_align" value="0.000000"/>
+ <constant name="x5b_align" value="0.000000"/>
+ <constant name="x6b_align" value="0.00000"/>
+ <constant name="x7b_align" value="0.000000"/>
+ <constant name="x8b_align" value="0.000000"/>
+ <constant name="x9b_align" value="0.000000"/>
+ <constant name="x10b_align" value="0.000000"/>
+ <constant name="y1b_align" value="0.000000"/>
+ <constant name="y2b_align" value="0.000000"/>
+ <constant name="y3b_align" value="0.000000"/>
+ <constant name="y4b_align" value="0.000000"/>
+ <constant name="y5b_align" value="0.0"/>
+ <constant name="y6b_align" value="0.0"/>
+ <constant name="y7b_align" value="0.000000"/>
+ <constant name="y8b_align" value="0.000000"/>
+ <constant name="y9b_align" value="0.000000"/>
+ <constant name="y10b_align" value="0.000000"/>
+ <constant name="z1b_align" value="0.000000"/>
+ <constant name="z2b_align" value="0.000000"/>
+ <constant name="z3b_align" value="0.000000"/>
+ <constant name="z4b_align" value="0.000000"/>
+ <constant name="z5b_align" value="0.000000"/>
+ <constant name="z6b_align" value="0.000000"/>
+ <constant name="z7b_align" value="0.000000"/>
+ <constant name="z8b_align" value="0.000000"/>
+ <constant name="z9b_align" value="0.000000"/>
+ <constant name="z10b_align" value="0.000000"/>
+ <constant name="rx1b_align" value="0.000000"/>
+ <constant name="rx2b_align" value="0.000000"/>
+ <constant name="rx3b_align" value="0.000000"/>
+ <constant name="rx4b_align" value="0.000000"/>
+ <constant name="rx5b_align" value="0.000000"/>
+ <constant name="rx6b_align" value="0.000000"/>
+ <constant name="rx7b_align" value="0.000000"/>
+ <constant name="rx8b_align" value="0.000000"/>
+ <constant name="rx9b_align" value="0.000000"/>
+ <constant name="rx10b_align" value="0.000000"/>
+ <constant name="ry1b_align" value="0.000000"/>
+ <constant name="ry2b_align" value="0.000000"/>
+ <constant name="ry3b_align" value="0.000000"/>
+ <constant name="ry4b_align" value="0.000000"/>
+ <constant name="ry5b_align" value="0.000000"/>
+ <constant name="ry6b_align" value="0.000000"/>
+ <constant name="ry7b_align" value="0.000000"/>
+ <constant name="ry8b_align" value="0.000000"/>
+ <constant name="ry9b_align" value="0.000000"/>
+ <constant name="ry10b_align" value="0.000000"/>
+ <constant name="rz1b_align" value="0.000000"/>
+ <constant name="rz2b_align" value="0.000000"/>
+ <constant name="rz3b_align" value="0.000000"/>
+ <constant name="rz4b_align" value="0.000000"/>
+ <constant name="rz5b_align" value="0.000000"/>
+ <constant name="rz6b_align" value="0.000000"/>
+ <constant name="rz7b_align" value="0.000000"/>
+ <constant name="rz8b_align" value="0.000000"/>
+ <constant name="rz9b_align" value="0.000000"/>
+ <constant name="rz10b_align" value="0.000000"/>
<!-- Positions of sensor centers above/below nominal beam -->
<constant name="y1t" value="36.894" />
@@ -606,134 +606,134 @@
<constant name="mod2_rx10b" value="mod_rx10b"/>
<constant name="mod2_ry10b" value="mod_ry10b+y_rot_bot_pivot"/>
<constant name="mod2_rz10b" value="mod_rz10b"/>
-
- <!-- final constants -->
- <constant name="final_x1t" value="mod2_x1t+x1t_align"/>
- <constant name="final_x2t" value="mod2_x2t+x2t_align"/>
- <constant name="final_x3t" value="mod2_x3t+x3t_align"/>
- <constant name="final_x4t" value="mod2_x4t+x4t_align"/>
- <constant name="final_x5t" value="mod2_x5t+x5t_align"/>
- <constant name="final_x6t" value="mod2_x6t+x6t_align"/>
- <constant name="final_x7t" value="mod2_x7t+x7t_align"/>
- <constant name="final_x8t" value="mod2_x8t+x8t_align"/>
- <constant name="final_x9t" value="mod2_x9t+x9t_align"/>
- <constant name="final_x10t" value="mod2_x10t+x10t_align"/>
- <constant name="final_y1t" value="mod2_y1t+y1t_align"/>
- <constant name="final_y2t" value="mod2_y2t+y2t_align"/>
- <constant name="final_y3t" value="mod2_y3t+y3t_align"/>
- <constant name="final_y4t" value="mod2_y4t+y4t_align"/>
- <constant name="final_y5t" value="mod2_y5t+y5t_align"/>
- <constant name="final_y6t" value="mod2_y6t+y6t_align"/>
- <constant name="final_y7t" value="mod2_y7t+y7t_align"/>
- <constant name="final_y8t" value="mod2_y8t+y8t_align"/>
- <constant name="final_y9t" value="mod2_y9t+y9t_align"/>
- <constant name="final_y10t" value="mod2_y10t+y10t_align"/>
- <constant name="final_z1t" value="mod2_z1t+z1t_align"/>
- <constant name="final_z2t" value="mod2_z2t+z2t_align"/>
- <constant name="final_z3t" value="mod2_z3t+z3t_align"/>
- <constant name="final_z4t" value="mod2_z4t+z4t_align"/>
- <constant name="final_z5t" value="mod2_z5t+z5t_align"/>
- <constant name="final_z6t" value="mod2_z6t+z6t_align"/>
- <constant name="final_z7t" value="mod2_z7t+z7t_align"/>
- <constant name="final_z8t" value="mod2_z8t+z8t_align"/>
- <constant name="final_z9t" value="mod2_z9t+z9t_align"/>
- <constant name="final_z10t" value="mod2_z10t+z10t_align"/>
- <constant name="final_rx1t" value="mod2_rx1t+rx1t_align"/>
- <constant name="final_rx2t" value="mod2_rx2t+rx2t_align"/>
- <constant name="final_rx3t" value="mod2_rx3t+rx3t_align"/>
- <constant name="final_rx4t" value="mod2_rx4t+rx4t_align"/>
- <constant name="final_rx5t" value="mod2_rx5t+rx5t_align"/>
- <constant name="final_rx6t" value="mod2_rx6t+rx6t_align"/>
- <constant name="final_rx7t" value="mod2_rx7t+rx7t_align"/>
- <constant name="final_rx8t" value="mod2_rx8t+rx8t_align"/>
- <constant name="final_rx9t" value="mod2_rx9t+rx9t_align"/>
- <constant name="final_rx10t" value="mod2_rx10t+rx10t_align"/>
- <constant name="final_ry1t" value="mod2_ry1t+ry1t_align"/>
- <constant name="final_ry2t" value="mod2_ry2t+ry2t_align"/>
- <constant name="final_ry3t" value="mod2_ry3t+ry3t_align"/>
- <constant name="final_ry4t" value="mod2_ry4t+ry4t_align"/>
- <constant name="final_ry5t" value="mod2_ry5t+ry5t_align"/>
- <constant name="final_ry6t" value="mod2_ry6t+ry6t_align"/>
- <constant name="final_ry7t" value="mod2_ry7t+ry7t_align"/>
- <constant name="final_ry8t" value="mod2_ry8t+ry8t_align"/>
- <constant name="final_ry9t" value="mod2_ry9t+ry9t_align"/>
- <constant name="final_ry10t" value="mod2_ry10t+ry10t_align"/>
- <constant name="final_rz1t" value="mod2_rz1t+rz1t_align"/>
- <constant name="final_rz2t" value="mod2_rz2t+rz2t_align"/>
- <constant name="final_rz3t" value="mod2_rz3t+rz3t_align"/>
- <constant name="final_rz4t" value="mod2_rz4t+rz4t_align"/>
- <constant name="final_rz5t" value="mod2_rz5t+rz5t_align"/>
- <constant name="final_rz6t" value="mod2_rz6t+rz6t_align"/>
- <constant name="final_rz7t" value="mod2_rz7t+rz7t_align"/>
- <constant name="final_rz8t" value="mod2_rz8t+rz8t_align"/>
- <constant name="final_rz9t" value="mod2_rz9t+rz9t_align"/>
- <constant name="final_rz10t" value="mod2_rz10t+rz10t_align"/>
- <constant name="final_x1b" value="mod2_x1b+x1b_align"/>
- <constant name="final_x2b" value="mod2_x2b+x2b_align"/>
- <constant name="final_x3b" value="mod2_x3b+x3b_align"/>
- <constant name="final_x4b" value="mod2_x4b+x4b_align"/>
- <constant name="final_x5b" value="mod2_x5b+x5b_align"/>
- <constant name="final_x6b" value="mod2_x6b+x6b_align"/>
- <constant name="final_x7b" value="mod2_x7b+x7b_align"/>
- <constant name="final_x8b" value="mod2_x8b+x8b_align"/>
- <constant name="final_x9b" value="mod2_x9b+x9b_align"/>
- <constant name="final_x10b" value="mod2_x10b+x10b_align"/>
- <constant name="final_y1b" value="mod2_y1b+y1b_align"/>
- <constant name="final_y2b" value="mod2_y2b+y2b_align"/>
- <constant name="final_y3b" value="mod2_y3b+y3b_align"/>
- <constant name="final_y4b" value="mod2_y4b+y4b_align"/>
- <constant name="final_y5b" value="mod2_y5b+y5b_align"/>
- <constant name="final_y6b" value="mod2_y6b+y6b_align"/>
- <constant name="final_y7b" value="mod2_y7b+y7b_align"/>
- <constant name="final_y8b" value="mod2_y8b+y8b_align"/>
- <constant name="final_y9b" value="mod2_y9b+y9b_align"/>
- <constant name="final_y10b" value="mod2_y10b+y10b_align"/>
- <constant name="final_z1b" value="mod2_z1b+z1b_align"/>
- <constant name="final_z2b" value="mod2_z2b+z2b_align"/>
- <constant name="final_z3b" value="mod2_z3b+z3b_align"/>
- <constant name="final_z4b" value="mod2_z4b+z4b_align"/>
- <constant name="final_z5b" value="mod2_z5b+z5b_align"/>
- <constant name="final_z6b" value="mod2_z6b+z6b_align"/>
- <constant name="final_z7b" value="mod2_z7b+z7b_align"/>
- <constant name="final_z8b" value="mod2_z8b+z8b_align"/>
- <constant name="final_z9b" value="mod2_z9b+z9b_align"/>
- <constant name="final_z10b" value="mod2_z10b+z10b_align"/>
- <constant name="final_rx1b" value="mod2_rx1b+rx1b_align"/>
- <constant name="final_rx2b" value="mod2_rx2b+rx2b_align"/>
- <constant name="final_rx3b" value="mod2_rx3b+rx3b_align"/>
- <constant name="final_rx4b" value="mod2_rx4b+rx4b_align"/>
- <constant name="final_rx5b" value="mod2_rx5b+rx5b_align"/>
- <constant name="final_rx6b" value="mod2_rx6b+rx6b_align"/>
- <constant name="final_rx7b" value="mod2_rx7b+rx7b_align"/>
- <constant name="final_rx8b" value="mod2_rx8b+rx8b_align"/>
- <constant name="final_rx9b" value="mod2_rx9b+rx9b_align"/>
- <constant name="final_rx10b" value="mod2_rx10b+rx10b_align"/>
- <constant name="final_ry1b" value="mod2_ry1b+ry1b_align"/>
- <constant name="final_ry2b" value="mod2_ry2b+ry2b_align"/>
- <constant name="final_ry3b" value="mod2_ry3b+ry3b_align"/>
- <constant name="final_ry4b" value="mod2_ry4b+ry4b_align"/>
- <constant name="final_ry5b" value="mod2_ry5b+ry5b_align"/>
- <constant name="final_ry6b" value="mod2_ry6b+ry6b_align"/>
- <constant name="final_ry7b" value="mod2_ry7b+ry7b_align"/>
- <constant name="final_ry8b" value="mod2_ry8b+ry8b_align"/>
- <constant name="final_ry9b" value="mod2_ry9b+ry9b_align"/>
- <constant name="final_ry10b" value="mod2_ry10b+ry10b_align"/>
- <constant name="final_rz1b" value="mod2_rz1b+rz1b_align"/>
- <constant name="final_rz2b" value="mod2_rz2b+rz2b_align"/>
- <constant name="final_rz3b" value="mod2_rz3b+rz3b_align"/>
- <constant name="final_rz4b" value="mod2_rz4b+rz4b_align"/>
- <constant name="final_rz5b" value="mod2_rz5b+rz5b_align"/>
- <constant name="final_rz6b" value="mod2_rz6b+rz6b_align"/>
- <constant name="final_rz7b" value="mod2_rz7b+rz7b_align"/>
- <constant name="final_rz8b" value="mod2_rz8b+rz8b_align"/>
- <constant name="final_rz9b" value="mod2_rz9b+rz9b_align"/>
- <constant name="final_rz10b" value="mod2_rz10b+rz10b_align"/>
-
-
+
+ <!-- final constants -->
+ <constant name="final_x1t" value="mod2_x1t+x1t_align"/>
+ <constant name="final_x2t" value="mod2_x2t+x2t_align"/>
+ <constant name="final_x3t" value="mod2_x3t+x3t_align"/>
+ <constant name="final_x4t" value="mod2_x4t+x4t_align"/>
+ <constant name="final_x5t" value="mod2_x5t+x5t_align"/>
+ <constant name="final_x6t" value="mod2_x6t+x6t_align"/>
+ <constant name="final_x7t" value="mod2_x7t+x7t_align"/>
+ <constant name="final_x8t" value="mod2_x8t+x8t_align"/>
+ <constant name="final_x9t" value="mod2_x9t+x9t_align"/>
+ <constant name="final_x10t" value="mod2_x10t+x10t_align"/>
+ <constant name="final_y1t" value="mod2_y1t+y1t_align"/>
+ <constant name="final_y2t" value="mod2_y2t+y2t_align"/>
+ <constant name="final_y3t" value="mod2_y3t+y3t_align"/>
+ <constant name="final_y4t" value="mod2_y4t+y4t_align"/>
+ <constant name="final_y5t" value="mod2_y5t+y5t_align"/>
+ <constant name="final_y6t" value="mod2_y6t+y6t_align"/>
+ <constant name="final_y7t" value="mod2_y7t+y7t_align"/>
+ <constant name="final_y8t" value="mod2_y8t+y8t_align"/>
+ <constant name="final_y9t" value="mod2_y9t+y9t_align"/>
+ <constant name="final_y10t" value="mod2_y10t+y10t_align"/>
+ <constant name="final_z1t" value="mod2_z1t+z1t_align"/>
+ <constant name="final_z2t" value="mod2_z2t+z2t_align"/>
+ <constant name="final_z3t" value="mod2_z3t+z3t_align"/>
+ <constant name="final_z4t" value="mod2_z4t+z4t_align"/>
+ <constant name="final_z5t" value="mod2_z5t+z5t_align"/>
+ <constant name="final_z6t" value="mod2_z6t+z6t_align"/>
+ <constant name="final_z7t" value="mod2_z7t+z7t_align"/>
+ <constant name="final_z8t" value="mod2_z8t+z8t_align"/>
+ <constant name="final_z9t" value="mod2_z9t+z9t_align"/>
+ <constant name="final_z10t" value="mod2_z10t+z10t_align"/>
+ <constant name="final_rx1t" value="mod2_rx1t+rx1t_align"/>
+ <constant name="final_rx2t" value="mod2_rx2t+rx2t_align"/>
+ <constant name="final_rx3t" value="mod2_rx3t+rx3t_align"/>
+ <constant name="final_rx4t" value="mod2_rx4t+rx4t_align"/>
+ <constant name="final_rx5t" value="mod2_rx5t+rx5t_align"/>
+ <constant name="final_rx6t" value="mod2_rx6t+rx6t_align"/>
+ <constant name="final_rx7t" value="mod2_rx7t+rx7t_align"/>
+ <constant name="final_rx8t" value="mod2_rx8t+rx8t_align"/>
+ <constant name="final_rx9t" value="mod2_rx9t+rx9t_align"/>
+ <constant name="final_rx10t" value="mod2_rx10t+rx10t_align"/>
+ <constant name="final_ry1t" value="mod2_ry1t+ry1t_align"/>
+ <constant name="final_ry2t" value="mod2_ry2t+ry2t_align"/>
+ <constant name="final_ry3t" value="mod2_ry3t+ry3t_align"/>
+ <constant name="final_ry4t" value="mod2_ry4t+ry4t_align"/>
+ <constant name="final_ry5t" value="mod2_ry5t+ry5t_align"/>
+ <constant name="final_ry6t" value="mod2_ry6t+ry6t_align"/>
+ <constant name="final_ry7t" value="mod2_ry7t+ry7t_align"/>
+ <constant name="final_ry8t" value="mod2_ry8t+ry8t_align"/>
+ <constant name="final_ry9t" value="mod2_ry9t+ry9t_align"/>
+ <constant name="final_ry10t" value="mod2_ry10t+ry10t_align"/>
+ <constant name="final_rz1t" value="mod2_rz1t+rz1t_align"/>
+ <constant name="final_rz2t" value="mod2_rz2t+rz2t_align"/>
+ <constant name="final_rz3t" value="mod2_rz3t+rz3t_align"/>
+ <constant name="final_rz4t" value="mod2_rz4t+rz4t_align"/>
+ <constant name="final_rz5t" value="mod2_rz5t+rz5t_align"/>
+ <constant name="final_rz6t" value="mod2_rz6t+rz6t_align"/>
+ <constant name="final_rz7t" value="mod2_rz7t+rz7t_align"/>
+ <constant name="final_rz8t" value="mod2_rz8t+rz8t_align"/>
+ <constant name="final_rz9t" value="mod2_rz9t+rz9t_align"/>
+ <constant name="final_rz10t" value="mod2_rz10t+rz10t_align"/>
+ <constant name="final_x1b" value="mod2_x1b+x1b_align"/>
+ <constant name="final_x2b" value="mod2_x2b+x2b_align"/>
+ <constant name="final_x3b" value="mod2_x3b+x3b_align"/>
+ <constant name="final_x4b" value="mod2_x4b+x4b_align"/>
+ <constant name="final_x5b" value="mod2_x5b+x5b_align"/>
+ <constant name="final_x6b" value="mod2_x6b+x6b_align"/>
+ <constant name="final_x7b" value="mod2_x7b+x7b_align"/>
+ <constant name="final_x8b" value="mod2_x8b+x8b_align"/>
+ <constant name="final_x9b" value="mod2_x9b+x9b_align"/>
+ <constant name="final_x10b" value="mod2_x10b+x10b_align"/>
+ <constant name="final_y1b" value="mod2_y1b+y1b_align"/>
+ <constant name="final_y2b" value="mod2_y2b+y2b_align"/>
+ <constant name="final_y3b" value="mod2_y3b+y3b_align"/>
+ <constant name="final_y4b" value="mod2_y4b+y4b_align"/>
+ <constant name="final_y5b" value="mod2_y5b+y5b_align"/>
+ <constant name="final_y6b" value="mod2_y6b+y6b_align"/>
+ <constant name="final_y7b" value="mod2_y7b+y7b_align"/>
+ <constant name="final_y8b" value="mod2_y8b+y8b_align"/>
+ <constant name="final_y9b" value="mod2_y9b+y9b_align"/>
+ <constant name="final_y10b" value="mod2_y10b+y10b_align"/>
+ <constant name="final_z1b" value="mod2_z1b+z1b_align"/>
+ <constant name="final_z2b" value="mod2_z2b+z2b_align"/>
+ <constant name="final_z3b" value="mod2_z3b+z3b_align"/>
+ <constant name="final_z4b" value="mod2_z4b+z4b_align"/>
+ <constant name="final_z5b" value="mod2_z5b+z5b_align"/>
+ <constant name="final_z6b" value="mod2_z6b+z6b_align"/>
+ <constant name="final_z7b" value="mod2_z7b+z7b_align"/>
+ <constant name="final_z8b" value="mod2_z8b+z8b_align"/>
+ <constant name="final_z9b" value="mod2_z9b+z9b_align"/>
+ <constant name="final_z10b" value="mod2_z10b+z10b_align"/>
+ <constant name="final_rx1b" value="mod2_rx1b+rx1b_align"/>
+ <constant name="final_rx2b" value="mod2_rx2b+rx2b_align"/>
+ <constant name="final_rx3b" value="mod2_rx3b+rx3b_align"/>
+ <constant name="final_rx4b" value="mod2_rx4b+rx4b_align"/>
+ <constant name="final_rx5b" value="mod2_rx5b+rx5b_align"/>
+ <constant name="final_rx6b" value="mod2_rx6b+rx6b_align"/>
+ <constant name="final_rx7b" value="mod2_rx7b+rx7b_align"/>
+ <constant name="final_rx8b" value="mod2_rx8b+rx8b_align"/>
+ <constant name="final_rx9b" value="mod2_rx9b+rx9b_align"/>
+ <constant name="final_rx10b" value="mod2_rx10b+rx10b_align"/>
+ <constant name="final_ry1b" value="mod2_ry1b+ry1b_align"/>
+ <constant name="final_ry2b" value="mod2_ry2b+ry2b_align"/>
+ <constant name="final_ry3b" value="mod2_ry3b+ry3b_align"/>
+ <constant name="final_ry4b" value="mod2_ry4b+ry4b_align"/>
+ <constant name="final_ry5b" value="mod2_ry5b+ry5b_align"/>
+ <constant name="final_ry6b" value="mod2_ry6b+ry6b_align"/>
+ <constant name="final_ry7b" value="mod2_ry7b+ry7b_align"/>
+ <constant name="final_ry8b" value="mod2_ry8b+ry8b_align"/>
+ <constant name="final_ry9b" value="mod2_ry9b+ry9b_align"/>
+ <constant name="final_ry10b" value="mod2_ry10b+ry10b_align"/>
+ <constant name="final_rz1b" value="mod2_rz1b+rz1b_align"/>
+ <constant name="final_rz2b" value="mod2_rz2b+rz2b_align"/>
+ <constant name="final_rz3b" value="mod2_rz3b+rz3b_align"/>
+ <constant name="final_rz4b" value="mod2_rz4b+rz4b_align"/>
+ <constant name="final_rz5b" value="mod2_rz5b+rz5b_align"/>
+ <constant name="final_rz6b" value="mod2_rz6b+rz6b_align"/>
+ <constant name="final_rz7b" value="mod2_rz7b+rz7b_align"/>
+ <constant name="final_rz8b" value="mod2_rz8b+rz8b_align"/>
+ <constant name="final_rz9b" value="mod2_rz9b+rz9b_align"/>
+ <constant name="final_rz10b" value="mod2_rz10b+rz10b_align"/>
+
+
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001"/>
<fraction n="1.0" ref="Vacuum" />
@@ -822,7 +822,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-3/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-3/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7-3/compact.xml Wed Apr 27 11:11:32 2016
@@ -84,127 +84,127 @@
<constant name="y9b_misalign" value="0.0" />
<constant name="y10b_misalign" value="0.0" />
- <!-- alignment corrections -->
- <constant name="x1t_align" value="0.000000" />
- <constant name="x2t_align" value="-0.000000" />
- <constant name="x3t_align" value="-0.000012" />
- <constant name="x4t_align" value="-0.017488" />
- <constant name="x5t_align" value="-0.000064" />
- <constant name="x6t_align" value="-0.000141" />
- <constant name="x7t_align" value="0.000002" />
- <constant name="x8t_align" value="0.000573" />
- <constant name="x9t_align" value="0.000000" />
- <constant name="x10t_align" value="-0.000000" />
- <constant name="y1t_align" value="0.000000" />
- <constant name="y2t_align" value="0.000000" />
- <constant name="y3t_align" value="-0.094679" />
- <constant name="y4t_align" value="-0.174440" />
- <constant name="y5t_align" value="0.118670" />
- <constant name="y6t_align" value="-0.001400" />
- <constant name="y7t_align" value="0.047526" />
- <constant name="y8t_align" value="0.011511" />
- <constant name="y9t_align" value="0.000000" />
- <constant name="y10t_align" value="0.000000" />
- <constant name="z1t_align" value="0.000000" />
- <constant name="z2t_align" value="0.000000" />
- <constant name="z3t_align" value="-0.002463" />
- <constant name="z4t_align" value="-0.003986" />
- <constant name="z5t_align" value="0.003089" />
- <constant name="z6t_align" value="-0.000032" />
- <constant name="z7t_align" value="0.001236" />
- <constant name="z8t_align" value="0.000281" />
- <constant name="z9t_align" value="0.000000" />
- <constant name="z10t_align" value="0.000000" />
- <constant name="rx1t_align" value="0.000000" />
- <constant name="rx2t_align" value="-0.000000" />
- <constant name="rx3t_align" value="0.000000" />
- <constant name="rx4t_align" value="-0.000000" />
- <constant name="rx5t_align" value="0.000000" />
- <constant name="rx6t_align" value="-0.000000" />
- <constant name="rx7t_align" value="0.000000" />
- <constant name="rx8t_align" value="-0.000000" />
- <constant name="rx9t_align" value="0.000000" />
- <constant name="rx10t_align" value="-0.000000" />
- <constant name="ry1t_align" value="0.000000" />
- <constant name="ry2t_align" value="0.000000" />
- <constant name="ry3t_align" value="0.000000" />
- <constant name="ry4t_align" value="0.000000" />
- <constant name="ry5t_align" value="0.000000" />
- <constant name="ry6t_align" value="0.000000" />
- <constant name="ry7t_align" value="0.000000" />
- <constant name="ry8t_align" value="0.000000" />
- <constant name="ry9t_align" value="0.000000" />
- <constant name="ry10t_align" value="0.000000" />
- <constant name="rz1t_align" value="0.000000" />
- <constant name="rz2t_align" value="0.000000" />
- <constant name="rz3t_align" value="0.000000" />
- <constant name="rz4t_align" value="0.000000" />
- <constant name="rz5t_align" value="0.000000" />
- <constant name="rz6t_align" value="0.000000" />
- <constant name="rz7t_align" value="0.000000" />
- <constant name="rz8t_align" value="0.000000" />
- <constant name="rz9t_align" value="0.000000" />
- <constant name="rz10t_align" value="0.000000" />
- <constant name="x1b_align" value="0.000000" />
- <constant name="x2b_align" value="-0.000000" />
- <constant name="x3b_align" value="-0.068351" />
- <constant name="x4b_align" value="0.000012" />
- <constant name="x5b_align" value="-0.100385" />
- <constant name="x6b_align" value="0.000012" />
- <constant name="x7b_align" value="-0.025606" />
- <constant name="x8b_align" value="0.000007" />
- <constant name="x9b_align" value="0.000000" />
- <constant name="x10b_align" value="-0.000000" />
- <constant name="y1b_align" value="0.000000" />
- <constant name="y2b_align" value="0.000000" />
- <constant name="y3b_align" value="0.681109" />
- <constant name="y4b_align" value="0.061551" />
- <constant name="y5b_align" value="1.004204" />
- <constant name="y6b_align" value="0.037831" />
- <constant name="y7b_align" value="0.506463" />
- <constant name="y8b_align" value="0.015775" />
- <constant name="y9b_align" value="0.000000" />
- <constant name="y10b_align" value="0.000000" />
- <constant name="z1b_align" value="0.000000" />
- <constant name="z2b_align" value="0.000000" />
- <constant name="z3b_align" value="-0.013322" />
- <constant name="z4b_align" value="-0.001371" />
- <constant name="z5b_align" value="-0.019651" />
- <constant name="z6b_align" value="-0.000843" />
- <constant name="z7b_align" value="-0.010587" />
- <constant name="z8b_align" value="-0.000352" />
- <constant name="z9b_align" value="0.000000" />
- <constant name="z10b_align" value="0.000000" />
- <constant name="rx1b_align" value="0.000000" />
- <constant name="rx2b_align" value="-0.000000" />
- <constant name="rx3b_align" value="0.000000" />
- <constant name="rx4b_align" value="-0.000000" />
- <constant name="rx5b_align" value="0.000000" />
- <constant name="rx6b_align" value="-0.000000" />
- <constant name="rx7b_align" value="0.000000" />
- <constant name="rx8b_align" value="-0.000000" />
- <constant name="rx9b_align" value="0.000000" />
- <constant name="rx10b_align" value="-0.000000" />
- <constant name="ry1b_align" value="0.000000" />
- <constant name="ry2b_align" value="0.000000" />
- <constant name="ry3b_align" value="0.000000" />
- <constant name="ry4b_align" value="0.000000" />
- <constant name="ry5b_align" value="0.000000" />
- <constant name="ry6b_align" value="0.000000" />
- <constant name="ry7b_align" value="0.000000" />
- <constant name="ry8b_align" value="0.000000" />
- <constant name="ry9b_align" value="0.000000" />
- <constant name="ry10b_align" value="0.000000" />
- <constant name="rz1b_align" value="0.000000" />
- <constant name="rz2b_align" value="0.000000" />
- <constant name="rz3b_align" value="0.000000" />
- <constant name="rz4b_align" value="0.000000" />
- <constant name="rz5b_align" value="0.000000" />
- <constant name="rz6b_align" value="0.000000" />
- <constant name="rz7b_align" value="0.000000" />
- <constant name="rz8b_align" value="0.000000" />
- <constant name="rz9b_align" value="0.000000" />
- <constant name="rz10b_align" value="0.000000" />
+ <!-- alignment corrections -->
+ <constant name="x1t_align" value="0.000000" />
+ <constant name="x2t_align" value="-0.000000" />
+ <constant name="x3t_align" value="-0.000012" />
+ <constant name="x4t_align" value="-0.017488" />
+ <constant name="x5t_align" value="-0.000064" />
+ <constant name="x6t_align" value="-0.000141" />
+ <constant name="x7t_align" value="0.000002" />
+ <constant name="x8t_align" value="0.000573" />
+ <constant name="x9t_align" value="0.000000" />
+ <constant name="x10t_align" value="-0.000000" />
+ <constant name="y1t_align" value="0.000000" />
+ <constant name="y2t_align" value="0.000000" />
+ <constant name="y3t_align" value="-0.094679" />
+ <constant name="y4t_align" value="-0.174440" />
+ <constant name="y5t_align" value="0.118670" />
+ <constant name="y6t_align" value="-0.001400" />
+ <constant name="y7t_align" value="0.047526" />
+ <constant name="y8t_align" value="0.011511" />
+ <constant name="y9t_align" value="0.000000" />
+ <constant name="y10t_align" value="0.000000" />
+ <constant name="z1t_align" value="0.000000" />
+ <constant name="z2t_align" value="0.000000" />
+ <constant name="z3t_align" value="-0.002463" />
+ <constant name="z4t_align" value="-0.003986" />
+ <constant name="z5t_align" value="0.003089" />
+ <constant name="z6t_align" value="-0.000032" />
+ <constant name="z7t_align" value="0.001236" />
+ <constant name="z8t_align" value="0.000281" />
+ <constant name="z9t_align" value="0.000000" />
+ <constant name="z10t_align" value="0.000000" />
+ <constant name="rx1t_align" value="0.000000" />
+ <constant name="rx2t_align" value="-0.000000" />
+ <constant name="rx3t_align" value="0.000000" />
+ <constant name="rx4t_align" value="-0.000000" />
+ <constant name="rx5t_align" value="0.000000" />
+ <constant name="rx6t_align" value="-0.000000" />
+ <constant name="rx7t_align" value="0.000000" />
+ <constant name="rx8t_align" value="-0.000000" />
+ <constant name="rx9t_align" value="0.000000" />
+ <constant name="rx10t_align" value="-0.000000" />
+ <constant name="ry1t_align" value="0.000000" />
+ <constant name="ry2t_align" value="0.000000" />
+ <constant name="ry3t_align" value="0.000000" />
+ <constant name="ry4t_align" value="0.000000" />
+ <constant name="ry5t_align" value="0.000000" />
+ <constant name="ry6t_align" value="0.000000" />
+ <constant name="ry7t_align" value="0.000000" />
+ <constant name="ry8t_align" value="0.000000" />
+ <constant name="ry9t_align" value="0.000000" />
+ <constant name="ry10t_align" value="0.000000" />
+ <constant name="rz1t_align" value="0.000000" />
+ <constant name="rz2t_align" value="0.000000" />
+ <constant name="rz3t_align" value="0.000000" />
+ <constant name="rz4t_align" value="0.000000" />
+ <constant name="rz5t_align" value="0.000000" />
+ <constant name="rz6t_align" value="0.000000" />
+ <constant name="rz7t_align" value="0.000000" />
+ <constant name="rz8t_align" value="0.000000" />
+ <constant name="rz9t_align" value="0.000000" />
+ <constant name="rz10t_align" value="0.000000" />
+ <constant name="x1b_align" value="0.000000" />
+ <constant name="x2b_align" value="-0.000000" />
+ <constant name="x3b_align" value="-0.068351" />
+ <constant name="x4b_align" value="0.000012" />
+ <constant name="x5b_align" value="-0.100385" />
+ <constant name="x6b_align" value="0.000012" />
+ <constant name="x7b_align" value="-0.025606" />
+ <constant name="x8b_align" value="0.000007" />
+ <constant name="x9b_align" value="0.000000" />
+ <constant name="x10b_align" value="-0.000000" />
+ <constant name="y1b_align" value="0.000000" />
+ <constant name="y2b_align" value="0.000000" />
+ <constant name="y3b_align" value="0.681109" />
+ <constant name="y4b_align" value="0.061551" />
+ <constant name="y5b_align" value="1.004204" />
+ <constant name="y6b_align" value="0.037831" />
+ <constant name="y7b_align" value="0.506463" />
+ <constant name="y8b_align" value="0.015775" />
+ <constant name="y9b_align" value="0.000000" />
+ <constant name="y10b_align" value="0.000000" />
+ <constant name="z1b_align" value="0.000000" />
+ <constant name="z2b_align" value="0.000000" />
+ <constant name="z3b_align" value="-0.013322" />
+ <constant name="z4b_align" value="-0.001371" />
+ <constant name="z5b_align" value="-0.019651" />
+ <constant name="z6b_align" value="-0.000843" />
+ <constant name="z7b_align" value="-0.010587" />
+ <constant name="z8b_align" value="-0.000352" />
+ <constant name="z9b_align" value="0.000000" />
+ <constant name="z10b_align" value="0.000000" />
+ <constant name="rx1b_align" value="0.000000" />
+ <constant name="rx2b_align" value="-0.000000" />
+ <constant name="rx3b_align" value="0.000000" />
+ <constant name="rx4b_align" value="-0.000000" />
+ <constant name="rx5b_align" value="0.000000" />
+ <constant name="rx6b_align" value="-0.000000" />
+ <constant name="rx7b_align" value="0.000000" />
+ <constant name="rx8b_align" value="-0.000000" />
+ <constant name="rx9b_align" value="0.000000" />
+ <constant name="rx10b_align" value="-0.000000" />
+ <constant name="ry1b_align" value="0.000000" />
+ <constant name="ry2b_align" value="0.000000" />
+ <constant name="ry3b_align" value="0.000000" />
+ <constant name="ry4b_align" value="0.000000" />
+ <constant name="ry5b_align" value="0.000000" />
+ <constant name="ry6b_align" value="0.000000" />
+ <constant name="ry7b_align" value="0.000000" />
+ <constant name="ry8b_align" value="0.000000" />
+ <constant name="ry9b_align" value="0.000000" />
+ <constant name="ry10b_align" value="0.000000" />
+ <constant name="rz1b_align" value="0.000000" />
+ <constant name="rz2b_align" value="0.000000" />
+ <constant name="rz3b_align" value="0.000000" />
+ <constant name="rz4b_align" value="0.000000" />
+ <constant name="rz5b_align" value="0.000000" />
+ <constant name="rz6b_align" value="0.000000" />
+ <constant name="rz7b_align" value="0.000000" />
+ <constant name="rz8b_align" value="0.000000" />
+ <constant name="rz9b_align" value="0.000000" />
+ <constant name="rz10b_align" value="0.000000" />
<!-- Positions of sensor centers above/below nominal beam -->
<constant name="y1t" value="36.894" />
@@ -604,134 +604,134 @@
<constant name="mod2_rx10b" value="mod_rx10b" />
<constant name="mod2_ry10b" value="mod_ry10b+y_rot_bot_pivot" />
<constant name="mod2_rz10b" value="mod_rz10b" />
-
- <!-- final constants -->
- <constant name="final_x1t" value="mod2_x1t+x1t_align" />
- <constant name="final_x2t" value="mod2_x2t+x2t_align" />
- <constant name="final_x3t" value="mod2_x3t+x3t_align" />
- <constant name="final_x4t" value="mod2_x4t+x4t_align" />
- <constant name="final_x5t" value="mod2_x5t+x5t_align" />
- <constant name="final_x6t" value="mod2_x6t+x6t_align" />
- <constant name="final_x7t" value="mod2_x7t+x7t_align" />
- <constant name="final_x8t" value="mod2_x8t+x8t_align" />
- <constant name="final_x9t" value="mod2_x9t+x9t_align" />
- <constant name="final_x10t" value="mod2_x10t+x10t_align" />
- <constant name="final_y1t" value="mod2_y1t+y1t_align" />
- <constant name="final_y2t" value="mod2_y2t+y2t_align" />
- <constant name="final_y3t" value="mod2_y3t+y3t_align" />
- <constant name="final_y4t" value="mod2_y4t+y4t_align" />
- <constant name="final_y5t" value="mod2_y5t+y5t_align" />
- <constant name="final_y6t" value="mod2_y6t+y6t_align" />
- <constant name="final_y7t" value="mod2_y7t+y7t_align" />
- <constant name="final_y8t" value="mod2_y8t+y8t_align" />
- <constant name="final_y9t" value="mod2_y9t+y9t_align" />
- <constant name="final_y10t" value="mod2_y10t+y10t_align" />
- <constant name="final_z1t" value="mod2_z1t+z1t_align" />
- <constant name="final_z2t" value="mod2_z2t+z2t_align" />
- <constant name="final_z3t" value="mod2_z3t+z3t_align" />
- <constant name="final_z4t" value="mod2_z4t+z4t_align" />
- <constant name="final_z5t" value="mod2_z5t+z5t_align" />
- <constant name="final_z6t" value="mod2_z6t+z6t_align" />
- <constant name="final_z7t" value="mod2_z7t+z7t_align" />
- <constant name="final_z8t" value="mod2_z8t+z8t_align" />
- <constant name="final_z9t" value="mod2_z9t+z9t_align" />
- <constant name="final_z10t" value="mod2_z10t+z10t_align" />
- <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
- <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
- <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
- <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
- <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
- <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
- <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
- <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
- <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
- <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
- <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
- <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
- <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
- <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
- <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
- <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
- <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
- <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
- <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
- <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
- <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
- <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
- <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
- <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
- <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
- <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
- <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
- <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
- <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
- <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
- <constant name="final_x1b" value="mod2_x1b+x1b_align" />
- <constant name="final_x2b" value="mod2_x2b+x2b_align" />
- <constant name="final_x3b" value="mod2_x3b+x3b_align" />
- <constant name="final_x4b" value="mod2_x4b+x4b_align" />
- <constant name="final_x5b" value="mod2_x5b+x5b_align" />
- <constant name="final_x6b" value="mod2_x6b+x6b_align" />
- <constant name="final_x7b" value="mod2_x7b+x7b_align" />
- <constant name="final_x8b" value="mod2_x8b+x8b_align" />
- <constant name="final_x9b" value="mod2_x9b+x9b_align" />
- <constant name="final_x10b" value="mod2_x10b+x10b_align" />
- <constant name="final_y1b" value="mod2_y1b+y1b_align" />
- <constant name="final_y2b" value="mod2_y2b+y2b_align" />
- <constant name="final_y3b" value="mod2_y3b+y3b_align" />
- <constant name="final_y4b" value="mod2_y4b+y4b_align" />
- <constant name="final_y5b" value="mod2_y5b+y5b_align" />
- <constant name="final_y6b" value="mod2_y6b+y6b_align" />
- <constant name="final_y7b" value="mod2_y7b+y7b_align" />
- <constant name="final_y8b" value="mod2_y8b+y8b_align" />
- <constant name="final_y9b" value="mod2_y9b+y9b_align" />
- <constant name="final_y10b" value="mod2_y10b+y10b_align" />
- <constant name="final_z1b" value="mod2_z1b+z1b_align" />
- <constant name="final_z2b" value="mod2_z2b+z2b_align" />
- <constant name="final_z3b" value="mod2_z3b+z3b_align" />
- <constant name="final_z4b" value="mod2_z4b+z4b_align" />
- <constant name="final_z5b" value="mod2_z5b+z5b_align" />
- <constant name="final_z6b" value="mod2_z6b+z6b_align" />
- <constant name="final_z7b" value="mod2_z7b+z7b_align" />
- <constant name="final_z8b" value="mod2_z8b+z8b_align" />
- <constant name="final_z9b" value="mod2_z9b+z9b_align" />
- <constant name="final_z10b" value="mod2_z10b+z10b_align" />
- <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
- <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
- <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
- <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
- <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
- <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
- <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
- <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
- <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
- <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
- <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
- <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
- <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
- <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
- <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
- <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
- <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
- <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
- <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
- <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
- <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
- <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
- <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
- <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
- <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
- <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
- <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
- <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
- <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
- <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
-
-
+
+ <!-- final constants -->
+ <constant name="final_x1t" value="mod2_x1t+x1t_align" />
+ <constant name="final_x2t" value="mod2_x2t+x2t_align" />
+ <constant name="final_x3t" value="mod2_x3t+x3t_align" />
+ <constant name="final_x4t" value="mod2_x4t+x4t_align" />
+ <constant name="final_x5t" value="mod2_x5t+x5t_align" />
+ <constant name="final_x6t" value="mod2_x6t+x6t_align" />
+ <constant name="final_x7t" value="mod2_x7t+x7t_align" />
+ <constant name="final_x8t" value="mod2_x8t+x8t_align" />
+ <constant name="final_x9t" value="mod2_x9t+x9t_align" />
+ <constant name="final_x10t" value="mod2_x10t+x10t_align" />
+ <constant name="final_y1t" value="mod2_y1t+y1t_align" />
+ <constant name="final_y2t" value="mod2_y2t+y2t_align" />
+ <constant name="final_y3t" value="mod2_y3t+y3t_align" />
+ <constant name="final_y4t" value="mod2_y4t+y4t_align" />
+ <constant name="final_y5t" value="mod2_y5t+y5t_align" />
+ <constant name="final_y6t" value="mod2_y6t+y6t_align" />
+ <constant name="final_y7t" value="mod2_y7t+y7t_align" />
+ <constant name="final_y8t" value="mod2_y8t+y8t_align" />
+ <constant name="final_y9t" value="mod2_y9t+y9t_align" />
+ <constant name="final_y10t" value="mod2_y10t+y10t_align" />
+ <constant name="final_z1t" value="mod2_z1t+z1t_align" />
+ <constant name="final_z2t" value="mod2_z2t+z2t_align" />
+ <constant name="final_z3t" value="mod2_z3t+z3t_align" />
+ <constant name="final_z4t" value="mod2_z4t+z4t_align" />
+ <constant name="final_z5t" value="mod2_z5t+z5t_align" />
+ <constant name="final_z6t" value="mod2_z6t+z6t_align" />
+ <constant name="final_z7t" value="mod2_z7t+z7t_align" />
+ <constant name="final_z8t" value="mod2_z8t+z8t_align" />
+ <constant name="final_z9t" value="mod2_z9t+z9t_align" />
+ <constant name="final_z10t" value="mod2_z10t+z10t_align" />
+ <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
+ <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
+ <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
+ <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
+ <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
+ <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
+ <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
+ <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
+ <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
+ <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
+ <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
+ <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
+ <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
+ <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
+ <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
+ <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
+ <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
+ <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
+ <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
+ <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
+ <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
+ <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
+ <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
+ <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
+ <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
+ <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
+ <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
+ <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
+ <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
+ <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
+ <constant name="final_x1b" value="mod2_x1b+x1b_align" />
+ <constant name="final_x2b" value="mod2_x2b+x2b_align" />
+ <constant name="final_x3b" value="mod2_x3b+x3b_align" />
+ <constant name="final_x4b" value="mod2_x4b+x4b_align" />
+ <constant name="final_x5b" value="mod2_x5b+x5b_align" />
+ <constant name="final_x6b" value="mod2_x6b+x6b_align" />
+ <constant name="final_x7b" value="mod2_x7b+x7b_align" />
+ <constant name="final_x8b" value="mod2_x8b+x8b_align" />
+ <constant name="final_x9b" value="mod2_x9b+x9b_align" />
+ <constant name="final_x10b" value="mod2_x10b+x10b_align" />
+ <constant name="final_y1b" value="mod2_y1b+y1b_align" />
+ <constant name="final_y2b" value="mod2_y2b+y2b_align" />
+ <constant name="final_y3b" value="mod2_y3b+y3b_align" />
+ <constant name="final_y4b" value="mod2_y4b+y4b_align" />
+ <constant name="final_y5b" value="mod2_y5b+y5b_align" />
+ <constant name="final_y6b" value="mod2_y6b+y6b_align" />
+ <constant name="final_y7b" value="mod2_y7b+y7b_align" />
+ <constant name="final_y8b" value="mod2_y8b+y8b_align" />
+ <constant name="final_y9b" value="mod2_y9b+y9b_align" />
+ <constant name="final_y10b" value="mod2_y10b+y10b_align" />
+ <constant name="final_z1b" value="mod2_z1b+z1b_align" />
+ <constant name="final_z2b" value="mod2_z2b+z2b_align" />
+ <constant name="final_z3b" value="mod2_z3b+z3b_align" />
+ <constant name="final_z4b" value="mod2_z4b+z4b_align" />
+ <constant name="final_z5b" value="mod2_z5b+z5b_align" />
+ <constant name="final_z6b" value="mod2_z6b+z6b_align" />
+ <constant name="final_z7b" value="mod2_z7b+z7b_align" />
+ <constant name="final_z8b" value="mod2_z8b+z8b_align" />
+ <constant name="final_z9b" value="mod2_z9b+z9b_align" />
+ <constant name="final_z10b" value="mod2_z10b+z10b_align" />
+ <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
+ <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
+ <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
+ <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
+ <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
+ <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
+ <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
+ <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
+ <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
+ <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
+ <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
+ <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
+ <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
+ <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
+ <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
+ <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
+ <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
+ <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
+ <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
+ <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
+ <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
+ <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
+ <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
+ <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
+ <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
+ <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
+ <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
+ <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
+ <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
+ <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
+
+
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001" />
<fraction n="1.0" ref="Vacuum" />
@@ -820,7 +820,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2" />
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v7/compact.xml Wed Apr 27 11:11:32 2016
@@ -488,7 +488,7 @@
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001"/>
<fraction n="1.0" ref="Vacuum" />
@@ -577,7 +577,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-4/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-4/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-4/compact.xml Wed Apr 27 11:11:32 2016
@@ -3,9 +3,9 @@
<info name="HPS-TestRun-v8-4">
<comment>
- HPS JLab Test Run detector with dipole field starting at z=0
- Alignment based on millepede: compact-u3-6-float-1351-v8-10k.xml
- </comment>
+ HPS JLab Test Run detector with dipole field starting at z=0
+ Alignment based on millepede: compact-u3-6-float-1351-v8-10k.xml
+ </comment>
</info>
<define>
@@ -65,127 +65,127 @@
<!-- <constant name="x_off" value = "-15.0"/> -->
<constant name="x_off" value="0.0" />
- <!-- alignment corrections -->
- <constant name="x1t_align" value="0.000000" />
- <constant name="x2t_align" value="0.000000" />
- <constant name="x3t_align" value="0.000014" />
- <constant name="x4t_align" value="0.015024" />
- <constant name="x5t_align" value="0.000045" />
- <constant name="x6t_align" value="-0.003411" />
- <constant name="x7t_align" value="0.000000" />
- <constant name="x8t_align" value="0.000000" />
- <constant name="x9t_align" value="0.000000" />
- <constant name="x10t_align" value="0.000000" />
- <constant name="y1t_align" value="0.000000" />
- <constant name="y2t_align" value="0.000000" />
- <constant name="y3t_align" value="0.112932" />
- <constant name="y4t_align" value="0.149860" />
- <constant name="y5t_align" value="-0.082467" />
- <constant name="y6t_align" value="-0.033966" />
- <constant name="y7t_align" value="0.000000" />
- <constant name="y8t_align" value="0.000000" />
- <constant name="y9t_align" value="0.000000" />
- <constant name="y10t_align" value="0.000000" />
- <constant name="z1t_align" value="-0.000000" />
- <constant name="z2t_align" value="0.000000" />
- <constant name="z3t_align" value="0.002938" />
- <constant name="z4t_align" value="0.003424" />
- <constant name="z5t_align" value="-0.002147" />
- <constant name="z6t_align" value="-0.000776" />
- <constant name="z7t_align" value="-0.000000" />
- <constant name="z8t_align" value="0.000000" />
- <constant name="z9t_align" value="-0.000000" />
- <constant name="z10t_align" value="0.000000" />
- <constant name="rx1t_align" value="0.000000" />
- <constant name="rx2t_align" value="0.000000" />
- <constant name="rx3t_align" value="0.000000" />
- <constant name="rx4t_align" value="0.000000" />
- <constant name="rx5t_align" value="0.000000" />
- <constant name="rx6t_align" value="0.000000" />
- <constant name="rx7t_align" value="0.000000" />
- <constant name="rx8t_align" value="0.000000" />
- <constant name="rx9t_align" value="0.000000" />
- <constant name="rx10t_align" value="0.000000" />
- <constant name="ry1t_align" value="0.000000" />
- <constant name="ry2t_align" value="0.000000" />
- <constant name="ry3t_align" value="0.000000" />
- <constant name="ry4t_align" value="0.000000" />
- <constant name="ry5t_align" value="0.000000" />
- <constant name="ry6t_align" value="0.000000" />
- <constant name="ry7t_align" value="0.000000" />
- <constant name="ry8t_align" value="0.000000" />
- <constant name="ry9t_align" value="0.000000" />
- <constant name="ry10t_align" value="0.000000" />
- <constant name="rz1t_align" value="-0.000000" />
- <constant name="rz2t_align" value="0.000000" />
- <constant name="rz3t_align" value="-0.000000" />
- <constant name="rz4t_align" value="0.000000" />
- <constant name="rz5t_align" value="-0.000000" />
- <constant name="rz6t_align" value="0.000000" />
- <constant name="rz7t_align" value="-0.000000" />
- <constant name="rz8t_align" value="0.000000" />
- <constant name="rz9t_align" value="-0.000000" />
- <constant name="rz10t_align" value="0.000000" />
- <constant name="x1b_align" value="0.000000" />
- <constant name="x2b_align" value="0.000000" />
- <constant name="x3b_align" value="0.008443" />
- <constant name="x4b_align" value="-0.000012" />
- <constant name="x5b_align" value="0.004437" />
- <constant name="x6b_align" value="-0.000010" />
- <constant name="x7b_align" value="0.000000" />
- <constant name="x8b_align" value="0.000000" />
- <constant name="x9b_align" value="0.000000" />
- <constant name="x10b_align" value="0.000000" />
- <constant name="y1b_align" value="0.000000" />
- <constant name="y2b_align" value="0.000000" />
- <constant name="y3b_align" value="-0.084138" />
- <constant name="y4b_align" value="-0.061835" />
- <constant name="y5b_align" value="-0.044388" />
- <constant name="y6b_align" value="-0.032317" />
- <constant name="y7b_align" value="0.000000" />
- <constant name="y8b_align" value="0.000000" />
- <constant name="y9b_align" value="0.000000" />
- <constant name="y10b_align" value="0.000000" />
- <constant name="z1b_align" value="0.000000" />
- <constant name="z2b_align" value="0.000000" />
- <constant name="z3b_align" value="0.001646" />
- <constant name="z4b_align" value="0.001377" />
- <constant name="z5b_align" value="0.000869" />
- <constant name="z6b_align" value="0.000720" />
- <constant name="z7b_align" value="0.000000" />
- <constant name="z8b_align" value="0.000000" />
- <constant name="z9b_align" value="0.000000" />
- <constant name="z10b_align" value="0.000000" />
- <constant name="rx1b_align" value="0.000000" />
- <constant name="rx2b_align" value="0.000000" />
- <constant name="rx3b_align" value="0.000000" />
- <constant name="rx4b_align" value="0.000000" />
- <constant name="rx5b_align" value="0.000000" />
- <constant name="rx6b_align" value="0.000000" />
- <constant name="rx7b_align" value="0.000000" />
- <constant name="rx8b_align" value="0.000000" />
- <constant name="rx9b_align" value="0.000000" />
- <constant name="rx10b_align" value="0.000000" />
- <constant name="ry1b_align" value="0.000000" />
- <constant name="ry2b_align" value="0.000000" />
- <constant name="ry3b_align" value="0.000000" />
- <constant name="ry4b_align" value="0.000000" />
- <constant name="ry5b_align" value="0.000000" />
- <constant name="ry6b_align" value="0.000000" />
- <constant name="ry7b_align" value="0.000000" />
- <constant name="ry8b_align" value="0.000000" />
- <constant name="ry9b_align" value="0.000000" />
- <constant name="ry10b_align" value="0.000000" />
- <constant name="rz1b_align" value="0.000000" />
- <constant name="rz2b_align" value="0.000000" />
- <constant name="rz3b_align" value="0.000000" />
- <constant name="rz4b_align" value="0.000000" />
- <constant name="rz5b_align" value="0.000000" />
- <constant name="rz6b_align" value="0.000000" />
- <constant name="rz7b_align" value="0.000000" />
- <constant name="rz8b_align" value="0.000000" />
- <constant name="rz9b_align" value="0.000000" />
- <constant name="rz10b_align" value="0.000000" />
+ <!-- alignment corrections -->
+ <constant name="x1t_align" value="0.000000" />
+ <constant name="x2t_align" value="0.000000" />
+ <constant name="x3t_align" value="0.000014" />
+ <constant name="x4t_align" value="0.015024" />
+ <constant name="x5t_align" value="0.000045" />
+ <constant name="x6t_align" value="-0.003411" />
+ <constant name="x7t_align" value="0.000000" />
+ <constant name="x8t_align" value="0.000000" />
+ <constant name="x9t_align" value="0.000000" />
+ <constant name="x10t_align" value="0.000000" />
+ <constant name="y1t_align" value="0.000000" />
+ <constant name="y2t_align" value="0.000000" />
+ <constant name="y3t_align" value="0.112932" />
+ <constant name="y4t_align" value="0.149860" />
+ <constant name="y5t_align" value="-0.082467" />
+ <constant name="y6t_align" value="-0.033966" />
+ <constant name="y7t_align" value="0.000000" />
+ <constant name="y8t_align" value="0.000000" />
+ <constant name="y9t_align" value="0.000000" />
+ <constant name="y10t_align" value="0.000000" />
+ <constant name="z1t_align" value="-0.000000" />
+ <constant name="z2t_align" value="0.000000" />
+ <constant name="z3t_align" value="0.002938" />
+ <constant name="z4t_align" value="0.003424" />
+ <constant name="z5t_align" value="-0.002147" />
+ <constant name="z6t_align" value="-0.000776" />
+ <constant name="z7t_align" value="-0.000000" />
+ <constant name="z8t_align" value="0.000000" />
+ <constant name="z9t_align" value="-0.000000" />
+ <constant name="z10t_align" value="0.000000" />
+ <constant name="rx1t_align" value="0.000000" />
+ <constant name="rx2t_align" value="0.000000" />
+ <constant name="rx3t_align" value="0.000000" />
+ <constant name="rx4t_align" value="0.000000" />
+ <constant name="rx5t_align" value="0.000000" />
+ <constant name="rx6t_align" value="0.000000" />
+ <constant name="rx7t_align" value="0.000000" />
+ <constant name="rx8t_align" value="0.000000" />
+ <constant name="rx9t_align" value="0.000000" />
+ <constant name="rx10t_align" value="0.000000" />
+ <constant name="ry1t_align" value="0.000000" />
+ <constant name="ry2t_align" value="0.000000" />
+ <constant name="ry3t_align" value="0.000000" />
+ <constant name="ry4t_align" value="0.000000" />
+ <constant name="ry5t_align" value="0.000000" />
+ <constant name="ry6t_align" value="0.000000" />
+ <constant name="ry7t_align" value="0.000000" />
+ <constant name="ry8t_align" value="0.000000" />
+ <constant name="ry9t_align" value="0.000000" />
+ <constant name="ry10t_align" value="0.000000" />
+ <constant name="rz1t_align" value="-0.000000" />
+ <constant name="rz2t_align" value="0.000000" />
+ <constant name="rz3t_align" value="-0.000000" />
+ <constant name="rz4t_align" value="0.000000" />
+ <constant name="rz5t_align" value="-0.000000" />
+ <constant name="rz6t_align" value="0.000000" />
+ <constant name="rz7t_align" value="-0.000000" />
+ <constant name="rz8t_align" value="0.000000" />
+ <constant name="rz9t_align" value="-0.000000" />
+ <constant name="rz10t_align" value="0.000000" />
+ <constant name="x1b_align" value="0.000000" />
+ <constant name="x2b_align" value="0.000000" />
+ <constant name="x3b_align" value="0.008443" />
+ <constant name="x4b_align" value="-0.000012" />
+ <constant name="x5b_align" value="0.004437" />
+ <constant name="x6b_align" value="-0.000010" />
+ <constant name="x7b_align" value="0.000000" />
+ <constant name="x8b_align" value="0.000000" />
+ <constant name="x9b_align" value="0.000000" />
+ <constant name="x10b_align" value="0.000000" />
+ <constant name="y1b_align" value="0.000000" />
+ <constant name="y2b_align" value="0.000000" />
+ <constant name="y3b_align" value="-0.084138" />
+ <constant name="y4b_align" value="-0.061835" />
+ <constant name="y5b_align" value="-0.044388" />
+ <constant name="y6b_align" value="-0.032317" />
+ <constant name="y7b_align" value="0.000000" />
+ <constant name="y8b_align" value="0.000000" />
+ <constant name="y9b_align" value="0.000000" />
+ <constant name="y10b_align" value="0.000000" />
+ <constant name="z1b_align" value="0.000000" />
+ <constant name="z2b_align" value="0.000000" />
+ <constant name="z3b_align" value="0.001646" />
+ <constant name="z4b_align" value="0.001377" />
+ <constant name="z5b_align" value="0.000869" />
+ <constant name="z6b_align" value="0.000720" />
+ <constant name="z7b_align" value="0.000000" />
+ <constant name="z8b_align" value="0.000000" />
+ <constant name="z9b_align" value="0.000000" />
+ <constant name="z10b_align" value="0.000000" />
+ <constant name="rx1b_align" value="0.000000" />
+ <constant name="rx2b_align" value="0.000000" />
+ <constant name="rx3b_align" value="0.000000" />
+ <constant name="rx4b_align" value="0.000000" />
+ <constant name="rx5b_align" value="0.000000" />
+ <constant name="rx6b_align" value="0.000000" />
+ <constant name="rx7b_align" value="0.000000" />
+ <constant name="rx8b_align" value="0.000000" />
+ <constant name="rx9b_align" value="0.000000" />
+ <constant name="rx10b_align" value="0.000000" />
+ <constant name="ry1b_align" value="0.000000" />
+ <constant name="ry2b_align" value="0.000000" />
+ <constant name="ry3b_align" value="0.000000" />
+ <constant name="ry4b_align" value="0.000000" />
+ <constant name="ry5b_align" value="0.000000" />
+ <constant name="ry6b_align" value="0.000000" />
+ <constant name="ry7b_align" value="0.000000" />
+ <constant name="ry8b_align" value="0.000000" />
+ <constant name="ry9b_align" value="0.000000" />
+ <constant name="ry10b_align" value="0.000000" />
+ <constant name="rz1b_align" value="0.000000" />
+ <constant name="rz2b_align" value="0.000000" />
+ <constant name="rz3b_align" value="0.000000" />
+ <constant name="rz4b_align" value="0.000000" />
+ <constant name="rz5b_align" value="0.000000" />
+ <constant name="rz6b_align" value="0.000000" />
+ <constant name="rz7b_align" value="0.000000" />
+ <constant name="rz8b_align" value="0.000000" />
+ <constant name="rz9b_align" value="0.000000" />
+ <constant name="rz10b_align" value="0.000000" />
<!-- Positions of sensor centers above/below nominal beam -->
<constant name="y1t" value="36.894" />
@@ -585,134 +585,134 @@
<constant name="mod2_rx10b" value="mod_rx10b" />
<constant name="mod2_ry10b" value="mod_ry10b+y_rot_bot_pivot" />
<constant name="mod2_rz10b" value="mod_rz10b" />
-
- <!-- final constants -->
- <constant name="final_x1t" value="mod2_x1t+x1t_align" />
- <constant name="final_x2t" value="mod2_x2t+x2t_align" />
- <constant name="final_x3t" value="mod2_x3t+x3t_align" />
- <constant name="final_x4t" value="mod2_x4t+x4t_align" />
- <constant name="final_x5t" value="mod2_x5t+x5t_align" />
- <constant name="final_x6t" value="mod2_x6t+x6t_align" />
- <constant name="final_x7t" value="mod2_x7t+x7t_align" />
- <constant name="final_x8t" value="mod2_x8t+x8t_align" />
- <constant name="final_x9t" value="mod2_x9t+x9t_align" />
- <constant name="final_x10t" value="mod2_x10t+x10t_align" />
- <constant name="final_y1t" value="mod2_y1t+y1t_align" />
- <constant name="final_y2t" value="mod2_y2t+y2t_align" />
- <constant name="final_y3t" value="mod2_y3t+y3t_align" />
- <constant name="final_y4t" value="mod2_y4t+y4t_align" />
- <constant name="final_y5t" value="mod2_y5t+y5t_align" />
- <constant name="final_y6t" value="mod2_y6t+y6t_align" />
- <constant name="final_y7t" value="mod2_y7t+y7t_align" />
- <constant name="final_y8t" value="mod2_y8t+y8t_align" />
- <constant name="final_y9t" value="mod2_y9t+y9t_align" />
- <constant name="final_y10t" value="mod2_y10t+y10t_align" />
- <constant name="final_z1t" value="mod2_z1t+z1t_align" />
- <constant name="final_z2t" value="mod2_z2t+z2t_align" />
- <constant name="final_z3t" value="mod2_z3t+z3t_align" />
- <constant name="final_z4t" value="mod2_z4t+z4t_align" />
- <constant name="final_z5t" value="mod2_z5t+z5t_align" />
- <constant name="final_z6t" value="mod2_z6t+z6t_align" />
- <constant name="final_z7t" value="mod2_z7t+z7t_align" />
- <constant name="final_z8t" value="mod2_z8t+z8t_align" />
- <constant name="final_z9t" value="mod2_z9t+z9t_align" />
- <constant name="final_z10t" value="mod2_z10t+z10t_align" />
- <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
- <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
- <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
- <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
- <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
- <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
- <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
- <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
- <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
- <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
- <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
- <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
- <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
- <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
- <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
- <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
- <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
- <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
- <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
- <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
- <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
- <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
- <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
- <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
- <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
- <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
- <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
- <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
- <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
- <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
- <constant name="final_x1b" value="mod2_x1b+x1b_align" />
- <constant name="final_x2b" value="mod2_x2b+x2b_align" />
- <constant name="final_x3b" value="mod2_x3b+x3b_align" />
- <constant name="final_x4b" value="mod2_x4b+x4b_align" />
- <constant name="final_x5b" value="mod2_x5b+x5b_align" />
- <constant name="final_x6b" value="mod2_x6b+x6b_align" />
- <constant name="final_x7b" value="mod2_x7b+x7b_align" />
- <constant name="final_x8b" value="mod2_x8b+x8b_align" />
- <constant name="final_x9b" value="mod2_x9b+x9b_align" />
- <constant name="final_x10b" value="mod2_x10b+x10b_align" />
- <constant name="final_y1b" value="mod2_y1b+y1b_align" />
- <constant name="final_y2b" value="mod2_y2b+y2b_align" />
- <constant name="final_y3b" value="mod2_y3b+y3b_align" />
- <constant name="final_y4b" value="mod2_y4b+y4b_align" />
- <constant name="final_y5b" value="mod2_y5b+y5b_align" />
- <constant name="final_y6b" value="mod2_y6b+y6b_align" />
- <constant name="final_y7b" value="mod2_y7b+y7b_align" />
- <constant name="final_y8b" value="mod2_y8b+y8b_align" />
- <constant name="final_y9b" value="mod2_y9b+y9b_align" />
- <constant name="final_y10b" value="mod2_y10b+y10b_align" />
- <constant name="final_z1b" value="mod2_z1b+z1b_align" />
- <constant name="final_z2b" value="mod2_z2b+z2b_align" />
- <constant name="final_z3b" value="mod2_z3b+z3b_align" />
- <constant name="final_z4b" value="mod2_z4b+z4b_align" />
- <constant name="final_z5b" value="mod2_z5b+z5b_align" />
- <constant name="final_z6b" value="mod2_z6b+z6b_align" />
- <constant name="final_z7b" value="mod2_z7b+z7b_align" />
- <constant name="final_z8b" value="mod2_z8b+z8b_align" />
- <constant name="final_z9b" value="mod2_z9b+z9b_align" />
- <constant name="final_z10b" value="mod2_z10b+z10b_align" />
- <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
- <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
- <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
- <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
- <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
- <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
- <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
- <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
- <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
- <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
- <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
- <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
- <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
- <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
- <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
- <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
- <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
- <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
- <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
- <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
- <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
- <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
- <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
- <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
- <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
- <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
- <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
- <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
- <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
- <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
-
-
+
+ <!-- final constants -->
+ <constant name="final_x1t" value="mod2_x1t+x1t_align" />
+ <constant name="final_x2t" value="mod2_x2t+x2t_align" />
+ <constant name="final_x3t" value="mod2_x3t+x3t_align" />
+ <constant name="final_x4t" value="mod2_x4t+x4t_align" />
+ <constant name="final_x5t" value="mod2_x5t+x5t_align" />
+ <constant name="final_x6t" value="mod2_x6t+x6t_align" />
+ <constant name="final_x7t" value="mod2_x7t+x7t_align" />
+ <constant name="final_x8t" value="mod2_x8t+x8t_align" />
+ <constant name="final_x9t" value="mod2_x9t+x9t_align" />
+ <constant name="final_x10t" value="mod2_x10t+x10t_align" />
+ <constant name="final_y1t" value="mod2_y1t+y1t_align" />
+ <constant name="final_y2t" value="mod2_y2t+y2t_align" />
+ <constant name="final_y3t" value="mod2_y3t+y3t_align" />
+ <constant name="final_y4t" value="mod2_y4t+y4t_align" />
+ <constant name="final_y5t" value="mod2_y5t+y5t_align" />
+ <constant name="final_y6t" value="mod2_y6t+y6t_align" />
+ <constant name="final_y7t" value="mod2_y7t+y7t_align" />
+ <constant name="final_y8t" value="mod2_y8t+y8t_align" />
+ <constant name="final_y9t" value="mod2_y9t+y9t_align" />
+ <constant name="final_y10t" value="mod2_y10t+y10t_align" />
+ <constant name="final_z1t" value="mod2_z1t+z1t_align" />
+ <constant name="final_z2t" value="mod2_z2t+z2t_align" />
+ <constant name="final_z3t" value="mod2_z3t+z3t_align" />
+ <constant name="final_z4t" value="mod2_z4t+z4t_align" />
+ <constant name="final_z5t" value="mod2_z5t+z5t_align" />
+ <constant name="final_z6t" value="mod2_z6t+z6t_align" />
+ <constant name="final_z7t" value="mod2_z7t+z7t_align" />
+ <constant name="final_z8t" value="mod2_z8t+z8t_align" />
+ <constant name="final_z9t" value="mod2_z9t+z9t_align" />
+ <constant name="final_z10t" value="mod2_z10t+z10t_align" />
+ <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
+ <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
+ <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
+ <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
+ <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
+ <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
+ <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
+ <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
+ <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
+ <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
+ <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
+ <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
+ <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
+ <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
+ <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
+ <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
+ <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
+ <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
+ <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
+ <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
+ <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
+ <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
+ <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
+ <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
+ <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
+ <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
+ <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
+ <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
+ <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
+ <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
+ <constant name="final_x1b" value="mod2_x1b+x1b_align" />
+ <constant name="final_x2b" value="mod2_x2b+x2b_align" />
+ <constant name="final_x3b" value="mod2_x3b+x3b_align" />
+ <constant name="final_x4b" value="mod2_x4b+x4b_align" />
+ <constant name="final_x5b" value="mod2_x5b+x5b_align" />
+ <constant name="final_x6b" value="mod2_x6b+x6b_align" />
+ <constant name="final_x7b" value="mod2_x7b+x7b_align" />
+ <constant name="final_x8b" value="mod2_x8b+x8b_align" />
+ <constant name="final_x9b" value="mod2_x9b+x9b_align" />
+ <constant name="final_x10b" value="mod2_x10b+x10b_align" />
+ <constant name="final_y1b" value="mod2_y1b+y1b_align" />
+ <constant name="final_y2b" value="mod2_y2b+y2b_align" />
+ <constant name="final_y3b" value="mod2_y3b+y3b_align" />
+ <constant name="final_y4b" value="mod2_y4b+y4b_align" />
+ <constant name="final_y5b" value="mod2_y5b+y5b_align" />
+ <constant name="final_y6b" value="mod2_y6b+y6b_align" />
+ <constant name="final_y7b" value="mod2_y7b+y7b_align" />
+ <constant name="final_y8b" value="mod2_y8b+y8b_align" />
+ <constant name="final_y9b" value="mod2_y9b+y9b_align" />
+ <constant name="final_y10b" value="mod2_y10b+y10b_align" />
+ <constant name="final_z1b" value="mod2_z1b+z1b_align" />
+ <constant name="final_z2b" value="mod2_z2b+z2b_align" />
+ <constant name="final_z3b" value="mod2_z3b+z3b_align" />
+ <constant name="final_z4b" value="mod2_z4b+z4b_align" />
+ <constant name="final_z5b" value="mod2_z5b+z5b_align" />
+ <constant name="final_z6b" value="mod2_z6b+z6b_align" />
+ <constant name="final_z7b" value="mod2_z7b+z7b_align" />
+ <constant name="final_z8b" value="mod2_z8b+z8b_align" />
+ <constant name="final_z9b" value="mod2_z9b+z9b_align" />
+ <constant name="final_z10b" value="mod2_z10b+z10b_align" />
+ <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
+ <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
+ <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
+ <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
+ <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
+ <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
+ <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
+ <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
+ <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
+ <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
+ <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
+ <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
+ <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
+ <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
+ <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
+ <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
+ <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
+ <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
+ <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
+ <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
+ <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
+ <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
+ <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
+ <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
+ <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
+ <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
+ <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
+ <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
+ <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
+ <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
+
+
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001" />
<fraction n="1.0" ref="Vacuum" />
@@ -801,7 +801,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2" />
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-5/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-5/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8-5/compact.xml Wed Apr 27 11:11:32 2016
@@ -3,9 +3,9 @@
<info name="HPS-TestRun-v8-5">
<comment>
- HPS JLab Test Run detector with dipole field starting at z=0
- Based on millepede-u3-6-float-then-u7-8float-then-u3-6-float-then-u1-2-float-then-u3-6-float-then-u9-10-float-then-u7-10-float-then-u3-6-float-then-u1-4-float-then-u9-10-float-1351-v8-10k.res.txt
- </comment>
+ HPS JLab Test Run detector with dipole field starting at z=0
+ Based on millepede-u3-6-float-then-u7-8float-then-u3-6-float-then-u1-2-float-then-u3-6-float-then-u9-10-float-then-u7-10-float-then-u3-6-float-then-u1-4-float-then-u9-10-float-1351-v8-10k.res.txt
+ </comment>
</info>
<define>
@@ -65,127 +65,127 @@
<!-- <constant name="x_off" value = "-15.0"/> -->
<constant name="x_off" value="0.0" />
- <!-- alignment corrections -->
- <constant name="x1t_align" value="0.000000" />
- <constant name="x2t_align" value="0.000338" />
- <constant name="x3t_align" value="0.000013" />
- <constant name="x4t_align" value="0.014360" />
- <constant name="x5t_align" value="0.000051" />
- <constant name="x6t_align" value="-0.004610" />
- <constant name="x7t_align" value="-0.000000" />
- <constant name="x8t_align" value="-0.000200" />
- <constant name="x9t_align" value="0.000023" />
- <constant name="x10t_align" value="0.003314" />
- <constant name="y1t_align" value="0.003643" />
- <constant name="y2t_align" value="0.003372" />
- <constant name="y3t_align" value="0.106214" />
- <constant name="y4t_align" value="0.143235" />
- <constant name="y5t_align" value="-0.094779" />
- <constant name="y6t_align" value="-0.045906" />
- <constant name="y7t_align" value="-0.004270" />
- <constant name="y8t_align" value="-0.004014" />
- <constant name="y9t_align" value="0.069062" />
- <constant name="y10t_align" value="0.066775" />
- <constant name="z1t_align" value="0.000095" />
- <constant name="z2t_align" value="0.000077" />
- <constant name="z3t_align" value="0.002763" />
- <constant name="z4t_align" value="0.003273" />
- <constant name="z5t_align" value="-0.002467" />
- <constant name="z6t_align" value="-0.001049" />
- <constant name="z7t_align" value="-0.000111" />
- <constant name="z8t_align" value="-0.000098" />
- <constant name="z9t_align" value="0.001796" />
- <constant name="z10t_align" value="0.001633" />
- <constant name="rx1t_align" value="0.000000" />
- <constant name="rx2t_align" value="0.000000" />
- <constant name="rx3t_align" value="0.000000" />
- <constant name="rx4t_align" value="0.000000" />
- <constant name="rx5t_align" value="0.000000" />
- <constant name="rx6t_align" value="0.000000" />
- <constant name="rx7t_align" value="0.000000" />
- <constant name="rx8t_align" value="0.000000" />
- <constant name="rx9t_align" value="0.000000" />
- <constant name="rx10t_align" value="0.000000" />
- <constant name="ry1t_align" value="0.000000" />
- <constant name="ry2t_align" value="0.000000" />
- <constant name="ry3t_align" value="0.000000" />
- <constant name="ry4t_align" value="0.000000" />
- <constant name="ry5t_align" value="0.000000" />
- <constant name="ry6t_align" value="0.000000" />
- <constant name="ry7t_align" value="0.000000" />
- <constant name="ry8t_align" value="0.000000" />
- <constant name="ry9t_align" value="0.000000" />
- <constant name="ry10t_align" value="0.000000" />
- <constant name="rz1t_align" value="-0.000000" />
- <constant name="rz2t_align" value="0.000000" />
- <constant name="rz3t_align" value="-0.000000" />
- <constant name="rz4t_align" value="0.000000" />
- <constant name="rz5t_align" value="-0.000000" />
- <constant name="rz6t_align" value="0.000000" />
- <constant name="rz7t_align" value="-0.000000" />
- <constant name="rz8t_align" value="0.000000" />
- <constant name="rz9t_align" value="-0.000000" />
- <constant name="rz10t_align" value="0.000000" />
- <constant name="x1b_align" value="-0.000240" />
- <constant name="x2b_align" value="0.000001" />
- <constant name="x3b_align" value="0.008881" />
- <constant name="x4b_align" value="-0.000013" />
- <constant name="x5b_align" value="0.005242" />
- <constant name="x6b_align" value="-0.000013" />
- <constant name="x7b_align" value="0.000171" />
- <constant name="x8b_align" value="-0.000002" />
- <constant name="x9b_align" value="-0.002390" />
- <constant name="x10b_align" value="0.000002" />
- <constant name="y1b_align" value="0.002386" />
- <constant name="y2b_align" value="0.002507" />
- <constant name="y3b_align" value="-0.088503" />
- <constant name="y4b_align" value="-0.066488" />
- <constant name="y5b_align" value="-0.052443" />
- <constant name="y6b_align" value="-0.040809" />
- <constant name="y7b_align" value="-0.003389" />
- <constant name="y8b_align" value="-0.003634" />
- <constant name="y9b_align" value="0.047254" />
- <constant name="y10b_align" value="0.047194" />
- <constant name="z1b_align" value="-0.000047" />
- <constant name="z2b_align" value="-0.000056" />
- <constant name="z3b_align" value="0.001731" />
- <constant name="z4b_align" value="0.001481" />
- <constant name="z5b_align" value="0.001026" />
- <constant name="z6b_align" value="0.000909" />
- <constant name="z7b_align" value="0.000071" />
- <constant name="z8b_align" value="0.000081" />
- <constant name="z9b_align" value="-0.000988" />
- <constant name="z10b_align" value="-0.001051" />
- <constant name="rx1b_align" value="0.000000" />
- <constant name="rx2b_align" value="0.000000" />
- <constant name="rx3b_align" value="0.000000" />
- <constant name="rx4b_align" value="0.000000" />
- <constant name="rx5b_align" value="0.000000" />
- <constant name="rx6b_align" value="0.000000" />
- <constant name="rx7b_align" value="0.000000" />
- <constant name="rx8b_align" value="0.000000" />
- <constant name="rx9b_align" value="0.000000" />
- <constant name="rx10b_align" value="0.000000" />
- <constant name="ry1b_align" value="0.000000" />
- <constant name="ry2b_align" value="0.000000" />
- <constant name="ry3b_align" value="0.000000" />
- <constant name="ry4b_align" value="0.000000" />
- <constant name="ry5b_align" value="0.000000" />
- <constant name="ry6b_align" value="0.000000" />
- <constant name="ry7b_align" value="0.000000" />
- <constant name="ry8b_align" value="0.000000" />
- <constant name="ry9b_align" value="0.000000" />
- <constant name="ry10b_align" value="0.000000" />
- <constant name="rz1b_align" value="0.000000" />
- <constant name="rz2b_align" value="0.000000" />
- <constant name="rz3b_align" value="0.000000" />
- <constant name="rz4b_align" value="0.000000" />
- <constant name="rz5b_align" value="0.000000" />
- <constant name="rz6b_align" value="0.000000" />
- <constant name="rz7b_align" value="0.000000" />
- <constant name="rz8b_align" value="0.000000" />
- <constant name="rz9b_align" value="0.000000" />
- <constant name="rz10b_align" value="0.000000" />
+ <!-- alignment corrections -->
+ <constant name="x1t_align" value="0.000000" />
+ <constant name="x2t_align" value="0.000338" />
+ <constant name="x3t_align" value="0.000013" />
+ <constant name="x4t_align" value="0.014360" />
+ <constant name="x5t_align" value="0.000051" />
+ <constant name="x6t_align" value="-0.004610" />
+ <constant name="x7t_align" value="-0.000000" />
+ <constant name="x8t_align" value="-0.000200" />
+ <constant name="x9t_align" value="0.000023" />
+ <constant name="x10t_align" value="0.003314" />
+ <constant name="y1t_align" value="0.003643" />
+ <constant name="y2t_align" value="0.003372" />
+ <constant name="y3t_align" value="0.106214" />
+ <constant name="y4t_align" value="0.143235" />
+ <constant name="y5t_align" value="-0.094779" />
+ <constant name="y6t_align" value="-0.045906" />
+ <constant name="y7t_align" value="-0.004270" />
+ <constant name="y8t_align" value="-0.004014" />
+ <constant name="y9t_align" value="0.069062" />
+ <constant name="y10t_align" value="0.066775" />
+ <constant name="z1t_align" value="0.000095" />
+ <constant name="z2t_align" value="0.000077" />
+ <constant name="z3t_align" value="0.002763" />
+ <constant name="z4t_align" value="0.003273" />
+ <constant name="z5t_align" value="-0.002467" />
+ <constant name="z6t_align" value="-0.001049" />
+ <constant name="z7t_align" value="-0.000111" />
+ <constant name="z8t_align" value="-0.000098" />
+ <constant name="z9t_align" value="0.001796" />
+ <constant name="z10t_align" value="0.001633" />
+ <constant name="rx1t_align" value="0.000000" />
+ <constant name="rx2t_align" value="0.000000" />
+ <constant name="rx3t_align" value="0.000000" />
+ <constant name="rx4t_align" value="0.000000" />
+ <constant name="rx5t_align" value="0.000000" />
+ <constant name="rx6t_align" value="0.000000" />
+ <constant name="rx7t_align" value="0.000000" />
+ <constant name="rx8t_align" value="0.000000" />
+ <constant name="rx9t_align" value="0.000000" />
+ <constant name="rx10t_align" value="0.000000" />
+ <constant name="ry1t_align" value="0.000000" />
+ <constant name="ry2t_align" value="0.000000" />
+ <constant name="ry3t_align" value="0.000000" />
+ <constant name="ry4t_align" value="0.000000" />
+ <constant name="ry5t_align" value="0.000000" />
+ <constant name="ry6t_align" value="0.000000" />
+ <constant name="ry7t_align" value="0.000000" />
+ <constant name="ry8t_align" value="0.000000" />
+ <constant name="ry9t_align" value="0.000000" />
+ <constant name="ry10t_align" value="0.000000" />
+ <constant name="rz1t_align" value="-0.000000" />
+ <constant name="rz2t_align" value="0.000000" />
+ <constant name="rz3t_align" value="-0.000000" />
+ <constant name="rz4t_align" value="0.000000" />
+ <constant name="rz5t_align" value="-0.000000" />
+ <constant name="rz6t_align" value="0.000000" />
+ <constant name="rz7t_align" value="-0.000000" />
+ <constant name="rz8t_align" value="0.000000" />
+ <constant name="rz9t_align" value="-0.000000" />
+ <constant name="rz10t_align" value="0.000000" />
+ <constant name="x1b_align" value="-0.000240" />
+ <constant name="x2b_align" value="0.000001" />
+ <constant name="x3b_align" value="0.008881" />
+ <constant name="x4b_align" value="-0.000013" />
+ <constant name="x5b_align" value="0.005242" />
+ <constant name="x6b_align" value="-0.000013" />
+ <constant name="x7b_align" value="0.000171" />
+ <constant name="x8b_align" value="-0.000002" />
+ <constant name="x9b_align" value="-0.002390" />
+ <constant name="x10b_align" value="0.000002" />
+ <constant name="y1b_align" value="0.002386" />
+ <constant name="y2b_align" value="0.002507" />
+ <constant name="y3b_align" value="-0.088503" />
+ <constant name="y4b_align" value="-0.066488" />
+ <constant name="y5b_align" value="-0.052443" />
+ <constant name="y6b_align" value="-0.040809" />
+ <constant name="y7b_align" value="-0.003389" />
+ <constant name="y8b_align" value="-0.003634" />
+ <constant name="y9b_align" value="0.047254" />
+ <constant name="y10b_align" value="0.047194" />
+ <constant name="z1b_align" value="-0.000047" />
+ <constant name="z2b_align" value="-0.000056" />
+ <constant name="z3b_align" value="0.001731" />
+ <constant name="z4b_align" value="0.001481" />
+ <constant name="z5b_align" value="0.001026" />
+ <constant name="z6b_align" value="0.000909" />
+ <constant name="z7b_align" value="0.000071" />
+ <constant name="z8b_align" value="0.000081" />
+ <constant name="z9b_align" value="-0.000988" />
+ <constant name="z10b_align" value="-0.001051" />
+ <constant name="rx1b_align" value="0.000000" />
+ <constant name="rx2b_align" value="0.000000" />
+ <constant name="rx3b_align" value="0.000000" />
+ <constant name="rx4b_align" value="0.000000" />
+ <constant name="rx5b_align" value="0.000000" />
+ <constant name="rx6b_align" value="0.000000" />
+ <constant name="rx7b_align" value="0.000000" />
+ <constant name="rx8b_align" value="0.000000" />
+ <constant name="rx9b_align" value="0.000000" />
+ <constant name="rx10b_align" value="0.000000" />
+ <constant name="ry1b_align" value="0.000000" />
+ <constant name="ry2b_align" value="0.000000" />
+ <constant name="ry3b_align" value="0.000000" />
+ <constant name="ry4b_align" value="0.000000" />
+ <constant name="ry5b_align" value="0.000000" />
+ <constant name="ry6b_align" value="0.000000" />
+ <constant name="ry7b_align" value="0.000000" />
+ <constant name="ry8b_align" value="0.000000" />
+ <constant name="ry9b_align" value="0.000000" />
+ <constant name="ry10b_align" value="0.000000" />
+ <constant name="rz1b_align" value="0.000000" />
+ <constant name="rz2b_align" value="0.000000" />
+ <constant name="rz3b_align" value="0.000000" />
+ <constant name="rz4b_align" value="0.000000" />
+ <constant name="rz5b_align" value="0.000000" />
+ <constant name="rz6b_align" value="0.000000" />
+ <constant name="rz7b_align" value="0.000000" />
+ <constant name="rz8b_align" value="0.000000" />
+ <constant name="rz9b_align" value="0.000000" />
+ <constant name="rz10b_align" value="0.000000" />
<!-- Positions of sensor centers above/below nominal beam -->
<constant name="y1t" value="36.894" />
@@ -585,134 +585,134 @@
<constant name="mod2_rx10b" value="mod_rx10b" />
<constant name="mod2_ry10b" value="mod_ry10b+y_rot_bot_pivot" />
<constant name="mod2_rz10b" value="mod_rz10b" />
-
- <!-- final constants -->
- <constant name="final_x1t" value="mod2_x1t+x1t_align" />
- <constant name="final_x2t" value="mod2_x2t+x2t_align" />
- <constant name="final_x3t" value="mod2_x3t+x3t_align" />
- <constant name="final_x4t" value="mod2_x4t+x4t_align" />
- <constant name="final_x5t" value="mod2_x5t+x5t_align" />
- <constant name="final_x6t" value="mod2_x6t+x6t_align" />
- <constant name="final_x7t" value="mod2_x7t+x7t_align" />
- <constant name="final_x8t" value="mod2_x8t+x8t_align" />
- <constant name="final_x9t" value="mod2_x9t+x9t_align" />
- <constant name="final_x10t" value="mod2_x10t+x10t_align" />
- <constant name="final_y1t" value="mod2_y1t+y1t_align" />
- <constant name="final_y2t" value="mod2_y2t+y2t_align" />
- <constant name="final_y3t" value="mod2_y3t+y3t_align" />
- <constant name="final_y4t" value="mod2_y4t+y4t_align" />
- <constant name="final_y5t" value="mod2_y5t+y5t_align" />
- <constant name="final_y6t" value="mod2_y6t+y6t_align" />
- <constant name="final_y7t" value="mod2_y7t+y7t_align" />
- <constant name="final_y8t" value="mod2_y8t+y8t_align" />
- <constant name="final_y9t" value="mod2_y9t+y9t_align" />
- <constant name="final_y10t" value="mod2_y10t+y10t_align" />
- <constant name="final_z1t" value="mod2_z1t+z1t_align" />
- <constant name="final_z2t" value="mod2_z2t+z2t_align" />
- <constant name="final_z3t" value="mod2_z3t+z3t_align" />
- <constant name="final_z4t" value="mod2_z4t+z4t_align" />
- <constant name="final_z5t" value="mod2_z5t+z5t_align" />
- <constant name="final_z6t" value="mod2_z6t+z6t_align" />
- <constant name="final_z7t" value="mod2_z7t+z7t_align" />
- <constant name="final_z8t" value="mod2_z8t+z8t_align" />
- <constant name="final_z9t" value="mod2_z9t+z9t_align" />
- <constant name="final_z10t" value="mod2_z10t+z10t_align" />
- <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
- <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
- <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
- <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
- <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
- <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
- <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
- <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
- <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
- <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
- <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
- <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
- <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
- <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
- <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
- <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
- <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
- <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
- <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
- <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
- <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
- <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
- <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
- <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
- <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
- <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
- <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
- <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
- <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
- <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
- <constant name="final_x1b" value="mod2_x1b+x1b_align" />
- <constant name="final_x2b" value="mod2_x2b+x2b_align" />
- <constant name="final_x3b" value="mod2_x3b+x3b_align" />
- <constant name="final_x4b" value="mod2_x4b+x4b_align" />
- <constant name="final_x5b" value="mod2_x5b+x5b_align" />
- <constant name="final_x6b" value="mod2_x6b+x6b_align" />
- <constant name="final_x7b" value="mod2_x7b+x7b_align" />
- <constant name="final_x8b" value="mod2_x8b+x8b_align" />
- <constant name="final_x9b" value="mod2_x9b+x9b_align" />
- <constant name="final_x10b" value="mod2_x10b+x10b_align" />
- <constant name="final_y1b" value="mod2_y1b+y1b_align" />
- <constant name="final_y2b" value="mod2_y2b+y2b_align" />
- <constant name="final_y3b" value="mod2_y3b+y3b_align" />
- <constant name="final_y4b" value="mod2_y4b+y4b_align" />
- <constant name="final_y5b" value="mod2_y5b+y5b_align" />
- <constant name="final_y6b" value="mod2_y6b+y6b_align" />
- <constant name="final_y7b" value="mod2_y7b+y7b_align" />
- <constant name="final_y8b" value="mod2_y8b+y8b_align" />
- <constant name="final_y9b" value="mod2_y9b+y9b_align" />
- <constant name="final_y10b" value="mod2_y10b+y10b_align" />
- <constant name="final_z1b" value="mod2_z1b+z1b_align" />
- <constant name="final_z2b" value="mod2_z2b+z2b_align" />
- <constant name="final_z3b" value="mod2_z3b+z3b_align" />
- <constant name="final_z4b" value="mod2_z4b+z4b_align" />
- <constant name="final_z5b" value="mod2_z5b+z5b_align" />
- <constant name="final_z6b" value="mod2_z6b+z6b_align" />
- <constant name="final_z7b" value="mod2_z7b+z7b_align" />
- <constant name="final_z8b" value="mod2_z8b+z8b_align" />
- <constant name="final_z9b" value="mod2_z9b+z9b_align" />
- <constant name="final_z10b" value="mod2_z10b+z10b_align" />
- <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
- <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
- <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
- <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
- <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
- <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
- <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
- <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
- <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
- <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
- <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
- <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
- <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
- <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
- <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
- <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
- <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
- <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
- <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
- <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
- <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
- <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
- <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
- <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
- <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
- <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
- <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
- <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
- <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
- <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
-
-
+
+ <!-- final constants -->
+ <constant name="final_x1t" value="mod2_x1t+x1t_align" />
+ <constant name="final_x2t" value="mod2_x2t+x2t_align" />
+ <constant name="final_x3t" value="mod2_x3t+x3t_align" />
+ <constant name="final_x4t" value="mod2_x4t+x4t_align" />
+ <constant name="final_x5t" value="mod2_x5t+x5t_align" />
+ <constant name="final_x6t" value="mod2_x6t+x6t_align" />
+ <constant name="final_x7t" value="mod2_x7t+x7t_align" />
+ <constant name="final_x8t" value="mod2_x8t+x8t_align" />
+ <constant name="final_x9t" value="mod2_x9t+x9t_align" />
+ <constant name="final_x10t" value="mod2_x10t+x10t_align" />
+ <constant name="final_y1t" value="mod2_y1t+y1t_align" />
+ <constant name="final_y2t" value="mod2_y2t+y2t_align" />
+ <constant name="final_y3t" value="mod2_y3t+y3t_align" />
+ <constant name="final_y4t" value="mod2_y4t+y4t_align" />
+ <constant name="final_y5t" value="mod2_y5t+y5t_align" />
+ <constant name="final_y6t" value="mod2_y6t+y6t_align" />
+ <constant name="final_y7t" value="mod2_y7t+y7t_align" />
+ <constant name="final_y8t" value="mod2_y8t+y8t_align" />
+ <constant name="final_y9t" value="mod2_y9t+y9t_align" />
+ <constant name="final_y10t" value="mod2_y10t+y10t_align" />
+ <constant name="final_z1t" value="mod2_z1t+z1t_align" />
+ <constant name="final_z2t" value="mod2_z2t+z2t_align" />
+ <constant name="final_z3t" value="mod2_z3t+z3t_align" />
+ <constant name="final_z4t" value="mod2_z4t+z4t_align" />
+ <constant name="final_z5t" value="mod2_z5t+z5t_align" />
+ <constant name="final_z6t" value="mod2_z6t+z6t_align" />
+ <constant name="final_z7t" value="mod2_z7t+z7t_align" />
+ <constant name="final_z8t" value="mod2_z8t+z8t_align" />
+ <constant name="final_z9t" value="mod2_z9t+z9t_align" />
+ <constant name="final_z10t" value="mod2_z10t+z10t_align" />
+ <constant name="final_rx1t" value="mod2_rx1t+rx1t_align" />
+ <constant name="final_rx2t" value="mod2_rx2t+rx2t_align" />
+ <constant name="final_rx3t" value="mod2_rx3t+rx3t_align" />
+ <constant name="final_rx4t" value="mod2_rx4t+rx4t_align" />
+ <constant name="final_rx5t" value="mod2_rx5t+rx5t_align" />
+ <constant name="final_rx6t" value="mod2_rx6t+rx6t_align" />
+ <constant name="final_rx7t" value="mod2_rx7t+rx7t_align" />
+ <constant name="final_rx8t" value="mod2_rx8t+rx8t_align" />
+ <constant name="final_rx9t" value="mod2_rx9t+rx9t_align" />
+ <constant name="final_rx10t" value="mod2_rx10t+rx10t_align" />
+ <constant name="final_ry1t" value="mod2_ry1t+ry1t_align" />
+ <constant name="final_ry2t" value="mod2_ry2t+ry2t_align" />
+ <constant name="final_ry3t" value="mod2_ry3t+ry3t_align" />
+ <constant name="final_ry4t" value="mod2_ry4t+ry4t_align" />
+ <constant name="final_ry5t" value="mod2_ry5t+ry5t_align" />
+ <constant name="final_ry6t" value="mod2_ry6t+ry6t_align" />
+ <constant name="final_ry7t" value="mod2_ry7t+ry7t_align" />
+ <constant name="final_ry8t" value="mod2_ry8t+ry8t_align" />
+ <constant name="final_ry9t" value="mod2_ry9t+ry9t_align" />
+ <constant name="final_ry10t" value="mod2_ry10t+ry10t_align" />
+ <constant name="final_rz1t" value="mod2_rz1t+rz1t_align" />
+ <constant name="final_rz2t" value="mod2_rz2t+rz2t_align" />
+ <constant name="final_rz3t" value="mod2_rz3t+rz3t_align" />
+ <constant name="final_rz4t" value="mod2_rz4t+rz4t_align" />
+ <constant name="final_rz5t" value="mod2_rz5t+rz5t_align" />
+ <constant name="final_rz6t" value="mod2_rz6t+rz6t_align" />
+ <constant name="final_rz7t" value="mod2_rz7t+rz7t_align" />
+ <constant name="final_rz8t" value="mod2_rz8t+rz8t_align" />
+ <constant name="final_rz9t" value="mod2_rz9t+rz9t_align" />
+ <constant name="final_rz10t" value="mod2_rz10t+rz10t_align" />
+ <constant name="final_x1b" value="mod2_x1b+x1b_align" />
+ <constant name="final_x2b" value="mod2_x2b+x2b_align" />
+ <constant name="final_x3b" value="mod2_x3b+x3b_align" />
+ <constant name="final_x4b" value="mod2_x4b+x4b_align" />
+ <constant name="final_x5b" value="mod2_x5b+x5b_align" />
+ <constant name="final_x6b" value="mod2_x6b+x6b_align" />
+ <constant name="final_x7b" value="mod2_x7b+x7b_align" />
+ <constant name="final_x8b" value="mod2_x8b+x8b_align" />
+ <constant name="final_x9b" value="mod2_x9b+x9b_align" />
+ <constant name="final_x10b" value="mod2_x10b+x10b_align" />
+ <constant name="final_y1b" value="mod2_y1b+y1b_align" />
+ <constant name="final_y2b" value="mod2_y2b+y2b_align" />
+ <constant name="final_y3b" value="mod2_y3b+y3b_align" />
+ <constant name="final_y4b" value="mod2_y4b+y4b_align" />
+ <constant name="final_y5b" value="mod2_y5b+y5b_align" />
+ <constant name="final_y6b" value="mod2_y6b+y6b_align" />
+ <constant name="final_y7b" value="mod2_y7b+y7b_align" />
+ <constant name="final_y8b" value="mod2_y8b+y8b_align" />
+ <constant name="final_y9b" value="mod2_y9b+y9b_align" />
+ <constant name="final_y10b" value="mod2_y10b+y10b_align" />
+ <constant name="final_z1b" value="mod2_z1b+z1b_align" />
+ <constant name="final_z2b" value="mod2_z2b+z2b_align" />
+ <constant name="final_z3b" value="mod2_z3b+z3b_align" />
+ <constant name="final_z4b" value="mod2_z4b+z4b_align" />
+ <constant name="final_z5b" value="mod2_z5b+z5b_align" />
+ <constant name="final_z6b" value="mod2_z6b+z6b_align" />
+ <constant name="final_z7b" value="mod2_z7b+z7b_align" />
+ <constant name="final_z8b" value="mod2_z8b+z8b_align" />
+ <constant name="final_z9b" value="mod2_z9b+z9b_align" />
+ <constant name="final_z10b" value="mod2_z10b+z10b_align" />
+ <constant name="final_rx1b" value="mod2_rx1b+rx1b_align" />
+ <constant name="final_rx2b" value="mod2_rx2b+rx2b_align" />
+ <constant name="final_rx3b" value="mod2_rx3b+rx3b_align" />
+ <constant name="final_rx4b" value="mod2_rx4b+rx4b_align" />
+ <constant name="final_rx5b" value="mod2_rx5b+rx5b_align" />
+ <constant name="final_rx6b" value="mod2_rx6b+rx6b_align" />
+ <constant name="final_rx7b" value="mod2_rx7b+rx7b_align" />
+ <constant name="final_rx8b" value="mod2_rx8b+rx8b_align" />
+ <constant name="final_rx9b" value="mod2_rx9b+rx9b_align" />
+ <constant name="final_rx10b" value="mod2_rx10b+rx10b_align" />
+ <constant name="final_ry1b" value="mod2_ry1b+ry1b_align" />
+ <constant name="final_ry2b" value="mod2_ry2b+ry2b_align" />
+ <constant name="final_ry3b" value="mod2_ry3b+ry3b_align" />
+ <constant name="final_ry4b" value="mod2_ry4b+ry4b_align" />
+ <constant name="final_ry5b" value="mod2_ry5b+ry5b_align" />
+ <constant name="final_ry6b" value="mod2_ry6b+ry6b_align" />
+ <constant name="final_ry7b" value="mod2_ry7b+ry7b_align" />
+ <constant name="final_ry8b" value="mod2_ry8b+ry8b_align" />
+ <constant name="final_ry9b" value="mod2_ry9b+ry9b_align" />
+ <constant name="final_ry10b" value="mod2_ry10b+ry10b_align" />
+ <constant name="final_rz1b" value="mod2_rz1b+rz1b_align" />
+ <constant name="final_rz2b" value="mod2_rz2b+rz2b_align" />
+ <constant name="final_rz3b" value="mod2_rz3b+rz3b_align" />
+ <constant name="final_rz4b" value="mod2_rz4b+rz4b_align" />
+ <constant name="final_rz5b" value="mod2_rz5b+rz5b_align" />
+ <constant name="final_rz6b" value="mod2_rz6b+rz6b_align" />
+ <constant name="final_rz7b" value="mod2_rz7b+rz7b_align" />
+ <constant name="final_rz8b" value="mod2_rz8b+rz8b_align" />
+ <constant name="final_rz9b" value="mod2_rz9b+rz9b_align" />
+ <constant name="final_rz10b" value="mod2_rz10b+rz10b_align" />
+
+
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001" />
<fraction n="1.0" ref="Vacuum" />
@@ -801,7 +801,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2" />
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPS-TestRun-v8/compact.xml Wed Apr 27 11:11:32 2016
@@ -64,127 +64,127 @@
<!-- <constant name="x_off" value = "-15.0"/> -->
<constant name="x_off" value = "0.0"/>
- <!-- alignment corrections -->
- <constant name="x1t_align" value="0.000000"/>
- <constant name="x2t_align" value="0.000000"/>
- <constant name="x3t_align" value="0.000000"/>
- <constant name="x4t_align" value="0.000000"/>
- <constant name="x5t_align" value="0.000000"/>
- <constant name="x6t_align" value="0.000000"/>
- <constant name="x7t_align" value="0.000000"/>
- <constant name="x8t_align" value="0.000000"/>
- <constant name="x9t_align" value="0.000000"/>
- <constant name="x10t_align" value="0.000000"/>
- <constant name="y1t_align" value="0.000000"/>
- <constant name="y2t_align" value="0.000000"/>
- <constant name="y3t_align" value="0.000000"/>
- <constant name="y4t_align" value="0.000000"/>
- <constant name="y5t_align" value="0.000000"/>
- <constant name="y6t_align" value="0.000000"/>
- <constant name="y7t_align" value="0.000000"/>
- <constant name="y8t_align" value="0.000000"/>
- <constant name="y9t_align" value="0.000000"/>
- <constant name="y10t_align" value="0.000000"/>
- <constant name="z1t_align" value="0.000000"/>
- <constant name="z2t_align" value="0.000000"/>
- <constant name="z3t_align" value="0.000000"/>
- <constant name="z4t_align" value="0.000000"/>
- <constant name="z5t_align" value="0.000000"/>
- <constant name="z6t_align" value="0.000000"/>
- <constant name="z7t_align" value="0.000000"/>
- <constant name="z8t_align" value="0.000000"/>
- <constant name="z9t_align" value="0.000000"/>
- <constant name="z10t_align" value="0.000000"/>
- <constant name="rx1t_align" value="0.000000"/>
- <constant name="rx2t_align" value="0.000000"/>
- <constant name="rx3t_align" value="0.000000"/>
- <constant name="rx4t_align" value="0.000000"/>
- <constant name="rx5t_align" value="0.000000"/>
- <constant name="rx6t_align" value="0.000000"/>
- <constant name="rx7t_align" value="0.000000"/>
- <constant name="rx8t_align" value="0.000000"/>
- <constant name="rx9t_align" value="0.000000"/>
- <constant name="rx10t_align" value="0.000000"/>
- <constant name="ry1t_align" value="0.000000"/>
- <constant name="ry2t_align" value="0.000000"/>
- <constant name="ry3t_align" value="0.000000"/>
- <constant name="ry4t_align" value="0.000000"/>
- <constant name="ry5t_align" value="0.000000"/>
- <constant name="ry6t_align" value="0.000000"/>
- <constant name="ry7t_align" value="0.000000"/>
- <constant name="ry8t_align" value="0.000000"/>
- <constant name="ry9t_align" value="0.000000"/>
- <constant name="ry10t_align" value="0.000000"/>
- <constant name="rz1t_align" value="0.000000"/>
- <constant name="rz2t_align" value="0.000000"/>
- <constant name="rz3t_align" value="0.000000"/>
- <constant name="rz4t_align" value="0.000000"/>
- <constant name="rz5t_align" value="0.000000"/>
- <constant name="rz6t_align" value="0.000000"/>
- <constant name="rz7t_align" value="0.000000"/>
- <constant name="rz8t_align" value="0.000000"/>
- <constant name="rz9t_align" value="0.000000"/>
- <constant name="rz10t_align" value="0.000000"/>
- <constant name="x1b_align" value="0.000000"/>
- <constant name="x2b_align" value="0.000000"/>
- <constant name="x3b_align" value="0.000000"/>
- <constant name="x4b_align" value="0.000000"/>
- <constant name="x5b_align" value="0.000000"/>
- <constant name="x6b_align" value="0.000000"/>
- <constant name="x7b_align" value="0.000000"/>
- <constant name="x8b_align" value="0.000000"/>
- <constant name="x9b_align" value="0.000000"/>
- <constant name="x10b_align" value="0.000000"/>
- <constant name="y1b_align" value="0.000000"/>
- <constant name="y2b_align" value="0.000000"/>
- <constant name="y3b_align" value="0.000000"/>
- <constant name="y4b_align" value="0.000000"/>
- <constant name="y5b_align" value="0.000000"/>
- <constant name="y6b_align" value="0.000000"/>
- <constant name="y7b_align" value="0.000000"/>
- <constant name="y8b_align" value="0.000000"/>
- <constant name="y9b_align" value="0.000000"/>
- <constant name="y10b_align" value="0.000000"/>
- <constant name="z1b_align" value="0.000000"/>
- <constant name="z2b_align" value="0.000000"/>
- <constant name="z3b_align" value="0.000000"/>
- <constant name="z4b_align" value="0.000000"/>
- <constant name="z5b_align" value="0.000000"/>
- <constant name="z6b_align" value="0.000000"/>
- <constant name="z7b_align" value="0.000000"/>
- <constant name="z8b_align" value="0.000000"/>
- <constant name="z9b_align" value="0.000000"/>
- <constant name="z10b_align" value="0.000000"/>
- <constant name="rx1b_align" value="0.000000"/>
- <constant name="rx2b_align" value="0.000000"/>
- <constant name="rx3b_align" value="0.000000"/>
- <constant name="rx4b_align" value="0.000000"/>
- <constant name="rx5b_align" value="0.000000"/>
- <constant name="rx6b_align" value="0.000000"/>
- <constant name="rx7b_align" value="0.000000"/>
- <constant name="rx8b_align" value="0.000000"/>
- <constant name="rx9b_align" value="0.000000"/>
- <constant name="rx10b_align" value="0.000000"/>
- <constant name="ry1b_align" value="0.000000"/>
- <constant name="ry2b_align" value="0.000000"/>
- <constant name="ry3b_align" value="0.000000"/>
- <constant name="ry4b_align" value="0.000000"/>
- <constant name="ry5b_align" value="0.000000"/>
- <constant name="ry6b_align" value="0.000000"/>
- <constant name="ry7b_align" value="0.000000"/>
- <constant name="ry8b_align" value="0.000000"/>
- <constant name="ry9b_align" value="0.000000"/>
- <constant name="ry10b_align" value="0.000000"/>
- <constant name="rz1b_align" value="0.000000"/>
- <constant name="rz2b_align" value="0.000000"/>
- <constant name="rz3b_align" value="0.000000"/>
- <constant name="rz4b_align" value="0.000000"/>
- <constant name="rz5b_align" value="0.000000"/>
- <constant name="rz6b_align" value="0.000000"/>
- <constant name="rz7b_align" value="0.000000"/>
- <constant name="rz8b_align" value="0.000000"/>
- <constant name="rz9b_align" value="0.000000"/>
- <constant name="rz10b_align" value="0.000000"/>
+ <!-- alignment corrections -->
+ <constant name="x1t_align" value="0.000000"/>
+ <constant name="x2t_align" value="0.000000"/>
+ <constant name="x3t_align" value="0.000000"/>
+ <constant name="x4t_align" value="0.000000"/>
+ <constant name="x5t_align" value="0.000000"/>
+ <constant name="x6t_align" value="0.000000"/>
+ <constant name="x7t_align" value="0.000000"/>
+ <constant name="x8t_align" value="0.000000"/>
+ <constant name="x9t_align" value="0.000000"/>
+ <constant name="x10t_align" value="0.000000"/>
+ <constant name="y1t_align" value="0.000000"/>
+ <constant name="y2t_align" value="0.000000"/>
+ <constant name="y3t_align" value="0.000000"/>
+ <constant name="y4t_align" value="0.000000"/>
+ <constant name="y5t_align" value="0.000000"/>
+ <constant name="y6t_align" value="0.000000"/>
+ <constant name="y7t_align" value="0.000000"/>
+ <constant name="y8t_align" value="0.000000"/>
+ <constant name="y9t_align" value="0.000000"/>
+ <constant name="y10t_align" value="0.000000"/>
+ <constant name="z1t_align" value="0.000000"/>
+ <constant name="z2t_align" value="0.000000"/>
+ <constant name="z3t_align" value="0.000000"/>
+ <constant name="z4t_align" value="0.000000"/>
+ <constant name="z5t_align" value="0.000000"/>
+ <constant name="z6t_align" value="0.000000"/>
+ <constant name="z7t_align" value="0.000000"/>
+ <constant name="z8t_align" value="0.000000"/>
+ <constant name="z9t_align" value="0.000000"/>
+ <constant name="z10t_align" value="0.000000"/>
+ <constant name="rx1t_align" value="0.000000"/>
+ <constant name="rx2t_align" value="0.000000"/>
+ <constant name="rx3t_align" value="0.000000"/>
+ <constant name="rx4t_align" value="0.000000"/>
+ <constant name="rx5t_align" value="0.000000"/>
+ <constant name="rx6t_align" value="0.000000"/>
+ <constant name="rx7t_align" value="0.000000"/>
+ <constant name="rx8t_align" value="0.000000"/>
+ <constant name="rx9t_align" value="0.000000"/>
+ <constant name="rx10t_align" value="0.000000"/>
+ <constant name="ry1t_align" value="0.000000"/>
+ <constant name="ry2t_align" value="0.000000"/>
+ <constant name="ry3t_align" value="0.000000"/>
+ <constant name="ry4t_align" value="0.000000"/>
+ <constant name="ry5t_align" value="0.000000"/>
+ <constant name="ry6t_align" value="0.000000"/>
+ <constant name="ry7t_align" value="0.000000"/>
+ <constant name="ry8t_align" value="0.000000"/>
+ <constant name="ry9t_align" value="0.000000"/>
+ <constant name="ry10t_align" value="0.000000"/>
+ <constant name="rz1t_align" value="0.000000"/>
+ <constant name="rz2t_align" value="0.000000"/>
+ <constant name="rz3t_align" value="0.000000"/>
+ <constant name="rz4t_align" value="0.000000"/>
+ <constant name="rz5t_align" value="0.000000"/>
+ <constant name="rz6t_align" value="0.000000"/>
+ <constant name="rz7t_align" value="0.000000"/>
+ <constant name="rz8t_align" value="0.000000"/>
+ <constant name="rz9t_align" value="0.000000"/>
+ <constant name="rz10t_align" value="0.000000"/>
+ <constant name="x1b_align" value="0.000000"/>
+ <constant name="x2b_align" value="0.000000"/>
+ <constant name="x3b_align" value="0.000000"/>
+ <constant name="x4b_align" value="0.000000"/>
+ <constant name="x5b_align" value="0.000000"/>
+ <constant name="x6b_align" value="0.000000"/>
+ <constant name="x7b_align" value="0.000000"/>
+ <constant name="x8b_align" value="0.000000"/>
+ <constant name="x9b_align" value="0.000000"/>
+ <constant name="x10b_align" value="0.000000"/>
+ <constant name="y1b_align" value="0.000000"/>
+ <constant name="y2b_align" value="0.000000"/>
+ <constant name="y3b_align" value="0.000000"/>
+ <constant name="y4b_align" value="0.000000"/>
+ <constant name="y5b_align" value="0.000000"/>
+ <constant name="y6b_align" value="0.000000"/>
+ <constant name="y7b_align" value="0.000000"/>
+ <constant name="y8b_align" value="0.000000"/>
+ <constant name="y9b_align" value="0.000000"/>
+ <constant name="y10b_align" value="0.000000"/>
+ <constant name="z1b_align" value="0.000000"/>
+ <constant name="z2b_align" value="0.000000"/>
+ <constant name="z3b_align" value="0.000000"/>
+ <constant name="z4b_align" value="0.000000"/>
+ <constant name="z5b_align" value="0.000000"/>
+ <constant name="z6b_align" value="0.000000"/>
+ <constant name="z7b_align" value="0.000000"/>
+ <constant name="z8b_align" value="0.000000"/>
+ <constant name="z9b_align" value="0.000000"/>
+ <constant name="z10b_align" value="0.000000"/>
+ <constant name="rx1b_align" value="0.000000"/>
+ <constant name="rx2b_align" value="0.000000"/>
+ <constant name="rx3b_align" value="0.000000"/>
+ <constant name="rx4b_align" value="0.000000"/>
+ <constant name="rx5b_align" value="0.000000"/>
+ <constant name="rx6b_align" value="0.000000"/>
+ <constant name="rx7b_align" value="0.000000"/>
+ <constant name="rx8b_align" value="0.000000"/>
+ <constant name="rx9b_align" value="0.000000"/>
+ <constant name="rx10b_align" value="0.000000"/>
+ <constant name="ry1b_align" value="0.000000"/>
+ <constant name="ry2b_align" value="0.000000"/>
+ <constant name="ry3b_align" value="0.000000"/>
+ <constant name="ry4b_align" value="0.000000"/>
+ <constant name="ry5b_align" value="0.000000"/>
+ <constant name="ry6b_align" value="0.000000"/>
+ <constant name="ry7b_align" value="0.000000"/>
+ <constant name="ry8b_align" value="0.000000"/>
+ <constant name="ry9b_align" value="0.000000"/>
+ <constant name="ry10b_align" value="0.000000"/>
+ <constant name="rz1b_align" value="0.000000"/>
+ <constant name="rz2b_align" value="0.000000"/>
+ <constant name="rz3b_align" value="0.000000"/>
+ <constant name="rz4b_align" value="0.000000"/>
+ <constant name="rz5b_align" value="0.000000"/>
+ <constant name="rz6b_align" value="0.000000"/>
+ <constant name="rz7b_align" value="0.000000"/>
+ <constant name="rz8b_align" value="0.000000"/>
+ <constant name="rz9b_align" value="0.000000"/>
+ <constant name="rz10b_align" value="0.000000"/>
<!-- Positions of sensor centers above/below nominal beam -->
<constant name="y1t" value="36.894" />
@@ -584,134 +584,134 @@
<constant name="mod2_rx10b" value="mod_rx10b"/>
<constant name="mod2_ry10b" value="mod_ry10b+y_rot_bot_pivot"/>
<constant name="mod2_rz10b" value="mod_rz10b"/>
-
- <!-- final constants -->
- <constant name="final_x1t" value="mod2_x1t+x1t_align"/>
- <constant name="final_x2t" value="mod2_x2t+x2t_align"/>
- <constant name="final_x3t" value="mod2_x3t+x3t_align"/>
- <constant name="final_x4t" value="mod2_x4t+x4t_align"/>
- <constant name="final_x5t" value="mod2_x5t+x5t_align"/>
- <constant name="final_x6t" value="mod2_x6t+x6t_align"/>
- <constant name="final_x7t" value="mod2_x7t+x7t_align"/>
- <constant name="final_x8t" value="mod2_x8t+x8t_align"/>
- <constant name="final_x9t" value="mod2_x9t+x9t_align"/>
- <constant name="final_x10t" value="mod2_x10t+x10t_align"/>
- <constant name="final_y1t" value="mod2_y1t+y1t_align"/>
- <constant name="final_y2t" value="mod2_y2t+y2t_align"/>
- <constant name="final_y3t" value="mod2_y3t+y3t_align"/>
- <constant name="final_y4t" value="mod2_y4t+y4t_align"/>
- <constant name="final_y5t" value="mod2_y5t+y5t_align"/>
- <constant name="final_y6t" value="mod2_y6t+y6t_align"/>
- <constant name="final_y7t" value="mod2_y7t+y7t_align"/>
- <constant name="final_y8t" value="mod2_y8t+y8t_align"/>
- <constant name="final_y9t" value="mod2_y9t+y9t_align"/>
- <constant name="final_y10t" value="mod2_y10t+y10t_align"/>
- <constant name="final_z1t" value="mod2_z1t+z1t_align"/>
- <constant name="final_z2t" value="mod2_z2t+z2t_align"/>
- <constant name="final_z3t" value="mod2_z3t+z3t_align"/>
- <constant name="final_z4t" value="mod2_z4t+z4t_align"/>
- <constant name="final_z5t" value="mod2_z5t+z5t_align"/>
- <constant name="final_z6t" value="mod2_z6t+z6t_align"/>
- <constant name="final_z7t" value="mod2_z7t+z7t_align"/>
- <constant name="final_z8t" value="mod2_z8t+z8t_align"/>
- <constant name="final_z9t" value="mod2_z9t+z9t_align"/>
- <constant name="final_z10t" value="mod2_z10t+z10t_align"/>
- <constant name="final_rx1t" value="mod2_rx1t+rx1t_align"/>
- <constant name="final_rx2t" value="mod2_rx2t+rx2t_align"/>
- <constant name="final_rx3t" value="mod2_rx3t+rx3t_align"/>
- <constant name="final_rx4t" value="mod2_rx4t+rx4t_align"/>
- <constant name="final_rx5t" value="mod2_rx5t+rx5t_align"/>
- <constant name="final_rx6t" value="mod2_rx6t+rx6t_align"/>
- <constant name="final_rx7t" value="mod2_rx7t+rx7t_align"/>
- <constant name="final_rx8t" value="mod2_rx8t+rx8t_align"/>
- <constant name="final_rx9t" value="mod2_rx9t+rx9t_align"/>
- <constant name="final_rx10t" value="mod2_rx10t+rx10t_align"/>
- <constant name="final_ry1t" value="mod2_ry1t+ry1t_align"/>
- <constant name="final_ry2t" value="mod2_ry2t+ry2t_align"/>
- <constant name="final_ry3t" value="mod2_ry3t+ry3t_align"/>
- <constant name="final_ry4t" value="mod2_ry4t+ry4t_align"/>
- <constant name="final_ry5t" value="mod2_ry5t+ry5t_align"/>
- <constant name="final_ry6t" value="mod2_ry6t+ry6t_align"/>
- <constant name="final_ry7t" value="mod2_ry7t+ry7t_align"/>
- <constant name="final_ry8t" value="mod2_ry8t+ry8t_align"/>
- <constant name="final_ry9t" value="mod2_ry9t+ry9t_align"/>
- <constant name="final_ry10t" value="mod2_ry10t+ry10t_align"/>
- <constant name="final_rz1t" value="mod2_rz1t+rz1t_align"/>
- <constant name="final_rz2t" value="mod2_rz2t+rz2t_align"/>
- <constant name="final_rz3t" value="mod2_rz3t+rz3t_align"/>
- <constant name="final_rz4t" value="mod2_rz4t+rz4t_align"/>
- <constant name="final_rz5t" value="mod2_rz5t+rz5t_align"/>
- <constant name="final_rz6t" value="mod2_rz6t+rz6t_align"/>
- <constant name="final_rz7t" value="mod2_rz7t+rz7t_align"/>
- <constant name="final_rz8t" value="mod2_rz8t+rz8t_align"/>
- <constant name="final_rz9t" value="mod2_rz9t+rz9t_align"/>
- <constant name="final_rz10t" value="mod2_rz10t+rz10t_align"/>
- <constant name="final_x1b" value="mod2_x1b+x1b_align"/>
- <constant name="final_x2b" value="mod2_x2b+x2b_align"/>
- <constant name="final_x3b" value="mod2_x3b+x3b_align"/>
- <constant name="final_x4b" value="mod2_x4b+x4b_align"/>
- <constant name="final_x5b" value="mod2_x5b+x5b_align"/>
- <constant name="final_x6b" value="mod2_x6b+x6b_align"/>
- <constant name="final_x7b" value="mod2_x7b+x7b_align"/>
- <constant name="final_x8b" value="mod2_x8b+x8b_align"/>
- <constant name="final_x9b" value="mod2_x9b+x9b_align"/>
- <constant name="final_x10b" value="mod2_x10b+x10b_align"/>
- <constant name="final_y1b" value="mod2_y1b+y1b_align"/>
- <constant name="final_y2b" value="mod2_y2b+y2b_align"/>
- <constant name="final_y3b" value="mod2_y3b+y3b_align"/>
- <constant name="final_y4b" value="mod2_y4b+y4b_align"/>
- <constant name="final_y5b" value="mod2_y5b+y5b_align"/>
- <constant name="final_y6b" value="mod2_y6b+y6b_align"/>
- <constant name="final_y7b" value="mod2_y7b+y7b_align"/>
- <constant name="final_y8b" value="mod2_y8b+y8b_align"/>
- <constant name="final_y9b" value="mod2_y9b+y9b_align"/>
- <constant name="final_y10b" value="mod2_y10b+y10b_align"/>
- <constant name="final_z1b" value="mod2_z1b+z1b_align"/>
- <constant name="final_z2b" value="mod2_z2b+z2b_align"/>
- <constant name="final_z3b" value="mod2_z3b+z3b_align"/>
- <constant name="final_z4b" value="mod2_z4b+z4b_align"/>
- <constant name="final_z5b" value="mod2_z5b+z5b_align"/>
- <constant name="final_z6b" value="mod2_z6b+z6b_align"/>
- <constant name="final_z7b" value="mod2_z7b+z7b_align"/>
- <constant name="final_z8b" value="mod2_z8b+z8b_align"/>
- <constant name="final_z9b" value="mod2_z9b+z9b_align"/>
- <constant name="final_z10b" value="mod2_z10b+z10b_align"/>
- <constant name="final_rx1b" value="mod2_rx1b+rx1b_align"/>
- <constant name="final_rx2b" value="mod2_rx2b+rx2b_align"/>
- <constant name="final_rx3b" value="mod2_rx3b+rx3b_align"/>
- <constant name="final_rx4b" value="mod2_rx4b+rx4b_align"/>
- <constant name="final_rx5b" value="mod2_rx5b+rx5b_align"/>
- <constant name="final_rx6b" value="mod2_rx6b+rx6b_align"/>
- <constant name="final_rx7b" value="mod2_rx7b+rx7b_align"/>
- <constant name="final_rx8b" value="mod2_rx8b+rx8b_align"/>
- <constant name="final_rx9b" value="mod2_rx9b+rx9b_align"/>
- <constant name="final_rx10b" value="mod2_rx10b+rx10b_align"/>
- <constant name="final_ry1b" value="mod2_ry1b+ry1b_align"/>
- <constant name="final_ry2b" value="mod2_ry2b+ry2b_align"/>
- <constant name="final_ry3b" value="mod2_ry3b+ry3b_align"/>
- <constant name="final_ry4b" value="mod2_ry4b+ry4b_align"/>
- <constant name="final_ry5b" value="mod2_ry5b+ry5b_align"/>
- <constant name="final_ry6b" value="mod2_ry6b+ry6b_align"/>
- <constant name="final_ry7b" value="mod2_ry7b+ry7b_align"/>
- <constant name="final_ry8b" value="mod2_ry8b+ry8b_align"/>
- <constant name="final_ry9b" value="mod2_ry9b+ry9b_align"/>
- <constant name="final_ry10b" value="mod2_ry10b+ry10b_align"/>
- <constant name="final_rz1b" value="mod2_rz1b+rz1b_align"/>
- <constant name="final_rz2b" value="mod2_rz2b+rz2b_align"/>
- <constant name="final_rz3b" value="mod2_rz3b+rz3b_align"/>
- <constant name="final_rz4b" value="mod2_rz4b+rz4b_align"/>
- <constant name="final_rz5b" value="mod2_rz5b+rz5b_align"/>
- <constant name="final_rz6b" value="mod2_rz6b+rz6b_align"/>
- <constant name="final_rz7b" value="mod2_rz7b+rz7b_align"/>
- <constant name="final_rz8b" value="mod2_rz8b+rz8b_align"/>
- <constant name="final_rz9b" value="mod2_rz9b+rz9b_align"/>
- <constant name="final_rz10b" value="mod2_rz10b+rz10b_align"/>
-
-
+
+ <!-- final constants -->
+ <constant name="final_x1t" value="mod2_x1t+x1t_align"/>
+ <constant name="final_x2t" value="mod2_x2t+x2t_align"/>
+ <constant name="final_x3t" value="mod2_x3t+x3t_align"/>
+ <constant name="final_x4t" value="mod2_x4t+x4t_align"/>
+ <constant name="final_x5t" value="mod2_x5t+x5t_align"/>
+ <constant name="final_x6t" value="mod2_x6t+x6t_align"/>
+ <constant name="final_x7t" value="mod2_x7t+x7t_align"/>
+ <constant name="final_x8t" value="mod2_x8t+x8t_align"/>
+ <constant name="final_x9t" value="mod2_x9t+x9t_align"/>
+ <constant name="final_x10t" value="mod2_x10t+x10t_align"/>
+ <constant name="final_y1t" value="mod2_y1t+y1t_align"/>
+ <constant name="final_y2t" value="mod2_y2t+y2t_align"/>
+ <constant name="final_y3t" value="mod2_y3t+y3t_align"/>
+ <constant name="final_y4t" value="mod2_y4t+y4t_align"/>
+ <constant name="final_y5t" value="mod2_y5t+y5t_align"/>
+ <constant name="final_y6t" value="mod2_y6t+y6t_align"/>
+ <constant name="final_y7t" value="mod2_y7t+y7t_align"/>
+ <constant name="final_y8t" value="mod2_y8t+y8t_align"/>
+ <constant name="final_y9t" value="mod2_y9t+y9t_align"/>
+ <constant name="final_y10t" value="mod2_y10t+y10t_align"/>
+ <constant name="final_z1t" value="mod2_z1t+z1t_align"/>
+ <constant name="final_z2t" value="mod2_z2t+z2t_align"/>
+ <constant name="final_z3t" value="mod2_z3t+z3t_align"/>
+ <constant name="final_z4t" value="mod2_z4t+z4t_align"/>
+ <constant name="final_z5t" value="mod2_z5t+z5t_align"/>
+ <constant name="final_z6t" value="mod2_z6t+z6t_align"/>
+ <constant name="final_z7t" value="mod2_z7t+z7t_align"/>
+ <constant name="final_z8t" value="mod2_z8t+z8t_align"/>
+ <constant name="final_z9t" value="mod2_z9t+z9t_align"/>
+ <constant name="final_z10t" value="mod2_z10t+z10t_align"/>
+ <constant name="final_rx1t" value="mod2_rx1t+rx1t_align"/>
+ <constant name="final_rx2t" value="mod2_rx2t+rx2t_align"/>
+ <constant name="final_rx3t" value="mod2_rx3t+rx3t_align"/>
+ <constant name="final_rx4t" value="mod2_rx4t+rx4t_align"/>
+ <constant name="final_rx5t" value="mod2_rx5t+rx5t_align"/>
+ <constant name="final_rx6t" value="mod2_rx6t+rx6t_align"/>
+ <constant name="final_rx7t" value="mod2_rx7t+rx7t_align"/>
+ <constant name="final_rx8t" value="mod2_rx8t+rx8t_align"/>
+ <constant name="final_rx9t" value="mod2_rx9t+rx9t_align"/>
+ <constant name="final_rx10t" value="mod2_rx10t+rx10t_align"/>
+ <constant name="final_ry1t" value="mod2_ry1t+ry1t_align"/>
+ <constant name="final_ry2t" value="mod2_ry2t+ry2t_align"/>
+ <constant name="final_ry3t" value="mod2_ry3t+ry3t_align"/>
+ <constant name="final_ry4t" value="mod2_ry4t+ry4t_align"/>
+ <constant name="final_ry5t" value="mod2_ry5t+ry5t_align"/>
+ <constant name="final_ry6t" value="mod2_ry6t+ry6t_align"/>
+ <constant name="final_ry7t" value="mod2_ry7t+ry7t_align"/>
+ <constant name="final_ry8t" value="mod2_ry8t+ry8t_align"/>
+ <constant name="final_ry9t" value="mod2_ry9t+ry9t_align"/>
+ <constant name="final_ry10t" value="mod2_ry10t+ry10t_align"/>
+ <constant name="final_rz1t" value="mod2_rz1t+rz1t_align"/>
+ <constant name="final_rz2t" value="mod2_rz2t+rz2t_align"/>
+ <constant name="final_rz3t" value="mod2_rz3t+rz3t_align"/>
+ <constant name="final_rz4t" value="mod2_rz4t+rz4t_align"/>
+ <constant name="final_rz5t" value="mod2_rz5t+rz5t_align"/>
+ <constant name="final_rz6t" value="mod2_rz6t+rz6t_align"/>
+ <constant name="final_rz7t" value="mod2_rz7t+rz7t_align"/>
+ <constant name="final_rz8t" value="mod2_rz8t+rz8t_align"/>
+ <constant name="final_rz9t" value="mod2_rz9t+rz9t_align"/>
+ <constant name="final_rz10t" value="mod2_rz10t+rz10t_align"/>
+ <constant name="final_x1b" value="mod2_x1b+x1b_align"/>
+ <constant name="final_x2b" value="mod2_x2b+x2b_align"/>
+ <constant name="final_x3b" value="mod2_x3b+x3b_align"/>
+ <constant name="final_x4b" value="mod2_x4b+x4b_align"/>
+ <constant name="final_x5b" value="mod2_x5b+x5b_align"/>
+ <constant name="final_x6b" value="mod2_x6b+x6b_align"/>
+ <constant name="final_x7b" value="mod2_x7b+x7b_align"/>
+ <constant name="final_x8b" value="mod2_x8b+x8b_align"/>
+ <constant name="final_x9b" value="mod2_x9b+x9b_align"/>
+ <constant name="final_x10b" value="mod2_x10b+x10b_align"/>
+ <constant name="final_y1b" value="mod2_y1b+y1b_align"/>
+ <constant name="final_y2b" value="mod2_y2b+y2b_align"/>
+ <constant name="final_y3b" value="mod2_y3b+y3b_align"/>
+ <constant name="final_y4b" value="mod2_y4b+y4b_align"/>
+ <constant name="final_y5b" value="mod2_y5b+y5b_align"/>
+ <constant name="final_y6b" value="mod2_y6b+y6b_align"/>
+ <constant name="final_y7b" value="mod2_y7b+y7b_align"/>
+ <constant name="final_y8b" value="mod2_y8b+y8b_align"/>
+ <constant name="final_y9b" value="mod2_y9b+y9b_align"/>
+ <constant name="final_y10b" value="mod2_y10b+y10b_align"/>
+ <constant name="final_z1b" value="mod2_z1b+z1b_align"/>
+ <constant name="final_z2b" value="mod2_z2b+z2b_align"/>
+ <constant name="final_z3b" value="mod2_z3b+z3b_align"/>
+ <constant name="final_z4b" value="mod2_z4b+z4b_align"/>
+ <constant name="final_z5b" value="mod2_z5b+z5b_align"/>
+ <constant name="final_z6b" value="mod2_z6b+z6b_align"/>
+ <constant name="final_z7b" value="mod2_z7b+z7b_align"/>
+ <constant name="final_z8b" value="mod2_z8b+z8b_align"/>
+ <constant name="final_z9b" value="mod2_z9b+z9b_align"/>
+ <constant name="final_z10b" value="mod2_z10b+z10b_align"/>
+ <constant name="final_rx1b" value="mod2_rx1b+rx1b_align"/>
+ <constant name="final_rx2b" value="mod2_rx2b+rx2b_align"/>
+ <constant name="final_rx3b" value="mod2_rx3b+rx3b_align"/>
+ <constant name="final_rx4b" value="mod2_rx4b+rx4b_align"/>
+ <constant name="final_rx5b" value="mod2_rx5b+rx5b_align"/>
+ <constant name="final_rx6b" value="mod2_rx6b+rx6b_align"/>
+ <constant name="final_rx7b" value="mod2_rx7b+rx7b_align"/>
+ <constant name="final_rx8b" value="mod2_rx8b+rx8b_align"/>
+ <constant name="final_rx9b" value="mod2_rx9b+rx9b_align"/>
+ <constant name="final_rx10b" value="mod2_rx10b+rx10b_align"/>
+ <constant name="final_ry1b" value="mod2_ry1b+ry1b_align"/>
+ <constant name="final_ry2b" value="mod2_ry2b+ry2b_align"/>
+ <constant name="final_ry3b" value="mod2_ry3b+ry3b_align"/>
+ <constant name="final_ry4b" value="mod2_ry4b+ry4b_align"/>
+ <constant name="final_ry5b" value="mod2_ry5b+ry5b_align"/>
+ <constant name="final_ry6b" value="mod2_ry6b+ry6b_align"/>
+ <constant name="final_ry7b" value="mod2_ry7b+ry7b_align"/>
+ <constant name="final_ry8b" value="mod2_ry8b+ry8b_align"/>
+ <constant name="final_ry9b" value="mod2_ry9b+ry9b_align"/>
+ <constant name="final_ry10b" value="mod2_ry10b+ry10b_align"/>
+ <constant name="final_rz1b" value="mod2_rz1b+rz1b_align"/>
+ <constant name="final_rz2b" value="mod2_rz2b+rz2b_align"/>
+ <constant name="final_rz3b" value="mod2_rz3b+rz3b_align"/>
+ <constant name="final_rz4b" value="mod2_rz4b+rz4b_align"/>
+ <constant name="final_rz5b" value="mod2_rz5b+rz5b_align"/>
+ <constant name="final_rz6b" value="mod2_rz6b+rz6b_align"/>
+ <constant name="final_rz7b" value="mod2_rz7b+rz7b_align"/>
+ <constant name="final_rz8b" value="mod2_rz8b+rz8b_align"/>
+ <constant name="final_rz9b" value="mod2_rz9b+rz9b_align"/>
+ <constant name="final_rz10b" value="mod2_rz10b+rz10b_align"/>
+
+
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001"/>
<fraction n="1.0" ref="Vacuum" />
@@ -800,7 +800,7 @@
</layer>
<layer id="2">
<module_placement name="TestRunModuleFieldDef" id="0" x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0" ry="0" rz="-PI/2"/>
- </layer>
+ </layer>
</detector>
<detector id="13" name="Ecal" type="HPSEcal3" insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
Modified: java/branches/HPSJAVA-409/detector-data/detectors/HPSTestRunTracker2014-v0/compact.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/detectors/HPSTestRunTracker2014-v0/compact.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/detectors/HPSTestRunTracker2014-v0/compact.xml Wed Apr 27 11:11:32 2016
@@ -7,9 +7,9 @@
</info>
<define>
- <!-- units -->
- <constant name="mm" value="0.1*cm"/>
- <constant name="inch" value="25.4*mm"/>
+ <!-- units -->
+ <constant name="mm" value="0.1*cm"/>
+ <constant name="inch" value="25.4*mm"/>
<!-- world -->
<constant name="world_side" value="500.0*cm" />
@@ -17,48 +17,48 @@
<constant name="world_y" value="world_side" />
<constant name="world_z" value="world_side" />
- <!-- tracking region -->
- <constant name="tracking_region_radius" value="200.0*cm"/>
+ <!-- tracking region -->
+ <constant name="tracking_region_radius" value="200.0*cm"/>
<constant name="tracking_region_min" value="5.0*cm"/>
<constant name="tracking_region_zmax" value="131.8*cm"/>
<!-- dipole magnet and B-field -->
- <constant name="dipoleMagnetPositionZ" value="45.72*cm" />
- <constant name="dipoleMagnetHeight" value="17.78*cm" />
- <constant name="dipoleMagnetWidth" value="41.6052*cm" />
- <constant name="dipoleMagnetLength" value="dipoleMagnetPositionZ*2.0" />
- <constant name="constBFieldY" value="-0.491" />
+ <constant name="dipoleMagnetPositionZ" value="45.72*cm" />
+ <constant name="dipoleMagnetHeight" value="17.78*cm" />
+ <constant name="dipoleMagnetWidth" value="41.6052*cm" />
+ <constant name="dipoleMagnetLength" value="dipoleMagnetPositionZ*2.0" />
+ <constant name="constBFieldY" value="-0.491" />
- <!-- ECAL -->
+ <!-- ECAL -->
<constant name="ecal_front" value="13.3/2*mm" />
- <constant name="ecal_back" value="16/2*mm" />
- <constant name="ecal_z" value="160/2*mm" />
- <constant name="ecal_dx" value="41.27*mm" />
+ <constant name="ecal_back" value="16/2*mm" />
+ <constant name="ecal_z" value="160/2*mm" />
+ <constant name="ecal_dx" value="41.27*mm" />
</define>
<materials>
- <!-- Set the world material to vacuum. -->
+ <!-- Set the world material to vacuum. -->
<material name="WorldMaterial">
- <D type="density" unit="g/cm3" value="0.0000000000000001"/>
- <fraction n="1.0" ref="Vacuum" />
+ <D type="density" unit="g/cm3" value="0.0000000000000001"/>
+ <fraction n="1.0" ref="Vacuum" />
</material>
<!-- Set tracking material to vacuum. -->
<material name="TrackingMaterial">
<D type="density" unit="g/cm3" value="0.0000000000000001" />
<fraction n="1.0" ref="Vacuum" />
</material>
- <!-- ECal crystal material. -->
- <material name="LeadTungstate">
- <D value="8.28" unit="g/cm3" />
- <composite n="1" ref="Pb" />
- <composite n="1" ref="W" />
- <composite n="4" ref="O" />
- </material>
+ <!-- ECal crystal material. -->
+ <material name="LeadTungstate">
+ <D value="8.28" unit="g/cm3" />
+ <composite n="1" ref="Pb" />
+ <composite n="1" ref="W" />
+ <composite n="4" ref="O" />
+ </material>
</materials>
<display>
-
+
<vis name="SensorVis" alpha="1.0" r="1.0" g="0.0" b="0.0" drawingStyle="wireframe" lineStyle="unbroken" showDaughters="true" visible="true"/>
<vis name="ActiveSensorVis" alpha="1.0" r="1.0" g="0.0" b="0.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="true" visible="true"/>
<vis name="CarbonFiberVis" alpha="1.0" r="0.88" g="0.88" b="0.88" drawingStyle="solid" lineStyle="unbroken" showDaughters="true" visible="true"/>
@@ -72,14 +72,14 @@
<vis name="BasePlateVis" alpha="1.0" r="0.35" g="0.35" b="0.35" drawingStyle="solid" lineStyle="dashed" showDaughters="true" visible="true"/>
<vis name="LayerVis" alpha="0.0" r="0.0" g="0.0" b="1.0" drawingStyle="wireframe" showDaughters="true" visible="false"/>
<vis name="ComponentVis" alpha="0.0" r="0.0" g="0.2" b="0.4" drawingStyle="solid" showDaughters="false" visible="false"/>
- <vis name="BeamPlaneVis" alpha="1.0" r="1.0" g="1.0" b="1.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="false" visible="true"/>
+ <vis name="BeamPlaneVis" alpha="1.0" r="1.0" g="1.0" b="1.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="false" visible="true"/>
<vis name="ECALVis" r="0.8" g="0.5" b="0.1" />
</display>
<detectors>
<detector id="1" name="Tracker" type="HPSTestRunTracker2014" readout="TrackerHits">
- <millepede_constants>
+ <millepede_constants>
<!-- top translations -->
<millepede_constant name="11101" value="0.0"/>
<millepede_constant name="11102" value="0.0"/>
@@ -234,43 +234,43 @@
</detector>
- <detector id="30" name="TrackerFieldDef" type="HPSTracker2"
- readout="TrackerHitsThin">
- <comment>The Silicon Vertex Tracker</comment>
- <module name="TestRunModuleFieldDef">
- <box x="dipoleMagnetWidth*4+1" y="dipoleMagnetHeight*4+1" />
- <module_component thickness="0.000000001*cm"
- material="Vacuum" sensitive="true">
- <dimensions x="dipoleMagnetWidth*4" y="dipoleMagnetHeight*4" />
- </module_component>
- </module>
- <layer id="1">
- <module_placement name="TestRunModuleFieldDef" id="0"
- x="0" y="0" z="dipoleMagnetPositionZ-dipoleMagnetLength/2" rx="0"
- ry="0" rz="-PI/2" />
- </layer>
- <layer id="2">
- <module_placement name="TestRunModuleFieldDef" id="0"
- x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0"
- ry="0" rz="-PI/2" />
- </layer>
- </detector>
-
- <detector id="13" name="Ecal" type="HPSEcal3"
- insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
- <comment>The crystal ECal</comment>
- <material name="LeadTungstate" />
- <dimensions x1="ecal_front" y1="ecal_front" x2="ecal_back"
- y2="ecal_back" z="ecal_z" />
- <!-- Measurements as found in installation by Stepan -->
- <!-- Moving ecal halves out by 6.5mm from SVT ecal-track matching - Pelle -->
- <!-- Moving ecal face to reflect latest survey calculation 2/17/14 - Pelle -->
- <layout beamgap="43.5*mm" nx="46" ny="5" dface="147.0*cm">
- <remove ixmin="-10" ixmax="-2" iymin="-1" iymax="1" />
- <top dx="ecal_dx" dy="0." dz="0." />
- <bottom dx="ecal_dx" dy="0." dz="0." />
- </layout>
- </detector>
+ <detector id="30" name="TrackerFieldDef" type="HPSTracker2"
+ readout="TrackerHitsThin">
+ <comment>The Silicon Vertex Tracker</comment>
+ <module name="TestRunModuleFieldDef">
+ <box x="dipoleMagnetWidth*4+1" y="dipoleMagnetHeight*4+1" />
+ <module_component thickness="0.000000001*cm"
+ material="Vacuum" sensitive="true">
+ <dimensions x="dipoleMagnetWidth*4" y="dipoleMagnetHeight*4" />
+ </module_component>
+ </module>
+ <layer id="1">
+ <module_placement name="TestRunModuleFieldDef" id="0"
+ x="0" y="0" z="dipoleMagnetPositionZ-dipoleMagnetLength/2" rx="0"
+ ry="0" rz="-PI/2" />
+ </layer>
+ <layer id="2">
+ <module_placement name="TestRunModuleFieldDef" id="0"
+ x="0" y="0" z="dipoleMagnetPositionZ+dipoleMagnetLength/2" rx="0"
+ ry="0" rz="-PI/2" />
+ </layer>
+ </detector>
+
+ <detector id="13" name="Ecal" type="HPSEcal3"
+ insideTrackingVolume="false" readout="EcalHits" vis="ECALVis">
+ <comment>The crystal ECal</comment>
+ <material name="LeadTungstate" />
+ <dimensions x1="ecal_front" y1="ecal_front" x2="ecal_back"
+ y2="ecal_back" z="ecal_z" />
+ <!-- Measurements as found in installation by Stepan -->
+ <!-- Moving ecal halves out by 6.5mm from SVT ecal-track matching - Pelle -->
+ <!-- Moving ecal face to reflect latest survey calculation 2/17/14 - Pelle -->
+ <layout beamgap="43.5*mm" nx="46" ny="5" dface="147.0*cm">
+ <remove ixmin="-10" ixmax="-2" iymin="-1" iymax="1" />
+ <top dx="ecal_dx" dy="0." dz="0." />
+ <bottom dx="ecal_dx" dy="0." dz="0." />
+ </layout>
+ </detector>
@@ -279,28 +279,28 @@
<readout name="TrackerHits">
<id>system:6,barrel:3,layer:4,module:12,sensor:1,side:32:-2,strip:12</id>
</readout>
- <readout name="TrackerHitsThin">
- <id>system:6,barrel:3,layer:4,module:12,sensor:1,side:32:-2,strip:12</id>
- </readout>
-
- <readout name="EcalHits">
- <segmentation type="GridXYZ" gridSizeX="0.0" gridSizeY="0.0"
- gridSizeZ="0.0" />
- <id>system:6,layer:2,ix:-8,iy:-6</id>
- </readout>
+ <readout name="TrackerHitsThin">
+ <id>system:6,barrel:3,layer:4,module:12,sensor:1,side:32:-2,strip:12</id>
+ </readout>
+
+ <readout name="EcalHits">
+ <segmentation type="GridXYZ" gridSizeX="0.0" gridSizeY="0.0"
+ gridSizeZ="0.0" />
+ <id>system:6,layer:2,ix:-8,iy:-6</id>
+ </readout>
</readouts>
- <fields>
- <field type="BoxDipole" name="AnalyzingDipole" x="0*cm" y="0*cm"
- z="dipoleMagnetPositionZ" dx="dipoleMagnetWidth/2.0" dy="dipoleMagnetHeight/2.0"
- dz="dipoleMagnetLength/2.0" bx="0.0" by="constBFieldY" bz="0.0" />
- </fields>
+ <fields>
+ <field type="BoxDipole" name="AnalyzingDipole" x="0*cm" y="0*cm"
+ z="dipoleMagnetPositionZ" dx="dipoleMagnetWidth/2.0" dy="dipoleMagnetHeight/2.0"
+ dz="dipoleMagnetLength/2.0" bx="0.0" by="constBFieldY" bz="0.0" />
+ </fields>
<!--
- <includes>
- <gdmlFile
- ref="http://www.lcsim.org/test/gdml/testRunDownstreamVacuumFlange.gdml" />
- </includes>
+ <includes>
+ <gdmlFile
+ ref="http://www.lcsim.org/test/gdml/testRunDownstreamVacuumFlange.gdml" />
+ </includes>
-->
</lccdd>
Modified: java/branches/HPSJAVA-409/detector-data/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-data/pom.xml (original)
+++ java/branches/HPSJAVA-409/detector-data/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/detector-data/</url>
Modified: java/branches/HPSJAVA-409/detector-model/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/pom.xml (original)
+++ java/branches/HPSJAVA-409/detector-model/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<build>
<plugins>
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java Wed Apr 27 11:11:32 2016
@@ -11,7 +11,6 @@
import org.lcsim.geometry.compact.converter.HPSTestRunTracker2014JavaBuilder;
import org.lcsim.geometry.compact.converter.HPSTrackerBuilder;
import org.lcsim.geometry.compact.converter.HPSTrackerJavaBuilder;
-import org.lcsim.geometry.compact.converter.JavaSurveyVolume;
import org.lcsim.geometry.subdetector.HPSTestRunTracker2014;
/**
@@ -21,21 +20,21 @@
*/
public class HPSTestRunTracker2014Converter extends HPSTracker2014ConverterBase {
- public HPSTestRunTracker2014Converter() {
- super();
- }
-
- /* (non-Javadoc)
- * @see org.lcsim.detector.converter.compact.HPSTracker2014ConverterBase#initializeBuilder(org.jdom.Element)
- */
- protected HPSTrackerJavaBuilder initializeBuilder(Element node) {
- return new HPSTestRunTracker2014JavaBuilder(_debug,node);
- }
+ public HPSTestRunTracker2014Converter() {
+ super();
+ }
- /* (non-Javadoc)
- * @see org.lcsim.detector.converter.compact.AbstractSubdetectorConverter#getSubdetectorType()
- */
- public Class getSubdetectorType() {
+ /* (non-Javadoc)
+ * @see org.lcsim.detector.converter.compact.HPSTracker2014ConverterBase#initializeBuilder(org.jdom.Element)
+ */
+ protected HPSTrackerJavaBuilder initializeBuilder(Element node) {
+ return new HPSTestRunTracker2014JavaBuilder(_debug,node);
+ }
+
+ /* (non-Javadoc)
+ * @see org.lcsim.detector.converter.compact.AbstractSubdetectorConverter#getSubdetectorType()
+ */
+ public Class getSubdetectorType() {
return HPSTestRunTracker2014.class;
}
@@ -53,5 +52,5 @@
protected int getModuleNumber(String surveyVolume) {
return HPSTrackerBuilder.getHalfFromName(surveyVolume).equals("top") ? 0 : 1;
}
-
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java Wed Apr 27 11:11:32 2016
@@ -7,7 +7,6 @@
import org.lcsim.geometry.compact.converter.HPSTracker2014JavaBuilder;
import org.lcsim.geometry.compact.converter.HPSTrackerBuilder;
import org.lcsim.geometry.compact.converter.HPSTrackerJavaBuilder;
-import org.lcsim.geometry.compact.converter.JavaSurveyVolume;
import org.lcsim.geometry.subdetector.HPSTracker2014;
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java Wed Apr 27 11:11:32 2016
@@ -71,7 +71,7 @@
* @see org.lcsim.detector.converter.compact.AbstractSubdetectorConverter#makeIdentifierHelper(org.lcsim.geometry.compact.Subdetector, org.lcsim.detector.DetectorIdentifierHelper.SystemMap)
*/
public IIdentifierHelper makeIdentifierHelper(Subdetector subdetector, SystemMap systemMap) {
- return new SiTrackerIdentifierHelper(subdetector.getDetectorElement(), makeIdentifierDictionary(subdetector), systemMap);
+ return new SiTrackerIdentifierHelper(subdetector.getDetectorElement(), makeIdentifierDictionary(subdetector), systemMap);
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java Wed Apr 27 11:11:32 2016
@@ -1,8 +1,4 @@
package org.lcsim.detector.converter.compact;
-
-import hep.physics.matrix.BasicMatrix;
-import hep.physics.vec.BasicHep3Vector;
-import hep.physics.vec.VecOp;
import java.util.ArrayList;
import java.util.HashMap;
@@ -16,15 +12,11 @@
import org.lcsim.detector.DetectorIdentifierHelper;
import org.lcsim.detector.DetectorIdentifierHelper.SystemMap;
import org.lcsim.detector.IDetectorElement;
-
import org.lcsim.detector.IPhysicalVolume;
import org.lcsim.detector.IPhysicalVolumePath;
-import org.lcsim.detector.IRotation3D;
-import org.lcsim.detector.ITranslation3D;
import org.lcsim.detector.LogicalVolume;
import org.lcsim.detector.PhysicalVolume;
import org.lcsim.detector.RotationGeant;
-import org.lcsim.detector.RotationPassiveXYZ;
import org.lcsim.detector.Transform3D;
import org.lcsim.detector.Translation3D;
import org.lcsim.detector.identifier.ExpandedIdentifier;
@@ -35,10 +27,7 @@
import org.lcsim.detector.material.IMaterial;
import org.lcsim.detector.material.MaterialStore;
import org.lcsim.detector.solids.Box;
-import org.lcsim.detector.solids.Polygon3D;
-import org.lcsim.detector.tracker.silicon.ChargeCarrier;
import org.lcsim.detector.tracker.silicon.SiSensor;
-import org.lcsim.detector.tracker.silicon.SiStrips;
import org.lcsim.detector.tracker.silicon.SiTrackerIdentifierHelper;
import org.lcsim.detector.tracker.silicon.SiTrackerModule;
import org.lcsim.geometry.compact.Detector;
@@ -238,7 +227,7 @@
{
//double moduleX = moduleParameters.getDimension(0);
//double moduleY = moduleParameters.getDimension(1);
- double moduleY = moduleParameters.getDimension(0);
+ double moduleY = moduleParameters.getDimension(0);
double moduleX = moduleParameters.getDimension(1);
Box box = (Box)moduleVolume.getSolid();
double moduleZ = box.getZHalfLength() * 2;
@@ -375,7 +364,7 @@
/*
private void configSensor(SiSensor sensor)
{
- //
+ //
Box sensorSolid = (Box) sensor.getGeometry().getLogicalVolume().getSolid();
Polygon3D pside = sensorSolid.getFacesNormalTo(new BasicHep3Vector(0, 0, 1)).get(0);
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
import org.lcsim.detector.converter.compact.SubdetectorDetectorElement;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
/**
- * Class describing an {@link HPSTracker2} subdetector.
+ * Detector element for <code>HPSTracker2</code> type.
*
* @author Omar Moreno <[log in to unmask]>
* @author Jeremy McCormick <[log in to unmask]>
@@ -19,89 +19,89 @@
private boolean debug = false;
// List of stereo layers composing the SVT
- List<SvtStereoLayer> stereoPairs = new ArrayList<SvtStereoLayer>();
-
- public HpsTracker2(String name, IDetectorElement parent) {
- super(name, parent);
- }
-
- /**
- * Get a collection of stereo pairs ({@link SvtStereoLayer}) composing the SVT.
- *
- * @return List of stereo pairs
- */
- public List<SvtStereoLayer> getStereoPairs(){
- return Collections.unmodifiableList(stereoPairs);
- }
+ List<SvtStereoLayer> stereoPairs = new ArrayList<SvtStereoLayer>();
+
+ public HpsTracker2(String name, IDetectorElement parent) {
+ super(name, parent);
+ }
+
+ /**
+ * Get a collection of stereo pairs ({@link SvtStereoLayer}) composing the SVT.
+ *
+ * @return List of stereo pairs
+ */
+ public List<SvtStereoLayer> getStereoPairs(){
+ return Collections.unmodifiableList(stereoPairs);
+ }
- /**
- * Get the collection of {@link HpsSiSensor} composing the SVT.
- *
- * @return List of sensors
- */
- public List<HpsSiSensor> getSensors(){
- List<HpsSiSensor> list = this.findDescendants(HpsSiSensor.class);
- if(debug) {
- System.out.printf("%s: found %d HpsSiSensors\n",getClass().getSimpleName(), list.size());
- System.out.printf("%s: %45s %5s %5s\n",getClass().getSimpleName(), "<name>", "<layerID>", "<moduleID>");
- for(HpsSiSensor sensor : list) {
- System.out.printf("%s: %45s %5d %5d\n",getClass().getSimpleName(), sensor.getName(), sensor.getLayerNumber(), sensor.getModuleNumber());
- }
- }
- return list;
- }
-
- /**
- * Get a {@link HpsSiSensor} by layer and module number.
- *
- * @param layer The SVT layer number
- * @param module The SVT module number
- * @return Corresponding sensor
- */
- public HpsSiSensor getSensor(int layer, int module){
- for(HpsSiSensor sensor : this.getSensors()){
- if(sensor.getLayerNumber() == layer && sensor.getModuleNumber() == module)
- return sensor;
- }
- return null;
- }
-
- /**
- * Get the maximum layer number present in the collection of {@link HpsSiSensor}.
- *
- * @return maximum layer number
- */
- private int getMaxLayerNumber(){
- int maxLayerNumber = 0;
- for(HpsSiSensor sensor : this.getSensors()){
- if(sensor.getLayerNumber() > maxLayerNumber) maxLayerNumber = sensor.getLayerNumber();
- }
- return maxLayerNumber;
- }
+ /**
+ * Get the collection of {@link HpsSiSensor} composing the SVT.
+ *
+ * @return List of sensors
+ */
+ public List<HpsSiSensor> getSensors(){
+ List<HpsSiSensor> list = this.findDescendants(HpsSiSensor.class);
+ if(debug) {
+ System.out.printf("%s: found %d HpsSiSensors\n",getClass().getSimpleName(), list.size());
+ System.out.printf("%s: %45s %5s %5s\n",getClass().getSimpleName(), "<name>", "<layerID>", "<moduleID>");
+ for(HpsSiSensor sensor : list) {
+ System.out.printf("%s: %45s %5d %5d\n",getClass().getSimpleName(), sensor.getName(), sensor.getLayerNumber(), sensor.getModuleNumber());
+ }
+ }
+ return list;
+ }
+
+ /**
+ * Get a {@link HpsSiSensor} by layer and module number.
+ *
+ * @param layer The SVT layer number
+ * @param module The SVT module number
+ * @return Corresponding sensor
+ */
+ public HpsSiSensor getSensor(int layer, int module){
+ for(HpsSiSensor sensor : this.getSensors()){
+ if(sensor.getLayerNumber() == layer && sensor.getModuleNumber() == module)
+ return sensor;
+ }
+ return null;
+ }
+
+ /**
+ * Get the maximum layer number present in the collection of {@link HpsSiSensor}.
+ *
+ * @return maximum layer number
+ */
+ private int getMaxLayerNumber(){
+ int maxLayerNumber = 0;
+ for(HpsSiSensor sensor : this.getSensors()){
+ if(sensor.getLayerNumber() > maxLayerNumber) maxLayerNumber = sensor.getLayerNumber();
+ }
+ return maxLayerNumber;
+ }
- /**
- * Get the maximum module number present in the collection of {@link HpsSiSensor}.
- *
- * @return maximum module number
- */
- private int getMaxModuleNumber(){
- int maxModuleID = 0;
- for(HpsSiSensor sensor : this.getSensors()){
- if(sensor.getModuleNumber() > maxModuleID) maxModuleID = sensor.getModuleNumber();
- }
- return maxModuleID;
- }
-
- /**
- * Method that loops through the collection of {@link HpsSiSensor} and creates
- * stereo layers. A stereo layer is composed of two adjacent sensors (stereo and axial)
- * with the same module number.
- */
- public void createStereoLayers(){
+ /**
+ * Get the maximum module number present in the collection of {@link HpsSiSensor}.
+ *
+ * @return maximum module number
+ */
+ private int getMaxModuleNumber(){
+ int maxModuleID = 0;
+ for(HpsSiSensor sensor : this.getSensors()){
+ if(sensor.getModuleNumber() > maxModuleID) maxModuleID = sensor.getModuleNumber();
+ }
+ return maxModuleID;
+ }
+
+ /**
+ * Method that loops through the collection of {@link HpsSiSensor} and creates
+ * stereo layers. A stereo layer is composed of two adjacent sensors (stereo and axial)
+ * with the same module number.
+ */
+ public void createStereoLayers(){
- //System.out.printf("%s: create stereo layers\n",getClass().getSimpleName());
+ //System.out.printf("%s: create stereo layers\n",getClass().getSimpleName());
- HpsSiSensor firstSensor = null;
+ HpsSiSensor firstSensor = null;
HpsSiSensor secondSensor = null;
//System.out.printf("%s: %10s %10s %42s %42s\n",getClass().getSimpleName(), "layerID/moduleID", "layerID/moduleID", "sensor1", "sensor2");
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java Wed Apr 27 11:11:32 2016
@@ -9,76 +9,76 @@
*/
public class SvtStereoLayer {
-
- // Layer number to which the stereo pair belongs to
- private int stereoLayerNumber = 0;
-
- // The axial and stereo sensors
- private HpsSiSensor axialSensor = null;
- private HpsSiSensor stereoSensor = null;
-
-
- /**
- * Ctor
- *
- * @param layerNumber Layer number to which the stereo pair belongs to
- * @param firstSensor The first sensor in the stereo layer
- * @param secondSensor The second sensor in the stereo layer
- */
- public SvtStereoLayer(int stereoLayerNumber, HpsSiSensor firstSensor, HpsSiSensor secondSensor){
- this.stereoLayerNumber = stereoLayerNumber;
- if(firstSensor.isAxial()){
- this.axialSensor = firstSensor;
- this.stereoSensor = secondSensor;
- } else {
- this.axialSensor = secondSensor;
- this.stereoSensor = firstSensor;
- }
- }
-
-
- /**
- * Get the axial sensor of the stereo pair
- *
- * @return Axial sensor. Returns null if it hasn't been set yet.
- */
- public HpsSiSensor getAxialSensor(){
- return axialSensor;
- }
-
- /**
- * Get the stereo sensor of the stereo pair
- *
- * @return Stereo sensor. Returns null if it hasn't been set yet.
- */
- public HpsSiSensor getStereoSensor(){
- return stereoSensor;
- }
+
+ // Layer number to which the stereo pair belongs to
+ private int stereoLayerNumber = 0;
+
+ // The axial and stereo sensors
+ private HpsSiSensor axialSensor = null;
+ private HpsSiSensor stereoSensor = null;
+
+
+ /**
+ * Ctor
+ *
+ * @param stereoLayerNumber Layer number to which the stereo pair belongs
+ * @param firstSensor The first sensor in the stereo layer
+ * @param secondSensor The second sensor in the stereo layer
+ */
+ public SvtStereoLayer(int stereoLayerNumber, HpsSiSensor firstSensor, HpsSiSensor secondSensor){
+ this.stereoLayerNumber = stereoLayerNumber;
+ if(firstSensor.isAxial()){
+ this.axialSensor = firstSensor;
+ this.stereoSensor = secondSensor;
+ } else {
+ this.axialSensor = secondSensor;
+ this.stereoSensor = firstSensor;
+ }
+ }
+
+
+ /**
+ * Get the axial sensor of the stereo pair
+ *
+ * @return Axial sensor. Returns null if it hasn't been set yet.
+ */
+ public HpsSiSensor getAxialSensor(){
+ return axialSensor;
+ }
+
+ /**
+ * Get the stereo sensor of the stereo pair
+ *
+ * @return Stereo sensor. Returns null if it hasn't been set yet.
+ */
+ public HpsSiSensor getStereoSensor(){
+ return stereoSensor;
+ }
- /**
- * Get the layer number to which the stereo pair belongs to.
- *
- * @return stereo layer number
- */
- public int getLayerNumber(){
- return stereoLayerNumber;
- }
-
- /**
- * Return a string describing the stereo pair
- *
- * @return stereo pair description
- */
- @Override
- public String toString(){
- StringBuffer buffer = new StringBuffer();
- buffer.append("[ Stereo Pair ]: Layer number: " + this.getLayerNumber() + "\n");
- buffer.append("\t\tAxial Sensor: ");
- buffer.append(axialSensor == null ? "None" : axialSensor.getName());
- buffer.append("\tStereo Sensor: ");
- buffer.append(stereoSensor == null ? "None" : stereoSensor.getName());
- return buffer.toString();
- }
+ /**
+ * Get the layer number to which the stereo pair belongs to.
+ *
+ * @return stereo layer number
+ */
+ public int getLayerNumber(){
+ return stereoLayerNumber;
+ }
+
+ /**
+ * Return a string describing the stereo pair
+ *
+ * @return stereo pair description
+ */
+ @Override
+ public String toString(){
+ StringBuffer buffer = new StringBuffer();
+ buffer.append("[ Stereo Pair ]: Layer number: " + this.getLayerNumber() + "\n");
+ buffer.append("\t\tAxial Sensor: ");
+ buffer.append(axialSensor == null ? "None" : axialSensor.getName());
+ buffer.append("\tStereo Sensor: ");
+ buffer.append(stereoSensor == null ? "None" : stereoSensor.getName());
+ return buffer.toString();
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java Wed Apr 27 11:11:32 2016
@@ -303,9 +303,8 @@
/**
* Generate an ID for a channel (strip) on a sensor.
*
- * @param sensor
* @param channel : Physical channel number
- * @return ID
+ * @return the channel ID
*/
public long makeChannelID(final int channel) {
final int sideNumber = this.hasElectrodesOnSide(ChargeCarrier.HOLE) ? ChargeCarrier.HOLE.charge()
@@ -385,7 +384,7 @@
/**
* Set the front end board (FEB) ID of the sensor.
*
- * @param FEB ID The FEB ID of the sensor.
+ * @param febID FEB ID The FEB ID of the sensor.
*/
public void setFebID(final int febID) {
this.febID = febID;
@@ -394,7 +393,7 @@
/**
* Set the FEB hybrid ID of the sensor.
*
- * @param FEB hybrid ID The FEB hybrid ID.
+ * @param febHybridID FEB hybrid ID The FEB hybrid ID.
*/
public void setFebHybridID(final int febHybridID) {
this.febHybridID = febHybridID;
@@ -412,7 +411,7 @@
/**
* Flag the sensor as being axial.
*
- * @param true if the sensor is Axial, false otherwise
+ * @param isAxial true if the sensor is Axial, false otherwise
*/
public void setAxial(final boolean isAxial) {
this.isAxial = isAxial;
@@ -421,7 +420,7 @@
/**
* Flag the sensor as being stereo
*
- * @param true is the sensor is stereo, false otherwise
+ * @param isStereo true is the sensor is stereo, false otherwise
*/
public void setStereo(final boolean isStereo) {
this.isStereo = isStereo;
@@ -449,6 +448,8 @@
buffer.append("\n");
buffer.append("----------------------------------");
buffer.append("\n");
+ buffer.append("Feb ID: " + this.getFebID() + "\n");
+ buffer.append("Feb Hybrid ID: " + this.getFebHybridID() + "\n");
buffer.append("Layer: " + this.getLayerNumber() + "\n");
buffer.append("Module: " + this.getModuleNumber() + "\n");
buffer.append("Number of readout strips: " + this.getReadoutElectrodes(ChargeCarrier.HOLE).getNCells() + "\n");
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java Wed Apr 27 11:11:32 2016
@@ -14,85 +14,85 @@
* @author Omar Moreno <[log in to unmask]>
*/
public class HpsTestRunSiSensor extends HpsSiSensor {
-
-
- protected int fpgaID;
- protected int hybridID;
-
-
- /**
- * This class constructor matches the signature of <code>SiSensor</code>.
- * @param sensorid The sensor ID.
- * @param name The name of the sensor.
- * @param parent The parent DetectorElement.
- * @param support The physical support path.
- * @param id The identifier of the sensor.
- */
- public HpsTestRunSiSensor(
+
+
+ protected int fpgaID;
+ protected int hybridID;
+
+
+ /**
+ * This class constructor matches the signature of <code>SiSensor</code>.
+ * @param sensorid The sensor ID.
+ * @param name The name of the sensor.
+ * @param parent The parent DetectorElement.
+ * @param support The physical support path.
+ * @param id The identifier of the sensor.
+ */
+ public HpsTestRunSiSensor(
int sensorid,
String name,
IDetectorElement parent,
String support,
IIdentifier id)
{
- super(sensorid, name, parent, support, id);
+ super(sensorid, name, parent, support, id);
}
-
-
+
+
- /**
- * Get the FPGA ID associated with this sensor.
- *
- * @return The FPGA ID
- */
- public int getFpgaID() {
- return fpgaID;
- }
+ /**
+ * Get the FPGA ID associated with this sensor.
+ *
+ * @return The FPGA ID
+ */
+ public int getFpgaID() {
+ return fpgaID;
+ }
- /**
- * Get the hybrid ID associated with this sensor.
- *
- * @return The hybrid ID
- */
- public int getHybridID() {
- return hybridID;
- }
+ /**
+ * Get the hybrid ID associated with this sensor.
+ *
+ * @return The hybrid ID
+ */
+ public int getHybridID() {
+ return hybridID;
+ }
- @Override
- public int getFebID(){
- throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
- }
-
- @Override
- public int getFebHybridID(){
- throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
- }
+ @Override
+ public int getFebID(){
+ throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
+ }
+
+ @Override
+ public int getFebHybridID(){
+ throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
+ }
- /**
- * Set the FPGA ID associated with this sensor.
- *
- * @param The FPGA ID
- */
- public void setFpgaID(int fpgaID) {
- this.fpgaID = fpgaID;
- }
+ /**
+ * Set the FPGA ID associated with this sensor.
+ *
+ * @param fpgaID The FPGA ID
+ */
+ public void setFpgaID(int fpgaID) {
+ this.fpgaID = fpgaID;
+ }
- /**
- * Set the hybrid ID associated with this sensor.
- *
- * @param The hybrid ID.
- */
- public void setHybridID(int hybridID) {
- this.hybridID = hybridID;
- }
-
- @Override
- public void setFebID(int febID) {
- throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
- }
-
- @Override
- public void setFebHybridID(int febHybridID) {
- throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
- }
+ /**
+ * Set the hybrid ID associated with this sensor.
+ *
+ * @param hybridID The hybrid ID.
+ */
+ public void setHybridID(int hybridID) {
+ this.hybridID = hybridID;
+ }
+
+ @Override
+ public void setFebID(int febID) {
+ throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
+ }
+
+ @Override
+ public void setFebHybridID(int febHybridID) {
+ throw new RuntimeException("This method is not supported for the HpsTestRunSiSensor.");
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java Wed Apr 27 11:11:32 2016
@@ -29,7 +29,7 @@
protected static final boolean use30mradRotation = true;
protected static final boolean useFakeHalfModuleAxialPos = false;
- // Global position references
+ // Global position references
protected static final double target_pos_wrt_base_plate_x = 162.3; //from Marco's 3D model
protected static final double target_pos_wrt_base_plate_y = 80.55; //from Tim's sketchup //68.75; //from Marco's 3D model
protected static final double target_pos_wrt_base_plate_z = 926.59; //from Marco's 3D model
@@ -208,7 +208,7 @@
protected void setPos() {
final double ball_pos_base_x = -1.0*target_pos_wrt_base_plate_x;
final double ball_pos_base_y = -1.0*target_pos_wrt_base_plate_y;
- final double ball_pos_base_z = target_pos_wrt_base_plate_z;
+ final double ball_pos_base_z = target_pos_wrt_base_plate_z;
final double vee_pos_base_x = ball_pos_base_x + BasePlate.base_plate_width;
final double vee_pos_base_y = ball_pos_base_y;
final double vee_pos_base_z = ball_pos_base_z;
@@ -230,7 +230,7 @@
public static class BasePlate extends SurveyVolume {
- // Base plate references
+ // Base plate references
public static final double base_plate_thickness = 0.25*inch;
public static final double base_plate_width = 385.00;
public static final double base_plate_length = 1216.00;
@@ -281,7 +281,7 @@
public CSupport(String name, SurveyVolume mother) {
super(name,mother, null);
init();
- }
+ }
private void calcAndSetFlatPos() {
if(use30mradRotation) {
// find the rotation to place the flat point
@@ -462,7 +462,7 @@
// this is referenced to the pin position of the c-support
private static final double ball_pos_csup_bearings_bottom_x = 240.0 - 265.0 + 14.0;
private static final double ball_pos_csup_bearings_bottom_y = (-6.0 + 22.0);
- private static final double ball_pos_csup_bearings_bottom_z = 14.7;
+ private static final double ball_pos_csup_bearings_bottom_z = 14.7;
private static final double vee_pos_csup_bearings_bottom_x = 240.0- 129.0;
private static final double vee_pos_csup_bearings_bottom_y = (-6.0 + 22.0);
private static final double vee_pos_csup_bearings_bottom_z = 14.7;
@@ -499,12 +499,12 @@
// make vectors
setBallPos(ball_pos_csup_bearings_bottom_x,ball_pos_csup_bearings_bottom_y,ball_pos_csup_bearings_bottom_z);
- setVeePos(vee_pos_csup_bearings_bottom_x,vee_pos_csup_bearings_bottom_y,vee_pos_csup_bearings_bottom_z);
+ setVeePos(vee_pos_csup_bearings_bottom_x,vee_pos_csup_bearings_bottom_y,vee_pos_csup_bearings_bottom_z);
setFlatPos(flat_pos_csup_bearings_bottom_x,flat_pos_csup_bearings_bottom_y,flat_pos_csup_bearings_bottom_z);
// create the coordinate system of the c-support bearings
- //HPSTestRunTracker2014GeomDef.Coord csup_bearings_bottom_coord = new HPSTestRunTracker2014GeomDef.Coord(ball_pos_csup_bearings_bottom, vee_pos_csup_bearings_bottom, flat_pos_csup_bearings_bottom);
+ //HPSTestRunTracker2014GeomDef.Coord csup_bearings_bottom_coord = new HPSTestRunTracker2014GeomDef.Coord(ball_pos_csup_bearings_bottom, vee_pos_csup_bearings_bottom, flat_pos_csup_bearings_bottom);
// since we don't care (no volume is built) about the local position of the bearings in the pin coord system we'll get rid of it
// and find the bearings position in the base coordinate system directly
@@ -579,11 +579,11 @@
public SupportPlateBottom(String name, SurveyVolume mother, SurveyVolume referenceGeom, String material) {
super(mother, referenceGeom, name, material);
- init();
+ init();
}
public SupportPlateBottom(String name, SurveyVolume mother, List<SurveyVolume> referenceGeom, String material) {
super(mother, referenceGeom, name, material);
- init();
+ init();
}
protected void setPos() {
if(debug) System.out.printf("%s: setPos for %s\n",this.getClass().getSimpleName(),getName());
@@ -755,11 +755,11 @@
public TestRunModule(String name, SurveyVolume mother, int layer, String half) {
super(name, mother,null,layer, half);
init();
- }
+ }
public TestRunModule(String name, SurveyVolume mother, SurveyVolume ref, int layer, String half) {
super(name, mother,null,ref,layer, half);
init();
- }
+ }
protected abstract double getColdBlockThickness();
protected abstract double getModuleBoxLength();
protected abstract double getModuleBoxWidth();
@@ -771,7 +771,7 @@
}
protected void setCenter() {
setCenter(getModuleBoxLength()/2.0-5.0, 0.0, getModuleBoxWidth()/2.0-box_extra_width/5.0);
- }
+ }
protected void setPos() {
if(debug) System.out.printf("%s: setPos for %s\n",this.getClass().getSimpleName(),getName());
@@ -787,7 +787,7 @@
case 2:
ballPos = new BasicHep3Vector(25.0, 561.1, SupportPlateBottom.pedestal_height_L2-SupportPlateBottom.support_plate_pocket_depth);
veePos = new BasicHep3Vector(95.0, 561.1, SupportPlateBottom.pedestal_height_L2-SupportPlateBottom.support_plate_pocket_depth);
- flatPos = new BasicHep3Vector(60.0, 567.10, SupportPlateBottom.pedestal_height_L2-SupportPlateBottom.support_plate_pocket_depth);
+ flatPos = new BasicHep3Vector(60.0, 567.10, SupportPlateBottom.pedestal_height_L2-SupportPlateBottom.support_plate_pocket_depth);
break;
case 3:
ballPos = new BasicHep3Vector(25.0, 461.1, SupportPlateBottom.pedestal_height_L3-SupportPlateBottom.support_plate_pocket_depth);
@@ -938,7 +938,7 @@
// Distance from CF edge to screw hole: 30mm
// Distance from screw hole to edge of cold block: 33.75mm
// Distance from edge of cold block to hole/ball position: 5mm
- protected static final double dist_sensor_center_to_coldblock_hole_vdir = (180.0 - 30.0 + (33.75 - 5.0)) - Sensor.length/2.0;
+ protected static final double dist_sensor_center_to_coldblock_hole_vdir = (180.0 - 30.0 + (33.75 - 5.0)) - Sensor.length/2.0;
protected static final double half_module_thickness = TestRunHalfModule.getHybridThickness() + TestRunHalfModule.getCFThickness() + HalfModuleLamination.thickness;
protected static final double half_module_length = TestRunHalfModule.getCFLength();
protected static final double half_module_width = 6.83 + Sensor.width;
@@ -1038,7 +1038,7 @@
if(useFakeHalfModuleAxialPos) {
ball_pos_halfmod_local_x = ball_pos_halfmod_local_x*2.0;
ball_pos_halfmod_local_y = -2.0*ball_pos_halfmod_local_y;
- }
+ }
final double ball_pos_halfmod_local_z = dist_lower_sensor_edge_to_cold_block_mounting_surface + Sensor.width/2.0;
@@ -1118,7 +1118,7 @@
// final double vee_pos_halfmod_local_z = ball_pos_halfmod_local_z;
// final double flat_pos_halfmod_local_x = ball_pos_halfmod_local_x;
// final double flat_pos_halfmod_local_y = ball_pos_halfmod_local_y + Sensor.getSensorThickness()/2.0;
-// final double flat_pos_halfmod_local_z = ball_pos_halfmod_local_z;
+// final double flat_pos_halfmod_local_z = ball_pos_halfmod_local_z;
// ballPos = new BasicHep3Vector(ball_pos_halfmod_local_x, ball_pos_halfmod_local_y, ball_pos_halfmod_local_z);
// veePos = new BasicHep3Vector(vee_pos_halfmod_local_x, vee_pos_halfmod_local_y,vee_pos_halfmod_local_z);
// flatPos = new BasicHep3Vector(flat_pos_halfmod_local_x, flat_pos_halfmod_local_y,flat_pos_halfmod_local_z);
@@ -1235,7 +1235,7 @@
}
- public static abstract class TestRunColdBlock extends SurveyVolume {
+ public static abstract class TestRunColdBlock extends SurveyVolume {
private int layer;
public TestRunColdBlock(String name, SurveyVolume mother, int layer) {
super(name, mother, null);
@@ -1284,7 +1284,7 @@
}
}
- public static class TestRunColdBlockL13 extends TestRunColdBlock {
+ public static class TestRunColdBlockL13 extends TestRunColdBlock {
protected static final double coldblock_L13_length = 82.00;
protected static final double coldblock_L13_width = 52.50;
protected static final double coldblock_L13_thickness = 6.00;
@@ -1306,7 +1306,7 @@
}
}
- public static class TestRunColdBlockL45 extends TestRunColdBlock {
+ public static class TestRunColdBlockL45 extends TestRunColdBlock {
protected static final double coldblock_L45_length = 82.00;
protected static final double coldblock_L45_width = 51.00;
protected static final double coldblock_L45_thickness = 6.00;
@@ -1358,7 +1358,7 @@
// if(useSiStripsConvention) {
// setBallPos(0,0,0);
// setVeePos(ballPos.x(), ballPos.y(), ballPos.z() + getSensorWidth()/2.0);
-// setFlatPos(ballPos.x() + getSensorLength()/2.0,ballPos.y(), ballPos.z());
+// setFlatPos(ballPos.x() + getSensorLength()/2.0,ballPos.y(), ballPos.z());
// } else {
// setBallPos(0,0,0);
// setVeePos(ballPos.x() + getSensorLength()/2.0, ballPos.y(), ballPos.z());
@@ -1392,7 +1392,7 @@
}
protected double getLength() {
return length;
- }
+ }
}
/**
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java Wed Apr 27 11:11:32 2016
@@ -28,175 +28,175 @@
*/
public class HPSTestRunTracker2014JavaBuilder extends HPSTrackerJavaBuilder {
-
-
- /**
- * Default constructor
- * @param node
- */
- public HPSTestRunTracker2014JavaBuilder(boolean debugFlag, Element node) {
- super(debugFlag, node);
- }
-
-
-
- /**
- * Build the JAVA geometry objects from the geometry definition.
- * @param trackingVolume - the reference volume.
- */
- public void build(ILogicalVolume trackingVolume) {
-
- // build geometry
+
+
+ /**
+ * Default constructor
+ * @param node
+ */
+ public HPSTestRunTracker2014JavaBuilder(boolean debugFlag, Element node) {
+ super(debugFlag, node);
+ }
+
+
+
+ /**
+ * Build the JAVA geometry objects from the geometry definition.
+ * @param trackingVolume - the reference volume.
+ */
+ public void build(ILogicalVolume trackingVolume) {
+
+ // build geometry
setBuilder(createGeometryDefinition(_debug, node));
-
- if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
-
- if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
-
- _builder.build();
-
- if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
-
- if(isDebug()) System.out.printf("%s: build the JAVA geometry objects\n", getClass().getSimpleName());
-
- // initialize the list to store a reference to each object
- javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
-
- // Go through the list of volumes to build that is created in the generic builder class
- JavaSurveyVolume trackingGeometry = new JavaSurveyVolume(_builder.getSurveyVolume(org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition.TrackingVolume.class), trackingVolume);
- add(trackingGeometry);
- //setBaseTrackerGeometry(new GhostJavaBaseGeom(_builder.getBaseGeometry(Base.class), trackingGeometry));
+
+ if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
+
+ if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
+
+ _builder.build();
+
+ if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
+
+ if(isDebug()) System.out.printf("%s: build the JAVA geometry objects\n", getClass().getSimpleName());
+
+ // initialize the list to store a reference to each object
+ javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
+
+ // Go through the list of volumes to build that is created in the generic builder class
+ JavaSurveyVolume trackingGeometry = new JavaSurveyVolume(_builder.getSurveyVolume(org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition.TrackingVolume.class), trackingVolume);
+ add(trackingGeometry);
+ //setBaseTrackerGeometry(new GhostJavaBaseGeom(_builder.getBaseGeometry(Base.class), trackingGeometry));
setBaseTrackerGeometry(new JavaSurveyVolume(_builder.getSurveyVolume(TrackerEnvelope.class), trackingGeometry,1));
add(getBaseTrackerGeometry());
- JavaSurveyVolume basePlateGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(BasePlate.class), getBaseTrackerGeometry());
- add(basePlateGeometry);
- // skip the c-support, this is purely a reference volume in the builder so should have no use here!?
- //JavaBaseGeometry cSupportGeometry = new GhostJavaBaseGeom(_builder.getBaseGeometry(CSupport.class), baseTrackerGeometry);
- //add(cSupportGeometry);
- JavaSurveyVolume supportBottomGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportBottom.class), getBaseTrackerGeometry());
- add(supportBottomGeometry);
- JavaSurveyVolume supportPlateBottomGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportPlateBottom.class), getBaseTrackerGeometry());
- add(supportPlateBottomGeometry);
- JavaSurveyVolume supportTopGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportTop.class), getBaseTrackerGeometry());
- add(supportTopGeometry);
- JavaSurveyVolume supportPlateTopGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportPlateTop.class), getBaseTrackerGeometry());
- add(supportPlateTopGeometry);
-
- // build modules
-
- if(isDebug()) System.out.printf("%s: build JAVA modules\n", getClass().getSimpleName());
-
- // Loop over all modules created
- for(BaseModuleBundle mod : _builder.modules) {
- TestRunModuleBundle m = (TestRunModuleBundle) mod;
- if(isDebug()) {
- System.out.printf("%s: build module %s (layer %d half %s)\n", getClass().getSimpleName(),m.module.getName(),m.getLayer(),m.getHalf());
- m.print();
- }
-
- // Find the mother among the objects using its name, should probably have a better way...
- String name_mother = m.getMother().getName();
- JavaSurveyVolume mother = null;
- for(JavaSurveyVolume g : javaSurveyVolumes) {
- if(g.getName().equals(name_mother)) {
- mother = g;
- break;
- }
- }
- // Check that it had a mother
- if(mother==null) throw new RuntimeException("Cound't find mother to module " + m.module.getName());
-
- if(isDebug()) System.out.printf("%s: found mother %s to module %s\n", getClass().getSimpleName(),mother.getName(),m.module.getName());
-
- // put the module in the list of objects that will be added to LCDD
- addTestRunModule(m, mother);
-
- if(isDebug()) System.out.printf("%s: DONE build module %s\n", getClass().getSimpleName(), m.module.getName());
-
-
- }
-
- if(isDebug()) System.out.printf("%s: DONE build JAVA modules\n", getClass().getSimpleName());
-
-
- if(isDebug()) System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
- if(isDebug()) {
- System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
- System.out.printf("%s: List of all the JAVA geometry objects built\n", this.getClass().getSimpleName());
- for(JavaSurveyVolume bg : javaSurveyVolumes) {
- System.out.printf("-------\n%s\n", bg.toString());
- }
- }
-
-
- // Set visualization features
- //setVis();
-
-
- }
-
- /**
- * Rules for adding the JAVA module geometry.
- * @param bundle - module to be added
- * @param mother - mother JAVA geometry object
- */
- protected void addTestRunModule(TestRunModuleBundle bundle, JavaSurveyVolume mother) {
-
- if(isDebug()) {
- System.out.printf("%s: addModule %s containing:\n",this.getClass().getSimpleName(), bundle.module.getName());
- bundle.print();
- }
-
- // Create the module
- JavaSurveyVolume lcddM = new JavaGhostSurveyVolume(bundle.module, mother);
- add(lcddM);
-
- // add half modules
- if(bundle.halfModuleAxial!=null) addHalfModule((TestRunHalfModuleBundle)bundle.halfModuleAxial,lcddM);
+ JavaSurveyVolume basePlateGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(BasePlate.class), getBaseTrackerGeometry());
+ add(basePlateGeometry);
+ // skip the c-support, this is purely a reference volume in the builder so should have no use here!?
+ //JavaBaseGeometry cSupportGeometry = new GhostJavaBaseGeom(_builder.getBaseGeometry(CSupport.class), baseTrackerGeometry);
+ //add(cSupportGeometry);
+ JavaSurveyVolume supportBottomGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportBottom.class), getBaseTrackerGeometry());
+ add(supportBottomGeometry);
+ JavaSurveyVolume supportPlateBottomGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportPlateBottom.class), getBaseTrackerGeometry());
+ add(supportPlateBottomGeometry);
+ JavaSurveyVolume supportTopGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportTop.class), getBaseTrackerGeometry());
+ add(supportTopGeometry);
+ JavaSurveyVolume supportPlateTopGeometry = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SupportPlateTop.class), getBaseTrackerGeometry());
+ add(supportPlateTopGeometry);
+
+ // build modules
+
+ if(isDebug()) System.out.printf("%s: build JAVA modules\n", getClass().getSimpleName());
+
+ // Loop over all modules created
+ for(BaseModuleBundle mod : _builder.modules) {
+ TestRunModuleBundle m = (TestRunModuleBundle) mod;
+ if(isDebug()) {
+ System.out.printf("%s: build module %s (layer %d half %s)\n", getClass().getSimpleName(),m.module.getName(),m.getLayer(),m.getHalf());
+ m.print();
+ }
+
+ // Find the mother among the objects using its name, should probably have a better way...
+ String name_mother = m.getMother().getName();
+ JavaSurveyVolume mother = null;
+ for(JavaSurveyVolume g : javaSurveyVolumes) {
+ if(g.getName().equals(name_mother)) {
+ mother = g;
+ break;
+ }
+ }
+ // Check that it had a mother
+ if(mother==null) throw new RuntimeException("Cound't find mother to module " + m.module.getName());
+
+ if(isDebug()) System.out.printf("%s: found mother %s to module %s\n", getClass().getSimpleName(),mother.getName(),m.module.getName());
+
+ // put the module in the list of objects that will be added to LCDD
+ addTestRunModule(m, mother);
+
+ if(isDebug()) System.out.printf("%s: DONE build module %s\n", getClass().getSimpleName(), m.module.getName());
+
+
+ }
+
+ if(isDebug()) System.out.printf("%s: DONE build JAVA modules\n", getClass().getSimpleName());
+
+
+ if(isDebug()) System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
+ if(isDebug()) {
+ System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
+ System.out.printf("%s: List of all the JAVA geometry objects built\n", this.getClass().getSimpleName());
+ for(JavaSurveyVolume bg : javaSurveyVolumes) {
+ System.out.printf("-------\n%s\n", bg.toString());
+ }
+ }
+
+
+ // Set visualization features
+ //setVis();
+
+
+ }
+
+ /**
+ * Rules for adding the JAVA module geometry.
+ * @param bundle - module to be added
+ * @param mother - mother JAVA geometry object
+ */
+ protected void addTestRunModule(TestRunModuleBundle bundle, JavaSurveyVolume mother) {
+
+ if(isDebug()) {
+ System.out.printf("%s: addModule %s containing:\n",this.getClass().getSimpleName(), bundle.module.getName());
+ bundle.print();
+ }
+
+ // Create the module
+ JavaSurveyVolume lcddM = new JavaGhostSurveyVolume(bundle.module, mother);
+ add(lcddM);
+
+ // add half modules
+ if(bundle.halfModuleAxial!=null) addHalfModule((TestRunHalfModuleBundle)bundle.halfModuleAxial,lcddM);
if(bundle.halfModuleStereo!=null) addHalfModule((TestRunHalfModuleBundle)bundle.halfModuleStereo,lcddM);
-
-
- if(isDebug()) {
- System.out.printf("%s: DONE addModule %s \n",this.getClass().getSimpleName(), bundle.module.getName());
- }
-
- }
-
-
- /**
- * Rules for adding the JAVA half module geometry.
- * @param bundle - module to be added
- * @param mother - mother JAVA geometry object
- */
- private void addHalfModule(TestRunHalfModuleBundle bundle, JavaSurveyVolume mother) {
- // Create the half-module
- // This is not a ghost element but reflects the module
- // concept in the old compact description
- // TODO fix the layer IDs
- int oldCompactModuleId = 0;
- JavaSurveyVolume lcddHM = new JavaSurveyVolume(bundle.halfModule, mother,oldCompactModuleId);
- add(lcddHM);
-
- // ComponentNumber is taken from old geometry where it is simply a counter when adding the xml daughters to the TestRunModule.
- // It is simply 0 for sensor and 1 for carbon fiber in the old geometry
- int componentNumber = ((Sensor)bundle.sensor).getId();
-
- // create the sensor
- JavaSurveyVolume lcddS = new JavaSurveyVolume(bundle.sensor, lcddHM, componentNumber);
- add(lcddS);
-
- // create the active sensor
- JavaSurveyVolume lcddAS = new JavaSurveyVolume(bundle.activeSensor, lcddS, componentNumber);
- add(lcddAS);
-
-// if(isDebug()) {
-// System.out.printf("%s: added sensor %s \n",this.getClass().getSimpleName(), lcddS.getName());
-// System.out.printf("%s: local coordinate system\n%s\n",this.getClass().getSimpleName(), bundle.sensor.getCoord().toString());
-// dsd
-// }
-
-
- }
+
+
+ if(isDebug()) {
+ System.out.printf("%s: DONE addModule %s \n",this.getClass().getSimpleName(), bundle.module.getName());
+ }
+
+ }
+
+
+ /**
+ * Rules for adding the JAVA half module geometry.
+ * @param bundle - module to be added
+ * @param mother - mother JAVA geometry object
+ */
+ private void addHalfModule(TestRunHalfModuleBundle bundle, JavaSurveyVolume mother) {
+ // Create the half-module
+ // This is not a ghost element but reflects the module
+ // concept in the old compact description
+ // TODO fix the layer IDs
+ int oldCompactModuleId = 0;
+ JavaSurveyVolume lcddHM = new JavaSurveyVolume(bundle.halfModule, mother,oldCompactModuleId);
+ add(lcddHM);
+
+ // ComponentNumber is taken from old geometry where it is simply a counter when adding the xml daughters to the TestRunModule.
+ // It is simply 0 for sensor and 1 for carbon fiber in the old geometry
+ int componentNumber = ((Sensor)bundle.sensor).getId();
+
+ // create the sensor
+ JavaSurveyVolume lcddS = new JavaSurveyVolume(bundle.sensor, lcddHM, componentNumber);
+ add(lcddS);
+
+ // create the active sensor
+ JavaSurveyVolume lcddAS = new JavaSurveyVolume(bundle.activeSensor, lcddS, componentNumber);
+ add(lcddAS);
+
+// if(isDebug()) {
+// System.out.printf("%s: added sensor %s \n",this.getClass().getSimpleName(), lcddS.getName());
+// System.out.printf("%s: local coordinate system\n%s\n",this.getClass().getSimpleName(), bundle.sensor.getCoord().toString());
+// dsd
+// }
+
+
+ }
@@ -206,8 +206,8 @@
}
-
-
-
+
+
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java Wed Apr 27 11:11:32 2016
@@ -26,157 +26,157 @@
public class HPSTestRunTracker2014LCDDBuilder extends HPSTrackerLCDDBuilder {
- public HPSTestRunTracker2014LCDDBuilder(boolean debugFlag, Element node, LCDD lcdd, SensitiveDetector sens) {
- super(debugFlag, node, lcdd, sens);
- }
+ public HPSTestRunTracker2014LCDDBuilder(boolean debugFlag, Element node, LCDD lcdd, SensitiveDetector sens) {
+ super(debugFlag, node, lcdd, sens);
+ }
-
- public void setBuilder() {
- setBuilder();
- }
-
- public void build(Volume worldVolume) {
-
- // set and build geometry
+
+ public void setBuilder() {
+ setBuilder();
+ }
+
+ public void build(Volume worldVolume) {
+
+ // set and build geometry
setBuilder(createGeometryDefinition(_debug, node));
- if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
+ if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
- if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
+ if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
- _builder.build();
+ _builder.build();
- if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
+ if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
- if(isDebug()) System.out.printf("%s: build the LCDD geometry objects\n", getClass().getSimpleName());
+ if(isDebug()) System.out.printf("%s: build the LCDD geometry objects\n", getClass().getSimpleName());
-
- // Go through the list of volumes to build that is created in the generic builder class
- // TODO this is manual now since I don't have a way of knowing in the generic builder class what is a ghost volume at this point.
- LCDDSurveyVolume trackingGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition.TrackingVolume.class), worldVolume);
- add(trackingGeometry);
- baseSurveyVolume = new LCDDSurveyVolume(_builder.getSurveyVolume(TrackerEnvelope.class), lcdd, trackingGeometry);
- add(baseSurveyVolume);
- LCDDSurveyVolume basePlateGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(BasePlate.class), lcdd, baseSurveyVolume);
- add(basePlateGeometry);
- // TODO I don't think this c-support has any use at all since the coordinates of it has been already used in the builder. Should remove?
- LCDDSurveyVolume cSupportGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(CSupport.class), baseSurveyVolume);
- add(cSupportGeometry);
- LCDDSurveyVolume supportBottomGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(SupportBottom.class), baseSurveyVolume);
- add(supportBottomGeometry);
- LCDDSurveyVolume supportPlateBottomGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(SupportPlateBottom.class), lcdd, baseSurveyVolume);
- add(supportPlateBottomGeometry);
- LCDDSurveyVolume supportTopGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(SupportTop.class), baseSurveyVolume);
- add(supportTopGeometry);
- LCDDSurveyVolume supportPlateTopGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(SupportPlateTop.class), lcdd, baseSurveyVolume);
- add(supportPlateTopGeometry);
+
+ // Go through the list of volumes to build that is created in the generic builder class
+ // TODO this is manual now since I don't have a way of knowing in the generic builder class what is a ghost volume at this point.
+ LCDDSurveyVolume trackingGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition.TrackingVolume.class), worldVolume);
+ add(trackingGeometry);
+ baseSurveyVolume = new LCDDSurveyVolume(_builder.getSurveyVolume(TrackerEnvelope.class), lcdd, trackingGeometry);
+ add(baseSurveyVolume);
+ LCDDSurveyVolume basePlateGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(BasePlate.class), lcdd, baseSurveyVolume);
+ add(basePlateGeometry);
+ // TODO I don't think this c-support has any use at all since the coordinates of it has been already used in the builder. Should remove?
+ LCDDSurveyVolume cSupportGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(CSupport.class), baseSurveyVolume);
+ add(cSupportGeometry);
+ LCDDSurveyVolume supportBottomGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(SupportBottom.class), baseSurveyVolume);
+ add(supportBottomGeometry);
+ LCDDSurveyVolume supportPlateBottomGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(SupportPlateBottom.class), lcdd, baseSurveyVolume);
+ add(supportPlateBottomGeometry);
+ LCDDSurveyVolume supportTopGeometry = new LCDDGhostSurveyVolume(_builder.getSurveyVolume(SupportTop.class), baseSurveyVolume);
+ add(supportTopGeometry);
+ LCDDSurveyVolume supportPlateTopGeometry = new LCDDSurveyVolume(_builder.getSurveyVolume(SupportPlateTop.class), lcdd, baseSurveyVolume);
+ add(supportPlateTopGeometry);
- // build modules
+ // build modules
- if(isDebug()) System.out.printf("%s: build modules\n", getClass().getSimpleName());
+ if(isDebug()) System.out.printf("%s: build modules\n", getClass().getSimpleName());
- // Loop over all modules created
- for(BaseModuleBundle mod : _builder.modules) {
- TestRunModuleBundle m = (TestRunModuleBundle) mod;
- if(isDebug()) {
- System.out.printf("%s: module layer %d half %s\n", getClass().getSimpleName(),m.getLayer(),m.getHalf());
- m.print();
- }
+ // Loop over all modules created
+ for(BaseModuleBundle mod : _builder.modules) {
+ TestRunModuleBundle m = (TestRunModuleBundle) mod;
+ if(isDebug()) {
+ System.out.printf("%s: module layer %d half %s\n", getClass().getSimpleName(),m.getLayer(),m.getHalf());
+ m.print();
+ }
- // Find the mother among the LCDD objects using its name, should probably have a better way...
- String name_mother = m.getMother().getName();
- LCDDSurveyVolume mother = null;
- for(LCDDSurveyVolume g : lcddSurveyVolumes) {
- if(g.getName().equals(name_mother)) {
- mother = g;
- break;
- }
- }
- // Check that it had a mother
- if(mother==null) throw new RuntimeException("Cound't find mother to module layer " + m.getLayer() + " half "+ m.getHalf());
+ // Find the mother among the LCDD objects using its name, should probably have a better way...
+ String name_mother = m.getMother().getName();
+ LCDDSurveyVolume mother = null;
+ for(LCDDSurveyVolume g : lcddSurveyVolumes) {
+ if(g.getName().equals(name_mother)) {
+ mother = g;
+ break;
+ }
+ }
+ // Check that it had a mother
+ if(mother==null) throw new RuntimeException("Cound't find mother to module layer " + m.getLayer() + " half "+ m.getHalf());
- if(isDebug()) System.out.printf("%s: found mother %s for module layer %d half %s\n", getClass().getSimpleName(),mother.getName(),m.getLayer(),m.getHalf());
+ if(isDebug()) System.out.printf("%s: found mother %s for module layer %d half %s\n", getClass().getSimpleName(),mother.getName(),m.getLayer(),m.getHalf());
- // add the module to the list of objects that will be added to LCDD
- addTestRunModule(m, mother);
+ // add the module to the list of objects that will be added to LCDD
+ addTestRunModule(m, mother);
- }
+ }
- if(isDebug()) {
- System.out.printf("%s: DONE building the LCDD geometry objects\n", getClass().getSimpleName());
- System.out.printf("%s: List of all %d LCDD geometry objects built\n", this.getClass().getSimpleName(), lcddSurveyVolumes.size());
- for(SurveyVolumeImpl bg : lcddSurveyVolumes) {
- System.out.printf("-------\n%s\n", bg.toString());
- }
- }
+ if(isDebug()) {
+ System.out.printf("%s: DONE building the LCDD geometry objects\n", getClass().getSimpleName());
+ System.out.printf("%s: List of all %d LCDD geometry objects built\n", this.getClass().getSimpleName(), lcddSurveyVolumes.size());
+ for(SurveyVolumeImpl bg : lcddSurveyVolumes) {
+ System.out.printf("-------\n%s\n", bg.toString());
+ }
+ }
- // Set visualization features
- setVisualization();
+ // Set visualization features
+ setVisualization();
- }
+ }
- /**
- * Rules for adding the LCDD module geometry.
- * @param bundle - module to be added
- * @param mother - mother LCDD geometry object
- */
- protected void addTestRunModule(TestRunModuleBundle bundle, LCDDSurveyVolume mother) {
- // This could perhaps be fixed if there is a relation with daughters in geometry definition?
- // create the module
- LCDDSurveyVolume lcddM = new LCDDSurveyVolume(bundle.module, lcdd, mother);
- add(lcddM);
- if(bundle.halfModuleAxial!=null) addTestRunHalfModule(bundle.halfModuleAxial,lcddM);
- if(bundle.coldBlock!=null) add(new LCDDSurveyVolume(bundle.coldBlock, lcdd, lcddM));
- if(bundle.halfModuleStereo!=null) addTestRunHalfModule((TestRunHalfModuleBundle)bundle.halfModuleStereo,lcddM);
+ /**
+ * Rules for adding the LCDD module geometry.
+ * @param bundle - module to be added
+ * @param mother - mother LCDD geometry object
+ */
+ protected void addTestRunModule(TestRunModuleBundle bundle, LCDDSurveyVolume mother) {
+ // This could perhaps be fixed if there is a relation with daughters in geometry definition?
+ // create the module
+ LCDDSurveyVolume lcddM = new LCDDSurveyVolume(bundle.module, lcdd, mother);
+ add(lcddM);
+ if(bundle.halfModuleAxial!=null) addTestRunHalfModule(bundle.halfModuleAxial,lcddM);
+ if(bundle.coldBlock!=null) add(new LCDDSurveyVolume(bundle.coldBlock, lcdd, lcddM));
+ if(bundle.halfModuleStereo!=null) addTestRunHalfModule((TestRunHalfModuleBundle)bundle.halfModuleStereo,lcddM);
// if(bundle.halfModuleAxial!=null) addHalfModule((TestRunHalfModuleBundle)bundle.halfModuleAxial,lcddM);
// if(bundle.coldBlock!=null) add(new LCDDSurveyVolume(bundle.coldBlock, lcdd, lcddM));
// if(bundle.halfModuleStereo!=null) addHalfModule((TestRunHalfModuleBundle)bundle.halfModuleStereo,lcddM);
- }
+ }
- /**
- * Rules for adding the LCDD half module geometry.
- * @param bundle - module to be added
- * @param mother - mother LCDD geometry object
- */
- protected void addTestRunHalfModule(HalfModuleBundle bundle2, LCDDSurveyVolume mother) {
- // This could perhaps be fixed if there is a relation with daughters in geometry definition?
- TestRunHalfModuleBundle bundle = (TestRunHalfModuleBundle) bundle2;
-
- // create the half-module
- LCDDSurveyVolume lcddHM = new LCDDSurveyVolume(bundle.halfModule, lcdd, mother);
- add(lcddHM);
- // create the sensor
- LCDDSurveyVolume lcddS = new LCDDSurveyVolume(bundle.sensor, lcdd, lcddHM);
- add(lcddS);
- // create the active sensor
- LCDDSurveyVolume lcddAS = new LCDDSurveyVolume(bundle.activeSensor, lcdd, lcddS);
- add(lcddAS);
- // create the lamination
- LCDDSurveyVolume lcddL = new LCDDSurveyVolume(bundle.lamination, lcdd, lcddHM);
- add(lcddL);
- // create the carbon fiber frame
- LCDDSurveyVolume lcddCF = new LCDDSurveyVolume(bundle.carbonFiber, lcdd, lcddHM);
- add(lcddCF);
- // create the hybrid frame
- LCDDSurveyVolume lcddH = new LCDDSurveyVolume(bundle.hybrid, lcdd, lcddHM);
- add(lcddH);
+ /**
+ * Rules for adding the LCDD half module geometry.
+ * @param bundle2 - module to be added
+ * @param mother - mother LCDD geometry object
+ */
+ protected void addTestRunHalfModule(HalfModuleBundle bundle2, LCDDSurveyVolume mother) {
+ // This could perhaps be fixed if there is a relation with daughters in geometry definition?
+ TestRunHalfModuleBundle bundle = (TestRunHalfModuleBundle) bundle2;
+
+ // create the half-module
+ LCDDSurveyVolume lcddHM = new LCDDSurveyVolume(bundle.halfModule, lcdd, mother);
+ add(lcddHM);
+ // create the sensor
+ LCDDSurveyVolume lcddS = new LCDDSurveyVolume(bundle.sensor, lcdd, lcddHM);
+ add(lcddS);
+ // create the active sensor
+ LCDDSurveyVolume lcddAS = new LCDDSurveyVolume(bundle.activeSensor, lcdd, lcddS);
+ add(lcddAS);
+ // create the lamination
+ LCDDSurveyVolume lcddL = new LCDDSurveyVolume(bundle.lamination, lcdd, lcddHM);
+ add(lcddL);
+ // create the carbon fiber frame
+ LCDDSurveyVolume lcddCF = new LCDDSurveyVolume(bundle.carbonFiber, lcdd, lcddHM);
+ add(lcddCF);
+ // create the hybrid frame
+ LCDDSurveyVolume lcddH = new LCDDSurveyVolume(bundle.hybrid, lcdd, lcddHM);
+ add(lcddH);
- }
+ }
@Override
public HPSTrackerGeometryDefinition createGeometryDefinition(boolean debug,
Element node) {
return new HPSTestRunTracker2014GeometryDefinition(_debug, node);
- }
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java Wed Apr 27 11:11:32 2016
@@ -27,8 +27,8 @@
*
*/
public class HPSTracker2014GeometryDefinition extends HPSTrackerGeometryDefinition {
-
- private static final Logger LOGGER = Logger.getLogger(HPSTracker2014GeometryDefinition.class.getPackage().getName());
+
+ private static final Logger LOGGER = Logger.getLogger(HPSTracker2014GeometryDefinition.class.getPackage().getName());
public HPSTracker2014GeometryDefinition(boolean debug, Element node) {
super(debug, node);
@@ -38,8 +38,7 @@
doBottom = true;
doTop = true;
layerBitMask = 0x3F;
- }
-
+ }
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerBuilder#build()
@@ -47,12 +46,11 @@
public void build() {
if(isDebug()) System.out.printf("%s: constructing the geometry objects\n", this.getClass().getSimpleName());
-
// Create alignment correction objects
// THis is really a ugly approach with MP corrections initialized before and
// the survey corrections based on the XML node
- // FIX THIS! //TODO
+ // TODO: FIX THIS!
AlignmentCorrection alignmentCorrections = new AlignmentCorrection();
alignmentCorrections.setNode(node);
AlignmentCorrection supBotCorr = getL13UChannelAlignmentCorrection(false);
@@ -74,25 +72,19 @@
SvtBoxBasePlate svtBoxBasePlate = new SvtBoxBasePlate("base_plate",svtBox,null);
surveyVolumes.add(svtBoxBasePlate);
-
-
-
+
SupportRingL13BottomKinMount supportRingKinL13Bottom = new SupportRingL13BottomKinMount("c_support_kin_L13b", svtBox, supBotCorr);
- surveyVolumes.add(supportRingKinL13Bottom);
-
+ surveyVolumes.add(supportRingKinL13Bottom);
UChannelL13 uChannelL13Bottom = new UChannelL13Bottom("support_bottom_L13", svtBox, alignmentCorrections, supportRingKinL13Bottom);
surveyVolumes.add(uChannelL13Bottom);
UChannelL13Plate uChannelL13BottomPlate = new UChannelL13BottomPlate("support_plate_bottom_L13", svtBox, null, uChannelL13Bottom);
surveyVolumes.add(uChannelL13BottomPlate);
-
SupportRingL13TopKinMount supportRingKinL13Top = new SupportRingL13TopKinMount("c_support_kin_L13t", svtBox, supTopCorr);
surveyVolumes.add(supportRingKinL13Top);
-
-
-
+
UChannelL13Top uChannelL13Top = new UChannelL13Top("support_top_L13", svtBox, alignmentCorrections, supportRingKinL13Top);
surveyVolumes.add(uChannelL13Top);
@@ -110,7 +102,6 @@
UChannelL46Plate uChannelL46TopPlate = new UChannelL46TopPlate("support_plate_top_L46", svtBox, null, uChannelL46Top);
surveyVolumes.add(uChannelL46TopPlate);
-
for(int l=1; l<=6;++l) {
if(doLayer(l)) {
@@ -135,30 +126,20 @@
bundle.print();
}
}
-
-
-
-
-
- }
-
+ }
/**
* {@link SurveyVolume} volume defining the pair spectrometer (PS) vacuum chamber
* Reference: tracking volume coordinate system
* Origin: same as reference
* Orientation: u - points in x direction (towards positron side), v - points upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class PSVacuumChamber extends SurveyVolume {
+
public static final double height = PS_vac_box_inner_height;
public static final double width = PS_vac_box_inner_width;
public static final double length = PS_vac_box_inner_length;
-
-
-
+
public PSVacuumChamber(String name, SurveyVolume mother, AlignmentCorrection alignmentCorrection) {
super(name, mother, alignmentCorrection);
init();
@@ -184,9 +165,6 @@
* Reference: PS vacuum chamber coordinate system. Note that the PS vacuum chamber box is placed w.r.t. this box and the target positions.
* Origin: intersection of midplanes vertically and horizontally
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SvtBox extends SurveyVolume {
public static final double height = 6.740*inch;
@@ -198,10 +176,7 @@
public static final double center_to_target_z = 13.777*inch;
public static final double center_to_target_x = 0.84*inch;
public static final double center_to_target_y = 0.0;
-
-
-
-
+
public SvtBox(String name, SurveyVolume mother, AlignmentCorrection alignmentCorrection) {
super(name, mother, alignmentCorrection);
init();
@@ -217,8 +192,7 @@
ballPos = new BasicHep3Vector(0, 0, 0);
veePos = new BasicHep3Vector(ballPos.x()+1, ballPos.y(), ballPos.z());
- flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()+1, ballPos.z());
-
+ flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()+1, ballPos.z());
}
}
@@ -228,9 +202,6 @@
* Reference: {@link SvtBox} coordinate system.
* Origin: surface of base plate intersection with center of hole for adjustment screw on positron side
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SvtBoxBasePlate extends SurveyVolume {
public static final double length = 50.5*inch;
@@ -268,16 +239,11 @@
}
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the support ring
* Reference: @SvtBoxBasePlate
* Origin: pin position of support ring (electron side)
* Orientation: slot position is vee position (positron side) i.e u points towards the positron side and v in the upstream beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class SupportRing extends SurveyVolume {
@@ -315,14 +281,9 @@
flatPos = VecOp.add(ballPos, vPrime);
}
}
-
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining a coordinate system from the kinematic mount positions for support channels
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class SupportRingL13KinMount extends SurveyVolume {
@@ -360,9 +321,6 @@
* Reference: {@link SvtBox} coordinate system
* Origin: cone mount (it's on the electron side)
* Orientation: ball is cone mount, slot mount is vee position and flat is along beam line pointing upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SupportRingL13BottomKinMount extends SupportRingL13KinMount {
@@ -385,9 +343,6 @@
* Reference: @SupportRing coordinate system
* Origin: cone mount (it's on the electron side)
* Orientation: ball is cone mount, slot mount is vee position and flat is along beamline pointing upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SupportRingL13TopKinMount extends SupportRingL13KinMount {
//public static final double mount_surface_wrt_baseplate_vertically = 5.388*inch;
@@ -420,12 +375,9 @@
// flatPos = new BasicHep3Vector(flat_pos_x,flat_pos_y,flat_pos_z);
// }
}
-
-
- /**
- * Abstract {@link SurveyVolume} volume defining the coordinate system of the L1-3 u-channels
- *
- * @author Per Hansson Adrian <[log in to unmask]>
+
+ /**
+ * Abstract {@link SurveyVolume} volume defining the coordinate system of the L1-3 u-channels
*/
public abstract static class UChannelL13 extends SurveyVolume {
public final static double length = UChannelL13Plate.length;
@@ -457,9 +409,6 @@
* Reference: {@link SupportRingL13BottomKinMount} coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13Bottom extends UChannelL13 {
private final static double cone_to_edge_of_plate_y = 12.25*inch;
@@ -489,14 +438,9 @@
}
}
-
-
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
* This is at nominal position.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
private static class UChannelL13BottomSurveyBalls {
@@ -523,18 +467,12 @@
}
}
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the top L1-3 u-channel
* Reference: SupportRingL13TopKinMount coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards positron side, v - pointing along the U-channel in the beam direction
* Note that this is flipped w.r.t. bottom support.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13Top extends UChannelL13 {
private final static Hep3Vector ball_kinMount = new BasicHep3Vector(SupportRingL13TopKinMount.kin_mount_pos_x,SupportRingL13TopKinMount.kin_mount_pos_y,SupportRingL13TopKinMount.kin_mount_pos_z);
@@ -567,15 +505,10 @@
return length;
}
}
-
-
-
+
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
* This is at nominal position.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
private static class UChannelL13TopSurveyBalls {
@@ -602,16 +535,8 @@
}
}
-
-
-
-
-
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of the u-channel plate
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class UChannelPlate extends SurveyVolume {
public UChannelPlate(String name, SurveyVolume m,
@@ -626,9 +551,6 @@
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of the u-channel plate
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class UChannelL13Plate extends UChannelPlate {
private final static double pocket_depth_L1 = 0.025;
@@ -689,9 +611,6 @@
* Reference: @UChannelL13Bottom coordinate system
* Origin: same as reference
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13BottomPlate extends UChannelL13Plate {
protected final static double L1_module_pin_to_edge_of_plate = (16.0-4.126)*inch;
@@ -715,10 +634,7 @@
* {@link SurveyVolume} volume defining the coordinate system of the bottom u-channel plate
* Reference: @UChannelL13Bottom coordinate system
* Origin: same as reference
- * Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ * Orientation: same as reference
*/
public static class UChannelL13TopPlate extends UChannelL13Plate {
protected final static double L1_module_pin_to_edge_of_plate = (16.0-2.75)*inch;
@@ -737,12 +653,9 @@
}
}
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining the L4-6 u-channel volume
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*/
public abstract static class UChannelL46 extends SurveyVolume {
@@ -760,26 +673,18 @@
protected void setBoxDim() {
setBoxDim(width,length,height);
- }
-
- }
-
-
-
-
-
+ }
+ }
+
/**
* {@link SurveyVolume} volume defining the coordinate system of the u-channel
* Reference: SVTBox coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class UChannelL46Bottom extends UChannelL46 {
-
-
+
protected static final double cone_to_edge_of_plate_y = 2.75*inch;
public UChannelL46Bottom(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection) {
@@ -803,15 +708,12 @@
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
private static class UChannelL46BottomSurveyBalls {
- // Shawn's calculated point at midpoint between two forward survey balls
+ // Shawn's calculated point at midpoint between two forward survey balls
protected final static Hep3Vector ball_pos = new BasicHep3Vector(-5.857, -157.776, -8.423);
-
private static final double cone_fwd_right_x = -7.019*inch;
private static final double cone_fwd_right_y = -6.419*inch;
@@ -841,22 +743,14 @@
return VecOp.sub(bwd_left, fwd_left);
}
}
-
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the u-channel
- * Reference: {@link SVTBox} coordinate system
- * Origin: midpoint between upstream survey cones
- * Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ * Reference: {@link HPSTracker2014GeometryDefinition.SVTBox} coordinate system
+ * Origin: midpoint between upstream survey cones
+ * Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
*/
public static class UChannelL46Top extends UChannelL46 {
-
private static final double cone_to_side_plate_pin_y = (0.875-0.25)*inch;
private static final double side_plate_pin_to_edge_of_plate_y = 1.5*inch;
@@ -886,8 +780,7 @@
//flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()-1, ballPos.z()); // random offset
}
}
-
-
+
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
*
@@ -899,7 +792,6 @@
// Shawn's calculated point at midpoint between two forward survey balls
protected final static Hep3Vector ball_pos = new BasicHep3Vector(-6.341, -141.909, 8.423);
-
protected static final double cone_fwd_right_x = -7.038*inch;
protected static final double cone_fwd_right_y = -5.794*inch;
protected static final double cone_fwd_right_z = 0.332*inch;
@@ -928,14 +820,9 @@
return VecOp.sub(bwd_right, fwd_right);
}
}
-
-
-
- /**
- * Abstract {@link SurveyVolume} defining the coordinate system of the u-channel plates
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+
+ /**
+ * Abstract {@link SurveyVolume} defining the coordinate system of the u-channel plates
*/
public abstract static class UChannelL46Plate extends UChannelPlate {
public final static double pocket_depth_L4 = 0.1;
@@ -1043,16 +930,12 @@
}
}
-
-
-
+
/**
* {@link SurveyVolume} volume defining the coordinate system of module L1-3
* Reference: @UChannelL13Bottom coordinate system
* Origin: hole position on mounting surface (on electron side)
* Orientation: u - is normal to the surface pointing vertically down, v - points along module away from hybrid side (i.e. positron direction).
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public abstract static class ModuleL13 extends BaseModule {
@@ -1119,8 +1002,7 @@
protected double getHoleModuleCenterOffset() {
return UChannelL13Bottom.cone_to_edge_of_plate_y - UChannelL13BottomPlate.L1_module_pin_to_edge_of_plate;
}
- }
-
+ }
public static class ModuleL1Bot extends ModuleL13Bot {
@@ -1136,8 +1018,7 @@
return new BasicHep3Vector(x, y, z);
}
- }
-
+ }
public static class ModuleL1Top extends ModuleL13Top {
@@ -1155,9 +1036,7 @@
}
}
-
-
-
+
public static class ModuleL2Bot extends ModuleL13Bot {
public ModuleL2Bot(String name, SurveyVolume mother,
@@ -1190,10 +1069,7 @@
}
}
-
-
-
-
+
public static class ModuleL3Bot extends ModuleL13Bot {
public ModuleL3Bot(String name, SurveyVolume mother,
@@ -1226,11 +1102,7 @@
}
}
-
-
-
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of module L4-6
*
@@ -1628,8 +1500,6 @@
* Reference: @ModuleL13Bot coordinate system
* Origin: sensor center
* Orientation: w - is normal to the surface pointing from p-side to n-side, v - points along strips away from signal bond pads
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class HalfModuleAxial extends HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule {
@@ -1668,8 +1538,6 @@
* Reference: @ModuleL13Bot coordinate system
* Origin: sensor center
* Orientation: same as axial - the module is rotated later.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class HalfModuleStereo extends HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule {
@@ -1873,23 +1741,21 @@
}
-
-
-
-
- /**
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ /**
+ *
*/
public static class LongModuleBundle extends BaseModuleBundle {
+
public HalfModuleBundle halfModuleAxialHole = null;
public HalfModuleBundle halfModuleStereoHole = null;
public HalfModuleBundle halfModuleAxialSlot = null;
public HalfModuleBundle halfModuleStereoSlot = null;
protected SurveyVolume coldBlock = null;
+
public LongModuleBundle(BaseModule m) {
super(m);
}
+
public void print() {
if(module!=null) System.out.printf("%s: %s\n", this.getClass().getSimpleName(),module.toString());
if(halfModuleAxialHole!=null) halfModuleAxialHole.print();
@@ -1897,13 +1763,11 @@
if(coldBlock!=null)System.out.printf("%s: %s\n", this.getClass().getSimpleName(),coldBlock.getName());
if(halfModuleStereoHole!=null) halfModuleStereoHole.print();
if(halfModuleStereoSlot!=null) halfModuleStereoSlot.print();
- }
- }
-
-
- /**
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ }
+ }
+
+ /**
+ *
*/
public static class LongHalfModuleBundle extends HalfModuleBundle {
public LongHalfModuleBundle() {
@@ -1913,9 +1777,7 @@
super(hm);
}
}
-
-
-
+
/**
* Create the half-module.
* @param side - stereo or axial
@@ -1955,9 +1817,6 @@
//TestRunModuleBundle bundle = (TestRunModuleBundle)getModuleBundle(mother);
//TestRunHalfModuleBundle halfModuleBundle;
LongModuleBundle bundle = (LongModuleBundle)getModuleBundle(mother);
-
-
-
// Build the half-module bundle and half-module
//TODO clean this up to a separate method
@@ -1983,9 +1842,6 @@
}
}
halfModuleBundle.halfModule = halfModule;
-
-
-
// create the half module components
makeHalfModuleComponentSensor(halfModule);
@@ -1994,13 +1850,8 @@
//makeHalfModuleComponentCF(halfModule);
- //makeHalfModuleComponentHybrid(halfModule);
-
-
-
-
- }
-
+ //makeHalfModuleComponentHybrid(halfModule);
+ }
protected void makeLongHalfModuleComponentKapton(BaseModule mother) {
@@ -2022,10 +1873,6 @@
}
-
-
-
-
protected HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule createTestRunHalfModuleAxial(String volName,
BaseModule mother, AlignmentCorrection alignmentCorrection,
int layer, String half) {
@@ -2047,7 +1894,6 @@
* @param alignmentCorrection
* @param layer
* @param half
- * @return
*/
protected LongHalfModule createLongAxialSlotHalfModule(String name, SurveyVolume mother,
AlignmentCorrection alignmentCorrection, int layer,
@@ -2062,16 +1908,13 @@
* @param alignmentCorrection
* @param layer
* @param half
- * @return
*/
protected LongHalfModule createLongStereoSlotHalfModule(String name, SurveyVolume mother,
AlignmentCorrection alignmentCorrection, int layer,
String half) {
return new LongStereoSlotHalfModule(name, mother, alignmentCorrection, layer, half);
}
-
-
-
+
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition#getHalfModuleBundle(org.lcsim.geometry.compact.converter.HPSTestRunTracker2014GeometryDefinition.BaseModule, java.lang.String)
*/
@@ -2121,9 +1964,6 @@
}
return hm;
}
-
-
-
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerBuilder#getMillepedeLayer(java.lang.String)
@@ -2149,8 +1989,6 @@
return getMillepedeLayer(isTopLayer, layer, isAxial, isHole);
}
-
-
/**
* Definition relating the sensors and layer number used in millepede for this detector.
@@ -2158,7 +1996,6 @@
* @param layer
* @param isAxial
* @param isHole
- * @return
*/
public int getMillepedeLayer(boolean isTopLayer, int layer, boolean isAxial, boolean isHole) {
int l = -1;
@@ -2208,16 +2045,7 @@
if(l<0) throw new RuntimeException("Error getting the millepede layer.");
if(isDebug()) System.out.printf("%s: %s %d %s %s -> MP layer %d\n",getClass().getSimpleName(),isTopLayer?"top":"bottom", layer, isAxial?"axial":"stereo", isHole?"hole":"slot", l);
-
return l;
}
-
-
-
-
}
-
-
-
-
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java Wed Apr 27 11:11:32 2016
@@ -28,122 +28,122 @@
*/
public class HPSTracker2014JavaBuilder extends HPSTestRunTracker2014JavaBuilder {
-
-
- /**
- * Default constructor
- * @param node
- */
- public HPSTracker2014JavaBuilder(boolean debugFlag, Element node) {
- super(debugFlag, node);
- }
-
-
-
-
- /**
- * Build the JAVA geometry objects from the geometry definition.
- * @param trackingVolume - the reference volume.
- */
- public void build(ILogicalVolume trackingVolume) {
-
- // build geometry
+
+
+ /**
+ * Default constructor
+ * @param node
+ */
+ public HPSTracker2014JavaBuilder(boolean debugFlag, Element node) {
+ super(debugFlag, node);
+ }
+
+
+
+
+ /**
+ * Build the JAVA geometry objects from the geometry definition.
+ * @param trackingVolume - the reference volume.
+ */
+ public void build(ILogicalVolume trackingVolume) {
+
+ // build geometry
setBuilder(createGeometryDefinition(this._debug, node));
-
- if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
-
- if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
-
- _builder.build();
-
- if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
-
- if(isDebug()) System.out.printf("%s: build the JAVA geometry objects\n", getClass().getSimpleName());
-
- // initialize the list to store a reference to each object
- javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
-
- // Go through the list of volumes to build that is created in the generic builder class
- JavaSurveyVolume tracking = new JavaSurveyVolume(_builder.getSurveyVolume(TrackingVolume.class), trackingVolume);
- add(tracking);
- JavaSurveyVolume chamber = new JavaGhostSurveyVolume(_builder.getSurveyVolume(PSVacuumChamber.class), tracking);
- add(chamber);
- setBaseTrackerGeometry(new JavaSurveyVolume(_builder.getSurveyVolume(SvtBox.class), chamber,1));
- add(getBaseTrackerGeometry());
- JavaSurveyVolume svtBoxBasePlate = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SvtBoxBasePlate.class), getBaseTrackerGeometry());
- add(svtBoxBasePlate);
-
-
- // build modules
-
- if(isDebug()) System.out.printf("%s: build JAVA modules\n", getClass().getSimpleName());
-
- // Loop over all modules created
- for(BaseModuleBundle mod : _builder.modules) {
- BaseModuleBundle m = mod;
- if(isDebug()) {
- System.out.printf("%s: build module %s (layer %d half %s)\n", getClass().getSimpleName(),m.module.getName(),m.getLayer(),m.getHalf());
- m.print();
- }
-
- // Find the mother among the objects using its name, should probably have a better way...
- String name_mother = m.getMother().getName();
- JavaSurveyVolume mother = null;
- for(JavaSurveyVolume g : javaSurveyVolumes) {
- if(g.getName().equals(name_mother)) {
- mother = g;
- break;
- }
- }
- // Check that it had a mother
- if(mother==null) throw new RuntimeException("Cound't find mother to module " + m.module.getName());
-
- if(isDebug()) System.out.printf("%s: found mother %s to module %s\n", getClass().getSimpleName(),mother.getName(),m.module.getName());
-
- // put the module in the list of objects that will be added to LCDD
- addModule(m, mother);
-
- if(isDebug()) System.out.printf("%s: DONE build module %s\n", getClass().getSimpleName(), m.module.getName());
-
-
- }
-
- if(isDebug()) System.out.printf("%s: DONE build JAVA modules\n", getClass().getSimpleName());
-
-
- //System.out.printf("%s: Built %d JAVA geometry objects\n", getClass().getSimpleName(),javaSurveyVolumes.size());
-
- if(isDebug()) {
- System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
- System.out.printf("%s: List of all the JAVA geometry objects built\n", this.getClass().getSimpleName());
- for(JavaSurveyVolume bg : javaSurveyVolumes) {
- System.out.printf("-------\n%s\n", bg.toString());
- }
- }
-
-
- // Set visualization features
- //setVis();
-
-
- }
-
- /**
- * Rules for adding the JAVA module geometry.
- * @param bundle - module to be added
- * @param mother - mother JAVA geometry object
- */
- private void addModule(BaseModuleBundle bundle, JavaSurveyVolume mother) {
- if(bundle instanceof TestRunModuleBundle) {
- addTestRunModule((TestRunModuleBundle) bundle, mother);
- } else if(bundle instanceof LongModuleBundle) {
- addLongModule((LongModuleBundle) bundle, mother);
- } else {
- throw new RuntimeException("The bundle is of unknown class type!");
- }
- }
-
- /**
+
+ if(_builder==null) throw new RuntimeException("need to set builder class before calling build!");
+
+ if(isDebug()) System.out.printf("%s: build the base geometry objects\n", getClass().getSimpleName());
+
+ _builder.build();
+
+ if(isDebug()) System.out.printf("%s: DONE build the base geometry objects\n", getClass().getSimpleName());
+
+ if(isDebug()) System.out.printf("%s: build the JAVA geometry objects\n", getClass().getSimpleName());
+
+ // initialize the list to store a reference to each object
+ javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
+
+ // Go through the list of volumes to build that is created in the generic builder class
+ JavaSurveyVolume tracking = new JavaSurveyVolume(_builder.getSurveyVolume(TrackingVolume.class), trackingVolume);
+ add(tracking);
+ JavaSurveyVolume chamber = new JavaGhostSurveyVolume(_builder.getSurveyVolume(PSVacuumChamber.class), tracking);
+ add(chamber);
+ setBaseTrackerGeometry(new JavaSurveyVolume(_builder.getSurveyVolume(SvtBox.class), chamber,1));
+ add(getBaseTrackerGeometry());
+ JavaSurveyVolume svtBoxBasePlate = new JavaGhostSurveyVolume(_builder.getSurveyVolume(SvtBoxBasePlate.class), getBaseTrackerGeometry());
+ add(svtBoxBasePlate);
+
+
+ // build modules
+
+ if(isDebug()) System.out.printf("%s: build JAVA modules\n", getClass().getSimpleName());
+
+ // Loop over all modules created
+ for(BaseModuleBundle mod : _builder.modules) {
+ BaseModuleBundle m = mod;
+ if(isDebug()) {
+ System.out.printf("%s: build module %s (layer %d half %s)\n", getClass().getSimpleName(),m.module.getName(),m.getLayer(),m.getHalf());
+ m.print();
+ }
+
+ // Find the mother among the objects using its name, should probably have a better way...
+ String name_mother = m.getMother().getName();
+ JavaSurveyVolume mother = null;
+ for(JavaSurveyVolume g : javaSurveyVolumes) {
+ if(g.getName().equals(name_mother)) {
+ mother = g;
+ break;
+ }
+ }
+ // Check that it had a mother
+ if(mother==null) throw new RuntimeException("Cound't find mother to module " + m.module.getName());
+
+ if(isDebug()) System.out.printf("%s: found mother %s to module %s\n", getClass().getSimpleName(),mother.getName(),m.module.getName());
+
+ // put the module in the list of objects that will be added to LCDD
+ addModule(m, mother);
+
+ if(isDebug()) System.out.printf("%s: DONE build module %s\n", getClass().getSimpleName(), m.module.getName());
+
+
+ }
+
+ if(isDebug()) System.out.printf("%s: DONE build JAVA modules\n", getClass().getSimpleName());
+
+
+ //System.out.printf("%s: Built %d JAVA geometry objects\n", getClass().getSimpleName(),javaSurveyVolumes.size());
+
+ if(isDebug()) {
+ System.out.printf("%s: DONE building the JAVA geometry objects\n", getClass().getSimpleName());
+ System.out.printf("%s: List of all the JAVA geometry objects built\n", this.getClass().getSimpleName());
+ for(JavaSurveyVolume bg : javaSurveyVolumes) {
+ System.out.printf("-------\n%s\n", bg.toString());
+ }
+ }
+
+
+ // Set visualization features
+ //setVis();
+
+
+ }
+
+ /**
+ * Rules for adding the JAVA module geometry.
+ * @param bundle - module to be added
+ * @param mother - mother JAVA geometry object
+ */
+ private void addModule(BaseModuleBundle bundle, JavaSurveyVolume mother) {
+ if(bundle instanceof TestRunModuleBundle) {
+ addTestRunModule((TestRunModuleBundle) bundle, mother);
+ } else if(bundle instanceof LongModuleBundle) {
+ addLongModule((LongModuleBundle) bundle, mother);
+ } else {
+ throw new RuntimeException("The bundle is of unknown class type!");
+ }
+ }
+
+ /**
* Rules for adding the LCDD module geometry.
* @param bundle - module to be added
* @param mother - mother LCDD geometry object
@@ -191,12 +191,12 @@
}
-
+
@Override
public HPSTrackerGeometryDefinition createGeometryDefinition(boolean debug, Element node) {
return new HPSTracker2014GeometryDefinition(debug, node);
}
-
-
+
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
/**
*
* Updated geometry information for the HPS tracker 2014
-
+ *
* @author Per Hansson Adrian <[log in to unmask]>
*
*/
@@ -93,8 +93,7 @@
/**
- * PI rotation around generic z-axis
- * @return
+ * PI rotation around generic z-axis
*/
private static Rotation getSlotRotation() {
return new Rotation(new Vector3D(0,0,1),Math.PI);
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java Wed Apr 27 11:11:32 2016
@@ -632,7 +632,7 @@
/**
* Get hole or slot key name from string
*
- * @param name.
+ * @param name "hole" or "slot"
* @return hole or not boolean
*/
public static boolean isHoleFromName(String name) {
@@ -649,10 +649,7 @@
/**
* Extract old definition of Test Run sensor number.
*
- * @param isTopLayer - top or bottom layer
- * @param l - layer
- * @param isAxial - axial or stereo sensor
- * @return
+ * @return the geometric layer according to Test Run definition
*/
public int getOldGeomDefLayerFromVolumeName(String name) {
@@ -672,8 +669,7 @@
/**
* Get the layer number consistent with the old geometry definition.
*
- * @param module name that contains layer and half information.
- * @return the layer.
+ * @return the older layer definition
*/
public int getOldLayerDefinition(boolean isTopLayer, int l, boolean isAxial) {
int layer = -1;
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java Wed Apr 27 11:11:32 2016
@@ -41,7 +41,7 @@
protected static final boolean use30mradRotation = true;
protected static final boolean useFakeHalfModuleAxialPos = false;
- // Global position references
+ // Global position references
protected static final double target_pos_wrt_base_plate_x = 162.3; //from Marco's 3D model
protected static final double target_pos_wrt_base_plate_y = 80.55; //from Tim's sketchup //68.75; //from Marco's 3D model
protected static final double target_pos_wrt_base_plate_z = 926.59; //from Marco's 3D model
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java Wed Apr 27 11:11:32 2016
@@ -15,158 +15,158 @@
public abstract class HPSTrackerJavaBuilder implements IHPSTrackerJavaBuilder {
- protected boolean _debug = false;
- private JavaSurveyVolume baseSurveyVolume;
- protected List<JavaSurveyVolume> javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
- protected DetectorIdentifierHelper detectorIdentifierHelper;
- protected IIdentifierDictionary identifierDictionary;
- protected Subdetector subdet;
- protected List<IDetectorElement> layerDetectorElements = new ArrayList<IDetectorElement>();
- protected List<IDetectorElement> moduleDetectorElements = new ArrayList<IDetectorElement>();
- protected IDetectorElement baseDetectorElement = null;
- public HPSTrackerBuilder _builder = null;
+ protected boolean _debug = false;
+ private JavaSurveyVolume baseSurveyVolume;
+ protected List<JavaSurveyVolume> javaSurveyVolumes = new ArrayList<JavaSurveyVolume>();
+ protected DetectorIdentifierHelper detectorIdentifierHelper;
+ protected IIdentifierDictionary identifierDictionary;
+ protected Subdetector subdet;
+ protected List<IDetectorElement> layerDetectorElements = new ArrayList<IDetectorElement>();
+ protected List<IDetectorElement> moduleDetectorElements = new ArrayList<IDetectorElement>();
+ protected IDetectorElement baseDetectorElement = null;
+ public HPSTrackerBuilder _builder = null;
protected Element node = null;
-
- public HPSTrackerJavaBuilder(boolean debugFlag, Element node) {
- this._debug=debugFlag;
- this.node = node;
- }
-
- public abstract void build(ILogicalVolume trackingVolume);
- public abstract HPSTrackerGeometryDefinition createGeometryDefinition(boolean debug, Element node);
+
+ public HPSTrackerJavaBuilder(boolean debugFlag, Element node) {
+ this._debug=debugFlag;
+ this.node = node;
+ }
+
+ public abstract void build(ILogicalVolume trackingVolume);
+ public abstract HPSTrackerGeometryDefinition createGeometryDefinition(boolean debug, Element node);
-
- /**
- * Add to list of objects.
- * @param geom - object to add.
- */
- public void add(JavaSurveyVolume geom) {
- javaSurveyVolumes.add(geom);
- }
-
- public void setBuilder(HPSTrackerBuilder b) {
- _builder = b;
- }
-
- public void build() {
- _builder.build();
- }
-
- public void setDebug(boolean debug) {
- _debug = debug;
- }
-
- public boolean isDebug() {
- return _debug;
- }
+
+ /**
+ * Add to list of objects.
+ * @param geom - object to add.
+ */
+ public void add(JavaSurveyVolume geom) {
+ javaSurveyVolumes.add(geom);
+ }
+
+ public void setBuilder(HPSTrackerBuilder b) {
+ _builder = b;
+ }
+
+ public void build() {
+ _builder.build();
+ }
+
+ public void setDebug(boolean debug) {
+ _debug = debug;
+ }
+
+ public boolean isDebug() {
+ return _debug;
+ }
- public DetectorIdentifierHelper getDetectorIdentifierHelper() {
- return detectorIdentifierHelper;
- }
+ public DetectorIdentifierHelper getDetectorIdentifierHelper() {
+ return detectorIdentifierHelper;
+ }
- public void setDetectorIdentifierHelper(
- DetectorIdentifierHelper detectorIdentifierHelper) {
- this.detectorIdentifierHelper = detectorIdentifierHelper;
- }
+ public void setDetectorIdentifierHelper(
+ DetectorIdentifierHelper detectorIdentifierHelper) {
+ this.detectorIdentifierHelper = detectorIdentifierHelper;
+ }
- public IIdentifierDictionary getIdentifierDictionary() {
- return identifierDictionary;
- }
+ public IIdentifierDictionary getIdentifierDictionary() {
+ return identifierDictionary;
+ }
- public void setIdentifierDictionary(
- IIdentifierDictionary identifierDictionary) {
- this.identifierDictionary = identifierDictionary;
- }
+ public void setIdentifierDictionary(
+ IIdentifierDictionary identifierDictionary) {
+ this.identifierDictionary = identifierDictionary;
+ }
- public void setSubdetector(Subdetector subdet) {
- this.subdet = subdet;
- }
+ public void setSubdetector(Subdetector subdet) {
+ this.subdet = subdet;
+ }
- public Subdetector getSubdetector() {
- return this.subdet;
- }
+ public Subdetector getSubdetector() {
+ return this.subdet;
+ }
-
+
-
+
- // This finds specific type. I would like to use the ID for this but can't, I think.
- // TODO there must be a factory instance to do this
- public SiTrackerModule getModuleDetectorElement(SiTrackerModule testElement) {
- if(isDebug()) System.out.printf("%s: getModuleDetectorElement for module %s path: \"%s\"\n", this.getClass().getSimpleName(),testElement.getName(),testElement.getGeometry().getPathString());
- SiTrackerModule element = null;
- for(IDetectorElement e : moduleDetectorElements) {
- SiTrackerModule m = (SiTrackerModule) e;
- if(isDebug()) System.out.printf("%s: compare with module %s path: %s\"%s\" \n", this.getClass().getSimpleName(),m.getName(),m.getGeometry().getPathString());
- if(m.getGeometry().getPathString().equals(testElement.getGeometry().getPathString())) {
- if(element!=null) throw new RuntimeException("two DE sharing extended ID?");
- if(isDebug()) System.out.printf("%s: found it\n", this.getClass().getSimpleName());
- element = m;
- }
- }
- return element;
- }
+ // This finds specific type. I would like to use the ID for this but can't, I think.
+ // TODO there must be a factory instance to do this
+ public SiTrackerModule getModuleDetectorElement(SiTrackerModule testElement) {
+ if(isDebug()) System.out.printf("%s: getModuleDetectorElement for module %s path: \"%s\"\n", this.getClass().getSimpleName(),testElement.getName(),testElement.getGeometry().getPathString());
+ SiTrackerModule element = null;
+ for(IDetectorElement e : moduleDetectorElements) {
+ SiTrackerModule m = (SiTrackerModule) e;
+ if(isDebug()) System.out.printf("%s: compare with module %s path: %s\"%s\" \n", this.getClass().getSimpleName(),m.getName(),m.getGeometry().getPathString());
+ if(m.getGeometry().getPathString().equals(testElement.getGeometry().getPathString())) {
+ if(element!=null) throw new RuntimeException("two DE sharing extended ID?");
+ if(isDebug()) System.out.printf("%s: found it\n", this.getClass().getSimpleName());
+ element = m;
+ }
+ }
+ return element;
+ }
-
- // Find detector elements
- // TODO This should be using some global geometry code like DetectorElementStore?
- public IDetectorElement getLayerDetectorElement(IExpandedIdentifier expId) {
- IDetectorElement element = null;
- if(isDebug()) System.out.printf("%s: search among %d layer DEs\n", this.getClass().getSimpleName(), layerDetectorElements.size());
- for(IDetectorElement e : layerDetectorElements) {
- if(isDebug()) System.out.printf("%s: test %s\n", this.getClass().getSimpleName(),e.getName());
- ExpandedIdentifier eId = (ExpandedIdentifier) e.getExpandedIdentifier();
- if(eId.equals(expId)) { // TODO order matters as expId is an interface without that function!?
- //check that only one was found
- if(element!=null) throw new RuntimeException("two DE sharing extended ID?");
- if(isDebug()) System.out.printf("%s: found it\n", this.getClass().getSimpleName());
- element = e;
- }
+
+ // Find detector elements
+ // TODO This should be using some global geometry code like DetectorElementStore?
+ public IDetectorElement getLayerDetectorElement(IExpandedIdentifier expId) {
+ IDetectorElement element = null;
+ if(isDebug()) System.out.printf("%s: search among %d layer DEs\n", this.getClass().getSimpleName(), layerDetectorElements.size());
+ for(IDetectorElement e : layerDetectorElements) {
+ if(isDebug()) System.out.printf("%s: test %s\n", this.getClass().getSimpleName(),e.getName());
+ ExpandedIdentifier eId = (ExpandedIdentifier) e.getExpandedIdentifier();
+ if(eId.equals(expId)) { // TODO order matters as expId is an interface without that function!?
+ //check that only one was found
+ if(element!=null) throw new RuntimeException("two DE sharing extended ID?");
+ if(isDebug()) System.out.printf("%s: found it\n", this.getClass().getSimpleName());
+ element = e;
+ }
- }
- return element;
- }
+ }
+ return element;
+ }
- public void addLayerDetectorElement(IDetectorElement e) {
- IExpandedIdentifier expId = e.getExpandedIdentifier();
- if(getLayerDetectorElement(expId) != null)
- throw new RuntimeException("Trying to add an existing layer detector element.");
- layerDetectorElements.add(e);
- }
+ public void addLayerDetectorElement(IDetectorElement e) {
+ IExpandedIdentifier expId = e.getExpandedIdentifier();
+ if(getLayerDetectorElement(expId) != null)
+ throw new RuntimeException("Trying to add an existing layer detector element.");
+ layerDetectorElements.add(e);
+ }
- public void addBaseDetectorElement(IDetectorElement e) {
- baseDetectorElement = e;
- }
+ public void addBaseDetectorElement(IDetectorElement e) {
+ baseDetectorElement = e;
+ }
- public IDetectorElement getBaseDetectorElement() {
- return baseDetectorElement;
- }
+ public IDetectorElement getBaseDetectorElement() {
+ return baseDetectorElement;
+ }
- public void addModuleDetectorElement(IDetectorElement e) {
- if(!(e instanceof SiTrackerModule))
- throw new RuntimeException("Trying to add an existing module of wrong type.");
- if(getModuleDetectorElement((SiTrackerModule) e) != null)
- throw new RuntimeException("Trying to add an already existing module detector element.");
- layerDetectorElements.add(e);
- }
-
+ public void addModuleDetectorElement(IDetectorElement e) {
+ if(!(e instanceof SiTrackerModule))
+ throw new RuntimeException("Trying to add an existing module of wrong type.");
+ if(getModuleDetectorElement((SiTrackerModule) e) != null)
+ throw new RuntimeException("Trying to add an already existing module detector element.");
+ layerDetectorElements.add(e);
+ }
+
- /**
- * @return the baseTrackerGeometry
- */
- public JavaSurveyVolume getBaseTrackerGeometry() {
- return baseSurveyVolume;
- }
+ /**
+ * @return the baseTrackerGeometry
+ */
+ public JavaSurveyVolume getBaseTrackerGeometry() {
+ return baseSurveyVolume;
+ }
- /**
- * @param baseTrackerGeometry the baseTrackerGeometry to set
- */
- public void setBaseTrackerGeometry(JavaSurveyVolume baseTrackerGeometry) {
- this.baseSurveyVolume = baseTrackerGeometry;
- }
+ /**
+ * @param baseTrackerGeometry the baseTrackerGeometry to set
+ */
+ public void setBaseTrackerGeometry(JavaSurveyVolume baseTrackerGeometry) {
+ this.baseSurveyVolume = baseTrackerGeometry;
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java Wed Apr 27 11:11:32 2016
@@ -10,24 +10,24 @@
public abstract class HPSTrackerLCDDBuilder implements IHPSTrackerLCDDBuilder {
- public boolean _debug = false;
- protected LCDD lcdd = null;
- protected LCDDSurveyVolume baseSurveyVolume;
- protected List<LCDDSurveyVolume> lcddSurveyVolumes = new ArrayList<LCDDSurveyVolume>();
- private SensitiveDetector sensitiveDetector;
- public HPSTrackerBuilder _builder = null;
+ public boolean _debug = false;
+ protected LCDD lcdd = null;
+ protected LCDDSurveyVolume baseSurveyVolume;
+ protected List<LCDDSurveyVolume> lcddSurveyVolumes = new ArrayList<LCDDSurveyVolume>();
+ private SensitiveDetector sensitiveDetector;
+ public HPSTrackerBuilder _builder = null;
protected Element node;
-
-
- public HPSTrackerLCDDBuilder(boolean debugFlag, Element node, LCDD lcdd2, SensitiveDetector sens) {
- setDebug(debugFlag);
- setLCDD(lcdd2);
- setSensitiveDetector(sens);
- setNode(node);
- }
-
- /**
+
+
+ public HPSTrackerLCDDBuilder(boolean debugFlag, Element node, LCDD lcdd2, SensitiveDetector sens) {
+ setDebug(debugFlag);
+ setLCDD(lcdd2);
+ setSensitiveDetector(sens);
+ setNode(node);
+ }
+
+ /**
* Build the LCDD geometry objects.
* @param worldVolume - the reference volume.
*/
@@ -37,93 +37,93 @@
public abstract void setBuilder();
public abstract HPSTrackerGeometryDefinition createGeometryDefinition(boolean debug, Element node);
-
+
- public void setNode(Element node) {
+ public void setNode(Element node) {
this.node = node;
- }
-
+ }
+
public void setSensitiveDetector(SensitiveDetector sens) {
- this.sensitiveDetector = sens;
- }
+ this.sensitiveDetector = sens;
+ }
- public SensitiveDetector getSensitiveDetector() {
- return this.sensitiveDetector;
- }
+ public SensitiveDetector getSensitiveDetector() {
+ return this.sensitiveDetector;
+ }
- public void setBuilder(HPSTrackerBuilder b) {
- _builder = b;
- }
-
- public HPSTrackerBuilder getBuilder() {
- return _builder;
- }
-
- public void build() {
- _builder.build();
- }
-
- public void setDebug(boolean debug) {
- _debug = debug;
- }
-
- public boolean isDebug() {
- return _debug;
- }
-
- /**
- * Add to list of objects.
- * @param geom - object to add.
- */
- public void add(LCDDSurveyVolume geom) {
- lcddSurveyVolumes.add(geom);
- }
+ public void setBuilder(HPSTrackerBuilder b) {
+ _builder = b;
+ }
+
+ public HPSTrackerBuilder getBuilder() {
+ return _builder;
+ }
+
+ public void build() {
+ _builder.build();
+ }
+
+ public void setDebug(boolean debug) {
+ _debug = debug;
+ }
+
+ public boolean isDebug() {
+ return _debug;
+ }
+
+ /**
+ * Add to list of objects.
+ * @param geom - object to add.
+ */
+ public void add(LCDDSurveyVolume geom) {
+ lcddSurveyVolumes.add(geom);
+ }
-
-
+
+
- public void setLCDD(LCDD lcdd) {
- this.lcdd = lcdd;
- }
+ public void setLCDD(LCDD lcdd) {
+ this.lcdd = lcdd;
+ }
- public LCDD getLCDD() {
- return lcdd;
- }
+ public LCDD getLCDD() {
+ return lcdd;
+ }
- public LCDDSurveyVolume getBaseLCDD() {
- return baseSurveyVolume;
- }
+ public LCDDSurveyVolume getBaseLCDD() {
+ return baseSurveyVolume;
+ }
- public void setVisualization() {
-
- if(isDebug()) System.out.printf("%s: Set LCDD visualization for %d LCDD geometry objects \n", getClass().getSimpleName(), lcddSurveyVolumes.size());
- for(SurveyVolumeImpl g : lcddSurveyVolumes) {
- String name = g.getName();
- if(isDebug()) System.out.printf("%s: Set LCDD vis for %s \n", getClass().getSimpleName(), name);
- if(name.contains("base_plate")) g.setVisName("BasePlateVis");
+ public void setVisualization() {
+
+ if(isDebug()) System.out.printf("%s: Set LCDD visualization for %d LCDD geometry objects \n", getClass().getSimpleName(), lcddSurveyVolumes.size());
+ for(SurveyVolumeImpl g : lcddSurveyVolumes) {
+ String name = g.getName();
+ if(isDebug()) System.out.printf("%s: Set LCDD vis for %s \n", getClass().getSimpleName(), name);
+ if(name.contains("base_plate")) g.setVisName("BasePlateVis");
else if(name.equals("base")) g.setVisName("SvtBoxVis");
- else if(name.contains("chamber")) g.setVisName("ChamberVis");
- else if(name.contains("support_bottom") || name.contains("support_top")) g.setVisName("SupportVolumeVis");
- else if(name.contains("support_plate")) g.setVisName("SupportPlateVis");
- else if(name.startsWith("module_")) {
- if(name.endsWith("halfmodule_axial") || name.endsWith("halfmodule_stereo")) g.setVisName("HalfModuleVis");
- else if(name.endsWith("cold")) g.setVisName("ColdBlockVis");
- else if(name.endsWith("lamination")) g.setVisName("KaptonVis");
- else if(name.endsWith("sensor")) g.setVisName("SensorVis");
- else if(name.endsWith("sensor_active")) g.setVisName("SensorVis");
- else if(name.endsWith("cf")) g.setVisName("CarbonFiberVis");
- else if(name.endsWith("hybrid")) g.setVisName("HybridVis");
- else {
- //this must be a module then?
- g.setVisName("ModuleVis");
- }
- }
- else {
- if(isDebug()) System.out.printf("%s: No LCDD vis for %s \n", getClass().getSimpleName(), name);
- }
- }
- if(isDebug()) System.out.printf("%s: DONE Set LCDD vis \n", getClass().getSimpleName());
- }
-
+ else if(name.contains("chamber")) g.setVisName("ChamberVis");
+ else if(name.contains("support_bottom") || name.contains("support_top")) g.setVisName("SupportVolumeVis");
+ else if(name.contains("support_plate")) g.setVisName("SupportPlateVis");
+ else if(name.startsWith("module_")) {
+ if(name.endsWith("halfmodule_axial") || name.endsWith("halfmodule_stereo")) g.setVisName("HalfModuleVis");
+ else if(name.endsWith("cold")) g.setVisName("ColdBlockVis");
+ else if(name.endsWith("lamination")) g.setVisName("KaptonVis");
+ else if(name.endsWith("sensor")) g.setVisName("SensorVis");
+ else if(name.endsWith("sensor_active")) g.setVisName("SensorVis");
+ else if(name.endsWith("cf")) g.setVisName("CarbonFiberVis");
+ else if(name.endsWith("hybrid")) g.setVisName("HybridVis");
+ else {
+ //this must be a module then?
+ g.setVisName("ModuleVis");
+ }
+ }
+ else {
+ if(isDebug()) System.out.printf("%s: No LCDD vis for %s \n", getClass().getSimpleName(), name);
+ }
+ }
+ if(isDebug()) System.out.printf("%s: DONE Set LCDD vis \n", getClass().getSimpleName());
+ }
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java Wed Apr 27 11:11:32 2016
@@ -7,25 +7,25 @@
public interface IHPSTrackerJavaBuilder {
- /**
- * Build the JAVA geometry objects from the geometry definition.
- * @param trackingVolume - the reference volume.
- */
- public void build(ILogicalVolume trackingVolume);
-
- public DetectorIdentifierHelper getDetectorIdentifierHelper();
+ /**
+ * Build the JAVA geometry objects from the geometry definition.
+ * @param trackingVolume - the reference volume.
+ */
+ public void build(ILogicalVolume trackingVolume);
+
+ public DetectorIdentifierHelper getDetectorIdentifierHelper();
- public void setDetectorIdentifierHelper(
- DetectorIdentifierHelper detectorIdentifierHelper);
+ public void setDetectorIdentifierHelper(
+ DetectorIdentifierHelper detectorIdentifierHelper);
- public IIdentifierDictionary getIdentifierDictionary();
+ public IIdentifierDictionary getIdentifierDictionary();
- public void setIdentifierDictionary(
- IIdentifierDictionary identifierDictionary);
+ public void setIdentifierDictionary(
+ IIdentifierDictionary identifierDictionary);
- public void setSubdetector(Subdetector subdet);
+ public void setSubdetector(Subdetector subdet);
- public Subdetector getSubdetector();
-
+ public Subdetector getSubdetector();
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java Wed Apr 27 11:11:32 2016
@@ -5,11 +5,11 @@
public interface IHPSTrackerLCDDBuilder {
- public void setSensitiveDetector(SensitiveDetector sens);
+ public void setSensitiveDetector(SensitiveDetector sens);
- public SensitiveDetector getSensitiveDetector();
-
- public void build(Volume worldVolume);
+ public SensitiveDetector getSensitiveDetector();
+
+ public void build(Volume worldVolume);
- public void setVisualization();
+ public void setVisualization();
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java Wed Apr 27 11:11:32 2016
@@ -7,22 +7,22 @@
* @author Per Hansson Adrian <[log in to unmask]>
*/
public class JavaGhostSurveyVolume extends JavaSurveyVolume {
-
- /**
- * Initialize with base and mother. This is typically for a reference geometry object
- * that is used for referencing coordinate systems but that doesn't have a volume itself.
- * @param surveyVolume - object used to get geometry definitions
- * @param mother - mother object
- */
- public JavaGhostSurveyVolume(SurveyVolume surveyVolume, JavaSurveyVolume mother) {
- super(surveyVolume);
- if(isDebug()) System.out.printf("%s: constructing JAVA ghost object %s with mother %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),mother==null?"null":mother.getName());
- setMother(mother);
- mother.addDaughter(this);
- setPositionAndRotation(surveyVolume);
- if(isDebug()) System.out.printf("%s: DONE constructing JAVA object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- }
-
+
+ /**
+ * Initialize with base and mother. This is typically for a reference geometry object
+ * that is used for referencing coordinate systems but that doesn't have a volume itself.
+ * @param surveyVolume - object used to get geometry definitions
+ * @param mother - mother object
+ */
+ public JavaGhostSurveyVolume(SurveyVolume surveyVolume, JavaSurveyVolume mother) {
+ super(surveyVolume);
+ if(isDebug()) System.out.printf("%s: constructing JAVA ghost object %s with mother %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),mother==null?"null":mother.getName());
+ setMother(mother);
+ mother.addDaughter(this);
+ setPositionAndRotation(surveyVolume);
+ if(isDebug()) System.out.printf("%s: DONE constructing JAVA object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ }
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,6 @@
import org.lcsim.detector.RotationGeant;
import org.lcsim.detector.Transform3D;
import org.lcsim.detector.Translation3D;
-import org.lcsim.detector.material.IMaterial;
import org.lcsim.detector.material.MaterialStore;
import org.lcsim.detector.solids.Box;
import org.lcsim.geometry.util.TransformationUtils;
@@ -26,120 +25,120 @@
* @author Per Hansson Adrian <[log in to unmask]>
*/
public class JavaSurveyVolume extends SurveyVolumeImpl {
- private Box box= null;
- private ILogicalVolume volume = null;
- private ITranslation3D pos = null;
- private IRotation3D rot = null;
- private IPhysicalVolume physVolume = null;
- private JavaSurveyVolume mother = null;
- public List<JavaSurveyVolume> daughters = new ArrayList<JavaSurveyVolume>();
- private int componentId = -1;
-
- /**
- * Default constructor
- */
- public JavaSurveyVolume(SurveyVolume surveyVolume) {
- super(surveyVolume);
- }
-
- /**
- * Construct a JAVA geometry object from its geometry definition and an already built logical volume.
- * This is typically used by the tracking volume.
- * @param surveyVolume - input geometry definition
- * @param vol - logical volume
- */
- public JavaSurveyVolume(SurveyVolume surveyVolume, ILogicalVolume vol) {
- super(surveyVolume);
- if(isDebug()) System.out.printf("%s: JavaBaseGeometry %s (given logical volume %s)\n", this.getClass().getSimpleName(),surveyVolume.getName(),vol.getName());
- // this must be tracking volume. May change in the future and is probably weird to make this requirement here.
- if(!surveyVolume.getName().contains("tracking")) throw new RuntimeException("this constructor is only used with the tracking volume!?");
- setVolume(vol);
- // since it's tracking volume, set the pos and rotation trivially
- Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(surveyVolume.getCoord().v(), surveyVolume.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
- setPos(new Translation3D(0,0,0));
- setRot(new RotationGeant(lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
- if(isDebug()) System.out.printf("%s: DONE JavaBaseGeometry %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- }
-
- /**
- * Construct a JAVA geometry object from its geometry definition.
- * @param surveyVolume - input geometry definition
- * @param mother - reference to mother JAVA definition
- * @param volumeId - component id number
- */
- public JavaSurveyVolume(SurveyVolume surveyVolume, JavaSurveyVolume mother, int volumeId) {
- super(surveyVolume);
+ private Box box= null;
+ private ILogicalVolume volume = null;
+ private ITranslation3D pos = null;
+ private IRotation3D rot = null;
+ private IPhysicalVolume physVolume = null;
+ private JavaSurveyVolume mother = null;
+ public List<JavaSurveyVolume> daughters = new ArrayList<JavaSurveyVolume>();
+ private int componentId = -1;
+
+ /**
+ * Default constructor
+ */
+ public JavaSurveyVolume(SurveyVolume surveyVolume) {
+ super(surveyVolume);
+ }
+
+ /**
+ * Construct a JAVA geometry object from its geometry definition and an already built logical volume.
+ * This is typically used by the tracking volume.
+ * @param surveyVolume - input geometry definition
+ * @param vol - logical volume
+ */
+ public JavaSurveyVolume(SurveyVolume surveyVolume, ILogicalVolume vol) {
+ super(surveyVolume);
+ if(isDebug()) System.out.printf("%s: JavaBaseGeometry %s (given logical volume %s)\n", this.getClass().getSimpleName(),surveyVolume.getName(),vol.getName());
+ // this must be tracking volume. May change in the future and is probably weird to make this requirement here.
+ if(!surveyVolume.getName().contains("tracking")) throw new RuntimeException("this constructor is only used with the tracking volume!?");
+ setVolume(vol);
+ // since it's tracking volume, set the pos and rotation trivially
+ Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(surveyVolume.getCoord().v(), surveyVolume.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
+ setPos(new Translation3D(0,0,0));
+ setRot(new RotationGeant(lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
+ if(isDebug()) System.out.printf("%s: DONE JavaBaseGeometry %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ }
+
+ /**
+ * Construct a JAVA geometry object from its geometry definition.
+ * @param surveyVolume - input geometry definition
+ * @param mother - reference to mother JAVA definition
+ * @param volumeId - component id number
+ */
+ public JavaSurveyVolume(SurveyVolume surveyVolume, JavaSurveyVolume mother, int volumeId) {
+ super(surveyVolume);
if(isDebug()) System.out.printf("%s: JavaBaseGeometry %s (volumeID %d, mother %s)\n", this.getClass().getSimpleName(),surveyVolume.getName(),volumeId,mother==null?"null":mother.getName());
- setComponentId(volumeId);
- setMother(mother);
- mother.addDaughter(this);
- buildBox();
- buildVolume();
- setPositionAndRotation(surveyVolume);
- if(isDebug()) System.out.printf("%s: DONE JavaBaseGeometry %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- }
-
- protected boolean hasCoordinateSystemInfo() {
- return pos!=null && rot!=null;
- }
-
-
- public void buildPhysVolume() {
- if(isDebug()) System.out.printf("%s: build phys volume for %s with mother %s and physical mother %s\n", this.getClass().getSimpleName(),getName(),getMother().getName(),getPhysMother().getName());
- JavaSurveyVolume physMother = getPhysMother();
- setPhysVolume(new PhysicalVolume(new Transform3D(getPos(), getRot()), getName(), volume, physMother.getVolume(),getComponentId()));
- }
-
- public void buildBox() {
- Hep3Vector b = VecOp.mult(0.5,getBoxDim());
- if(isDebug()) System.out.printf("%s: build box for %s with dimensions %s \n", this.getClass().getSimpleName(),getName(), b);
- setBox(new Box(getName() + "Box", b.x(), b.y(), b.z()));
- }
- public void buildVolume() {
- if(isDebug()) System.out.printf("%s: build volume for %s with material %s\n", this.getClass().getSimpleName(),getName(), MaterialStore.getInstance().get(getMaterial()));
- setVolume(new LogicalVolume(getName() + "_volume", box, MaterialStore.getInstance().get(getMaterial())));
-
- }
- public void setPositionAndRotation(SurveyVolume base) {
- if(isDebug()) System.out.printf("%s: set position and rotation for volume %s\n", this.getClass().getSimpleName(),getName());
-
- // no mother, this must be the world/tracking volume!?
- if(base.getMother()==null) throw new RuntimeException("trying to set coordinates w/o mother defined for "+base.getName());
-
- // Vector from origin to center of box locally
- Hep3Vector box_center_base_local = base.getCenter();
-
- // find the physical mother i.e. not a ghost volume and compound transformations to it
- JavaSurveyVolume physMother = getPhysMother();
- if(isDebug()) System.out.printf("%s: physical mother to transform to is %s; find the transform to it\n", this.getClass().getSimpleName(),physMother.getName());
- Transform3D trf = HPSTrackerBuilder.getTransform(base.getCoord().getTransformation(),base.getMother(),physMother.getName());
- if(isDebug()) System.out.printf("%s: found transform to physical mother \n%s\n\n", this.getClass().getSimpleName(),trf.toString());
-
- // find the position of the center in the physical mother coord
- Hep3Vector box_center_base = trf.transformed(box_center_base_local);
-
- // find the position of the center of the box in the mother coordinate system, make sure to use the physical mother coordinates
- if(isDebug()) System.out.printf("%s: find center of box in physical mother coord %s \n", this.getClass().getSimpleName(),physMother.getName());
- // hack since my getTransform function needs a mother TODO Fix this!
- SurveyVolume gm = base;
- if(isDebug()) System.out.printf("%s: look for physical mother %s starting from mother %s \n", this.getClass().getSimpleName(),physMother.getName(),gm.getMother()!=null?gm.getMother().getName():"-- no mother --");
- while((gm=gm.getMother()).getName()!=physMother.getName()) {
- if(isDebug()) System.out.printf("%s: gm is %s \n", this.getClass().getSimpleName(),gm.getName());
- //gm = gm.getMother();
- }
- if(isDebug()) System.out.printf("%s: found physical mother %s with center at %s \n", this.getClass().getSimpleName(),gm.getName(), gm.getCenter());
-
- Hep3Vector mother_center = gm.getCenter();
-
- // now calculate the position of this box center in the mother LCDD coordinates
- Hep3Vector box_center = VecOp.sub(box_center_base, mother_center);
-
- //Find LCDD Euler rotation angles from coordinate system unit vectors
- //Note that this has to be rotation wrt to physical mother and not just mother as normally is the case
- //Use apache lib to get angles, but in principle I should already have it from the trf above
- //Hep3Vector lcdd_rot_angles = HPSTestRunTracker2014.getEulerAngles(base.getCoord().v(), base.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
- if(isDebug()) System.out.printf("%s: find LCDD Cardan rotation angles - need to find mother to physical mother transform \n", this.getClass().getSimpleName(),physMother.getName());
- Hep3Vector base_u = base.getCoord().u();
+ setComponentId(volumeId);
+ setMother(mother);
+ mother.addDaughter(this);
+ buildBox();
+ buildVolume();
+ setPositionAndRotation(surveyVolume);
+ if(isDebug()) System.out.printf("%s: DONE JavaBaseGeometry %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ }
+
+ protected boolean hasCoordinateSystemInfo() {
+ return pos!=null && rot!=null;
+ }
+
+
+ public void buildPhysVolume() {
+ if(isDebug()) System.out.printf("%s: build phys volume for %s with mother %s and physical mother %s\n", this.getClass().getSimpleName(),getName(),getMother().getName(),getPhysMother().getName());
+ JavaSurveyVolume physMother = getPhysMother();
+ setPhysVolume(new PhysicalVolume(new Transform3D(getPos(), getRot()), getName(), volume, physMother.getVolume(),getComponentId()));
+ }
+
+ public void buildBox() {
+ Hep3Vector b = VecOp.mult(0.5,getBoxDim());
+ if(isDebug()) System.out.printf("%s: build box for %s with dimensions %s \n", this.getClass().getSimpleName(),getName(), b);
+ setBox(new Box(getName() + "Box", b.x(), b.y(), b.z()));
+ }
+ public void buildVolume() {
+ if(isDebug()) System.out.printf("%s: build volume for %s with material %s\n", this.getClass().getSimpleName(),getName(), MaterialStore.getInstance().get(getMaterial()));
+ setVolume(new LogicalVolume(getName() + "_volume", box, MaterialStore.getInstance().get(getMaterial())));
+
+ }
+ public void setPositionAndRotation(SurveyVolume base) {
+ if(isDebug()) System.out.printf("%s: set position and rotation for volume %s\n", this.getClass().getSimpleName(),getName());
+
+ // no mother, this must be the world/tracking volume!?
+ if(base.getMother()==null) throw new RuntimeException("trying to set coordinates w/o mother defined for "+base.getName());
+
+ // Vector from origin to center of box locally
+ Hep3Vector box_center_base_local = base.getCenter();
+
+ // find the physical mother i.e. not a ghost volume and compound transformations to it
+ JavaSurveyVolume physMother = getPhysMother();
+ if(isDebug()) System.out.printf("%s: physical mother to transform to is %s; find the transform to it\n", this.getClass().getSimpleName(),physMother.getName());
+ Transform3D trf = HPSTrackerBuilder.getTransform(base.getCoord().getTransformation(),base.getMother(),physMother.getName());
+ if(isDebug()) System.out.printf("%s: found transform to physical mother \n%s\n\n", this.getClass().getSimpleName(),trf.toString());
+
+ // find the position of the center in the physical mother coord
+ Hep3Vector box_center_base = trf.transformed(box_center_base_local);
+
+ // find the position of the center of the box in the mother coordinate system, make sure to use the physical mother coordinates
+ if(isDebug()) System.out.printf("%s: find center of box in physical mother coord %s \n", this.getClass().getSimpleName(),physMother.getName());
+ // hack since my getTransform function needs a mother TODO Fix this!
+ SurveyVolume gm = base;
+ if(isDebug()) System.out.printf("%s: look for physical mother %s starting from mother %s \n", this.getClass().getSimpleName(),physMother.getName(),gm.getMother()!=null?gm.getMother().getName():"-- no mother --");
+ while((gm=gm.getMother()).getName()!=physMother.getName()) {
+ if(isDebug()) System.out.printf("%s: gm is %s \n", this.getClass().getSimpleName(),gm.getName());
+ //gm = gm.getMother();
+ }
+ if(isDebug()) System.out.printf("%s: found physical mother %s with center at %s \n", this.getClass().getSimpleName(),gm.getName(), gm.getCenter());
+
+ Hep3Vector mother_center = gm.getCenter();
+
+ // now calculate the position of this box center in the mother LCDD coordinates
+ Hep3Vector box_center = VecOp.sub(box_center_base, mother_center);
+
+ //Find LCDD Euler rotation angles from coordinate system unit vectors
+ //Note that this has to be rotation wrt to physical mother and not just mother as normally is the case
+ //Use apache lib to get angles, but in principle I should already have it from the trf above
+ //Hep3Vector lcdd_rot_angles = HPSTestRunTracker2014.getEulerAngles(base.getCoord().v(), base.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
+ if(isDebug()) System.out.printf("%s: find LCDD Cardan rotation angles - need to find mother to physical mother transform \n", this.getClass().getSimpleName(),physMother.getName());
+ Hep3Vector base_u = base.getCoord().u();
Hep3Vector base_v = base.getCoord().v();
Hep3Vector base_w = base.getCoord().w();
if(isDebug()) System.out.printf("%s: unit vectors in mother coord: %s, %s, %s\n", this.getClass().getSimpleName(),base_u.toString(),base_v.toString(),base_w.toString());
@@ -169,22 +168,22 @@
//System.out.printf("%s: unit vectors u %s v %s w %s\n", this.getClass().getSimpleName(),base.getCoord().u().toString(),base.getCoord().v().toString(),base.getCoord().w().toString());
}
- Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(base_u, base_v, base_w, unit_u, unit_v, unit_w);
-
-
- // Create the LCDD position
- setPos(new Translation3D(box_center.x(), box_center.y(), box_center.z()));
- setRot(new RotationGeant(lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
-
- if(isDebug()) {
-
- System.out.printf("%s: SurveyVolume information for %s:\n", this.getClass().getSimpleName(), base.getName());
+ Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(base_u, base_v, base_w, unit_u, unit_v, unit_w);
+
+
+ // Create the LCDD position
+ setPos(new Translation3D(box_center.x(), box_center.y(), box_center.z()));
+ setRot(new RotationGeant(lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
+
+ if(isDebug()) {
+
+ System.out.printf("%s: SurveyVolume information for %s:\n", this.getClass().getSimpleName(), base.getName());
System.out.printf("%s: box_center_base_local %s\n", this.getClass().getSimpleName(), box_center_base_local.toString());
- System.out.printf("%s: box_center_base %s\n", this.getClass().getSimpleName(), box_center_base.toString());
- System.out.printf("%s: mother center %s\n", this.getClass().getSimpleName(), base.getMother()==null?" <no mother> ":mother_center.toString());
- System.out.printf("%s: box_center %s\n", this.getClass().getSimpleName(), box_center.toString());
- System.out.printf("%s: pos %s\n", this.getClass().getSimpleName(), getPos().toString());
- Hep3Vector box_center_tracking_xcheck = HPSTrackerBuilder.transformToTracking(box_center_base_local, base);
+ System.out.printf("%s: box_center_base %s\n", this.getClass().getSimpleName(), box_center_base.toString());
+ System.out.printf("%s: mother center %s\n", this.getClass().getSimpleName(), base.getMother()==null?" <no mother> ":mother_center.toString());
+ System.out.printf("%s: box_center %s\n", this.getClass().getSimpleName(), box_center.toString());
+ System.out.printf("%s: pos %s\n", this.getClass().getSimpleName(), getPos().toString());
+ Hep3Vector box_center_tracking_xcheck = HPSTrackerBuilder.transformToTracking(box_center_base_local, base);
System.out.printf("%s: box_center_tracking_xcheck %s (for %s)\n", this.getClass().getSimpleName(), box_center_tracking_xcheck==null ? " <null> " : box_center_tracking_xcheck.toString(),base.getName());
Hep3Vector box_center_envelope_xcheck2 = HPSTrackerBuilder.transformToParent(box_center_base_local, base, "base");
System.out.printf("%s: box_center_base_xcheck2 %s (for %s)\n", this.getClass().getSimpleName(), box_center_envelope_xcheck2==null ? " <null> " : box_center_envelope_xcheck2.toString(),base.getName());
@@ -203,90 +202,90 @@
System.out.printf("%s: origin_base_in %s\n", this.getClass().getSimpleName(), origin_base_in==null ? " <null> " : origin_base_in.toString());
}
System.out.printf("%s: euler %s\n", this.getClass().getSimpleName(), lcdd_rot_angles.toString());
- System.out.printf("%s: rot %s\n", this.getClass().getSimpleName(), getRot().toString());
-
- }
-
- }
-
- /**
- * Find the first non-ghost volume among parents.
- * @return mother object
- */
- public JavaSurveyVolume getPhysMother() {
- //if(isDebug()) System.out.printf("%s: finding physical mother to %s\n", this.getClass().getSimpleName(), getName());
- if(mother==null) throw new RuntimeException("Trying to get phys mother but there is no mother!");
- if(mother instanceof JavaGhostSurveyVolume) {
- return mother.getPhysMother();
- } else {
- //if(isDebug()) System.out.printf("%s: found a non-ghost volume: %s\n", this.getClass().getSimpleName(), mother.getName());
- return mother;
- }
- }
-
-
- public ILogicalVolume getVolume() {
- return volume;
- }
- protected void setVolume(ILogicalVolume volume) {
- this.volume = volume;
- }
- protected Box getBox() {
- return box;
- }
- protected void setBox(Box b) {
- box = b;
- }
- protected ITranslation3D getPos() {
- return pos;
- }
- protected void setPos(ITranslation3D iTranslation3D) {
- this.pos = iTranslation3D;
- }
- protected IRotation3D getRot() {
- return rot;
- }
- protected void setRot(IRotation3D iRotation3D) {
- this.rot = iRotation3D;
- }
- public JavaSurveyVolume getMother() {
- return mother;
- }
- protected void setMother(JavaSurveyVolume mother) {
- this.mother = mother;
- }
- public IPhysicalVolume getPhysVolume() {
- return physVolume;
- }
- protected void setPhysVolume(PhysicalVolume physVolume) {
- this.physVolume = physVolume;
- }
-
- public List<JavaSurveyVolume> getDaughters() {
- return daughters;
- }
-
- protected void addDaughter(JavaSurveyVolume o) {
- getDaughters().add(o);
- }
-
- public int getComponentId() {
- return componentId;
- }
-
- public void setComponentId(int componentId) {
- this.componentId = componentId;
- }
-
- public String toString() {
- String s = "JavaBaseGeometry " + getName() + "\n";
- if(getPos()!=null && getRot()!=null) {
- s += "Position: " + getPos().toString() + "\n";
- s += "Rotation: " + getRot().toString() + "\n";
- } else {
- s+= " - no position/rotation info -\n";
- }
- return s;
- }
+ System.out.printf("%s: rot %s\n", this.getClass().getSimpleName(), getRot().toString());
+
+ }
+
+ }
+
+ /**
+ * Find the first non-ghost volume among parents.
+ * @return mother object
+ */
+ public JavaSurveyVolume getPhysMother() {
+ //if(isDebug()) System.out.printf("%s: finding physical mother to %s\n", this.getClass().getSimpleName(), getName());
+ if(mother==null) throw new RuntimeException("Trying to get phys mother but there is no mother!");
+ if(mother instanceof JavaGhostSurveyVolume) {
+ return mother.getPhysMother();
+ } else {
+ //if(isDebug()) System.out.printf("%s: found a non-ghost volume: %s\n", this.getClass().getSimpleName(), mother.getName());
+ return mother;
+ }
+ }
+
+
+ public ILogicalVolume getVolume() {
+ return volume;
+ }
+ protected void setVolume(ILogicalVolume volume) {
+ this.volume = volume;
+ }
+ protected Box getBox() {
+ return box;
+ }
+ protected void setBox(Box b) {
+ box = b;
+ }
+ protected ITranslation3D getPos() {
+ return pos;
+ }
+ protected void setPos(ITranslation3D iTranslation3D) {
+ this.pos = iTranslation3D;
+ }
+ protected IRotation3D getRot() {
+ return rot;
+ }
+ protected void setRot(IRotation3D iRotation3D) {
+ this.rot = iRotation3D;
+ }
+ public JavaSurveyVolume getMother() {
+ return mother;
+ }
+ protected void setMother(JavaSurveyVolume mother) {
+ this.mother = mother;
+ }
+ public IPhysicalVolume getPhysVolume() {
+ return physVolume;
+ }
+ protected void setPhysVolume(PhysicalVolume physVolume) {
+ this.physVolume = physVolume;
+ }
+
+ public List<JavaSurveyVolume> getDaughters() {
+ return daughters;
+ }
+
+ protected void addDaughter(JavaSurveyVolume o) {
+ getDaughters().add(o);
+ }
+
+ public int getComponentId() {
+ return componentId;
+ }
+
+ public void setComponentId(int componentId) {
+ this.componentId = componentId;
+ }
+
+ public String toString() {
+ String s = "JavaBaseGeometry " + getName() + "\n";
+ if(getPos()!=null && getRot()!=null) {
+ s += "Position: " + getPos().toString() + "\n";
+ s += "Rotation: " + getRot().toString() + "\n";
+ } else {
+ s+= " - no position/rotation info -\n";
+ }
+ return s;
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java Wed Apr 27 11:11:32 2016
@@ -10,20 +10,20 @@
*/
public class LCDDGhostSurveyVolume extends LCDDSurveyVolume {
-
-
- /**
- * Initialize with base and mother. This is typically for a reference geometry object
- * that is used for referencing coordinate systems but that doesn't have a volume itself.
- * @param base - object used to get geometry definitions
- * @param mother - mother LCDD object
- */
- public LCDDGhostSurveyVolume(SurveyVolume base, LCDDSurveyVolume mother) {
- super(base);
- if(isDebug()) System.out.printf("%s: constructing LCDD ghost object %s with mother %s\n", this.getClass().getSimpleName(),base.getName(),mother==null?"null":mother.getName());
- setMother(mother);
- mother.addDaughter(this);
- if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),base.getName());
- }
-
+
+
+ /**
+ * Initialize with base and mother. This is typically for a reference geometry object
+ * that is used for referencing coordinate systems but that doesn't have a volume itself.
+ * @param base - object used to get geometry definitions
+ * @param mother - mother LCDD object
+ */
+ public LCDDGhostSurveyVolume(SurveyVolume base, LCDDSurveyVolume mother) {
+ super(base);
+ if(isDebug()) System.out.printf("%s: constructing LCDD ghost object %s with mother %s\n", this.getClass().getSimpleName(),base.getName(),mother==null?"null":mother.getName());
+ setMother(mother);
+ mother.addDaughter(this);
+ if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),base.getName());
+ }
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java Wed Apr 27 11:11:32 2016
@@ -25,233 +25,233 @@
* @author Per Hansson Adrian <[log in to unmask]>
*/
public class LCDDSurveyVolume extends SurveyVolumeImpl {
- Box box= null;
- Volume volume = null;
- private Position pos = null;
- private Rotation rot = null;
- private PhysVol physVolume = null;
- LCDD lcdd = null;
- private LCDDSurveyVolume mother = null;
- protected Map<String,Integer> physVolId = null;
- public List<LCDDSurveyVolume> daughters = new ArrayList<LCDDSurveyVolume>();
- /**
- * Default constructor
- * @param surveyVolume - core geometry definitions
- */
- public LCDDSurveyVolume(SurveyVolume surveyVolume) {
- super(surveyVolume);
- }
-
- /**
- * Initialize this object with a known volume and no mother. Typically the world volume would use this.
- * @param surveyVolume - core geometry definitions
- * @param vol - given volume
- */
- public LCDDSurveyVolume(SurveyVolume surveyVolume, Volume volume) {
- super(surveyVolume);
- if(isDebug()) System.out.printf("%s: constructing LCDD object %s with volume name %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),volume.getName());
- setVolume(volume);
- if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(surveyVolume.getCoord().v(), surveyVolume.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
- setPos(new Position(getName() + "_position", 0, 0, 0));
- setRot(new Rotation(getName() + "_rotation",lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
- if(isDebug()) System.out.printf("%s: DONE %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- }
-
- /**
- * Interface to the LCDD converter geometry for the geometry definition.
- * @param surveyVolume - core geometry definition
- * @param lcdd - lcdd file
- * @param mother - reference to mother LCDD definition
- */
- public LCDDSurveyVolume(SurveyVolume surveyVolume, LCDD lcdd, LCDDSurveyVolume mother) {
- super(surveyVolume);
- if(isDebug()) System.out.printf("%s: constructing LCDD object %s with mother %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),mother==null?"null":mother.getName());
- this.lcdd = lcdd;
- setMother(mother);
- mother.addDaughter(this);
- buildBox();
- buildVolume();
- setPositionAndRotation(surveyVolume);
- //buildPhysVolume(mother);
- if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
- }
-
-
- public void buildPhysVolume() {
-
- if(isDebug()) System.out.printf("%s: build phys volume for %s with mother %s and physical mother %s\n", this.getClass().getSimpleName(),getName(),getMother().getName(),getPhysMother().getName());
- LCDDSurveyVolume physMother = getPhysMother();
- setPhysVolume(new PhysVol(volume, physMother.getVolume(), getPos(), getRot()));
- //if(isDebug()) System.out.printf("%s: build phys volume for %s\n", this.getClass().getSimpleName(),getName());
- //setPhysVolume(new PhysVol(volume, getMother().getVolume(), getPos(), getRot()));
- }
- public void buildBox() {
- if(isDebug()) System.out.printf("%s: build box for %s\n", getClass().getSimpleName(),getName());
- setBox(new Box(getName() + "Box", getBoxDim().x(), getBoxDim().y(), getBoxDim().z()));
- }
- public void buildVolume() {
- if(isDebug()) System.out.printf("%s: build volume for %s with material %s\n", this.getClass().getSimpleName(),getName(),getMaterial());
- try {
- Material mat = lcdd.getMaterial(getMaterial());
- setVolume(new Volume(getName() + "_volume", box, mat));
- } catch (JDOMException e) {
- e.printStackTrace();
- }
- }
-
-
- public void setPositionAndRotation(SurveyVolume base) {
- if(isDebug()) System.out.printf("%s: set position and rotation for volume %s\n", this.getClass().getSimpleName(),getName());
-
- // NOTE:
- // This sets position and reference w.r.t. mother coordinate system.
- // If I'm not building that volume this will be wrong.
- // TODO Similar to in the JAVA converter this should be something like the physical mother.
-
- if(base.getMother()==null) throw new RuntimeException("trying to set coordinates w/o mother defined for "+base.getName());
-
- // Vector from origin to center of box locally
- Hep3Vector box_center_base_local = base.getCenter();
-
- //translate to the mother coordinate system
- LCDDSurveyVolume physMother = getPhysMother();
- if(isDebug()) System.out.printf("%s: physical mother to transform to is %s; find the transform to it\n", this.getClass().getSimpleName(),physMother.getName());
- Transform3D trf = HPSTrackerBuilder.getTransform(base.getCoord().getTransformation(),base.getMother(),physMother.getName());
- if(isDebug()) System.out.printf("%s: found transform to physical mother \n%s\n\n", this.getClass().getSimpleName(),trf.toString());
-
- // find the position of the center in the physical mother coord
- Hep3Vector box_center_base = trf.transformed(box_center_base_local);
-
- // find the position of the center of the box in the mother coordinate system, make sure to use the physical mother coordinates
- if(isDebug()) System.out.printf("%s: find center of box in physical mother coord %s \n", this.getClass().getSimpleName(),physMother.getName());
- // hack since my getTransform function needs a mother TODO Fix this!
- SurveyVolume gm = base;
- if(isDebug()) System.out.printf("%s: look for physical mother %s starting from mother %s \n", this.getClass().getSimpleName(),physMother.getName(),gm.getMother()!=null?gm.getMother().getName():"-- no mother --");
- while((gm=gm.getMother()).getName()!=physMother.getName()) {
- if(isDebug()) System.out.printf("%s: gm is %s \n", this.getClass().getSimpleName(),gm.getName());
- //gm = gm.getMother();
- }
- if(isDebug()) System.out.printf("%s: found physical mother %s with center at %s \n", this.getClass().getSimpleName(),gm.getName(), gm.getCenter());
-
- Hep3Vector mother_center = gm.getCenter();
-
- // find the position of the center in the mother coord
- Hep3Vector box_center = VecOp.sub(box_center_base, mother_center);
-
- //Find LCDD Euler rotation angles from coordinate system unit vectors
- //Note that this has to be rotation wrt to physical mother and not just mother as normally is the case
- if(isDebug()) System.out.printf("%s: find LCDD Cardan rotation angles - need to find mother to physical mother transform \n", this.getClass().getSimpleName(),physMother.getName());
- Hep3Vector base_u = base.getCoord().u();
- Hep3Vector base_v = base.getCoord().v();
- Hep3Vector base_w = base.getCoord().w();
+ Box box= null;
+ Volume volume = null;
+ private Position pos = null;
+ private Rotation rot = null;
+ private PhysVol physVolume = null;
+ LCDD lcdd = null;
+ private LCDDSurveyVolume mother = null;
+ protected Map<String,Integer> physVolId = null;
+ public List<LCDDSurveyVolume> daughters = new ArrayList<LCDDSurveyVolume>();
+ /**
+ * Default constructor
+ * @param surveyVolume - core geometry definitions
+ */
+ public LCDDSurveyVolume(SurveyVolume surveyVolume) {
+ super(surveyVolume);
+ }
+
+ /**
+ * Initialize this object with a known volume and no mother. Typically the world volume would use this.
+ * @param surveyVolume - core geometry definitions
+ * @param volume - given volume
+ */
+ public LCDDSurveyVolume(SurveyVolume surveyVolume, Volume volume) {
+ super(surveyVolume);
+ if(isDebug()) System.out.printf("%s: constructing LCDD object %s with volume name %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),volume.getName());
+ setVolume(volume);
+ if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(surveyVolume.getCoord().v(), surveyVolume.getCoord().w(), new BasicHep3Vector(0,1,0),new BasicHep3Vector(0,0,1));
+ setPos(new Position(getName() + "_position", 0, 0, 0));
+ setRot(new Rotation(getName() + "_rotation",lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
+ if(isDebug()) System.out.printf("%s: DONE %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ }
+
+ /**
+ * Interface to the LCDD converter geometry for the geometry definition.
+ * @param surveyVolume - core geometry definition
+ * @param lcdd - lcdd file
+ * @param mother - reference to mother LCDD definition
+ */
+ public LCDDSurveyVolume(SurveyVolume surveyVolume, LCDD lcdd, LCDDSurveyVolume mother) {
+ super(surveyVolume);
+ if(isDebug()) System.out.printf("%s: constructing LCDD object %s with mother %s\n", this.getClass().getSimpleName(),surveyVolume.getName(),mother==null?"null":mother.getName());
+ this.lcdd = lcdd;
+ setMother(mother);
+ mother.addDaughter(this);
+ buildBox();
+ buildVolume();
+ setPositionAndRotation(surveyVolume);
+ //buildPhysVolume(mother);
+ if(isDebug()) System.out.printf("%s: DONE constructing LCDD object %s\n", this.getClass().getSimpleName(),surveyVolume.getName());
+ }
+
+
+ public void buildPhysVolume() {
+
+ if(isDebug()) System.out.printf("%s: build phys volume for %s with mother %s and physical mother %s\n", this.getClass().getSimpleName(),getName(),getMother().getName(),getPhysMother().getName());
+ LCDDSurveyVolume physMother = getPhysMother();
+ setPhysVolume(new PhysVol(volume, physMother.getVolume(), getPos(), getRot()));
+ //if(isDebug()) System.out.printf("%s: build phys volume for %s\n", this.getClass().getSimpleName(),getName());
+ //setPhysVolume(new PhysVol(volume, getMother().getVolume(), getPos(), getRot()));
+ }
+ public void buildBox() {
+ if(isDebug()) System.out.printf("%s: build box for %s\n", getClass().getSimpleName(),getName());
+ setBox(new Box(getName() + "Box", getBoxDim().x(), getBoxDim().y(), getBoxDim().z()));
+ }
+ public void buildVolume() {
+ if(isDebug()) System.out.printf("%s: build volume for %s with material %s\n", this.getClass().getSimpleName(),getName(),getMaterial());
+ try {
+ Material mat = lcdd.getMaterial(getMaterial());
+ setVolume(new Volume(getName() + "_volume", box, mat));
+ } catch (JDOMException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ public void setPositionAndRotation(SurveyVolume base) {
+ if(isDebug()) System.out.printf("%s: set position and rotation for volume %s\n", this.getClass().getSimpleName(),getName());
+
+ // NOTE:
+ // This sets position and reference w.r.t. mother coordinate system.
+ // If I'm not building that volume this will be wrong.
+ // TODO Similar to in the JAVA converter this should be something like the physical mother.
+
+ if(base.getMother()==null) throw new RuntimeException("trying to set coordinates w/o mother defined for "+base.getName());
+
+ // Vector from origin to center of box locally
+ Hep3Vector box_center_base_local = base.getCenter();
+
+ //translate to the mother coordinate system
+ LCDDSurveyVolume physMother = getPhysMother();
+ if(isDebug()) System.out.printf("%s: physical mother to transform to is %s; find the transform to it\n", this.getClass().getSimpleName(),physMother.getName());
+ Transform3D trf = HPSTrackerBuilder.getTransform(base.getCoord().getTransformation(),base.getMother(),physMother.getName());
+ if(isDebug()) System.out.printf("%s: found transform to physical mother \n%s\n\n", this.getClass().getSimpleName(),trf.toString());
+
+ // find the position of the center in the physical mother coord
+ Hep3Vector box_center_base = trf.transformed(box_center_base_local);
+
+ // find the position of the center of the box in the mother coordinate system, make sure to use the physical mother coordinates
+ if(isDebug()) System.out.printf("%s: find center of box in physical mother coord %s \n", this.getClass().getSimpleName(),physMother.getName());
+ // hack since my getTransform function needs a mother TODO Fix this!
+ SurveyVolume gm = base;
+ if(isDebug()) System.out.printf("%s: look for physical mother %s starting from mother %s \n", this.getClass().getSimpleName(),physMother.getName(),gm.getMother()!=null?gm.getMother().getName():"-- no mother --");
+ while((gm=gm.getMother()).getName()!=physMother.getName()) {
+ if(isDebug()) System.out.printf("%s: gm is %s \n", this.getClass().getSimpleName(),gm.getName());
+ //gm = gm.getMother();
+ }
+ if(isDebug()) System.out.printf("%s: found physical mother %s with center at %s \n", this.getClass().getSimpleName(),gm.getName(), gm.getCenter());
+
+ Hep3Vector mother_center = gm.getCenter();
+
+ // find the position of the center in the mother coord
+ Hep3Vector box_center = VecOp.sub(box_center_base, mother_center);
+
+ //Find LCDD Euler rotation angles from coordinate system unit vectors
+ //Note that this has to be rotation wrt to physical mother and not just mother as normally is the case
+ if(isDebug()) System.out.printf("%s: find LCDD Cardan rotation angles - need to find mother to physical mother transform \n", this.getClass().getSimpleName(),physMother.getName());
+ Hep3Vector base_u = base.getCoord().u();
+ Hep3Vector base_v = base.getCoord().v();
+ Hep3Vector base_w = base.getCoord().w();
if(isDebug()) System.out.printf("%s: unit vectors in mother coord: %s, %s, %s\n", this.getClass().getSimpleName(),base_u.toString(),base_v.toString(),base_w.toString());
- Hep3Vector unit_u = new BasicHep3Vector(1,0,0);
- Hep3Vector unit_v = new BasicHep3Vector(0,1,0);
- Hep3Vector unit_w = new BasicHep3Vector(0,0,1);
- if(!base.getMother().getName().equals(physMother.getName())) {
- if(isDebug()) System.out.printf("%s: Need to get unit vectors in physical mother %s coord system\n", this.getClass().getSimpleName(),physMother.getName());
+ Hep3Vector unit_u = new BasicHep3Vector(1,0,0);
+ Hep3Vector unit_v = new BasicHep3Vector(0,1,0);
+ Hep3Vector unit_w = new BasicHep3Vector(0,0,1);
+ if(!base.getMother().getName().equals(physMother.getName())) {
+ if(isDebug()) System.out.printf("%s: Need to get unit vectors in physical mother %s coord system\n", this.getClass().getSimpleName(),physMother.getName());
Transform3D trf_mother = HPSTrackerBuilder.getTransform(base.getMother().getCoord().getTransformation(),base.getMother().getMother(),physMother.getName());
if(isDebug()) System.out.printf("%s: found transform from mother to physical mother \n%s\n", this.getClass().getSimpleName(),trf_mother.toString());
- //unit_u = VecOp.unit(trf_mother.rotated(unit_u));
- //unit_v = VecOp.unit(trf_mother.rotated(unit_v));
- //unit_w = VecOp.unit(trf_mother.rotated(unit_w));
- base_u = VecOp.unit(trf_mother.rotated(base_u));
+ //unit_u = VecOp.unit(trf_mother.rotated(unit_u));
+ //unit_v = VecOp.unit(trf_mother.rotated(unit_v));
+ //unit_w = VecOp.unit(trf_mother.rotated(unit_w));
+ base_u = VecOp.unit(trf_mother.rotated(base_u));
base_v = VecOp.unit(trf_mother.rotated(base_v));
base_w = VecOp.unit(trf_mother.rotated(base_w));
- } else {
- if(isDebug()) System.out.printf("%s: mother and physical mother is the same so unit vectors didn't change\n",getClass().getSimpleName());
- }
-
- if(isDebug()) {
- if(isDebug()) System.out.printf("%s: final unit vectors to get Cardan angles from : \n%s, %s, %s -> %s, %s, %s \n",
- this.getClass().getSimpleName(),
- base_u.toString(),base_v.toString(),base_w.toString(),
- unit_u.toString(),unit_v.toString(),unit_w.toString());
- //System.out.printf("%s: unit vectors u %s v %s w %s\n", this.getClass().getSimpleName(),base.getCoord().u().toString(),base.getCoord().v().toString(),base.getCoord().w().toString());
- }
+ } else {
+ if(isDebug()) System.out.printf("%s: mother and physical mother is the same so unit vectors didn't change\n",getClass().getSimpleName());
+ }
+
+ if(isDebug()) {
+ if(isDebug()) System.out.printf("%s: final unit vectors to get Cardan angles from : \n%s, %s, %s -> %s, %s, %s \n",
+ this.getClass().getSimpleName(),
+ base_u.toString(),base_v.toString(),base_w.toString(),
+ unit_u.toString(),unit_v.toString(),unit_w.toString());
+ //System.out.printf("%s: unit vectors u %s v %s w %s\n", this.getClass().getSimpleName(),base.getCoord().u().toString(),base.getCoord().v().toString(),base.getCoord().w().toString());
+ }
Hep3Vector lcdd_rot_angles = TransformationUtils.getCardanAngles(base_u, base_v, base_w, unit_u, unit_v, unit_w);
-
- // Create the LCDD position and rotation
- setPos(new Position(getName() + "_position",box_center.x(), box_center.y(), box_center.z()));
- setRot(new Rotation(getName() + "_rotation",lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
-
- if(isDebug()) {
- System.out.printf("%s: SurveyVolume information for %s:\n", this.getClass().getSimpleName(), base.getName());
+
+ // Create the LCDD position and rotation
+ setPos(new Position(getName() + "_position",box_center.x(), box_center.y(), box_center.z()));
+ setRot(new Rotation(getName() + "_rotation",lcdd_rot_angles.x(), lcdd_rot_angles.y(), lcdd_rot_angles.z()));
+
+ if(isDebug()) {
+ System.out.printf("%s: SurveyVolume information for %s:\n", this.getClass().getSimpleName(), base.getName());
System.out.printf("%s: box_center_base_local %s\n", this.getClass().getSimpleName(), box_center_base_local.toString());
- System.out.printf("%s: box_center_base %s\n", this.getClass().getSimpleName(), box_center_base.toString());
- System.out.printf("%s: mother center %s\n", this.getClass().getSimpleName(), mother_center.toString());
- System.out.printf("%s: box_center %s\n", this.getClass().getSimpleName(), box_center.toString());
- System.out.printf("%s: pos %s\n", this.getClass().getSimpleName(), getPos().toString());
- System.out.printf("%s: euler %s\n", this.getClass().getSimpleName(), lcdd_rot_angles.toString());
+ System.out.printf("%s: box_center_base %s\n", this.getClass().getSimpleName(), box_center_base.toString());
+ System.out.printf("%s: mother center %s\n", this.getClass().getSimpleName(), mother_center.toString());
+ System.out.printf("%s: box_center %s\n", this.getClass().getSimpleName(), box_center.toString());
+ System.out.printf("%s: pos %s\n", this.getClass().getSimpleName(), getPos().toString());
+ System.out.printf("%s: euler %s\n", this.getClass().getSimpleName(), lcdd_rot_angles.toString());
System.out.printf("%s: rot %s\n", this.getClass().getSimpleName(), getRot().toString());
-
- //calculate the position in tracking volume separately as a xcheck
- Hep3Vector box_center_tracking_xcheck = HPSTrackerBuilder.transformToTracking(box_center_base_local, base);
- System.out.printf("%s: box_center_tracking_xcheck %s (for %s)\n", this.getClass().getSimpleName(), box_center_tracking_xcheck.toString(), base.getName());
- }
-
- }
- /**
- * Find the first non-ghost volume among parents.
- * @return mother object
- */
- public LCDDSurveyVolume getPhysMother() {
- //if(isDebug()) System.out.printf("%s: finding physical mother to %s\n", this.getClass().getSimpleName(), getName());
- if(mother==null) throw new RuntimeException("Trying to get phys mother but there is no mother!");
- if(mother instanceof LCDDGhostSurveyVolume) {
- return mother.getPhysMother();
- } else {
- //if(isDebug()) System.out.printf("%s: found a non-ghost volume: %s\n", this.getClass().getSimpleName(), mother.getName());
- return mother;
- }
- }
-
- public Volume getVolume() {
- return volume;
- }
- public void setVolume(Volume volume) {
- this.volume = volume;
- }
- public Box getBox() {
- return box;
- }
- public void setBox(Box b) {
- box = b;
- }
- public Position getPos() {
- return pos;
- }
- public void setPos(Position pos) {
- this.pos = pos;
- }
- public Rotation getRot() {
- return rot;
- }
- public void setRot(Rotation rot) {
- this.rot = rot;
- }
- public LCDDSurveyVolume getMother() {
- return mother;
- }
- public void setMother(LCDDSurveyVolume mother) {
- this.mother = mother;
- }
- public PhysVol getPhysVolume() {
- return physVolume;
- }
- public void setPhysVolume(PhysVol physVolume) {
- this.physVolume = physVolume;
- }
- public List<LCDDSurveyVolume> getDaughters() {
- return daughters;
- }
- public void addDaughter(LCDDSurveyVolume o) {
- getDaughters().add(o);
- }
- public String toString() {
+
+ //calculate the position in tracking volume separately as a xcheck
+ Hep3Vector box_center_tracking_xcheck = HPSTrackerBuilder.transformToTracking(box_center_base_local, base);
+ System.out.printf("%s: box_center_tracking_xcheck %s (for %s)\n", this.getClass().getSimpleName(), box_center_tracking_xcheck.toString(), base.getName());
+ }
+
+ }
+ /**
+ * Find the first non-ghost volume among parents.
+ * @return mother object
+ */
+ public LCDDSurveyVolume getPhysMother() {
+ //if(isDebug()) System.out.printf("%s: finding physical mother to %s\n", this.getClass().getSimpleName(), getName());
+ if(mother==null) throw new RuntimeException("Trying to get phys mother but there is no mother!");
+ if(mother instanceof LCDDGhostSurveyVolume) {
+ return mother.getPhysMother();
+ } else {
+ //if(isDebug()) System.out.printf("%s: found a non-ghost volume: %s\n", this.getClass().getSimpleName(), mother.getName());
+ return mother;
+ }
+ }
+
+ public Volume getVolume() {
+ return volume;
+ }
+ public void setVolume(Volume volume) {
+ this.volume = volume;
+ }
+ public Box getBox() {
+ return box;
+ }
+ public void setBox(Box b) {
+ box = b;
+ }
+ public Position getPos() {
+ return pos;
+ }
+ public void setPos(Position pos) {
+ this.pos = pos;
+ }
+ public Rotation getRot() {
+ return rot;
+ }
+ public void setRot(Rotation rot) {
+ this.rot = rot;
+ }
+ public LCDDSurveyVolume getMother() {
+ return mother;
+ }
+ public void setMother(LCDDSurveyVolume mother) {
+ this.mother = mother;
+ }
+ public PhysVol getPhysVolume() {
+ return physVolume;
+ }
+ public void setPhysVolume(PhysVol physVolume) {
+ this.physVolume = physVolume;
+ }
+ public List<LCDDSurveyVolume> getDaughters() {
+ return daughters;
+ }
+ public void addDaughter(LCDDSurveyVolume o) {
+ getDaughters().add(o);
+ }
+ public String toString() {
String s = getClass().getSimpleName() +": " + getName() + "\n";
if(getPos()!=null && getRot()!=null) {
double x = Double.valueOf(getPos().getAttributeValue("x"));
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java Wed Apr 27 11:11:32 2016
@@ -9,113 +9,113 @@
public class MilleParameter {
private static double corrScaleFactor = -1.;
- private int id;
- private double value;
- private double presigma;
- private static final Map<Integer,String> dMap;
- private static final Map<Integer,String> tMap;
- private static final Map<Integer,String> hMap;
- static {
- dMap = new HashMap<Integer,String>();
- dMap.put(1, "x");dMap.put(2, "y"); dMap.put(3, "z");
- tMap = new HashMap<Integer,String>();
- tMap.put(1, "");tMap.put(2, "r");
- hMap = new HashMap<Integer,String>();
- hMap.put(1, "t");hMap.put(2, "b");
- }
- public static final int half_offset = 10000;
- public static final int type_offset = 1000;
- public static final int dimension_offset = 100;
- public static enum Type {
- TRANSLATION(1), ROTATION(2);
- private int value;
- private Type(int value) {this.value = value;}
- public int getType() {return this.value;}
- };
-
- public MilleParameter(String line) {
- String[] vals = StringUtils.split(line);// line.split("\\s+");
- if(vals.length <3) {
- System.out.println("this line is ill-formatted (" + vals.length + ")");
- System.out.println(line);
- System.exit(1);
- }
- try {
- //for(String v : vals) System.out.println("\"" + v + "\"");
- setId(Integer.parseInt(vals[0]));
- setValue( corrScaleFactor * Double.parseDouble(vals[1]) );
- setPresigma(Double.parseDouble(vals[2]));
-
- } catch (NumberFormatException e) {
- System.out.println(vals[0] + " " + vals[1] + " " + vals[2]);
- throw new RuntimeException("problem parsing string ", e);
- }
- }
-
- public MilleParameter(int id, double value, double presigma) {
- setId(id);
- setValue(value);
- setPresigma(presigma);
- }
-
- public String getXMLName() {
- String d = dMap.get(getDim());
- String t = tMap.get(getType());
- String h = hMap.get(getHalf());
- int s = getSensor();
- return String.format("%s%s%d%s_align", t,d,s,h);
-
- }
+ private int id;
+ private double value;
+ private double presigma;
+ private static final Map<Integer,String> dMap;
+ private static final Map<Integer,String> tMap;
+ private static final Map<Integer,String> hMap;
+ static {
+ dMap = new HashMap<Integer,String>();
+ dMap.put(1, "x");dMap.put(2, "y"); dMap.put(3, "z");
+ tMap = new HashMap<Integer,String>();
+ tMap.put(1, "");tMap.put(2, "r");
+ hMap = new HashMap<Integer,String>();
+ hMap.put(1, "t");hMap.put(2, "b");
+ }
+ public static final int half_offset = 10000;
+ public static final int type_offset = 1000;
+ public static final int dimension_offset = 100;
+ public static enum Type {
+ TRANSLATION(1), ROTATION(2);
+ private int value;
+ private Type(int value) {this.value = value;}
+ public int getType() {return this.value;}
+ };
+
+ public MilleParameter(String line) {
+ String[] vals = StringUtils.split(line);// line.split("\\s+");
+ if(vals.length <3) {
+ System.out.println("this line is ill-formatted (" + vals.length + ")");
+ System.out.println(line);
+ System.exit(1);
+ }
+ try {
+ //for(String v : vals) System.out.println("\"" + v + "\"");
+ setId(Integer.parseInt(vals[0]));
+ setValue( corrScaleFactor * Double.parseDouble(vals[1]) );
+ setPresigma(Double.parseDouble(vals[2]));
+
+ } catch (NumberFormatException e) {
+ System.out.println(vals[0] + " " + vals[1] + " " + vals[2]);
+ throw new RuntimeException("problem parsing string ", e);
+ }
+ }
+
+ public MilleParameter(int id, double value, double presigma) {
+ setId(id);
+ setValue(value);
+ setPresigma(presigma);
+ }
+
+ public String getXMLName() {
+ String d = dMap.get(getDim());
+ String t = tMap.get(getType());
+ String h = hMap.get(getHalf());
+ int s = getSensor();
+ return String.format("%s%s%d%s_align", t,d,s,h);
+
+ }
- public int getDim() {
- int h = (int) (getHalf() * half_offset);
- int t = (int) (getType() * type_offset);
- return (int) Math.floor((id- h -t)/(double)dimension_offset);
- }
-
- public int getSensor() {
- int h = (int) (getHalf() * half_offset);
- int t = (int) (getType() * type_offset);
- int d = (int) (getDim() * dimension_offset);
- return (id - h - t -d);
- }
+ public int getDim() {
+ int h = (int) (getHalf() * half_offset);
+ int t = (int) (getType() * type_offset);
+ return (int) Math.floor((id- h -t)/(double)dimension_offset);
+ }
+
+ public int getSensor() {
+ int h = (int) (getHalf() * half_offset);
+ int t = (int) (getType() * type_offset);
+ int d = (int) (getDim() * dimension_offset);
+ return (id - h - t -d);
+ }
- public int getType() {
- int h = (int) (getHalf() * half_offset);
- return (int) Math.floor((id -h)/(double)type_offset);
- }
+ public int getType() {
+ int h = (int) (getHalf() * half_offset);
+ return (int) Math.floor((id -h)/(double)type_offset);
+ }
- public int getHalf() {
- return (int)Math.floor(id/(double)half_offset);
- }
+ public int getHalf() {
+ return (int)Math.floor(id/(double)half_offset);
+ }
- public int getId() {
- return id;
- }
+ public int getId() {
+ return id;
+ }
- public void setId(int id) {
- this.id = id;
- }
+ public void setId(int id) {
+ this.id = id;
+ }
- public double getValue() {
- return value;
- }
+ public double getValue() {
+ return value;
+ }
- public void setValue(double value) {
- this.value = value;
- }
+ public void setValue(double value) {
+ this.value = value;
+ }
- public double getPresigma() {
- return presigma;
- }
+ public double getPresigma() {
+ return presigma;
+ }
- public void setPresigma(double presigma) {
- this.presigma = presigma;
- }
-
- public String toString() {
- return String.format("Milleparameter id=%d half=%d type=%d dim=%d sensor=%d value=%f", this.getId(), this.getHalf(), this.getType(), this.getDim(), this.getSensor(), this.getValue());
- }
+ public void setPresigma(double presigma) {
+ this.presigma = presigma;
+ }
+
+ public String toString() {
+ return String.format("Milleparameter id=%d half=%d type=%d dim=%d sensor=%d value=%f", this.getId(), this.getHalf(), this.getType(), this.getDim(), this.getSensor(), this.getValue());
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java Wed Apr 27 11:11:32 2016
@@ -13,26 +13,26 @@
import org.lcsim.detector.Translation3D;
/**
- * Class describing a simple coordinate system used to define the {@link SurveyVolume}.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- */
- public class SurveyCoordinateSystem {
- private final boolean debug = false;
- private Hep3Vector origin;
- private Hep3Vector u;
- private Hep3Vector v;
- private Hep3Vector w;
+ * Class describing a simple coordinate system used to define the {@link SurveyVolume}.
+ *
+ * @author Per Hansson Adrian <[log in to unmask]>
+ */
+ public class SurveyCoordinateSystem {
+ private final boolean debug = false;
+ private Hep3Vector origin;
+ private Hep3Vector u;
+ private Hep3Vector v;
+ private Hep3Vector w;
-// public SurveyCoordinateSystem(Hep3Vector org, Hep3Vector unit_x, Hep3Vector unit_y, Hep3Vector unit_z) {
-// origin = org;
-// u = unit_x;
-// v = unit_y;
-// w = unit_z;
-// }
+// public SurveyCoordinateSystem(Hep3Vector org, Hep3Vector unit_x, Hep3Vector unit_y, Hep3Vector unit_z) {
+// origin = org;
+// u = unit_x;
+// v = unit_y;
+// w = unit_z;
+// }
-
- public SurveyCoordinateSystem(Hep3Vector ball, Hep3Vector vee, Hep3Vector flat) {
+
+ public SurveyCoordinateSystem(Hep3Vector ball, Hep3Vector vee, Hep3Vector flat) {
origin = ball;
Hep3Vector ball_to_vee = VecOp.sub(vee, ball);
u = VecOp.unit(ball_to_vee);
@@ -41,61 +41,61 @@
v = VecOp.cross(w, u);
check();
}
-
- private void check() {
- checkUnitLength();
- checkAngles();
- }
+
+ private void check() {
+ checkUnitLength();
+ checkAngles();
+ }
- private void checkUnitLength() {
- if(u.magnitude()-1>0.00001 || v.magnitude()-1>0.00001 || v.magnitude()-1>0.00001) {
- throw new RuntimeException("Error: the unit vectors of the coordinate system is ill-defined " + toString());
- }
- }
+ private void checkUnitLength() {
+ if(u.magnitude()-1>0.00001 || v.magnitude()-1>0.00001 || v.magnitude()-1>0.00001) {
+ throw new RuntimeException("Error: the unit vectors of the coordinate system is ill-defined " + toString());
+ }
+ }
- private void checkAngles() {
- if( (VecOp.dot(u, v)-1)>0.00001 || (VecOp.dot(u, w)-1)>0.00001 || (VecOp.dot(v, w)-1)>0.00001 ) {
- throw new RuntimeException("Error: the angles in coordinate system is ill-defined " + toString());
- }
- }
-
-
- /**
- * Transform this coordinate system to another one.
- * @param t
- */
- public void transform(Transform3D t) {
- Transform3D t_this = getTransformation();
- Hep3Vector v = t_this.getTranslation().getTranslationVector();
- Hep3Vector vrot = t.rotated(v);
- Hep3Vector vrottrans = t.translated(vrot);
- origin = vrottrans;
+ private void checkAngles() {
+ if( (VecOp.dot(u, v)-1)>0.00001 || (VecOp.dot(u, w)-1)>0.00001 || (VecOp.dot(v, w)-1)>0.00001 ) {
+ throw new RuntimeException("Error: the angles in coordinate system is ill-defined " + toString());
+ }
+ }
+
+
+ /**
+ * Transform this coordinate system to another one.
+ * @param t
+ */
+ public void transform(Transform3D t) {
+ Transform3D t_this = getTransformation();
+ Hep3Vector v = t_this.getTranslation().getTranslationVector();
+ Hep3Vector vrot = t.rotated(v);
+ Hep3Vector vrottrans = t.translated(vrot);
+ origin = vrottrans;
rotate(t.getRotation());
//System.out.printf("monkey transform\n");
//System.out.printf("v %s\n",v.toString());
//System.out.printf("vrot %s\n",vrot.toString());
//System.out.printf("vrottrans %s\n",vrottrans.toString());
- check();
- }
-
- public void rotate(IRotation3D r) {
- r.rotate(u);
- r.rotate(v);
- r.rotate(w);
- }
+ check();
+ }
+
+ public void rotate(IRotation3D r) {
+ r.rotate(u);
+ r.rotate(v);
+ r.rotate(w);
+ }
- public void translate(Hep3Vector translation) {
- // update origin with local translation in u,v,w
- //origin = VecOp.add(origin, translation);
- translate(new Translation3D(translation));
- }
+ public void translate(Hep3Vector translation) {
+ // update origin with local translation in u,v,w
+ //origin = VecOp.add(origin, translation);
+ translate(new Translation3D(translation));
+ }
- public void translate(Translation3D t) {
- origin = t.translated(getTransformation().getTranslation().getTranslationVector());
- }
+ public void translate(Translation3D t) {
+ origin = t.translated(getTransformation().getTranslation().getTranslationVector());
+ }
-
- public void rotateApache(Rotation r) {
+
+ public void rotateApache(Rotation r) {
if(debug) System.out.printf("%s: apply apache rotation to this coord system\n%s\n", getClass().getSimpleName(),toString());
this.u = new BasicHep3Vector(r.applyTo(new Vector3D(u.v())).toArray());
this.v = new BasicHep3Vector(r.applyTo(new Vector3D(v.v())).toArray());
@@ -104,52 +104,52 @@
}
public Hep3Vector origin() {
- return origin;
- }
- public Hep3Vector u() {
- return u;
- }
- public Hep3Vector v() {
- return v;
- }
- public Hep3Vector w() {
- return w;
- }
- public void u(Hep3Vector vec) {
+ return origin;
+ }
+ public Hep3Vector u() {
+ return u;
+ }
+ public Hep3Vector v() {
+ return v;
+ }
+ public Hep3Vector w() {
+ return w;
+ }
+ public void u(Hep3Vector vec) {
u = vec;
- }
+ }
public void v(Hep3Vector vec) {
v = vec;
}
public void w(Hep3Vector vec) {
w = vec;
}
-
+
public String toString() {
- String str = "origin " + origin.toString() + "\nu " + u.toString() + "\nv " + v.toString() + "\nw " + w.toString();
- return str;
+ String str = "origin " + origin.toString() + "\nu " + u.toString() + "\nv " + v.toString() + "\nw " + w.toString();
+ return str;
}
-
-
- /**
- * Find @ITransform3D to the coordinate system defined by the input.
- * @return resulting 3D transform
- */
- public Transform3D getTransformation() {
- // Find the transform between the two frames - use transform classes here (not really needed)
- Translation3D translation = new Translation3D(origin.x(), origin.y(), origin.z());
- //RotationGeant trackingToEnvelopeRotation = new RotationGeant(0, 0, 0);
- Rotation3D rotation = new Rotation3D(
- new BasicHep3Matrix(
- u.x(),v.x(),w.x(),
- u.y(),v.y(),w.y(),
- u.z(),v.z(),w.z()
- ));
- Transform3D envelopeToSupportTransform = new Transform3D(translation, rotation);
- return envelopeToSupportTransform;
- }
-
-
-
-
- }
+
+
+ /**
+ * Find @ITransform3D to the coordinate system defined by the input.
+ * @return resulting 3D transform
+ */
+ public Transform3D getTransformation() {
+ // Find the transform between the two frames - use transform classes here (not really needed)
+ Translation3D translation = new Translation3D(origin.x(), origin.y(), origin.z());
+ //RotationGeant trackingToEnvelopeRotation = new RotationGeant(0, 0, 0);
+ Rotation3D rotation = new Rotation3D(
+ new BasicHep3Matrix(
+ u.x(),v.x(),w.x(),
+ u.y(),v.y(),w.y(),
+ u.z(),v.z(),w.z()
+ ));
+ Transform3D envelopeToSupportTransform = new Transform3D(translation, rotation);
+ return envelopeToSupportTransform;
+ }
+
+
+
+
+ }
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java Wed Apr 27 11:11:32 2016
@@ -20,66 +20,66 @@
*
*/
public abstract class SurveyVolume {
- protected boolean debug = false;
- private String name;
- private String material = "Vacuum";
- private SurveyVolume mother = null;
- protected List<SurveyVolume> referenceGeom = null;
- private SurveyCoordinateSystem coord;
- protected Hep3Vector ballPos;
- protected Hep3Vector veePos;
- protected Hep3Vector flatPos;
- private Hep3Vector center;
- private Hep3Vector boxDim;
- private AlignmentCorrection alignmentCorrections;
-
- public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection) {
- setName(name);
- setMother(m);
- setAlignmentCorrection(alignmentCorrection);
- }
-
- public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection, SurveyVolume ref) {
- setName(name);
- setMother(m);
- setAlignmentCorrection(alignmentCorrection);
- addReferenceGeom(ref);
- }
-
- public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection, List<SurveyVolume> ref) {
- setName(name);
- setMother(m);
- setAlignmentCorrection(alignmentCorrection);
- addReferenceGeom(ref);
- }
-
- protected abstract void setPos();
- protected abstract void setCenter();
- protected abstract void setBoxDim();
-
- /**
- *
- * Initialize the volume.
- * This needs to be called at the top level implementation of the {@link SurveyVolume} to properly setup
- * the coordinate systems. It takes care of applying user supplied custom transformations and alignment corrections
- * in the order given in the function below. That order must be preserved to get a uniform behavior.
- *
- */
- protected void init() {
- if(debug) System.out.printf("%s: init SurveyVolume %s\n",this.getClass().getSimpleName(),getName());
+ protected boolean debug = false;
+ private String name;
+ private String material = "Vacuum";
+ private SurveyVolume mother = null;
+ protected List<SurveyVolume> referenceGeom = null;
+ private SurveyCoordinateSystem coord;
+ protected Hep3Vector ballPos;
+ protected Hep3Vector veePos;
+ protected Hep3Vector flatPos;
+ private Hep3Vector center;
+ private Hep3Vector boxDim;
+ private AlignmentCorrection alignmentCorrections;
+
+ public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection) {
+ setName(name);
+ setMother(m);
+ setAlignmentCorrection(alignmentCorrection);
+ }
+
+ public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection, SurveyVolume ref) {
+ setName(name);
+ setMother(m);
+ setAlignmentCorrection(alignmentCorrection);
+ addReferenceGeom(ref);
+ }
+
+ public SurveyVolume(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection, List<SurveyVolume> ref) {
+ setName(name);
+ setMother(m);
+ setAlignmentCorrection(alignmentCorrection);
+ addReferenceGeom(ref);
+ }
+
+ protected abstract void setPos();
+ protected abstract void setCenter();
+ protected abstract void setBoxDim();
+
+ /**
+ *
+ * Initialize the volume.
+ * This needs to be called at the top level implementation of the {@link SurveyVolume} to properly setup
+ * the coordinate systems. It takes care of applying user supplied custom transformations and alignment corrections
+ * in the order given in the function below. That order must be preserved to get a uniform behavior.
+ *
+ */
+ protected void init() {
+ if(debug) System.out.printf("%s: init SurveyVolume %s\n",this.getClass().getSimpleName(),getName());
setPos();
- setCoord();
- applyReferenceTransformation();
- setCenter();
- setBoxDim();
- applyGenericCoordinateSystemCorrections();
- applyLocalAlignmentCorrections();
- if(debug) {
- //printCoordInfo();
- System.out.printf("%s: init of SurveyVolume %s DONE\n",this.getClass().getSimpleName(),getName());
- }
- }
-
+ setCoord();
+ applyReferenceTransformation();
+ setCenter();
+ setBoxDim();
+ applyGenericCoordinateSystemCorrections();
+ applyLocalAlignmentCorrections();
+ if(debug) {
+ //printCoordInfo();
+ System.out.printf("%s: init of SurveyVolume %s DONE\n",this.getClass().getSimpleName(),getName());
+ }
+ }
+
private void applySurvey(Element node) {
@@ -378,254 +378,254 @@
* Apply a generic correction to the coordinate system of this volume.
*/
protected void applyGenericCoordinateSystemCorrections() {
- //do nothing here unless overridden
-
- }
-
- /**
- * Applies a user supplied reference transformation to the module.
- * This is convenient as it allows for intermediary "virtual" mother volumes to be used
- * in referencing a volume to it's physcial mother volume.
- */
- protected void applyReferenceTransformation() {
-
-
- if(referenceGeom!=null) {
-
- if(debug) System.out.printf("%s: apply reference transformation for %s\n",this.getClass().getSimpleName(),getName());
-
-
- if(debug) System.out.printf("%s: coord system before %d ref transformations:\n%s\n",this.getClass().getSimpleName(),referenceGeom.size(),getCoord().toString());
-
- for(SurveyVolume ref : referenceGeom) {
-
- if(debug) {
- System.out.printf("%s: coord system before ref %s transform:\n%s\n",this.getClass().getSimpleName(),ref.getName(),getCoord().toString());
- System.out.printf("%s: Ref %s coord\n%s\n",this.getClass().getSimpleName(), ref.getName(),ref.getCoord().toString());
- }
-
- getCoord().transform(ref.getCoord().getTransformation());
-
- if(debug) System.out.printf("%s: coord system after ref %s transform:\n%s\n",this.getClass().getSimpleName(),ref.getName(),getCoord().toString());
-
- }
-
- if(debug) System.out.printf("%s: coord system after ref transformations:\n%s\n",this.getClass().getSimpleName(),getCoord().toString());
-
- } else {
-
- if(debug) System.out.printf("%s: no reference transformation exists for %s\n",this.getClass().getSimpleName(),getName());
-
- }
-
- }
-
- /**
- * Apply @link AlignmentCorrection to the volume if they are supplied.
- *
- */
- private void applyLocalAlignmentCorrections() {
-
- // Apply alignment corrections to local coordinate system that is already built
- boolean debug_local = false;
- if(this.coord==null)
- throw new RuntimeException("no coordinate system was set before trying to apply alignment corrections.");
-
- if(alignmentCorrections!=null) {
-
-
- if(alignmentCorrections.getNode()!=null) {
-
- if(debug_local || debug) System.out.printf("%s: Apply survey results to %s\n",this.getClass().getSimpleName(),this.getName());
-
- applySurvey(alignmentCorrections.getNode());
-
- if(debug_local || debug) System.out.printf("%s: DONE Apply survey results to %s\n",this.getClass().getSimpleName(),this.getName());
-
- }
-
-
-
-
-
- if(debug_local || debug) System.out.printf("%s: Apply alignment corrections to %s\n",this.getClass().getSimpleName(),this.getName());
-
- // translate
- if(alignmentCorrections.getTranslation()!=null) {
-
- if(debug_local || debug) System.out.printf("%s: Apply local translation %s\n", this.getClass().getSimpleName(),alignmentCorrections.getTranslation().toString());
-
- // rotate into mother coordinate system
- Hep3Vector translation_mother = getCoord().getTransformation().rotated(alignmentCorrections.getTranslation());
-
- if(debug_local || debug) System.out.printf("%s: after rotation apply translation %s to coordinate system\n", this.getClass().getSimpleName(),translation_mother.toString());
-
- //apply translation
- getCoord().translate(translation_mother);
-
- } else {
- if(debug_local || debug) System.out.printf("%s: No translation to coordinate system\n", this.getClass().getSimpleName());
- }
-
- // rotate
- if(alignmentCorrections.getRotation()!=null) {
-
+ //do nothing here unless overridden
+
+ }
+
+ /**
+ * Applies a user supplied reference transformation to the module.
+ * This is convenient as it allows for intermediary "virtual" mother volumes to be used
+ * in referencing a volume to it's physcial mother volume.
+ */
+ protected void applyReferenceTransformation() {
+
+
+ if(referenceGeom!=null) {
+
+ if(debug) System.out.printf("%s: apply reference transformation for %s\n",this.getClass().getSimpleName(),getName());
+
+
+ if(debug) System.out.printf("%s: coord system before %d ref transformations:\n%s\n",this.getClass().getSimpleName(),referenceGeom.size(),getCoord().toString());
+
+ for(SurveyVolume ref : referenceGeom) {
+
+ if(debug) {
+ System.out.printf("%s: coord system before ref %s transform:\n%s\n",this.getClass().getSimpleName(),ref.getName(),getCoord().toString());
+ System.out.printf("%s: Ref %s coord\n%s\n",this.getClass().getSimpleName(), ref.getName(),ref.getCoord().toString());
+ }
+
+ getCoord().transform(ref.getCoord().getTransformation());
+
+ if(debug) System.out.printf("%s: coord system after ref %s transform:\n%s\n",this.getClass().getSimpleName(),ref.getName(),getCoord().toString());
+
+ }
+
+ if(debug) System.out.printf("%s: coord system after ref transformations:\n%s\n",this.getClass().getSimpleName(),getCoord().toString());
+
+ } else {
+
+ if(debug) System.out.printf("%s: no reference transformation exists for %s\n",this.getClass().getSimpleName(),getName());
+
+ }
+
+ }
+
+ /**
+ * Apply @link AlignmentCorrection to the volume if they are supplied.
+ *
+ */
+ private void applyLocalAlignmentCorrections() {
+
+ // Apply alignment corrections to local coordinate system that is already built
+ boolean debug_local = false;
+ if(this.coord==null)
+ throw new RuntimeException("no coordinate system was set before trying to apply alignment corrections.");
+
+ if(alignmentCorrections!=null) {
+
+
+ if(alignmentCorrections.getNode()!=null) {
+
+ if(debug_local || debug) System.out.printf("%s: Apply survey results to %s\n",this.getClass().getSimpleName(),this.getName());
+
+ applySurvey(alignmentCorrections.getNode());
+
+ if(debug_local || debug) System.out.printf("%s: DONE Apply survey results to %s\n",this.getClass().getSimpleName(),this.getName());
+
+ }
+
+
+
+
+
+ if(debug_local || debug) System.out.printf("%s: Apply alignment corrections to %s\n",this.getClass().getSimpleName(),this.getName());
+
+ // translate
+ if(alignmentCorrections.getTranslation()!=null) {
+
+ if(debug_local || debug) System.out.printf("%s: Apply local translation %s\n", this.getClass().getSimpleName(),alignmentCorrections.getTranslation().toString());
+
+ // rotate into mother coordinate system
+ Hep3Vector translation_mother = getCoord().getTransformation().rotated(alignmentCorrections.getTranslation());
+
+ if(debug_local || debug) System.out.printf("%s: after rotation apply translation %s to coordinate system\n", this.getClass().getSimpleName(),translation_mother.toString());
+
+ //apply translation
+ getCoord().translate(translation_mother);
+
+ } else {
+ if(debug_local || debug) System.out.printf("%s: No translation to coordinate system\n", this.getClass().getSimpleName());
+ }
+
+ // rotate
+ if(alignmentCorrections.getRotation()!=null) {
+
if(debug_local || debug) {
System.out.printf("%s: Apply rotation matrix:\n", this.getClass().getSimpleName());
TransformationUtils.printMatrix(alignmentCorrections.getRotation().getMatrix());
System.out.printf("%s: coord system before:\n%s\n", this.getClass().getSimpleName(),getCoord().toString());
}
-
- // correct rotation of the local unit vectors
- Vector3D u_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(1,0,0));
- Vector3D v_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(0,1,0));
- Vector3D w_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(0,0,1));
-
- // rotate the local unit vectors to the mother coordinates
-
- Hep3Vector u_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(u_rot_local.toArray()));
- Hep3Vector v_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(v_rot_local.toArray()));
- Hep3Vector w_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(w_rot_local.toArray()));
+
+ // correct rotation of the local unit vectors
+ Vector3D u_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(1,0,0));
+ Vector3D v_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(0,1,0));
+ Vector3D w_rot_local = alignmentCorrections.getRotation().applyTo(new Vector3D(0,0,1));
+
+ // rotate the local unit vectors to the mother coordinates
+
+ Hep3Vector u_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(u_rot_local.toArray()));
+ Hep3Vector v_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(v_rot_local.toArray()));
+ Hep3Vector w_rot = getCoord().getTransformation().getRotation().rotated(new BasicHep3Vector(w_rot_local.toArray()));
- getCoord().u(u_rot);
- getCoord().v(v_rot);
- getCoord().w(w_rot);
+ getCoord().u(u_rot);
+ getCoord().v(v_rot);
+ getCoord().w(w_rot);
if(debug_local || debug) {
System.out.printf("%s: coord system after:\n%s\n", this.getClass().getSimpleName(),getCoord().toString());
}
-
-
-
- } else {
- if(debug_local || debug) System.out.printf("%s: No rotation to coordinate system\n", this.getClass().getSimpleName());
- }
-
- if(debug_local || debug) System.out.printf("%s: coordinate system after alignment corrections:\n%s\n",this.getClass().getSimpleName(),getCoord().toString());
-
- } else {
+
+
+
+ } else {
+ if(debug_local || debug) System.out.printf("%s: No rotation to coordinate system\n", this.getClass().getSimpleName());
+ }
+
+ if(debug_local || debug) System.out.printf("%s: coordinate system after alignment corrections:\n%s\n",this.getClass().getSimpleName(),getCoord().toString());
+
+ } else {
if(debug_local || debug) System.out.printf("%s: no alignment corrections exist for %s\n",this.getClass().getSimpleName(),this.getName());
- }
-
- }
-
- private void setAlignmentCorrection(AlignmentCorrection alignmentCorrection) {
+ }
+
+ }
+
+ private void setAlignmentCorrection(AlignmentCorrection alignmentCorrection) {
this.alignmentCorrections = alignmentCorrection;
}
public void setBallPos(double x, double y, double z) {
- ballPos = new BasicHep3Vector(x,y,z);
- }
- public void setVeePos(double x, double y, double z) {
- veePos = new BasicHep3Vector(x,y,z);
- }
- public void setFlatPos(double x, double y, double z) {
- flatPos = new BasicHep3Vector(x,y,z);
- }
- public Hep3Vector getBallPos() {
- return ballPos;
- }
- public Hep3Vector getVeePos() {
- return veePos;
- }
- public Hep3Vector getFlatPos() {
- return flatPos;
- }
- public void setCoord() {
- if(ballPos==null || veePos==null || flatPos==null) {
- throw new RuntimeException("Need to set ball, vee and flat before building coord system!");
- }
-
- coord = new SurveyCoordinateSystem(ballPos, veePos, flatPos);
-
- if(this.debug) {
- System.out.printf("%s: setCoord \n%s\n", this.getClass().getSimpleName(), coord.toString());
- }
- }
- public SurveyCoordinateSystem getCoord() {
- if(coord == null) {
- throw new RuntimeException("Need to setCoord!");
- }
- return coord;
- }
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
- }
- public Hep3Vector getCenter() {
- return center;
- }
- public void setCenter(Hep3Vector center) {
- this.center = center;
- }
- public void setCenter(double x, double y, double z) {
- this.center = new BasicHep3Vector(x,y,z);
- }
- public Hep3Vector getBoxDim() {
- return boxDim;
- }
- public void setBoxDim(double x, double y, double z) {
- this.boxDim = new BasicHep3Vector(x,y,z);
- }
- public SurveyVolume getMother() {
- return mother;
- }
- public void setMother(SurveyVolume mother) {
- this.mother = mother;
- }
- public void addReferenceGeom(SurveyVolume refGeom) {
- if(refGeom!=null) { // check that it's not a dummy call
- if(referenceGeom == null) {
- referenceGeom = new ArrayList<SurveyVolume>();
- }
- referenceGeom.add(refGeom);
- }
- }
- public void addReferenceGeom(List<SurveyVolume> refGeomList) {
- if(referenceGeom == null) {
- referenceGeom = new ArrayList<SurveyVolume>();
- }
- referenceGeom.addAll(refGeomList);
- }
- public void printSurveyPos() {
- if(debug) {
- System.out.printf("%s: Survey pos for %s:\n",getClass().getSimpleName(),getName());
- System.out.printf("%s: ballPos %s\n",getClass().getSimpleName(), ballPos.toString());
- System.out.printf("%s: veePos %s\n",getClass().getSimpleName(), veePos.toString());
- System.out.printf("%s: flatPos %s\n",getClass().getSimpleName(), flatPos.toString());
- }
- }
- public String getMaterial() {
- return material;
- }
- public void setMaterial(String material) {
- this.material = material;
- }
- public String toString() {
- String s = "==\n" + getName() + " with mother " + (getMother()==null?"<no mother>":getMother().getName()) + ":\n";
- if( getCenter()!=null) s += "Center of box: " + getCenter().toString() + "\n";
+ ballPos = new BasicHep3Vector(x,y,z);
+ }
+ public void setVeePos(double x, double y, double z) {
+ veePos = new BasicHep3Vector(x,y,z);
+ }
+ public void setFlatPos(double x, double y, double z) {
+ flatPos = new BasicHep3Vector(x,y,z);
+ }
+ public Hep3Vector getBallPos() {
+ return ballPos;
+ }
+ public Hep3Vector getVeePos() {
+ return veePos;
+ }
+ public Hep3Vector getFlatPos() {
+ return flatPos;
+ }
+ public void setCoord() {
+ if(ballPos==null || veePos==null || flatPos==null) {
+ throw new RuntimeException("Need to set ball, vee and flat before building coord system!");
+ }
+
+ coord = new SurveyCoordinateSystem(ballPos, veePos, flatPos);
+
+ if(this.debug) {
+ System.out.printf("%s: setCoord \n%s\n", this.getClass().getSimpleName(), coord.toString());
+ }
+ }
+ public SurveyCoordinateSystem getCoord() {
+ if(coord == null) {
+ throw new RuntimeException("Need to setCoord!");
+ }
+ return coord;
+ }
+ public String getName() {
+ return name;
+ }
+ public void setName(String name) {
+ this.name = name;
+ }
+ public Hep3Vector getCenter() {
+ return center;
+ }
+ public void setCenter(Hep3Vector center) {
+ this.center = center;
+ }
+ public void setCenter(double x, double y, double z) {
+ this.center = new BasicHep3Vector(x,y,z);
+ }
+ public Hep3Vector getBoxDim() {
+ return boxDim;
+ }
+ public void setBoxDim(double x, double y, double z) {
+ this.boxDim = new BasicHep3Vector(x,y,z);
+ }
+ public SurveyVolume getMother() {
+ return mother;
+ }
+ public void setMother(SurveyVolume mother) {
+ this.mother = mother;
+ }
+ public void addReferenceGeom(SurveyVolume refGeom) {
+ if(refGeom!=null) { // check that it's not a dummy call
+ if(referenceGeom == null) {
+ referenceGeom = new ArrayList<SurveyVolume>();
+ }
+ referenceGeom.add(refGeom);
+ }
+ }
+ public void addReferenceGeom(List<SurveyVolume> refGeomList) {
+ if(referenceGeom == null) {
+ referenceGeom = new ArrayList<SurveyVolume>();
+ }
+ referenceGeom.addAll(refGeomList);
+ }
+ public void printSurveyPos() {
+ if(debug) {
+ System.out.printf("%s: Survey pos for %s:\n",getClass().getSimpleName(),getName());
+ System.out.printf("%s: ballPos %s\n",getClass().getSimpleName(), ballPos.toString());
+ System.out.printf("%s: veePos %s\n",getClass().getSimpleName(), veePos.toString());
+ System.out.printf("%s: flatPos %s\n",getClass().getSimpleName(), flatPos.toString());
+ }
+ }
+ public String getMaterial() {
+ return material;
+ }
+ public void setMaterial(String material) {
+ this.material = material;
+ }
+ public String toString() {
+ String s = "==\n" + getName() + " with mother " + (getMother()==null?"<no mother>":getMother().getName()) + ":\n";
+ if( getCenter()!=null) s += "Center of box: " + getCenter().toString() + "\n";
if( getBoxDim()!=null) s += "Box dimensions: " + getBoxDim().toString() + "\n";
- if(this.coord==null) s += " No coord system \n";
- else {
- s += getName() + " origin " + getCoord().origin() + " u " + getCoord().u()+ " v " + getCoord().v()+ " w " + getCoord().w();
- }
+ if(this.coord==null) s += " No coord system \n";
+ else {
+ s += getName() + " origin " + getCoord().origin() + " u " + getCoord().u()+ " v " + getCoord().v()+ " w " + getCoord().w();
+ }
s += "AlignmentCorrections: \n";
- if(this.alignmentCorrections!=null) {
- s += "Milleparameters: ";
- if(this.alignmentCorrections.getMilleParameters()!=null) {
- for(MilleParameter mp : this.alignmentCorrections.getMilleParameters()) s += mp.getId() + " ";
- } else {
- s += "no MP params associated.";
- }
- s += "(" + this.getName() + ")" + " \n";
- } else {
- s+= " no alignment corrections associated.\n";
- }
- SurveyVolume m = getMother();
- while(m!=null) {
+ if(this.alignmentCorrections!=null) {
+ s += "Milleparameters: ";
+ if(this.alignmentCorrections.getMilleParameters()!=null) {
+ for(MilleParameter mp : this.alignmentCorrections.getMilleParameters()) s += mp.getId() + " ";
+ } else {
+ s += "no MP params associated.";
+ }
+ s += "(" + this.getName() + ")" + " \n";
+ } else {
+ s+= " no alignment corrections associated.\n";
+ }
+ SurveyVolume m = getMother();
+ while(m!=null) {
Hep3Vector origin_m = HPSTrackerBuilder.transformToParent(new BasicHep3Vector(0, 0, 0), this, m.getName());
String unitVecStr = "";
if(getCoord()!=null) {
@@ -638,12 +638,12 @@
//origin_m = VecOp.mult(0.0393701, origin_m);
//s += String.format("%s origin in %s : (%.4f %.4f %.4f) (inch)\n",getName(), m.getName(), origin_m.x(),origin_m.y(),origin_m.z());
m = m.getMother();
- }
-
-
- return s;
- }
-
-
-
+ }
+
+
+ return s;
+ }
+
+
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java Wed Apr 27 11:11:32 2016
@@ -21,7 +21,7 @@
public abstract void setPositionAndRotation(SurveyVolume base);
public String getName() {
- return surveyVolume.getName();
+ return surveyVolume.getName();
}
protected Hep3Vector getBoxDim() {
@@ -33,7 +33,7 @@
}
public boolean isDebug() {
- return debug;
+ return debug;
}
public abstract String toString();
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java Wed Apr 27 11:11:32 2016
@@ -7,14 +7,14 @@
* @author Per Hansson Adrian <[log in to unmask]>
*/
public class SurveyVolumeVisualization {
- protected String visName = "";
- public SurveyVolumeVisualization() {}
- public String getVisName() {
- return visName;
- }
- protected void setVisName(String visName) {
- this.visName = visName;
- }
-
-
+ protected String visName = "";
+ public SurveyVolumeVisualization() {}
+ public String getVisName() {
+ return visName;
+ }
+ protected void setVisName(String visName) {
+ this.visName = visName;
+ }
+
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java Wed Apr 27 11:11:32 2016
@@ -14,16 +14,16 @@
* @author <a href="mailto:[log in to unmask]">Jeremy McCormick</a>
*/
public class SvtAlignmentConstantsReader {
-
- private SvtAlignmentConstantsReader() {
- }
+
+ private SvtAlignmentConstantsReader() {
+ }
- /**
- * Read SVT alignment constants from the conditions database table <i>svt_alignments</i> and create a list of
- * <code>MilleParameter</code> objects from it.
- *
- * @return the Millepede parameter list
- */
+ /**
+ * Read SVT alignment constants from the conditions database table <i>svt_alignments</i> and create a list of
+ * <code>MilleParameter</code> objects from it.
+ *
+ * @return the Millepede parameter list
+ */
static List<MilleParameter> readMilleParameters() {
final DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java Wed Apr 27 11:11:32 2016
@@ -8,13 +8,13 @@
public class HPSMuonCalorimeter2 extends LCDDSubdetector
{
- HPSMuonCalorimeter2(Element e) throws JDOMException
- {
- super(e);
- }
+ HPSMuonCalorimeter2(Element e) throws JDOMException
+ {
+ super(e);
+ }
- void addToLCDD(LCDD lcdd, SensitiveDetector sens) throws JDOMException
- {
+ void addToLCDD(LCDD lcdd, SensitiveDetector sens) throws JDOMException
+ {
String name = node.getAttributeValue("name");
System.out.println("HPSMuonCalorimeter2.addToLCDD - " + name);
int id = node.getAttribute("id").getIntValue();
@@ -22,7 +22,7 @@
Element parameters = node.getChild("parameters");
if (parameters == null) {
- throw new RuntimeException("parameters element missing");
+ throw new RuntimeException("parameters element missing");
}
double frontFaceToTarget = parameters.getAttribute("front_face_to_target").getDoubleValue();
@@ -40,16 +40,16 @@
System.out.println("stripSpacingZ = " + stripSpacingZ);
for (Object layerObject : node.getChildren("layer")) {
- Element layerElement = (Element)layerObject;
- int layerId = layerElement.getAttribute("id").getIntValue();
- System.out.println("layer = " + layerId);
- for (Object sliceObject : layerElement.getChildren("slice")) {
- Element sliceElement = (Element)sliceObject;
- if (sliceElement.getAttribute("thickness") != null) {
- double thickness = sliceElement.getAttribute("thickness").getDoubleValue();
- System.out.println("slice thickness = " + thickness);
- }
- }
+ Element layerElement = (Element)layerObject;
+ int layerId = layerElement.getAttribute("id").getIntValue();
+ System.out.println("layer = " + layerId);
+ for (Object sliceObject : layerElement.getChildren("slice")) {
+ Element sliceElement = (Element)sliceObject;
+ if (sliceElement.getAttribute("thickness") != null) {
+ double thickness = sliceElement.getAttribute("thickness").getDoubleValue();
+ System.out.println("slice thickness = " + thickness);
+ }
+ }
}
- }
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java Wed Apr 27 11:11:32 2016
@@ -9,7 +9,6 @@
import org.lcsim.geometry.compact.converter.HPSTestRunTracker2014GeometryDefinition;
import org.lcsim.geometry.compact.converter.HPSTestRunTracker2014LCDDBuilder;
import org.lcsim.geometry.compact.converter.HPSTrackerBuilder;
-import org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition;
import org.lcsim.geometry.compact.converter.HPSTrackerLCDDBuilder;
import org.lcsim.geometry.compact.converter.lcdd.util.Box;
import org.lcsim.geometry.compact.converter.lcdd.util.LCDD;
@@ -30,58 +29,58 @@
*/
public class HPSTestRunTracker2014 extends HPSTracker2014Base
{
- public HPSTestRunTracker2014(Element node) throws JDOMException
- {
- super(node);
- }
-
- /* (non-Javadoc)
- * @see org.lcsim.geometry.compact.converter.lcdd.HPSTracker2014Base#initializeBuilder(org.lcsim.geometry.compact.converter.lcdd.util.LCDD, org.lcsim.geometry.compact.converter.lcdd.util.SensitiveDetector)
- */
- protected HPSTrackerLCDDBuilder initializeBuilder(LCDD lcdd, SensitiveDetector sens) {
- HPSTrackerLCDDBuilder b = new HPSTestRunTracker2014LCDDBuilder(_debug,node,lcdd,sens);
- return b;
- }
-
-
-
- /* (non-Javadoc)
+ public HPSTestRunTracker2014(Element node) throws JDOMException
+ {
+ super(node);
+ }
+
+ /* (non-Javadoc)
+ * @see org.lcsim.geometry.compact.converter.lcdd.HPSTracker2014Base#initializeBuilder(org.lcsim.geometry.compact.converter.lcdd.util.LCDD, org.lcsim.geometry.compact.converter.lcdd.util.SensitiveDetector)
+ */
+ protected HPSTrackerLCDDBuilder initializeBuilder(LCDD lcdd, SensitiveDetector sens) {
+ HPSTrackerLCDDBuilder b = new HPSTestRunTracker2014LCDDBuilder(_debug,node,lcdd,sens);
+ return b;
+ }
+
+
+
+ /* (non-Javadoc)
* @see org.lcsim.detector.converter.compact.HPSTracker2014ConverterBase#getModuleNumber(org.lcsim.geometry.compact.converter.JavaSurveyVolume)
*/
protected int getModuleNumber(String surveyVolume) {
return HPSTrackerBuilder.getHalfFromName(surveyVolume).equals("top") ? 0 : 1;
}
-
-
-
-
-
-
-
-
-
-
-
-
-
- private void makeExample(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample ----");
-
- }
-
-
-
- String volName = "example";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisX = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(1., 0., 0.);
+
+
+
+
+
+
+
+
+
+
+
+
+
+ private void makeExample(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample ----");
+
+ }
+
+
+
+ String volName = "example";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisX = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(1., 0., 0.);
org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisY = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 1., 0.);
org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisZ = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 0., 1.);
@@ -131,11 +130,11 @@
Rotation rot = new Rotation(volName + "_rotation",0,0,0);
lcdd.add(pos);
lcdd.add(rot);
-
-
+
+
PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
+ System.out.println("Created physical vomume " + basePV.getName());
}
@@ -151,153 +150,153 @@
lcdd.add(subRot);
PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
+ System.out.println("Created physical vomume " + subBasePV.getName());
}
-
+
lcdd.add(volumeSub);
volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
+
lcdd.add(volume);
-
-
-
-
- }
-
-
- private void makeExample2(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample2 ----");
-
- }
-
- String volName = "example2";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisX = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(1., 0., 0.);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisY = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 1., 0.);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisZ = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 0., 1.);
-
- double alpha1 = PI / 4.;
- double alpha2 = PI / 4.;
- double alpha3 = -PI / 4.;
-
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r123 =
- new org.apache.commons.math3.geometry.euclidean.threed.Rotation(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ,
- alpha1,
- alpha2,
- alpha3);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisXPrime = r123.applyTo(axisX);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisYPrime = r123.applyTo(axisY);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisZPrime = r123.applyTo(axisZ);
-
- //if(_debug) System.out.println("axisYPrime: " + axisYPrime);
- //if(_debug) System.out.println("axisZPrime: " + axisZPrime);
-
-
-
- //double [] rotations = r123.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- //double [] rotations = r12.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
- double [] rotations = r123.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
- private void makeExample3(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample3 ----");
-
- }
-
- String volName = "example3";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
- /*
-
- TestRunModuleL13: survey positions before ref support_plate_top transform
- Survey pos for module_L1t:
- ballPos [ 25.000, 676.10, -4.3500]
- veePos [ 95.000, 676.10, -4.3500]
- flatPos [ 60.000, 670.10, -4.3500]
- TestRunModuleL13: Ref support_plate_top coord
- Coordinate system:
- origin [ 40.314, 142.71, 138.40]
- u [ 0.99955, 0.030000, 0.0000]
- v [ -0.030000, 0.99955, 0.0000]
- w [ 0.0000, -0.0000, 1.0000]
- TestRunModuleL13: survey positions after ref support_plate_top transform
- Survey pos for module_L1t:
- ballPos [ 45.020, 819.25, 134.05]
- veePos [ 114.99, 821.35, 134.05]
- flatPos [ 80.184, 814.31, 134.05]
- TestRunModuleL13: coordinate system:
- Coordinate system:
- origin [ 45.020, 819.25, 134.05]
- u [ 0.99955, 0.030000, 0.0000]
- v [ 0.030000, -0.99955, 0.0000]
- w [ 0.0000, 0.0000, -1.0000]
- TestRunModuleL13: translation:
- [ 45.020, 819.25, 134.05]
- TestRunModuleL13: rotation:
- [
- 0.999549894704642 0.030000133265350216 0.0
- 0.030000133265350216 -0.999549894704642 0.0
- 0.0 0.0 -1.0
-
- ]
-
-
-
- LCDDBaseGeom: set position and rotation for volume module_L1t
+
+
+
+
+ }
+
+
+ private void makeExample2(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample2 ----");
+
+ }
+
+ String volName = "example2";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisX = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(1., 0., 0.);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisY = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 1., 0.);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisZ = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(0., 0., 1.);
+
+ double alpha1 = PI / 4.;
+ double alpha2 = PI / 4.;
+ double alpha3 = -PI / 4.;
+
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r123 =
+ new org.apache.commons.math3.geometry.euclidean.threed.Rotation(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ,
+ alpha1,
+ alpha2,
+ alpha3);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisXPrime = r123.applyTo(axisX);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisYPrime = r123.applyTo(axisY);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D axisZPrime = r123.applyTo(axisZ);
+
+ //if(_debug) System.out.println("axisYPrime: " + axisYPrime);
+ //if(_debug) System.out.println("axisZPrime: " + axisZPrime);
+
+
+
+ //double [] rotations = r123.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ //double [] rotations = r12.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+ double [] rotations = r123.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+ private void makeExample3(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample3 ----");
+
+ }
+
+ String volName = "example3";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+ /*
+
+ TestRunModuleL13: survey positions before ref support_plate_top transform
+ Survey pos for module_L1t:
+ ballPos [ 25.000, 676.10, -4.3500]
+ veePos [ 95.000, 676.10, -4.3500]
+ flatPos [ 60.000, 670.10, -4.3500]
+ TestRunModuleL13: Ref support_plate_top coord
+ Coordinate system:
+ origin [ 40.314, 142.71, 138.40]
+ u [ 0.99955, 0.030000, 0.0000]
+ v [ -0.030000, 0.99955, 0.0000]
+ w [ 0.0000, -0.0000, 1.0000]
+ TestRunModuleL13: survey positions after ref support_plate_top transform
+ Survey pos for module_L1t:
+ ballPos [ 45.020, 819.25, 134.05]
+ veePos [ 114.99, 821.35, 134.05]
+ flatPos [ 80.184, 814.31, 134.05]
+ TestRunModuleL13: coordinate system:
+ Coordinate system:
+ origin [ 45.020, 819.25, 134.05]
+ u [ 0.99955, 0.030000, 0.0000]
+ v [ 0.030000, -0.99955, 0.0000]
+ w [ 0.0000, 0.0000, -1.0000]
+ TestRunModuleL13: translation:
+ [ 45.020, 819.25, 134.05]
+ TestRunModuleL13: rotation:
+ [
+ 0.999549894704642 0.030000133265350216 0.0
+ 0.030000133265350216 -0.999549894704642 0.0
+ 0.0 0.0 -1.0
+
+ ]
+
+
+
+ LCDDBaseGeom: set position and rotation for volume module_L1t
getEulerAngles: u [ 0.030000, -0.99955, 0.0000] v[ 0.0000, 0.0000, -1.0000] -> [ 0.0000, 1.0000, 0.0000] [ 0.0000, 0.0000, 1.0000]
Input: u {0.03; -1; 0} v {0; 0; -1} u' {0; 1; 0} v' {0; 0; 1}
rot matrix:
@@ -314,388 +313,388 @@
LCDDBaseGeom: rot [Element: <rotation/>]
LCDDBaseGeom: DONE constructing LCDD object module_L1t
-
-
- */
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- Hep3Vector u_L1 = new BasicHep3Vector(0.99955, 0.030000, 0.0000);
- Hep3Vector v_L1 = new BasicHep3Vector(0.030000, -0.99955, 0.0000);
- Hep3Vector w_L1 = new BasicHep3Vector(0.0000, 0.0000, -1.0000);
-
-
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u_L1.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v_L1.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w_L1.v());
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
- Hep3Vector euler_angles = TransformationUtils.getCardanAngles(v_L1, w_L1, v, w);
-
- //Get the generic rotation
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(v_3D_L1,w_3D_L1,v_3D, w_3D);
- //Get the angles
- double rotations[] = r.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
- if(_debug) {
- System.out.println("getEulerAngles gives euler_angles: " + euler_angles.toString());
- System.out.println("manual gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
- }
-
- if((rotations[0]-euler_angles.x())>0.00001 || (rotations[1]-euler_angles.y())>0.00001 || (rotations[2]-euler_angles.z())>0.00001) {
- //System.("closing the loop in apache rotation didn't work!");
- //throw new RuntimeException("closing the loop in apache rotation didn't work!");
- }
-
-
-
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*4,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("HybridVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
- private void makeExample4(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample4 ----");
-
- }
-
- String volName = "example4";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
-
-
- //set up a rotation about the X axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -1.0*Math.PI);
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
-
-
- double [] rotations = r1.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- if(_debug) {
- System.out.println("u_3D: " + u_3D.toString());
- System.out.println("v_3D: " + v_3D.toString());
- System.out.println("w_3D: " + w_3D.toString());
- r1.toString();
- System.out.println("u_3D_p: " + u_3D_p.toString());
- System.out.println("v_3D_p: " + v_3D_p.toString());
- System.out.println("w_3D_p: " + w_3D_p.toString());
-
- System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*2,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
- private void makeExample5(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample5 ----");
-
- }
-
- String volName = "example5";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
-
-
- //set up a rotation about the X axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -1.0*Math.PI);
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
-
- // set up a rotation about the Z axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D_p, -0.03);
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r13 = r3.applyTo(r1);
-
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D);
- //org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D_p);
- //org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D_p);
- //org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D_p);
-
- double [] rotations = r13.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- if(_debug) {
- System.out.println("u_3D: " + u_3D.toString());
- System.out.println("v_3D: " + v_3D.toString());
- System.out.println("w_3D: " + w_3D.toString());
- r1.toString();
- System.out.println("u_3D_p: " + u_3D_p.toString());
- System.out.println("v_3D_p: " + v_3D_p.toString());
- System.out.println("w_3D_p: " + w_3D_p.toString());
- r13.toString();
- System.out.println("u_3D_pp: " + u_3D_pp.toString());
- System.out.println("v_3D_pp: " + v_3D_pp.toString());
- System.out.println("w_3D_pp: " + w_3D_pp.toString());
- System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*3,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
-
- private void makeExample5b(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample5b ----");
-
- }
-
- String volName = "example5b";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
-
- // set up a rotation about the Z axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D, -0.03);
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r3.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r3.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r3.applyTo(w_3D);
-
-
-
- double [] rotations = r3.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- if(_debug) {
- System.out.println("u_3D: " + u_3D.toString());
- System.out.println("v_3D: " + v_3D.toString());
- System.out.println("w_3D: " + w_3D.toString());
- r3.toString();
- System.out.println("u_3D_p: " + u_3D_p.toString());
- System.out.println("v_3D_p: " + v_3D_p.toString());
- System.out.println("w_3D_p: " + w_3D_p.toString());
- System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*3,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1.5,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
- private void makeExample3b(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample3b ----");
-
- }
-
- String volName = "example3b";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
- /*
-
- TestRunModuleL13: survey positions
+
+
+ */
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ Hep3Vector u_L1 = new BasicHep3Vector(0.99955, 0.030000, 0.0000);
+ Hep3Vector v_L1 = new BasicHep3Vector(0.030000, -0.99955, 0.0000);
+ Hep3Vector w_L1 = new BasicHep3Vector(0.0000, 0.0000, -1.0000);
+
+
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u_L1.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v_L1.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w_L1.v());
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+ Hep3Vector euler_angles = TransformationUtils.getCardanAngles(v_L1, w_L1, v, w);
+
+ //Get the generic rotation
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(v_3D_L1,w_3D_L1,v_3D, w_3D);
+ //Get the angles
+ double rotations[] = r.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+ if(_debug) {
+ System.out.println("getEulerAngles gives euler_angles: " + euler_angles.toString());
+ System.out.println("manual gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+ }
+
+ if((rotations[0]-euler_angles.x())>0.00001 || (rotations[1]-euler_angles.y())>0.00001 || (rotations[2]-euler_angles.z())>0.00001) {
+ //System.("closing the loop in apache rotation didn't work!");
+ //throw new RuntimeException("closing the loop in apache rotation didn't work!");
+ }
+
+
+
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*4,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("HybridVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+ private void makeExample4(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample4 ----");
+
+ }
+
+ String volName = "example4";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+
+
+ //set up a rotation about the X axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -1.0*Math.PI);
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
+
+
+ double [] rotations = r1.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ if(_debug) {
+ System.out.println("u_3D: " + u_3D.toString());
+ System.out.println("v_3D: " + v_3D.toString());
+ System.out.println("w_3D: " + w_3D.toString());
+ r1.toString();
+ System.out.println("u_3D_p: " + u_3D_p.toString());
+ System.out.println("v_3D_p: " + v_3D_p.toString());
+ System.out.println("w_3D_p: " + w_3D_p.toString());
+
+ System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*2,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+ private void makeExample5(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample5 ----");
+
+ }
+
+ String volName = "example5";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+
+
+ //set up a rotation about the X axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -1.0*Math.PI);
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
+
+ // set up a rotation about the Z axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D_p, -0.03);
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r13 = r3.applyTo(r1);
+
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D);
+ //org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D_p);
+ //org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D_p);
+ //org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D_p);
+
+ double [] rotations = r13.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ if(_debug) {
+ System.out.println("u_3D: " + u_3D.toString());
+ System.out.println("v_3D: " + v_3D.toString());
+ System.out.println("w_3D: " + w_3D.toString());
+ r1.toString();
+ System.out.println("u_3D_p: " + u_3D_p.toString());
+ System.out.println("v_3D_p: " + v_3D_p.toString());
+ System.out.println("w_3D_p: " + w_3D_p.toString());
+ r13.toString();
+ System.out.println("u_3D_pp: " + u_3D_pp.toString());
+ System.out.println("v_3D_pp: " + v_3D_pp.toString());
+ System.out.println("w_3D_pp: " + w_3D_pp.toString());
+ System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*3,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+
+ private void makeExample5b(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample5b ----");
+
+ }
+
+ String volName = "example5b";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+
+ // set up a rotation about the Z axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D, -0.03);
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r3.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r3.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r3.applyTo(w_3D);
+
+
+
+ double [] rotations = r3.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ if(_debug) {
+ System.out.println("u_3D: " + u_3D.toString());
+ System.out.println("v_3D: " + v_3D.toString());
+ System.out.println("w_3D: " + w_3D.toString());
+ r3.toString();
+ System.out.println("u_3D_p: " + u_3D_p.toString());
+ System.out.println("v_3D_p: " + v_3D_p.toString());
+ System.out.println("w_3D_p: " + w_3D_p.toString());
+ System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*3,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1.5,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+ private void makeExample3b(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample3b ----");
+
+ }
+
+ String volName = "example3b";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+ /*
+
+ TestRunModuleL13: survey positions
Survey pos for module_L1b:
ballPos [ 25.000, 661.10, 4.3500]
veePos [ 95.000, 661.10, 4.3500]
@@ -732,11 +731,11 @@
]
-
-
-
-
- LCDDBaseGeom: set position and rotation for volume module_L1b
+
+
+
+
+ LCDDBaseGeom: set position and rotation for volume module_L1b
getEulerAngles: u [ -0.030000, 0.99955, 0.0000] v[ 0.0000, 0.0000, 1.0000] -> [ 0.0000, 1.0000, 0.0000] [ 0.0000, 0.0000, 1.0000]
Input: u {-0.03; 1; 0} v {0; 0; 1} u' {0; 1; 0} v' {0; 0; 1}
rot matrix:
@@ -753,484 +752,484 @@
LCDDBaseGeom: rot [Element: <rotation/>]
LCDDBaseGeom: DONE constructing LCDD object module_L1b
-
-
-
-
- */
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- Hep3Vector u_L1 = new BasicHep3Vector(0.99955, 0.030000, 0.0000);
- Hep3Vector v_L1 = new BasicHep3Vector(-0.030000, 0.99955, 0.0000);
- Hep3Vector w_L1 = new BasicHep3Vector(0.0000, 0.0000, 1.0000);
-
-
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u_L1.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v_L1.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w_L1.v());
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
- Hep3Vector euler_angles = TransformationUtils.getCardanAngles(v_L1, w_L1, v, w);
-
- //Get the generic rotation
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(v_3D_L1,w_3D_L1,v_3D, w_3D);
- //Get the angles
- double rotations[] = r.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
- if(_debug) {
- System.out.println("getEulerAngles gives euler_angles: " + euler_angles.toString());
- System.out.println("manual gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
- }
-
- if((rotations[0]-euler_angles.x())>0.00001 || (rotations[1]-euler_angles.y())>0.00001 || (rotations[2]-euler_angles.z())>0.00001) {
- //throw new RuntimeException("closing the loop in apache rotation didn't work!");
- }
-
-
-
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*4,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1.5,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("HybridVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
- private void makeExample6(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample6 ----");
-
- }
-
- String volName = "example6";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
- double [] rotations = {-0.5*Math.PI,0,0};
-
-
- if(_debug) {
-
-
- System.out.println("manual set lcdd angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
- private void makeExample66(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample66 ----");
-
- }
-
- String volName = "example66";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
-
-
- //set up a rotation about the X axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -0.5*Math.PI);
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
-
-
- double [] rotations = r1.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- if(_debug) {
- System.out.println("u_3D: " + u_3D.toString());
- System.out.println("v_3D: " + v_3D.toString());
- System.out.println("w_3D: " + w_3D.toString());
- r1.toString();
- System.out.println("u_3D_p: " + u_3D_p.toString());
- System.out.println("v_3D_p: " + v_3D_p.toString());
- System.out.println("w_3D_p: " + w_3D_p.toString());
-
- System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-4,0,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
-
-
-
- private void makeExample7(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample7 ----");
-
- }
-
- String volName = "example7";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
- double [] rotations = {-0.5*Math.PI,0,-0.25*Math.PI};
-
-
- if(_debug) {
-
-
- System.out.println("manual set lcdd angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
- private void makeExample77(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample77 ----");
-
- }
-
- String volName = "example77";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
-
-
- //set up a rotation about the X axis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -0.5*Math.PI);
-
- // find y' and z'
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
-
-
- //set up a rotation about the Z xis
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D_p, -0.25*Math.PI);
-
- org.apache.commons.math3.geometry.euclidean.threed.Rotation r13 = r3.applyTo(r1);
-
- // find y'' and z''
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D);
-
- // find y'' and z'' (cross-check)
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp_2 = r3.applyTo(u_3D_p);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp_2 = r3.applyTo(v_3D_p);
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp_2 = r3.applyTo(w_3D_p);
-
-
- double [] rotations = r13.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
-
-
- if(_debug) {
- System.out.println("u_3D: " + u_3D.toString());
- System.out.println("v_3D: " + v_3D.toString());
- System.out.println("w_3D: " + w_3D.toString());
- r1.toString();
- System.out.println("u_3D_p: " + u_3D_p.toString());
- System.out.println("v_3D_p: " + v_3D_p.toString());
- System.out.println("w_3D_p: " + w_3D_p.toString());
- r13.toString();
- System.out.println("u_3D_pp: " + u_3D_pp.toString());
- System.out.println("v_3D_pp: " + v_3D_pp.toString());
- System.out.println("w_3D_pp: " + w_3D_pp.toString());
-
- System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
-
- System.out.println("u_3D_pp_2: " + u_3D_pp_2.toString());
- System.out.println("v_3D_pp_2: " + v_3D_pp_2.toString());
- System.out.println("w_3D_pp_2: " + w_3D_pp_2.toString());
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-4,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1,0);
- Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
-
- private void makeExample8(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample8 ----");
-
- }
-
- String volName = "example8";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
- Hep3Vector u_L1 = new BasicHep3Vector(1/Math.sqrt(2),0,1/Math.sqrt(2));
- Hep3Vector v_L1 = new BasicHep3Vector(-1/Math.sqrt(2),0,1/Math.sqrt(2));
- Hep3Vector w_L1 = new BasicHep3Vector(0,-1,0);
-
- Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
-
-
+
+
+
+
+ */
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ Hep3Vector u_L1 = new BasicHep3Vector(0.99955, 0.030000, 0.0000);
+ Hep3Vector v_L1 = new BasicHep3Vector(-0.030000, 0.99955, 0.0000);
+ Hep3Vector w_L1 = new BasicHep3Vector(0.0000, 0.0000, 1.0000);
+
+
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u_L1.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v_L1.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_L1 = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w_L1.v());
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+ Hep3Vector euler_angles = TransformationUtils.getCardanAngles(v_L1, w_L1, v, w);
+
+ //Get the generic rotation
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(v_3D_L1,w_3D_L1,v_3D, w_3D);
+ //Get the angles
+ double rotations[] = r.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+ if(_debug) {
+ System.out.println("getEulerAngles gives euler_angles: " + euler_angles.toString());
+ System.out.println("manual gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+ }
+
+ if((rotations[0]-euler_angles.x())>0.00001 || (rotations[1]-euler_angles.y())>0.00001 || (rotations[2]-euler_angles.z())>0.00001) {
+ //throw new RuntimeException("closing the loop in apache rotation didn't work!");
+ }
+
+
+
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*4,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1.5,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("HybridVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+ private void makeExample6(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample6 ----");
+
+ }
+
+ String volName = "example6";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+ double [] rotations = {-0.5*Math.PI,0,0};
+
+
+ if(_debug) {
+
+
+ System.out.println("manual set lcdd angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+ private void makeExample66(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample66 ----");
+
+ }
+
+ String volName = "example66";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+
+
+ //set up a rotation about the X axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -0.5*Math.PI);
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
+
+
+ double [] rotations = r1.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ if(_debug) {
+ System.out.println("u_3D: " + u_3D.toString());
+ System.out.println("v_3D: " + v_3D.toString());
+ System.out.println("w_3D: " + w_3D.toString());
+ r1.toString();
+ System.out.println("u_3D_p: " + u_3D_p.toString());
+ System.out.println("v_3D_p: " + v_3D_p.toString());
+ System.out.println("w_3D_p: " + w_3D_p.toString());
+
+ System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-4,0,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+
+
+
+ private void makeExample7(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample7 ----");
+
+ }
+
+ String volName = "example7";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+ double [] rotations = {-0.5*Math.PI,0,-0.25*Math.PI};
+
+
+ if(_debug) {
+
+
+ System.out.println("manual set lcdd angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+ private void makeExample77(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample77 ----");
+
+ }
+
+ String volName = "example77";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+
+
+ //set up a rotation about the X axis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r1 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(u_3D, -0.5*Math.PI);
+
+ // find y' and z'
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_p = r1.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_p = r1.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_p = r1.applyTo(w_3D);
+
+
+ //set up a rotation about the Z xis
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r3 = new org.apache.commons.math3.geometry.euclidean.threed.Rotation(w_3D_p, -0.25*Math.PI);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Rotation r13 = r3.applyTo(r1);
+
+ // find y'' and z''
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp = r13.applyTo(u_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp = r13.applyTo(v_3D);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp = r13.applyTo(w_3D);
+
+ // find y'' and z'' (cross-check)
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D_pp_2 = r3.applyTo(u_3D_p);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D_pp_2 = r3.applyTo(v_3D_p);
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D_pp_2 = r3.applyTo(w_3D_p);
+
+
+ double [] rotations = r13.getAngles(org.apache.commons.math3.geometry.euclidean.threed.RotationOrder.XYZ);
+
+
+ if(_debug) {
+ System.out.println("u_3D: " + u_3D.toString());
+ System.out.println("v_3D: " + v_3D.toString());
+ System.out.println("w_3D: " + w_3D.toString());
+ r1.toString();
+ System.out.println("u_3D_p: " + u_3D_p.toString());
+ System.out.println("v_3D_p: " + v_3D_p.toString());
+ System.out.println("w_3D_p: " + w_3D_p.toString());
+ r13.toString();
+ System.out.println("u_3D_pp: " + u_3D_pp.toString());
+ System.out.println("v_3D_pp: " + v_3D_pp.toString());
+ System.out.println("w_3D_pp: " + w_3D_pp.toString());
+
+ System.out.println("gives euler_angles: (" + rotations[0] + "," + rotations[1] + "," + rotations[2] + ")");
+
+ System.out.println("u_3D_pp_2: " + u_3D_pp_2.toString());
+ System.out.println("v_3D_pp_2: " + v_3D_pp_2.toString());
+ System.out.println("w_3D_pp_2: " + w_3D_pp_2.toString());
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-4,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-1,0);
+ Rotation rot = new Rotation(volName + "_rotation",rotations[0],rotations[1],rotations[2]);
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+
+ private void makeExample8(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
if(_debug) {
-
-
- System.out.println("euler angles " + euler_angles.toString());
-
+ System.out.println("--- makeExample8 ----");
+
}
-
-
- //apply to unit vector
-
+ String volName = "example8";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+ Hep3Vector u_L1 = new BasicHep3Vector(1/Math.sqrt(2),0,1/Math.sqrt(2));
+ Hep3Vector v_L1 = new BasicHep3Vector(-1/Math.sqrt(2),0,1/Math.sqrt(2));
+ Hep3Vector w_L1 = new BasicHep3Vector(0,-1,0);
+
+ Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
+
+
+
+
+ if(_debug) {
+
+
+ System.out.println("euler angles " + euler_angles.toString());
+
+ }
+
+
+
+ //apply to unit vector
+
Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-1,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-3,0);
Rotation rot = new Rotation(volName + "_rotation",euler_angles.x(),euler_angles.y(),euler_angles.z());
lcdd.add(pos);
lcdd.add(rot);
-
-
+
+
PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
+ System.out.println("Created physical vomume " + basePV.getName());
}
-
+
@@ -1244,197 +1243,197 @@
lcdd.add(subRot);
PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
+ System.out.println("Created physical vomume " + subBasePV.getName());
}
-
+
lcdd.add(volumeSub);
volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
lcdd.add(volume);
-
-
-
-
- }
-
-
-
- private void makeExample9(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample9 ----");
-
- }
-
- String volName = "example9";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
- Hep3Vector u_L1 = new BasicHep3Vector(1,0,0);
- Hep3Vector v_L1 = new BasicHep3Vector(0,0,1);
- Hep3Vector w_L1 = new BasicHep3Vector(0,-1,0);
-
- Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
-
-
-
-
- if(_debug) {
-
-
- System.out.println("euler angles " + euler_angles.toString());
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-1,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-2,0);
- Rotation rot = new Rotation(volName + "_rotation",euler_angles.x(),euler_angles.y(),euler_angles.z());
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
-
-
-
-
-
- private void makeExample10(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
-
-
- if(_debug) {
- System.out.println("--- makeExample10 ----");
-
- }
-
- String volName = "example10";
- Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
- lcdd.add(box);
- Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
-
-
- Hep3Vector u = new BasicHep3Vector(1,0,0);
- Hep3Vector v = new BasicHep3Vector(0,1,0);
- Hep3Vector w = new BasicHep3Vector(0,0,1);
-
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
- org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
-
- Hep3Vector u_L1 = new BasicHep3Vector(1/Math.sqrt(2),1/Math.sqrt(2),0);
- Hep3Vector v_L1 = new BasicHep3Vector(0,0,1);
- Hep3Vector w_L1 = new BasicHep3Vector(1/Math.sqrt(2),-1/Math.sqrt(2),0);
-
- Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
-
-
-
-
- if(_debug) {
-
-
- System.out.println("euler angles " + euler_angles.toString());
-
- }
-
-
-
- //apply to unit vector
-
- Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-2,0);
- Rotation rot = new Rotation(volName + "_rotation",euler_angles.x(),euler_angles.y(),euler_angles.z());
- lcdd.add(pos);
- lcdd.add(rot);
-
-
- PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
- if(_debug) {
- System.out.println("Created physical vomume " + basePV.getName());
- }
-
-
-
-
- volName = volName + "_sub";
- Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
- lcdd.add(boxSub);
- Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
- Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
- Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
- lcdd.add(subPos);
- lcdd.add(subRot);
- PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
- if(_debug) {
- System.out.println("Created physical vomume " + subBasePV.getName());
- }
-
- lcdd.add(volumeSub);
- volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
-
- lcdd.add(volume);
-
-
-
-
- }
+
+
+
+
+ }
+
+
+
+ private void makeExample9(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample9 ----");
+
+ }
+
+ String volName = "example9";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+ Hep3Vector u_L1 = new BasicHep3Vector(1,0,0);
+ Hep3Vector v_L1 = new BasicHep3Vector(0,0,1);
+ Hep3Vector w_L1 = new BasicHep3Vector(0,-1,0);
+
+ Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
+
+
+
+
+ if(_debug) {
+
+
+ System.out.println("euler angles " + euler_angles.toString());
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-1,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-2,0);
+ Rotation rot = new Rotation(volName + "_rotation",euler_angles.x(),euler_angles.y(),euler_angles.z());
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
+
+
+
+
+
+ private void makeExample10(LCDD lcdd, SensitiveDetector sens) throws JDOMException {
+
+
+ if(_debug) {
+ System.out.println("--- makeExample10 ----");
+
+ }
+
+ String volName = "example10";
+ Box box = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/4.0);
+ lcdd.add(box);
+ Volume volume = new Volume(volName + "_volume", box, lcdd.getMaterial("Vacuum"));
+
+
+ Hep3Vector u = new BasicHep3Vector(1,0,0);
+ Hep3Vector v = new BasicHep3Vector(0,1,0);
+ Hep3Vector w = new BasicHep3Vector(0,0,1);
+
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D u_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(u.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D v_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(v.v());
+ org.apache.commons.math3.geometry.euclidean.threed.Vector3D w_3D = new org.apache.commons.math3.geometry.euclidean.threed.Vector3D(w.v());
+
+ Hep3Vector u_L1 = new BasicHep3Vector(1/Math.sqrt(2),1/Math.sqrt(2),0);
+ Hep3Vector v_L1 = new BasicHep3Vector(0,0,1);
+ Hep3Vector w_L1 = new BasicHep3Vector(1/Math.sqrt(2),-1/Math.sqrt(2),0);
+
+ Hep3Vector euler_angles = TransformationUtils.getCardanAngles(u_L1, v_L1, u, v);
+
+
+
+
+ if(_debug) {
+
+
+ System.out.println("euler angles " + euler_angles.toString());
+
+ }
+
+
+
+ //apply to unit vector
+
+ Position pos = new Position(volName + "_position",HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width*1.5*-2,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length*-2,0);
+ Rotation rot = new Rotation(volName + "_rotation",euler_angles.x(),euler_angles.y(),euler_angles.z());
+ lcdd.add(pos);
+ lcdd.add(rot);
+
+
+ PhysVol basePV = new PhysVol(volume, lcdd.pickMotherVolume(this), pos, rot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + basePV.getName());
+ }
+
+
+
+
+ volName = volName + "_sub";
+ Box boxSub = new Box(volName + "Box", HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_width, HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0 , HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/8.0);
+ lcdd.add(boxSub);
+ Volume volumeSub = new Volume(volName + "_volume", boxSub, lcdd.getMaterial("Vacuum"));
+ Position subPos = new Position(volName + "_position",0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/4.0*2-HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_length/8.0,HPSTestRunTracker2014GeometryDefinition.TrackerEnvelope.base_height/16.0);
+ Rotation subRot = new Rotation(volName + "_rotation",0,0,0);
+ lcdd.add(subPos);
+ lcdd.add(subRot);
+ PhysVol subBasePV = new PhysVol(volumeSub, volume, subPos, subRot);
+ if(_debug) {
+ System.out.println("Created physical vomume " + subBasePV.getName());
+ }
+
+ lcdd.add(volumeSub);
+ volumeSub.setVisAttributes(lcdd.getVisAttributes("SensorVis"));
+
+ lcdd.add(volume);
+
+
+
+
+ }
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java Wed Apr 27 11:11:32 2016
@@ -69,7 +69,7 @@
// layer
for (Iterator i = node.getChildren("layer").iterator(); i.hasNext();)
{
- // Modules are numbered from 0 starting in each layer.
+ // Modules are numbered from 0 starting in each layer.
int moduleNumber = 0;
Element layerElement = (Element) i.next();
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java Wed Apr 27 11:11:32 2016
@@ -53,79 +53,79 @@
}
// Place modules within the tracking volume.
- private void createModulePlacements(LCDD lcdd, int sysId, String subdetName) throws DataConversionException {
- //Volume trackingVolume = lcdd.getTrackingVolume();
- Volume momVolume = lcdd.pickMotherVolume(this);
- // Loop over layers.
+ private void createModulePlacements(LCDD lcdd, int sysId, String subdetName) throws DataConversionException {
+ //Volume trackingVolume = lcdd.getTrackingVolume();
+ Volume momVolume = lcdd.pickMotherVolume(this);
+ // Loop over layers.
for (Iterator i = node.getChildren("layer").iterator(); i.hasNext();) {
- Element layerElement = (Element)i.next();
- int layerNumber = layerElement.getAttribute("id").getIntValue();
- // Loop over modules within layer.
- for (Iterator j = layerElement.getChildren("module_placement").iterator(); j.hasNext();) {
-
- Element modulePlacementElement = (Element)j.next();
- String moduleName = modulePlacementElement.getAttributeValue("name");
- int moduleNumber = modulePlacementElement.getAttribute("id").getIntValue();
-
- // Get the position and rotation parameters. All must be explicitly specified.
- double x, y, z;
- double rx, ry, rz;
- x = modulePlacementElement.getAttribute("x").getDoubleValue();
- y = modulePlacementElement.getAttribute("y").getDoubleValue();
- z = modulePlacementElement.getAttribute("z").getDoubleValue();
- rx = modulePlacementElement.getAttribute("rx").getDoubleValue();
- ry = modulePlacementElement.getAttribute("ry").getDoubleValue();
- rz = modulePlacementElement.getAttribute("rz").getDoubleValue();
-
- // Place the module with position and rotation from above.
- String modulePlacementName = subdetName + "_" + moduleName + "_layer" + layerNumber + "_module" + moduleNumber;
- Position p = new Position(modulePlacementName + "_position", x, y, z);
- Rotation r = new Rotation(modulePlacementName + "_rotation", rx, ry, rz);
- lcdd.add(p);
- lcdd.add(r);
- //PhysVol modulePhysVol = new PhysVol(modules.get(moduleName), trackingVolume, p, r);
- PhysVol modulePhysVol = new PhysVol(modules.get(moduleName), momVolume, p, r);
-
- // Add identifier values to the placement volume.
- modulePhysVol.addPhysVolID("system", sysId);
- modulePhysVol.addPhysVolID("barrel", 0);
- modulePhysVol.addPhysVolID("layer", layerNumber);
- modulePhysVol.addPhysVolID("module", moduleNumber);
- }
- }
- }
+ Element layerElement = (Element)i.next();
+ int layerNumber = layerElement.getAttribute("id").getIntValue();
+ // Loop over modules within layer.
+ for (Iterator j = layerElement.getChildren("module_placement").iterator(); j.hasNext();) {
+
+ Element modulePlacementElement = (Element)j.next();
+ String moduleName = modulePlacementElement.getAttributeValue("name");
+ int moduleNumber = modulePlacementElement.getAttribute("id").getIntValue();
+
+ // Get the position and rotation parameters. All must be explicitly specified.
+ double x, y, z;
+ double rx, ry, rz;
+ x = modulePlacementElement.getAttribute("x").getDoubleValue();
+ y = modulePlacementElement.getAttribute("y").getDoubleValue();
+ z = modulePlacementElement.getAttribute("z").getDoubleValue();
+ rx = modulePlacementElement.getAttribute("rx").getDoubleValue();
+ ry = modulePlacementElement.getAttribute("ry").getDoubleValue();
+ rz = modulePlacementElement.getAttribute("rz").getDoubleValue();
+
+ // Place the module with position and rotation from above.
+ String modulePlacementName = subdetName + "_" + moduleName + "_layer" + layerNumber + "_module" + moduleNumber;
+ Position p = new Position(modulePlacementName + "_position", x, y, z);
+ Rotation r = new Rotation(modulePlacementName + "_rotation", rx, ry, rz);
+ lcdd.add(p);
+ lcdd.add(r);
+ //PhysVol modulePhysVol = new PhysVol(modules.get(moduleName), trackingVolume, p, r);
+ PhysVol modulePhysVol = new PhysVol(modules.get(moduleName), momVolume, p, r);
+
+ // Add identifier values to the placement volume.
+ modulePhysVol.addPhysVolID("system", sysId);
+ modulePhysVol.addPhysVolID("barrel", 0);
+ modulePhysVol.addPhysVolID("layer", layerNumber);
+ modulePhysVol.addPhysVolID("module", moduleNumber);
+ }
+ }
+ }
// Create the module logical volumes.
- private void createModules(LCDD lcdd, SensitiveDetector sd) {
+ private void createModules(LCDD lcdd, SensitiveDetector sd) {
for (Iterator i = node.getChildren("module").iterator(); i.hasNext();) {
Element module = (Element) i.next();
String moduleName = module.getAttributeValue("name");
moduleParameters.put(moduleName, new ModuleParameters(module));
modules.put(moduleName, makeModule(moduleParameters.get(moduleName), sd, lcdd));
}
- }
-
- private Volume makeModule(ModuleParameters params, SensitiveDetector sd, LCDD lcdd) {
- double thickness = params.getThickness();
- double x, y;
- // x = params.getDimension(0);
- // y = params.getDimension(1);
- y = params.getDimension(0); // Y is in X plane in world coordinates.
- x = params.getDimension(1); // X is in Y plane in world coordinates.
- // System.out.println("making module with x = " + x + " and y = " + y);
- Box box = new Box(params.getName() + "Box", x, y, thickness);
- lcdd.add(box);
-
- Volume moduleVolume = new Volume(params.getName() + "Volume", box, vacuum);
- makeModuleComponents(moduleVolume, params, sd, lcdd);
- lcdd.add(moduleVolume);
-
- if (params.getVis() != null) {
- moduleVolume.setVisAttributes(lcdd.getVisAttributes(params.getVis()));
- }
-
- return moduleVolume;
- }
+ }
+
+ private Volume makeModule(ModuleParameters params, SensitiveDetector sd, LCDD lcdd) {
+ double thickness = params.getThickness();
+ double x, y;
+ // x = params.getDimension(0);
+ // y = params.getDimension(1);
+ y = params.getDimension(0); // Y is in X plane in world coordinates.
+ x = params.getDimension(1); // X is in Y plane in world coordinates.
+ // System.out.println("making module with x = " + x + " and y = " + y);
+ Box box = new Box(params.getName() + "Box", x, y, thickness);
+ lcdd.add(box);
+
+ Volume moduleVolume = new Volume(params.getName() + "Volume", box, vacuum);
+ makeModuleComponents(moduleVolume, params, sd, lcdd);
+ lcdd.add(moduleVolume);
+
+ if (params.getVis() != null) {
+ moduleVolume.setVisAttributes(lcdd.getVisAttributes(params.getVis()));
+ }
+
+ return moduleVolume;
+ }
private void makeModuleComponents(Volume moduleVolume, ModuleParameters moduleParameters, SensitiveDetector sd, LCDD lcdd) {
Box envelope = (Box) lcdd.getSolid(moduleVolume.getSolidRef());
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java Wed Apr 27 11:11:32 2016
@@ -61,7 +61,7 @@
return moduleNumber;
}
-
+
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java Wed Apr 27 11:11:32 2016
@@ -15,39 +15,33 @@
/**
* Reconstruction version of HPS ECal with crystal array.
*
- * @author Jeremy McCormick <[log in to unmask]>
- * @author Timothy Nelson <[log in to unmask]>
- * @version $Id: HPSEcal.java,v 1.6 2011/07/28 20:20:18 jeremy Exp $
+ * @author Jeremy McCormick, SLAC
+ * @author Timothy Nelson, SLAC
*/
-public class HPSEcal extends AbstractSubdetector
-{
+public class HPSEcal extends AbstractSubdetector {
+
private int nx;
private int ny;
private double beamgap;
private double dface;
private boolean oddX;
- public static class NeighborMap extends HashMap<Long,Set<Long>>
- {
+ public static class NeighborMap extends HashMap<Long, Set<Long>> {
+
IIdentifierHelper helper;
- public NeighborMap(IIdentifierHelper helper)
- {
+
+ public NeighborMap(IIdentifierHelper helper) {
this.helper = helper;
}
-
- public String toString()
- {
+
+ public String toString() {
System.out.println("NeighborMap has " + this.size() + " entries.");
StringBuffer buff = new StringBuffer();
- for (long id : this.keySet())
- {
- buff.append(helper.unpack(new Identifier(id)))
- .append("\n");
- Set<Long> nei = this.get(id);
- for (long nid : nei)
- {
- buff.append(" " + helper.unpack(new Identifier(nid)))
- .append("\n");
+ for (long id : this.keySet()) {
+ buff.append(helper.unpack(new Identifier(id))).append("\n");
+ Set<Long> nei = this.get(id);
+ for (long nid : nei) {
+ buff.append(" " + helper.unpack(new Identifier(nid))).append("\n");
}
}
return buff.toString();
@@ -56,308 +50,270 @@
private NeighborMap neighborMap = null;
- HPSEcal(Element node) throws JDOMException
- {
+ HPSEcal(Element node) throws JDOMException {
super(node);
-
+
Element layout = node.getChild("layout");
-
+
nx = layout.getAttribute("nx").getIntValue();
ny = layout.getAttribute("ny").getIntValue();
beamgap = layout.getAttribute("beamgap").getDoubleValue();
dface = layout.getAttribute("dface").getDoubleValue();
-
- if (nx % 2 != 0)
+
+ if (nx % 2 != 0) {
oddX = true;
- }
-
- public double distanceToFace()
- {
+ }
+ }
+
+ public double distanceToFace() {
return dface;
}
-
- public double beamGap()
- {
+
+ public double beamGap() {
return beamgap;
}
-
+
/**
* The number of crystals in X in one section.
- * @return
+ *
+ * @return the number of crystals in X in one section
*/
- public double nx()
- {
+ public double nx() {
return nx;
}
-
+
/**
* The number of crystals in y in one section.
- * @return
+ *
+ * @return the number of crystals in Y in one section
*/
- public double ny()
- {
+ public double ny() {
return ny;
- }
-
+ }
+
// Class for storing neighbor incides in XY and side.
- static class XYSide implements Comparator<XYSide>
- {
+ static class XYSide implements Comparator<XYSide> {
+
int x;
int y;
int side;
-
- public XYSide(int x, int y, int side)
- {
+
+ public XYSide(int x, int y, int side) {
this.x = x;
this.y = y;
this.side = side;
}
-
- public int x()
- {
+
+ public int x() {
return x;
}
-
- public int y()
- {
+
+ public int y() {
return y;
}
-
- public int side()
- {
+
+ public int side() {
return side;
}
-
- public boolean equals(Object o)
- {
- XYSide xy = (XYSide)o;
- return xy.x() == x && xy.y() == y && xy.side() == side;
- }
-
- public int compare(XYSide o1, XYSide o2)
- {
- if (o1.equals(o2))
- {
+
+ public boolean equals(Object o) {
+ XYSide xy = (XYSide) o;
+ return xy.x() == x && xy.y() == y && xy.side() == side;
+ }
+
+ public int compare(XYSide o1, XYSide o2) {
+ if (o1.equals(o2)) {
return 0;
- }
- else
- {
+ } else {
return -1;
}
}
}
-
+
/**
- * Get the neighbors for a given cell ID. Each crystal not on an edge
- * has 8 neighbors. Edge crystals have fewer.
+ * Get the neighbors for a given cell ID. Each crystal not on an edge has 8 neighbors. Edge crystals have fewer.
+ *
* @param id The cell ID.
* @return A <code>Set</code> containing the cell's neighbors.
*/
- Set<Long> getNeighbors(Long id)
- {
+ Set<Long> getNeighbors(Long id) {
// Get the IDDecoder.
- IDDecoder dec = getIDDecoder();
-
+ IDDecoder dec = getIDDecoder();
+
// Set the ID.
dec.setID(id);
-
+
// Get ID field values.
int x = dec.getValue("ix");
int y = dec.getValue("iy");
int side = dec.getValue("side");
-
+
// Get field indices.
int ix = dec.getFieldIndex("ix");
int iy = dec.getFieldIndex("iy");
int iside = dec.getFieldIndex("side");
-
+
// Get X, Y, & side neighbor data for this crystal.
Set<XYSide> neighbors = getNeighbors(x, y, side);
// Get buffer with values from current ID.
int[] buffer = new int[dec.getFieldCount()];
- dec.getValues(buffer);
-
+ dec.getValues(buffer);
+
// Create an encoder to make neighbor IDs.
IDEncoder enc = new IDEncoder(dec.getIDDescription());
-
+
// Set to hold neighbor IDs.
Set<Long> ids = new HashSet<Long>();
-
+
// Loop over neighbor objects to make IDs.
- for (XYSide xyside : neighbors)
- {
+ for (XYSide xyside : neighbors) {
buffer[ix] = xyside.x;
buffer[iy] = xyside.y;
buffer[iside] = xyside.side;
long nId = enc.setValues(buffer);
ids.add(nId);
}
-
+
return ids;
}
-
- Set<XYSide> getNeighbors(int ix, int iy, int side)
- {
+
+ Set<XYSide> getNeighbors(int ix, int iy, int side) {
Set<Integer> xneighbors = getXNeighbors(ix);
Set<Integer> yneighbors = getYNeighbors(iy);
-
+
Set<XYSide> neighbors = new HashSet<XYSide>();
-
- for (Integer jx : xneighbors)
- {
- for (Integer jy : yneighbors)
- {
+
+ for (Integer jx : xneighbors) {
+ for (Integer jy : yneighbors) {
// Filter out self.
- if (jx == ix && jy == iy)
- {
+ if (jx == ix && jy == iy) {
continue;
}
-
- neighbors.add(new XYSide(jx,jy,side));
+
+ neighbors.add(new XYSide(jx, jy, side));
}
}
-
+
return neighbors;
}
-
- Set<Integer> getXNeighbors(int ix)
- {
+
+ Set<Integer> getXNeighbors(int ix) {
Set<Integer> neighbors = new HashSet<Integer>();
-
+
// Add self.
neighbors.add(ix);
-
+
// Left neighbor.
- if (isValidX(ix - 1))
- {
+ if (isValidX(ix - 1)) {
neighbors.add(ix - 1);
- }
- else if (isValidX(ix - 2))
- {
+ } else if (isValidX(ix - 2)) {
neighbors.add(ix - 2);
}
-
+
// Right neighbor.
- if (isValidX(ix + 1))
- {
+ if (isValidX(ix + 1)) {
neighbors.add(ix + 1);
- }
- else if (isValidX(ix + 2))
- {
+ } else if (isValidX(ix + 2)) {
neighbors.add(ix + 2);
- }
-
+ }
+
return neighbors;
}
-
- Set<Integer> getYNeighbors(int iy)
- {
+
+ Set<Integer> getYNeighbors(int iy) {
Set<Integer> neighbors = new HashSet<Integer>();
-
+
// Add self.
neighbors.add(iy);
-
+
// Lower neighbor.
- if (isValidY(iy - 1))
- {
+ if (isValidY(iy - 1)) {
neighbors.add(iy - 1);
}
// Upper neighbor.
- if (isValidY(iy + 1))
- {
+ if (isValidY(iy + 1)) {
neighbors.add(iy + 1);
}
-
+
return neighbors;
}
-
- boolean isValidY(int iy)
- {
+
+ boolean isValidY(int iy) {
// Zero is not valid because ID scheme goes from 1.
return iy > 0 && iy <= ny;
}
-
- boolean isValidX(int ix)
- {
+
+ boolean isValidX(int ix) {
// Even case.
- if (!oddX)
- {
- return ix >= -nx/2 && ix <= nx/2 && ix != 0;
+ if (!oddX) {
+ return ix >= -nx / 2 && ix <= nx / 2 && ix != 0;
}
// Odd case.
- else
- {
- return ix >= (-nx-1)/2 && ix <= (nx+1)/2;
- }
- }
-
+ else {
+ return ix >= (-nx - 1) / 2 && ix <= (nx + 1) / 2;
+ }
+ }
+
/**
* Create a map of crystal IDs to the <code>Set</code> of neighbor crystal IDs.
+ *
* @return A map of neighbors for each crystal ID.
*/
- public NeighborMap getNeighborMap()
- {
- if (neighborMap != null)
- {
+ public NeighborMap getNeighborMap() {
+ if (neighborMap != null) {
return neighborMap;
}
-
- // Setup the private instance of the map.
+
+ // Setup the private instance of the map.
neighborMap = new NeighborMap(this.getDetectorElement().getIdentifierHelper());
-
+
IDDecoder dec = getIDDecoder();
IDEncoder enc = new IDEncoder(dec.getIDDescription());
-
+
int nfields = dec.getFieldCount();
int[] vals = new int[nfields];
vals[dec.getFieldIndex("system")] = getSystemID();
-
+
int idxx = dec.getFieldIndex("ix");
int idxy = dec.getFieldIndex("iy");
-
- int hnx = nx;
-
- // Calculate number of X for loop. (from LCDD conv)
- if (oddX)
- {
+
+ int hnx = nx;
+
+ // Calculate number of X for loop. (from LCDD conv)
+ if (oddX) {
hnx -= 1;
hnx /= 2;
- }
- else
- {
+ } else {
hnx /= 2;
}
-
- for (int side=-1; side <=1; side++)
- {
- if (side == 0) continue;
+
+ for (int side = -1; side <= 1; side++) {
+ if (side == 0)
+ continue;
vals[dec.getFieldIndex("side")] = side;
// Loop over y.
- for (int iy=1; iy<=ny; iy++)
- {
+ for (int iy = 1; iy <= ny; iy++) {
// Loop over x.
- for (int ix=0; ix<=hnx; ix++)
- {
+ for (int ix = 0; ix <= hnx; ix++) {
// Loop for positive and negative x.
- for (int j=-1; j<=1; j++)
- {
+ for (int j = -1; j <= 1; j++) {
if (j == 0)
continue;
-
- vals[idxx] = ix*j;
+
+ vals[idxx] = ix * j;
vals[idxy] = iy;
-
+
Long id = enc.setValues(vals);
Set<Long> neighbors = getNeighbors(id);
-
+
neighborMap.put(id, neighbors);
}
}
}
}
-
+
return neighborMap;
- }
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java Wed Apr 27 11:11:32 2016
@@ -150,7 +150,7 @@
/**
* The number of crystals in X in one section.
*
- * @return
+ * @return the number of crystals in X in one section
*/
public double nx() {
return nx;
@@ -159,7 +159,7 @@
/**
* The number of crystals in y in one section.
*
- * @return
+ * @return the number of crystals in Y in one section
*/
public double ny() {
return ny;
Modified: java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java Wed Apr 27 11:11:32 2016
@@ -9,21 +9,21 @@
public class HPSTestRunTracker2014 extends AbstractTracker {
- public HPSTestRunTracker2014(Element node) throws JDOMException
- {
- super(node);
- }
+ public HPSTestRunTracker2014(Element node) throws JDOMException
+ {
+ super(node);
+ }
- public void appendHepRep(HepRepFactory factory, HepRep heprep)
- {
+ public void appendHepRep(HepRepFactory factory, HepRep heprep)
+ {
DetectorElementToHepRepConverter.convert(getDetectorElement(), factory, heprep, -1, false, getVisAttributes().getColor());
- }
-
- public boolean isEndcap() {
- return false;
- }
-
- public boolean isBarrel() {
- return true;
- }
+ }
+
+ public boolean isEndcap() {
+ return false;
+ }
+
+ public boolean isBarrel() {
+ return true;
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java Wed Apr 27 11:11:32 2016
@@ -63,7 +63,8 @@
final DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
conditionsManager.addConditionsListener(new SvtDetectorSetup());
- conditionsManager.setDetector("HPS-Proposal2014-v7-2pt2", 0);
+ //conditionsManager.setDetector("HPS-Proposal2014-v7-2pt2", 0);
+ conditionsManager.setDetector("HPS-EngRun2015-Nominal-v3", 5772);
// Get the detector.
final Detector detector = conditionsManager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java Wed Apr 27 11:11:32 2016
@@ -36,8 +36,8 @@
private static final int TOTAL_NUMBER_OF_STEREO_LAYERS = 10;
private static final String SUBDETECTOR_NAME = "Tracker";
- public static final int NUMBER_OF_READOUT_STRIPS = 639;
- public static final int NUMBER_OF_SENSE_STRIPS = 1277;
+ public static final int NUMBER_OF_READOUT_STRIPS = 639;
+ public static final int NUMBER_OF_SENSE_STRIPS = 1277;
//-----------------//
//-----------------//
@@ -74,12 +74,12 @@
for(HpsSiSensor sensor : sensors) {
assertTrue("[ " + this.getClass().getSimpleName() + " ]: Sensor is of wrong type: " + sensor.getClass().getSimpleName(),
sensor instanceof HpsSiSensor);
- assertTrue("[ " + this.getClass().getSimpleName() + " ]: Wrong number of readout electrodes found.",
- sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getNCells() == NUMBER_OF_READOUT_STRIPS);
-
- assertTrue("[ " + this.getClass().getSimpleName() + " ]: Wrong number of sense electrodes found.",
- sensor.getSenseElectrodes(ChargeCarrier.HOLE).getNCells() == NUMBER_OF_SENSE_STRIPS);
- LOGGER.info(sensor.toString());
+ assertTrue("[ " + this.getClass().getSimpleName() + " ]: Wrong number of readout electrodes found.",
+ sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getNCells() == NUMBER_OF_READOUT_STRIPS);
+
+ assertTrue("[ " + this.getClass().getSimpleName() + " ]: Wrong number of sense electrodes found.",
+ sensor.getSenseElectrodes(ChargeCarrier.HOLE).getNCells() == NUMBER_OF_SENSE_STRIPS);
+ LOGGER.info(sensor.toString());
}
LOGGER.info("Sensors were all initialized correctly.");
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java Wed Apr 27 11:11:32 2016
@@ -18,7 +18,7 @@
{
public HPSTestRunTracker2014LCDDTest(String name)
{
- super(name);
+ super(name);
}
public static TestSuite suite()
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java Wed Apr 27 11:11:32 2016
@@ -18,7 +18,7 @@
{
public HPSTracker2014LCDDTest(String name)
{
- super(name);
+ super(name);
}
public static TestSuite suite()
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java Wed Apr 27 11:11:32 2016
@@ -18,7 +18,7 @@
{
public HPSTracker2014v1LCDDTest(String name)
{
- super(name);
+ super(name);
}
public static TestSuite suite()
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java Wed Apr 27 11:11:32 2016
@@ -18,7 +18,7 @@
{
public HPSTracker2014v1SurveyLCDDTest(String name)
{
- super(name);
+ super(name);
}
public static TestSuite suite()
Modified: java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java Wed Apr 27 11:11:32 2016
@@ -19,36 +19,36 @@
*/
public class HPSTestRunTracker2014Test extends TestCase {
-
- Detector det;
- public HPSTestRunTracker2014Test(String name) {
- super(name);
- }
-
- protected void setUp() throws Exception {
- GeometryReader geometryReader = new GeometryReader();
- geometryReader.setBuildDetailed(true);
- String pathToCompactFile = "/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml";
+
+ Detector det;
+ public HPSTestRunTracker2014Test(String name) {
+ super(name);
+ }
+
+ protected void setUp() throws Exception {
+ GeometryReader geometryReader = new GeometryReader();
+ geometryReader.setBuildDetailed(true);
+ String pathToCompactFile = "/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml";
- InputStream in = HPSTestRunTracker2014Test.class.getResourceAsStream(pathToCompactFile);
- det = geometryReader.read(in);
-
- System.out.printf("%s: detector name converted: %s\n",this.getClass().getSimpleName(), det.getName());
-
-
- }
-
- public void test() {
-
-
-// IDetectorElementStore store = DetectorElementStore.getInstance();
-// System.out.printf("%s: Printing %d DE:\n",this.getClass().getSimpleName(), store.size());
-// System.out.printf("%s: %50s %40s %50s %50s\n",this.getClass().getSimpleName(), "name", "pos", "path","mother");
-// for(IDetectorElement e : store) {
-// System.out.printf("%s: %50s %40s %50s %50s \n",this.getClass().getSimpleName(), e.getName(),e.hasGeometryInfo()?e.getGeometry().getPosition().toString():" - ",e.hasGeometryInfo()?((PhysicalVolumePath)e.getGeometry().getPath()).toString():" - ",e.getParent()==null?" - ":e.getParent().getName());
-// }
-
- IDetectorElementStore store = DetectorElementStore.getInstance();
+ InputStream in = HPSTestRunTracker2014Test.class.getResourceAsStream(pathToCompactFile);
+ det = geometryReader.read(in);
+
+ System.out.printf("%s: detector name converted: %s\n",this.getClass().getSimpleName(), det.getName());
+
+
+ }
+
+ public void test() {
+
+
+// IDetectorElementStore store = DetectorElementStore.getInstance();
+// System.out.printf("%s: Printing %d DE:\n",this.getClass().getSimpleName(), store.size());
+// System.out.printf("%s: %50s %40s %50s %50s\n",this.getClass().getSimpleName(), "name", "pos", "path","mother");
+// for(IDetectorElement e : store) {
+// System.out.printf("%s: %50s %40s %50s %50s \n",this.getClass().getSimpleName(), e.getName(),e.hasGeometryInfo()?e.getGeometry().getPosition().toString():" - ",e.hasGeometryInfo()?((PhysicalVolumePath)e.getGeometry().getPath()).toString():" - ",e.getParent()==null?" - ":e.getParent().getName());
+// }
+
+ IDetectorElementStore store = DetectorElementStore.getInstance();
System.out.printf("%s: Printing %d DE:\n",this.getClass().getSimpleName(), store.size());
System.out.printf("%s: %50s %40s %50s %50s %s\n",this.getClass().getSimpleName(), "name", "pos", "path","mother", "expId");
for(IDetectorElement e : store) {
@@ -58,8 +58,8 @@
expId = e.getExpandedIdentifier();
System.out.printf("%s: %50s %40s %50s %50s %s\n",this.getClass().getSimpleName(), e.getName(),e.hasGeometryInfo()?e.getGeometry().getPosition().toString():" - ",e.hasGeometryInfo()?((PhysicalVolumePath)e.getGeometry().getPath()).toString():" - ",e.getParent()==null?" - ":e.getParent().getName(),expId==null?" no expId ":expId.toString());
}
-
- }
+
+ }
}
Modified: java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTest.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTest.xml (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTest.xml Wed Apr 27 11:11:32 2016
@@ -50,7 +50,7 @@
<!--
<constant name="tracking_region_radius" value="200.0*cm"/>
<constant name="tracking_region_min" value="5.0*cm"/>
- <constant name="tracking_region_zmax" value="100.0*cm"/>
+ <constant name="tracking_region_zmax" value="100.0*cm"/>
<constant name="xCent1" value="10*cm" />
<constant name="xCent2" value="20*cm" />
Modified: java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml (original)
+++ java/branches/HPSJAVA-409/detector-model/src/test/resources/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.xml Wed Apr 27 11:11:32 2016
@@ -7,9 +7,9 @@
</info>
<define>
- <!-- units -->
- <constant name="mm" value="0.1*cm"/>
- <constant name="inch" value="25.4*mm"/>
+ <!-- units -->
+ <constant name="mm" value="0.1*cm"/>
+ <constant name="inch" value="25.4*mm"/>
<!-- world -->
<constant name="world_side" value="500.0*cm" />
@@ -17,8 +17,8 @@
<constant name="world_y" value="world_side" />
<constant name="world_z" value="world_side" />
- <!-- tracking region -->
- <constant name="tracking_region_radius" value="200.0*cm"/>
+ <!-- tracking region -->
+ <constant name="tracking_region_radius" value="200.0*cm"/>
<constant name="tracking_region_min" value="5.0*cm"/>
<constant name="tracking_region_zmax" value="131.8*cm"/>
@@ -34,7 +34,7 @@
</materials>
<display>
-
+
<vis name="SensorVis" alpha="1.0" r="1.0" g="0.0" b="0.0" drawingStyle="wireframe" lineStyle="unbroken" showDaughters="true" visible="true"/>
<vis name="ActiveSensorVis" alpha="1.0" r="1.0" g="0.0" b="0.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="true" visible="true"/>
<vis name="CarbonFiberVis" alpha="1.0" r="0.88" g="0.88" b="0.88" drawingStyle="solid" lineStyle="unbroken" showDaughters="true" visible="true"/>
@@ -48,7 +48,7 @@
<vis name="BasePlateVis" alpha="1.0" r="0.35" g="0.35" b="0.35" drawingStyle="solid" lineStyle="dashed" showDaughters="true" visible="true"/>
<vis name="LayerVis" alpha="0.0" r="0.0" g="0.0" b="1.0" drawingStyle="wireframe" showDaughters="true" visible="false"/>
<vis name="ComponentVis" alpha="0.0" r="0.0" g="0.2" b="0.4" drawingStyle="solid" showDaughters="false" visible="false"/>
- <vis name="BeamPlaneVis" alpha="1.0" r="1.0" g="1.0" b="1.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="false" visible="true"/>
+ <vis name="BeamPlaneVis" alpha="1.0" r="1.0" g="1.0" b="1.0" drawingStyle="solid" lineStyle="unbroken" showDaughters="false" visible="true"/>
</display>
Modified: java/branches/HPSJAVA-409/detector-model/target/antrun/build-main.xml
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/antrun/build-main.xml (original)
+++ java/branches/HPSJAVA-409/detector-model/target/antrun/build-main.xml Wed Apr 27 11:11:32 2016
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" ?>
<project name="maven-antrun-" default="main" >
<target name="main">
- <mkdir dir="/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/target/test-output"/>
+ <mkdir dir="/media/work/svn/hps-java/HPSJAVA-409/detector-model/target/test-output"/>
</target>
</project>
Modified: java/branches/HPSJAVA-409/detector-model/target/hps-detector-model-3.4.2-SNAPSHOT-bin.jar
=============================================================================
Binary files - no diff available.
Modified: java/branches/HPSJAVA-409/detector-model/target/hps-detector-model-3.4.2-SNAPSHOT.jar
=============================================================================
Binary files - no diff available.
Modified: java/branches/HPSJAVA-409/detector-model/target/maven-archiver/pom.properties
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/maven-archiver/pom.properties (original)
+++ java/branches/HPSJAVA-409/detector-model/target/maven-archiver/pom.properties Wed Apr 27 11:11:32 2016
@@ -1,5 +1,5 @@
#Generated by Maven
-#Fri Dec 04 17:58:10 CET 2015
+#Fri Apr 08 16:49:44 PDT 2016
version=3.4.2-SNAPSHOT
groupId=org.hps
artifactId=hps-detector-model
Modified: java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst (original)
+++ java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst Wed Apr 27 11:11:32 2016
@@ -1 +1,188 @@
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13Top.class
+org/lcsim/geometry/compact/converter/HPSTracker2014LCDDBuilder.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL46Top.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelPlate.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$HalfModuleStereo.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46TopSurveyBalls.class
+org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.class
+org/lcsim/detector/converter/compact/HPSTracker2Converter$ModuleParameters.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal3$CrystalRange.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46BottomPlate.class
+org/lcsim/geometry/compact/converter/SurveyVolumeImpl.class
+org/lcsim/detector/converter/compact/HPSTrackerConverter.class
+org/lcsim/detector/converter/compact/HPSTrackerConverter$ModuleParameters.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$Hybrid.class
+org/hps/detector/ecal/EcalCrystalChannelMap.class
+org/hps/detector/svt/SvtDetectorSetup.class
+org/lcsim/geometry/subdetector/HPSTracker2014.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL5Bot.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13BottomPlate.class
+org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.class
+org/lcsim/geometry/compact/converter/AlignmentCorrection.class
+org/lcsim/geometry/subdetector/HPSEcal3$NeighborMap.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014Base.class
+org/lcsim/geometry/compact/converter/MilleParameter.class
+org/lcsim/geometry/compact/converter/LCDDSurveyVolume.class
+org/hps/detector/ecal/HPSEcalDetectorElement$NeighborDirection.class
+org/lcsim/detector/converter/compact/HPSMuonCalorimeterConverter.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunColdBlockL45.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$SupportTop.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$HalfModuleComponent.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongHalfModuleBundle.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL4Bot.class
+org/hps/detector/ecal/geo2015/crystal/Geant4Position.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongAxialSlotHalfModule.class
+org/lcsim/geometry/subdetector/HPSTestRunTracker2014.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunHalfModuleStereo.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunModule.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL13.class
+org/lcsim/geometry/compact/converter/ReadSurveyOutput.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$SupportPlateBottom.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal4.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$Sensor.class
+org/lcsim/geometry/subdetector/HPSEcal4.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal.class
+org/hps/detector/ecal/CrystalRange.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongAxialHoleHalfModule.class
+org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL6Top.class
+org/lcsim/geometry/subdetector/HPSEcal$XYSide.class
+org/lcsim/geometry/subdetector/HPSEcal$NeighborMap.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.class
+org/hps/detector/ecal/geo2015/geoutils/CenterMass.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal3.class
+org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SupportRingL13KinMount.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$SupportPlateTop.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$HalfModuleLamination.class
+org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunColdBlockL13.class
+org/lcsim/geometry/compact/converter/SurveyVolume.class
+org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL3Top.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$SupportBottom.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2$ModuleComponentParameters.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunModuleL13.class
+org/lcsim/detector/converter/compact/subdetector/HpsTracker2.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL13Top.class
+org/lcsim/geometry/compact/converter/CompactSurveyVolume.class
+org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.class
+org/lcsim/geometry/subdetector/HPSEcal3$XY.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal2.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongStereoHalfModule.class
+org/hps/detector/ecal/geo2015/ecal/EcalSurveyData.class
+org/lcsim/geometry/subdetector/HPSTracker.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13BottomSurveyBalls.class
+org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.class
+org/lcsim/geometry/compact/converter/HPSTracker2014v1LCDDBuilder.class
+org/hps/detector/ecal/HPSEcalAPI.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13Plate.class
+org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.class
+org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition$TestRunModuleBundle.class
+org/lcsim/geometry/compact/converter/SurveyResult.class
+org/hps/detector/ecal/geo2015/ecal/EcalNominal.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongAxialSlotHalfModuleBase.class
+org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL1Bot.class
+org/hps/detector/ecal/geo2015/base/StatFunUtils.class
+org/hps/detector/ecal/geo2015/ecal/ECalRotationCalculator.class
+org/hps/detector/ecal/geo2015/ecal/Transformations.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TrackerEnvelope.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongStereoHoleHalfModule.class
+org/lcsim/geometry/subdetector/HPSEcal3$CrystalRange.class
+org/lcsim/geometry/subdetector/HPSTracker2014v1.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$PSVacuumChamber.class
+org/lcsim/detector/tracker/silicon/HpsSiSensor.class
+org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.class
+org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL6Bot.class
+org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker$ModuleComponentParameters.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13TopSurveyBalls.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunHalfModuleBundle.class
+org/hps/detector/ecal/geo2015/crystal/CrystalTaitBryanAngleCalculator.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13TopPlate.class
+org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46Bottom.class
+org/lcsim/detector/converter/compact/HPSTracker2014Converter.class
+org/hps/detector/ecal/EcalCrystal.class
+org/lcsim/geometry/subdetector/HPSTracker2.class
+org/lcsim/geometry/compact/converter/MilleParameter$Type.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunHalfModuleAxial.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$SupportPlate.class
org/hps/detector/ecal/HPSEcalDetectorElement$1.class
+org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.class
+org/lcsim/detector/converter/compact/HPSEcal2Converter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL13Bottom.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL2Top.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SvtBoxBasePlate.class
+org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition$TrackingVolume.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunColdBlock.class
+org/lcsim/geometry/compact/converter/JavaSurveyVolume.class
+org/lcsim/geometry/compact/converter/HPSTrackerBuilder$BaseModuleBundle.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL13Bot.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL46.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SupportRingL13TopKinMount.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL3Bot.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.class
+org/lcsim/detector/converter/compact/HPSTrackerConverter$ModuleComponentParameters.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.class
+org/hps/detector/ecal/geo2015/geoutils/Plane.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL5Top.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongStereoSlotHalfModuleBase.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$BasePlate.class
+org/lcsim/geometry/compact/converter/HPSTrackerBuilder$HalfModuleBundle.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$ActiveSensor.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46BottomSurveyBalls.class
+org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition$LongStereoSlotHalfModule.class
+org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.class
+org/lcsim/geometry/subdetector/HPSEcal3.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.class
+org/hps/detector/ecal/geo2015/base/DataLoader.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongHalfModule.class
+org/lcsim/detector/converter/compact/HPSEcalConverter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SupportRingL13BottomKinMount.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$BaseModule.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.class
+org/lcsim/geometry/subdetector/HPSEcal.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$HalfModuleAxial.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongModuleBundle.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46Plate.class
+org/lcsim/detector/converter/compact/HPSTracker2014v1Converter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46TopPlate.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL2Bot.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunModuleL45.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SvtBox.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$CSupport.class
+org/hps/detector/ecal/HPSEcalDetectorElement.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$UChannelL46Top.class
+org/lcsim/detector/converter/compact/HPSEcal4Converter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition$LongAxialSlotHalfModule.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$HalfLongModuleLamination.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2$ModuleParameters.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$SupportRing.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$TestRunHalfModule.class
+org/hps/detector/ecal/geo2015/geoutils/Vector.class
+org/hps/detector/ecal/geo2015/geoutils/Line.class
+org/hps/detector/ecal/geo2015/crystal/Crystal.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL46Bot.class
+org/lcsim/detector/converter/compact/HPSTracker2Converter.class
+org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$LongStereoSlotHalfModule.class
+org/lcsim/detector/converter/compact/HPSEcal3Converter.class
+org/lcsim/geometry/compact/converter/HPSTrackerBuilder.class
+org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition$CarbonFiber.class
+org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL4Top.class
+org/lcsim/geometry/compact/converter/HPSTracker2014v1JavaBuilder.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker$ModuleParameters.class
+org/lcsim/geometry/subdetector/HPSEcal2.class
+org/lcsim/detector/converter/compact/HPSTracker2Converter$ModuleComponentParameters.class
+org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition$ModuleL1Top.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1.class
+org/lcsim/geometry/subdetector/HPSMuonCalorimeter.class
Modified: java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst (original)
+++ java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst Wed Apr 27 11:11:32 2016
@@ -1,92 +1,92 @@
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/EcalNominal.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Plane.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2014.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/CompactSurveyVolume.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/Crystal.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal3.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014Base.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/Geant4Position.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/HPSEcalDetectorElement.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/CrystalEulerAngleCalculator.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcalConverter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSMuonCalorimeter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/FunCheatJava.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/EcalCrystal.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/Transformations.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal3Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/DataLoader.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/CenterMass.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal4.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Line.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal4Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/CrystalRange.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/AlignmentCorrection.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/EcalCrystalChannelMap.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal4.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014v1Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/package-info.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1LCDDBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/EcalSurveyData.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/ECalRotationCalculator.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSMuonCalorimeterConverter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal2Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014LCDDBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1JavaBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyResult.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Vector.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/HPSEcalAPI.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal2.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2014v1.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerJavaBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal4.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014GeometryDefinition.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014v1Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcalConverter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal3Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/EcalCrystal.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal2Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal3.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTestRunTracker2014Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal4.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1JavaBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerGeometryDefinition.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/Geant4Position.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014JavaBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyCoordinateSystem.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014LCDDBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014JavaBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/DataLoader.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1LCDDBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaGhostSurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/Transformations.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/CenterMass.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTracker2014ConverterBase.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSEcal4Converter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2014.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSMuonCalorimeter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/Crystal.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/HPSEcalDetectorElement.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SvtAlignmentConstantsReader.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/CrystalTaitBryanAngleCalculator.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/EcalCrystalChannelMap.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerJavaBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker2014v1.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/CrystalRange.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Line.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Vector.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSMuonCalorimeterConverter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerLCDDBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/StatFunUtils.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/MilleParameter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/crystal/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyResult.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSMuonCalorimeter2.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/EcalNominal.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/AlignmentCorrection.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDGhostSurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/HPSEcalAPI.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/detector/converter/compact/HPSTrackerConverter.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/geoutils/Plane.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/EcalSurveyData.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/CompactSurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeImpl.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/ecal/ECalRotationCalculator.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/JavaSurveyVolume.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014Base.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/hps/detector/ecal/geo2015/base/package-info.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSTracker.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/SurveyVolumeVisualization.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/IHPSTrackerLCDDBuilder.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
Modified: java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst (original)
+++ java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst Wed Apr 27 11:11:32 2016
@@ -1 +1,16 @@
+org/hps/detector/svt/SvtDetectorSetupTest.class
org/lcsim/detector/converter/compact/HPSEcal4ConverterTest.class
+org/lcsim/detector/converter/compact/HPSMuonCalorimeterTest.class
+org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.class
+org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.class
+org/hps/detector/svt/TestRunSvtDetectorSetupTest.class
+org/hps/detector/svt/SvtDaqMappingTest.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.class
+org/lcsim/geometry/subdetector/HPSTracker2014Test.class
+org/lcsim/detector/converter/compact/HpsTestRunSiSensorConverterTest.class
+org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.class
+org/hps/detector/ecal/HPSEcalAPITest.class
+org/hps/detector/SvtAlignmentTest.class
+org/lcsim/geometry/compact/converter/lcdd/HPSEcal4LCDDTest.class
Modified: java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst
=============================================================================
--- java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst (original)
+++ java/branches/HPSJAVA-409/detector-model/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst Wed Apr 27 11:11:32 2016
@@ -1,16 +1,16 @@
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/ecal/HPSEcalAPITest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSEcal4ConverterTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDaqMappingTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal4LCDDTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HpsTestRunSiSensorConverterTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/TestRunSvtDetectorSetupTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTracker2014Test.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSMuonCalorimeterTest.java
-/projet/nucleon2/annie/HPS/withHPS_Java/SENA/hps_trunk_dev/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1LCDDTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDaqMappingTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HpsTestRunSiSensorConverterTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014v1SurveyLCDDTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTracker2014LCDDTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSEcal4ConverterTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/SvtDetectorSetupTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/svt/TestRunSvtDetectorSetupTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSTestRunTracker2014LCDDTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/compact/converter/lcdd/HPSEcal4LCDDTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTestRunTracker2014Test.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/geometry/subdetector/HPSTracker2014Test.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/hps/detector/ecal/HPSEcalAPITest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSMuonCalorimeterTest.java
+/media/work/svn/hps-java/HPSJAVA-409/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
Modified: java/branches/HPSJAVA-409/distribution/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/distribution/pom.xml (original)
+++ java/branches/HPSJAVA-409/distribution/pom.xml Wed Apr 27 11:11:32 2016
@@ -13,7 +13,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/distribution/</url>
@@ -80,7 +80,7 @@
<binFileExtensions>
<unix>.sh</unix>
</binFileExtensions>
- <extraJvmArguments>-Djava.util.logging.config.class=org.hps.logging.config.DefaultLoggingConfig</extraJvmArguments>
+ <extraJvmArguments>-Xmx2g -Djava.util.logging.config.class=org.hps.logging.config.DefaultLoggingConfig</extraJvmArguments>
<programs>
<program>
<mainClass>org.hps.evio.EvioToLcio</mainClass>
@@ -88,35 +88,31 @@
</program>
<program>
<mainClass>org.hps.job.JobManager</mainClass>
- <id>job</id>
+ <id>job-manager</id>
</program>
<program>
<mainClass>org.hps.conditions.cli.CommandLineTool</mainClass>
- <id>conddb</id>
- </program>
- <program>
- <mainClass>org.hps.crawler.DatacatCrawler</mainClass>
- <id>crawler</id>
+ <id>conditions-cli</id>
</program>
<program>
<mainClass>org.hps.run.database.RunDatabaseCommandLine</mainClass>
- <id>rundb</id>
+ <id>run-database-cli</id>
</program>
<program>
<mainClass>org.hps.monitoring.application.Main</mainClass>
- <id>monapp</id>
+ <id>monitoring-app</id>
</program>
<program>
<mainClass>org.lcsim.geometry.compact.converter.Main</mainClass>
- <id>detcnv</id>
+ <id>detector-converter</id>
</program>
<program>
<mainClass>org.hps.record.evio.EvioFileProducer</mainClass>
- <id>evio_file_producer</id>
+ <id>evio-file-producer</id>
</program>
<program>
<mainClass>org.jlab.coda.et.apps.StartEt</mainClass>
- <id>et_server</id>
+ <id>et-server</id>
<commandLineArguments>
<commandLineArgument>-f</commandLineArgument>
<commandLineArgument>ETBuffer</commandLineArgument>
@@ -124,6 +120,18 @@
<commandLineArgument>20000</commandLineArgument>
<commandLineArgument>-v</commandLineArgument>
</commandLineArguments>
+ </program>
+ <program>
+ <mainClass>org.hps.crawler.MetadataWriter</mainClass>
+ <id>dc-create-metadata</id>
+ </program>
+ <program>
+ <mainClass>org.hps.crawler.DatacatAddFile</mainClass>
+ <id>dc-add-file</id>
+ </program>
+ <program>
+ <mainClass>org.hps.crawler.DatacatCrawler</mainClass>
+ <id>dc-crawler</id>
</program>
</programs>
</configuration>
Modified: java/branches/HPSJAVA-409/distribution/src/main/java/org/hps/HPSJavaProperties.java
=============================================================================
--- java/branches/HPSJAVA-409/distribution/src/main/java/org/hps/HPSJavaProperties.java (original)
+++ java/branches/HPSJAVA-409/distribution/src/main/java/org/hps/HPSJavaProperties.java Wed Apr 27 11:11:32 2016
@@ -82,6 +82,11 @@
return this.properties.getProperty("timestamp");
}
+ /**
+ * Convert this object to a string.
+ *
+ * @return this object converted to a string
+ */
@Override
public String toString() {
final StringBuffer sb = new StringBuffer();
Modified: java/branches/HPSJAVA-409/ecal-event-display/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/pom.xml (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-event-display/</url>
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Association.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Association.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Association.java Wed Apr 27 11:11:32 2016
@@ -10,52 +10,52 @@
* @author Kyle McCarty
*/
public final class Association {
- private final Point parent;
- private final Point child;
- private final Color highlight;
-
- /**
- * <b>Association</b><br/><br/>
- * <code>public <b>Association</b>(Point parentCrystal, Point childCrystal, Color highlightColor)</code><br/><br/>
- * Creates an association between a child crystal and a parent
- * crystal.
- * @param parentCrystal - The crystal with which the child crystal
- * is connected.
- * @param childCrystal - The connected crystal.
- * @param highlightColor - The color in which the child crystal
- * should be highlighted.
- */
- public Association(Point parentCrystal, Point childCrystal, Color highlightColor) {
- parent = parentCrystal;
- child = childCrystal;
- highlight = highlightColor;
- }
-
- /**
- * <b>getChildCrystal</b><br/><br/>
- * <code>public Point <b>getChildCrystal</b>()</code><br/><br/>
- * Indicates the indices for the child crystal.
- * @return Returns the child crystal's indices in a <code>Point
- * </code> object.
- */
- public Point getChildCrystal() { return child; }
-
- /**
- * <b>getHighlight</b><br/><br/>
- * <code>public Color <b>getHighlight</b>()</code><br/><br/>
- * Gets the color with which the child crystal should be highlighted
- * whenever the parent crystal is selected.
- * @return Returns the highlight color as a <code>Color</code> object.
- */
- public Color getHighlight() { return highlight; }
-
- /**
- * <b>getParentCrystal</b><br/><br/>
- * <code>public Point <b>getParentCrystal</b>()</code><br/><br/>
- * Indicates the indices for the parent crystal with which the
- * child crystal is connected.
- * @return Returns the parent crystal's indices in a <code>Point
- * </code> object.
- */
- public Point getParentCrystal() { return parent; }
+ private final Point parent;
+ private final Point child;
+ private final Color highlight;
+
+ /**
+ * <b>Association</b><br/><br/>
+ * <code>public <b>Association</b>(Point parentCrystal, Point childCrystal, Color highlightColor)</code><br/><br/>
+ * Creates an association between a child crystal and a parent
+ * crystal.
+ * @param parentCrystal - The crystal with which the child crystal
+ * is connected.
+ * @param childCrystal - The connected crystal.
+ * @param highlightColor - The color in which the child crystal
+ * should be highlighted.
+ */
+ public Association(Point parentCrystal, Point childCrystal, Color highlightColor) {
+ parent = parentCrystal;
+ child = childCrystal;
+ highlight = highlightColor;
+ }
+
+ /**
+ * <b>getChildCrystal</b><br/><br/>
+ * <code>public Point <b>getChildCrystal</b>()</code><br/><br/>
+ * Indicates the indices for the child crystal.
+ * @return Returns the child crystal's indices in a <code>Point
+ * </code> object.
+ */
+ public Point getChildCrystal() { return child; }
+
+ /**
+ * <b>getHighlight</b><br/><br/>
+ * <code>public Color <b>getHighlight</b>()</code><br/><br/>
+ * Gets the color with which the child crystal should be highlighted
+ * whenever the parent crystal is selected.
+ * @return Returns the highlight color as a <code>Color</code> object.
+ */
+ public Color getHighlight() { return highlight; }
+
+ /**
+ * <b>getParentCrystal</b><br/><br/>
+ * <code>public Point <b>getParentCrystal</b>()</code><br/><br/>
+ * Indicates the indices for the parent crystal with which the
+ * child crystal is connected.
+ * @return Returns the parent crystal's indices in a <code>Point
+ * </code> object.
+ */
+ public Point getParentCrystal() { return parent; }
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Cluster.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Cluster.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Cluster.java Wed Apr 27 11:11:32 2016
@@ -11,161 +11,161 @@
* @author Kyle McCarty
*/
public final class Cluster {
- private final Point center;
- private final double energy;
- private final double time;
- private ArrayList<Point> hitList = new ArrayList<Point>();
- private ArrayList<Point> shareList = new ArrayList<Point>();
-
- /**
- * Creates a new cluster. All clusters are required to have a
- * cluster center.
- * @param ix - The cluster center's x-index.
- * @param iy - The cluster center's y-index.
- */
- public Cluster(int ix, int iy) { this(new Point(ix, iy), Double.NaN, Double.NaN); }
-
- /**
- * Creates a new cluster. All clusters are required to have a seed
- * hit.
- * @param clusterCenter - The <code>Point</code> object indicating in
- * which crystal the seed hit occurred.
- */
- public Cluster(Point clusterCenter) { this(clusterCenter, Double.NaN, Double.NaN); }
-
- /**
- * Creates a new cluster. All clusters are required to have a
- * cluster center.
- * @param ix - The cluster center's x-index.
- * @param iy - The cluster center's y-index.
- * @param energy - The cluster's energy.
- */
- public Cluster(int ix, int iy, double energy) { this(new Point(ix, iy), energy, Double.NaN); }
-
- /**
- * Creates a new cluster. All clusters are required to have a
- * cluster center.
- * @param ix - The cluster center's x-index.
- * @param iy - The cluster center's y-index.
- * @param energy - The cluster's energy.
- * @param time - The cluster's time-stamp.
- */
- public Cluster(int ix, int iy, double energy, double time) { this(new Point(ix, iy), energy, time); }
-
- /**
- * Creates a new cluster. All clusters are required to have a seed
- * hit.
- * @param clusterCenter - The <code>Point</code> object indicating in
- * which crystal the seed hit occurred.
- * @param energy - The cluster's energy.
- */
- public Cluster(Point clusterCenter, double energy) {
- this(clusterCenter, energy, Double.NaN);
- }
-
- /**
- * Creates a new cluster. All clusters are required to have a seed
- * hit.
- * @param clusterCenter - The <code>Point</code> object indicating in
- * which crystal the seed hit occurred.
- * @param energy - The cluster's energy.
- * @param time - The cluster's time-stamp.
- */
- public Cluster(Point clusterCenter, double energy, double time) {
- center = clusterCenter;
- this.energy = energy;
- this.time = time;
- }
-
- /**
- * Adds an <code>Point</code> to the list of this cluster's
- * component hits.
- * @param ix - The component hit's x-coordinate.
- * @param iy - The component hit's y-coordinate.
- */
- public void addComponentHit(int ix, int iy) { hitList.add(new Point(ix, iy)); }
-
- /**
- * Adds an <code>Point</code> to the list of this cluster's
- * component hits.
- * @param eHit - The <code>Point</code> object indicating in which
- * crystal the hit occurred.
- */
- public void addComponentHit(Point eHit) { hitList.add(eHit); }
-
- /**
- * Adds an <code>Point</code> to the list of this cluster's shared
- * hits.
- * @param ix - The shared hit's x-coordinate.
- * @param iy - The shared hit's y-coordinate.
- */
- public void addSharedHit(int ix, int iy) { shareList.add(new Point(ix, iy)); }
-
- /**
- * Adds an <code>Point</code> to the list of this cluster's shared
- * hits.
- * @param eHit - The <code>Point</code> object indicating in which
- * crystal the hit occurred.
- */
- public void addSharedHit(Point eHit) { shareList.add(eHit); }
-
- /**
- * Gets the hit representing the cluster center.
- * @return Returns the cluster center hit as an <code>Point</code>.
- */
- public Point getClusterCenter() { return center; }
-
- /**
- * Gets the cluster's energy, if it was defined when the cluster
- * was constructed.
- * @return Returns the energy of the cluster if it was defined,
- * and <code>NaN</code> otherwise.
- */
- public double getClusterEnergy() { return energy; }
-
- /**
- * Gets the time stamp for the cluster in nanoseconds.
- * @return Returns the cluster's time stamp.
- */
- public double getClusterTime() { return time; }
-
- /**
- * Indicates how many component hits compose this cluster. Note
- * that this does not include the seed hit or shared hits.
- * @return Returns the number of component hits in the cluster
- * as an <code>int</code>.
- */
- public int getComponentHitCount() { return hitList.size(); }
-
- /**
- * Gets the list of hits that make up the cluster, exempting the
- * seed hit and shared hits.
- * @return Returns the cluster hits as a <code>List</code> object
- * composed of <code>Point</code> objects.
- */
- public List<Point> getComponentHits() { return hitList; }
-
- /**
- * Indicates how many total hits compose this cluster. This includes
- * component hits, shared hits, and the seed hit.
- * @return Returns the number of component hits in the cluster
- * as an <code>int</code>.
- */
- public int getHitCount() { return hitList.size() + shareList.size() + 1; }
-
- /**
- * Indicates how many shared hits compose this cluster. Note that
- * this does not include the seed hit or component hits.
- * @return Returns the number of shared hits in the cluster as an
- * <code>int</code>.
- */
- public int getSharedHitCount() { return shareList.size(); }
-
- /**
- * Gets the list of hits that make up the cluster, exempting the
- * seed hit and component hits.
- * @return Returns the shared hits as a <code>List</code> object
- * composed of <code>Point</code> objects.
- */
- public List<Point> getSharedHits() { return shareList; }
+ private final Point center;
+ private final double energy;
+ private final double time;
+ private ArrayList<Point> hitList = new ArrayList<Point>();
+ private ArrayList<Point> shareList = new ArrayList<Point>();
+
+ /**
+ * Creates a new cluster. All clusters are required to have a
+ * cluster center.
+ * @param ix - The cluster center's x-index.
+ * @param iy - The cluster center's y-index.
+ */
+ public Cluster(int ix, int iy) { this(new Point(ix, iy), Double.NaN, Double.NaN); }
+
+ /**
+ * Creates a new cluster. All clusters are required to have a seed
+ * hit.
+ * @param clusterCenter - The <code>Point</code> object indicating in
+ * which crystal the seed hit occurred.
+ */
+ public Cluster(Point clusterCenter) { this(clusterCenter, Double.NaN, Double.NaN); }
+
+ /**
+ * Creates a new cluster. All clusters are required to have a
+ * cluster center.
+ * @param ix - The cluster center's x-index.
+ * @param iy - The cluster center's y-index.
+ * @param energy - The cluster's energy.
+ */
+ public Cluster(int ix, int iy, double energy) { this(new Point(ix, iy), energy, Double.NaN); }
+
+ /**
+ * Creates a new cluster. All clusters are required to have a
+ * cluster center.
+ * @param ix - The cluster center's x-index.
+ * @param iy - The cluster center's y-index.
+ * @param energy - The cluster's energy.
+ * @param time - The cluster's time-stamp.
+ */
+ public Cluster(int ix, int iy, double energy, double time) { this(new Point(ix, iy), energy, time); }
+
+ /**
+ * Creates a new cluster. All clusters are required to have a seed
+ * hit.
+ * @param clusterCenter - The <code>Point</code> object indicating in
+ * which crystal the seed hit occurred.
+ * @param energy - The cluster's energy.
+ */
+ public Cluster(Point clusterCenter, double energy) {
+ this(clusterCenter, energy, Double.NaN);
+ }
+
+ /**
+ * Creates a new cluster. All clusters are required to have a seed
+ * hit.
+ * @param clusterCenter - The <code>Point</code> object indicating in
+ * which crystal the seed hit occurred.
+ * @param energy - The cluster's energy.
+ * @param time - The cluster's time-stamp.
+ */
+ public Cluster(Point clusterCenter, double energy, double time) {
+ center = clusterCenter;
+ this.energy = energy;
+ this.time = time;
+ }
+
+ /**
+ * Adds an <code>Point</code> to the list of this cluster's
+ * component hits.
+ * @param ix - The component hit's x-coordinate.
+ * @param iy - The component hit's y-coordinate.
+ */
+ public void addComponentHit(int ix, int iy) { hitList.add(new Point(ix, iy)); }
+
+ /**
+ * Adds an <code>Point</code> to the list of this cluster's
+ * component hits.
+ * @param eHit - The <code>Point</code> object indicating in which
+ * crystal the hit occurred.
+ */
+ public void addComponentHit(Point eHit) { hitList.add(eHit); }
+
+ /**
+ * Adds an <code>Point</code> to the list of this cluster's shared
+ * hits.
+ * @param ix - The shared hit's x-coordinate.
+ * @param iy - The shared hit's y-coordinate.
+ */
+ public void addSharedHit(int ix, int iy) { shareList.add(new Point(ix, iy)); }
+
+ /**
+ * Adds an <code>Point</code> to the list of this cluster's shared
+ * hits.
+ * @param eHit - The <code>Point</code> object indicating in which
+ * crystal the hit occurred.
+ */
+ public void addSharedHit(Point eHit) { shareList.add(eHit); }
+
+ /**
+ * Gets the hit representing the cluster center.
+ * @return Returns the cluster center hit as an <code>Point</code>.
+ */
+ public Point getClusterCenter() { return center; }
+
+ /**
+ * Gets the cluster's energy, if it was defined when the cluster
+ * was constructed.
+ * @return Returns the energy of the cluster if it was defined,
+ * and <code>NaN</code> otherwise.
+ */
+ public double getClusterEnergy() { return energy; }
+
+ /**
+ * Gets the time stamp for the cluster in nanoseconds.
+ * @return Returns the cluster's time stamp.
+ */
+ public double getClusterTime() { return time; }
+
+ /**
+ * Indicates how many component hits compose this cluster. Note
+ * that this does not include the seed hit or shared hits.
+ * @return Returns the number of component hits in the cluster
+ * as an <code>int</code>.
+ */
+ public int getComponentHitCount() { return hitList.size(); }
+
+ /**
+ * Gets the list of hits that make up the cluster, exempting the
+ * seed hit and shared hits.
+ * @return Returns the cluster hits as a <code>List</code> object
+ * composed of <code>Point</code> objects.
+ */
+ public List<Point> getComponentHits() { return hitList; }
+
+ /**
+ * Indicates how many total hits compose this cluster. This includes
+ * component hits, shared hits, and the seed hit.
+ * @return Returns the number of component hits in the cluster
+ * as an <code>int</code>.
+ */
+ public int getHitCount() { return hitList.size() + shareList.size() + 1; }
+
+ /**
+ * Indicates how many shared hits compose this cluster. Note that
+ * this does not include the seed hit or component hits.
+ * @return Returns the number of shared hits in the cluster as an
+ * <code>int</code>.
+ */
+ public int getSharedHitCount() { return shareList.size(); }
+
+ /**
+ * Gets the list of hits that make up the cluster, exempting the
+ * seed hit and component hits.
+ * @return Returns the shared hits as a <code>List</code> object
+ * composed of <code>Point</code> objects.
+ */
+ public List<Point> getSharedHits() { return shareList; }
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/EcalHit.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/EcalHit.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/EcalHit.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
* @param energy - The raw energy of the hit.
**/
public EcalHit(int ix, int iy, double energy) {
- this(new Point(ix, iy), energy);
+ this(new Point(ix, iy), energy);
}
/**
@@ -32,7 +32,7 @@
* @param energy - The raw energy of the hit.
**/
public EcalHit(Point ixy, double energy) {
- loc = ixy;
+ loc = ixy;
this.energy = energy;
}
@@ -44,7 +44,7 @@
* @param time - The time-stamp for the hit.
**/
public EcalHit(int ix, int iy, double energy, double time) {
- this(new Point(ix, iy), energy, time);
+ this(new Point(ix, iy), energy, time);
}
/**
@@ -54,7 +54,7 @@
* @param time - The time-stamp for the hit.
**/
public EcalHit(Point ixy, double energy, double time) {
- loc = ixy;
+ loc = ixy;
this.energy = energy;
this.time = time;
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Event.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Event.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/event/Event.java Wed Apr 27 11:11:32 2016
@@ -10,119 +10,119 @@
* @author Kyle McCarty
*/
public final class Event {
- private List<EcalHit> hitList;
- private List<Point> clusterList;
- private List<Association> connectList;
-
- /**
- * <b>Event</b><br/><br/>
- * <code>public <b>Event</b>()</code><br/><br/>
- * Creates a new <code>Event</code>.
- */
- public Event() { this(10, 10, 10); }
-
- /**
- * <b>Event</b><br/><br/>
- * <code>public <b>Event</b>(int hits, int clusters)</code><br/><br/>
- * Creates a new <code>Event</code> and reserves spaces for the
- * given number of hits and cluster centers.
- * @param hits - The number of hits for which to reserve space.
- * @param clusters - The number of cluster centers for which to
- * reserve space.
- */
- public Event(int hits, int clusters) { this(hits, clusters, 10); }
-
- /**
- * <b>Event</b><br/><br/>
- * <code>public <b>Event</b>(int hits, int clusters, int associations)</code><br/><br/>
- * Creates a new <code>Event</code> and reserves spaces for the
- * given number of hits, cluster centers, and crystal associations.
- * @param hits - The number of hits for which to reserve space.
- * @param clusters - The number of cluster centers for which to
- * reserve space.
- * @param associations - The number of crystal associations for
- * which to reserve space.
- */
- public Event(int hits, int clusters, int associations) {
- hitList = new ArrayList<EcalHit>(hits);
- clusterList = new ArrayList<Point>(clusters);
- connectList = new ArrayList<Association>(associations);
- }
-
- /**
- * <b>Event</b><br/><br/>
- * <code>public <b>Event</b>(List<EcalHit> hits, List<Point> clusters, List<Association> associations)</code><br/><br/>
- * Creates a new <code>Event</code> and sets its contents to those
- * of the given lists. The crystal association list will be empty.
- * @param hits - The list of calorimeter hits.
- * @param clusters - The list of cluster centers.
- */
- public Event(List<EcalHit> hits, List<Point> clusters) {
- this(hits, clusters, new ArrayList<Association>());
- }
-
- /**
- * <b>Event</b><br/><br/>
- * <code>public <b>Event</b>(List<EcalHit> hits, List<Point> clusters, List<Association> associations)</code><br/><br/>
- * Creates a new <code>Event</code> and sets its contents to those
- * of the given lists.
- * @param hits - The list of calorimeter hits.
- * @param clusters - The list of cluster centers.
- * @param associations - The list of crystal associations.
- */
- public Event(List<EcalHit> hits, List<Point> clusters, List<Association> associations) {
- hitList = hits;
- clusterList = clusters;
- connectList = associations;
- }
-
- /**
- * <b>addAssociation</b><br/><br/>
- * <code>public void <b>addAssociation</b>(Association connectedCrystal)</code><br/><br/>
- * Adds a crystal association to the event.
- * @param connectedCrystal - The crystal association to add.
- */
- public void addAssociation(Association connectedCrystal) {
- connectList.add(connectedCrystal);
- }
-
- /**
- * <b>addCluster</b><br/><br/>
- * <code>public void <b>addCluster</b>(Point cluster)</code><br/><br/>
- * Adds a cluster center to the event.
- * @param cluster - The cluster center to add.
- */
- public void addCluster(Point cluster) { clusterList.add(cluster); }
-
- /**
- * <b>addHit</b><br/><br/>
- * <code>public void <b>addHit</b>(EcalHit hit)</code><br/><br/>
- * Adds a calorimeter hit to the event.
- * @param hit - The calorimeter hit to add.
- */
- public void addHit(EcalHit hit) { hitList.add(hit); }
-
- /**
- * <b>getAssociations</b><br/><br/>
- * <code>public List<Association> <b>getAssociations</b>()</code><br/><br/>
- * Gets the list of associated crystals for this event.
- * @return Returns the associations in a <code>List</code>.
- */
- public List<Association> getAssociations() { return connectList; }
-
- /**
- * <b>getClusterCenters</b><br/><br/>
- * <code>List<Cluster><b>getClusterCenters</b>()</code><br/><br/>
- * Gets the list of cluster centers for this event.
- * @return Returns the cluster centers in a <code>List</code>.
- */
- public List<Point> getClusterCenters() { return clusterList; }
-
- /**
- * <b>getHits</b><br/><br/>
- * <code>public List<EcalHit> <b>getHits</b>()</code><br/><br/>
- * Gets the list of calorimeter hits for this event.
- * @return Returns the hits in a <code>List</code>.
- */
- public List<EcalHit> getHits() { return hitList; }
+ private List<EcalHit> hitList;
+ private List<Point> clusterList;
+ private List<Association> connectList;
+
+ /**
+ * <b>Event</b><br/><br/>
+ * <code>public <b>Event</b>()</code><br/><br/>
+ * Creates a new <code>Event</code>.
+ */
+ public Event() { this(10, 10, 10); }
+
+ /**
+ * <b>Event</b><br/><br/>
+ * <code>public <b>Event</b>(int hits, int clusters)</code><br/><br/>
+ * Creates a new <code>Event</code> and reserves spaces for the
+ * given number of hits and cluster centers.
+ * @param hits - The number of hits for which to reserve space.
+ * @param clusters - The number of cluster centers for which to
+ * reserve space.
+ */
+ public Event(int hits, int clusters) { this(hits, clusters, 10); }
+
+ /**
+ * <b>Event</b><br/><br/>
+ * <code>public <b>Event</b>(int hits, int clusters, int associations)</code><br/><br/>
+ * Creates a new <code>Event</code> and reserves spaces for the
+ * given number of hits, cluster centers, and crystal associations.
+ * @param hits - The number of hits for which to reserve space.
+ * @param clusters - The number of cluster centers for which to
+ * reserve space.
+ * @param associations - The number of crystal associations for
+ * which to reserve space.
+ */
+ public Event(int hits, int clusters, int associations) {
+ hitList = new ArrayList<EcalHit>(hits);
+ clusterList = new ArrayList<Point>(clusters);
+ connectList = new ArrayList<Association>(associations);
+ }
+
+ /**
+ * <b>Event</b><br/><br/>
+ * <code>public <b>Event</b>(List<EcalHit> hits, List<Point> clusters, List<Association> associations)</code><br/><br/>
+ * Creates a new <code>Event</code> and sets its contents to those
+ * of the given lists. The crystal association list will be empty.
+ * @param hits - The list of calorimeter hits.
+ * @param clusters - The list of cluster centers.
+ */
+ public Event(List<EcalHit> hits, List<Point> clusters) {
+ this(hits, clusters, new ArrayList<Association>());
+ }
+
+ /**
+ * <b>Event</b><br/><br/>
+ * <code>public <b>Event</b>(List<EcalHit> hits, List<Point> clusters, List<Association> associations)</code><br/><br/>
+ * Creates a new <code>Event</code> and sets its contents to those
+ * of the given lists.
+ * @param hits - The list of calorimeter hits.
+ * @param clusters - The list of cluster centers.
+ * @param associations - The list of crystal associations.
+ */
+ public Event(List<EcalHit> hits, List<Point> clusters, List<Association> associations) {
+ hitList = hits;
+ clusterList = clusters;
+ connectList = associations;
+ }
+
+ /**
+ * <b>addAssociation</b><br/><br/>
+ * <code>public void <b>addAssociation</b>(Association connectedCrystal)</code><br/><br/>
+ * Adds a crystal association to the event.
+ * @param connectedCrystal - The crystal association to add.
+ */
+ public void addAssociation(Association connectedCrystal) {
+ connectList.add(connectedCrystal);
+ }
+
+ /**
+ * <b>addCluster</b><br/><br/>
+ * <code>public void <b>addCluster</b>(Point cluster)</code><br/><br/>
+ * Adds a cluster center to the event.
+ * @param cluster - The cluster center to add.
+ */
+ public void addCluster(Point cluster) { clusterList.add(cluster); }
+
+ /**
+ * <b>addHit</b><br/><br/>
+ * <code>public void <b>addHit</b>(EcalHit hit)</code><br/><br/>
+ * Adds a calorimeter hit to the event.
+ * @param hit - The calorimeter hit to add.
+ */
+ public void addHit(EcalHit hit) { hitList.add(hit); }
+
+ /**
+ * <b>getAssociations</b><br/><br/>
+ * <code>public List<Association> <b>getAssociations</b>()</code><br/><br/>
+ * Gets the list of associated crystals for this event.
+ * @return Returns the associations in a <code>List</code>.
+ */
+ public List<Association> getAssociations() { return connectList; }
+
+ /**
+ * <b>getClusterCenters</b><br/><br/>
+ * <code>List<Cluster><b>getClusterCenters</b>()</code><br/><br/>
+ * Gets the list of cluster centers for this event.
+ * @return Returns the cluster centers in a <code>List</code>.
+ */
+ public List<Point> getClusterCenters() { return clusterList; }
+
+ /**
+ * <b>getHits</b><br/><br/>
+ * <code>public List<EcalHit> <b>getHits</b>()</code><br/><br/>
+ * Gets the list of calorimeter hits for this event.
+ * @return Returns the hits in a <code>List</code>.
+ */
+ public List<EcalHit> getHits() { return hitList; }
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/EventManager.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/EventManager.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/EventManager.java Wed Apr 27 11:11:32 2016
@@ -21,16 +21,16 @@
* may be read.
* @throws IOException Occurs if there is an error closing the file stream.
**/
- public void close() throws IOException;
-
- /**
- * <b>getEventNumber</b><br/><br/>
+ public void close() throws IOException;
+
+ /**
+ * <b>getEventNumber</b><br/><br/>
* <code>public int <b>getEventNumber</b>()</code><br/><br/>
* Gets the ordinal number for the currently displayed event.
- * @return Returns the current event's ordinal number.
- */
- public int getEventNumber();
-
+ * @return Returns the current event's ordinal number.
+ */
+ public int getEventNumber();
+
/**
* <b>getClusters</b><br/><br/>
* <code>public ArrayList<Cluster> <b>getClusters</b>()</code><br/><br/>
@@ -38,17 +38,17 @@
* @return Returns the current clusters as an <code>ArrayList
* </code> object.
**/
- public List<Cluster> getClusters();
-
- /**
+ public List<Cluster> getClusters();
+
+ /**
* <b>getHits</b><br/><br/>
* <code>public ArrayList<EcalHit> <b>getHits</b>()</code><br/><br/>
* Allows access to the current event's list of hits.
* @return Returns the current hits as an <code>ArrayList</code> object.
**/
- public List<EcalHit> getHits();
-
- /**
+ public List<EcalHit> getHits();
+
+ /**
* <b>nextEvent</b><br/><br/>
* <code>public boolean <b>nextEvent</b>()</code><br/><br/>
* Populates the event manager with hits and clusters from the next event.
@@ -56,8 +56,8 @@
* </code> if it was not.
* @throws IOException Occurs if there was a file read error.
**/
- public boolean nextEvent() throws IOException;
-
+ public boolean nextEvent() throws IOException;
+
/**
* <b>previousEvent</b><br/><br/>
* <code>public boolean <b>previousEvent</b>()</code><br/><br/>
@@ -66,5 +66,5 @@
* </code> if it was not.
* @throws IOException Occurs if there was a file read error.
**/
- public boolean previousEvent() throws IOException;
+ public boolean previousEvent() throws IOException;
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java Wed Apr 27 11:11:32 2016
@@ -53,7 +53,7 @@
/**
* Initializes an event manager that will read from the indicated file.
- * @param filename - The path to the file containing hit information.
+ * @param file - The path to the file containing hit information.
*/
public TextManager(File file) throws IOException {
reader = new AdvancedReader(file);
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/EventDisplayOutputDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/EventDisplayOutputDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/EventDisplayOutputDriver.java Wed Apr 27 11:11:32 2016
@@ -97,30 +97,30 @@
// Only write clusters if the option is selected.
if(outputClusters) {
- // Process the clusters.
- for (org.lcsim.event.Cluster cluster : clusters) {
- // Get the seed hit for the cluster.
- CalorimeterHit seedHit = (CalorimeterHit)cluster.getCalorimeterHits().get(0);
- int ix = seedHit.getIdentifierFieldValue("ix");
- int iy = seedHit.getIdentifierFieldValue("iy");
- double time = seedHit.getTime();
-
- // Get the cluster's total energy.
- double energy = cluster.getEnergy();
-
- // Write the seed hit to start a cluster.
- writer.append(String.format("Cluster\t%d\t%d\t%f\t%f%n", ix, iy, energy, time));
-
- // Write the component hits to the cluster.
- for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
- // Get each component hit's x/y coordinates.
- ix = hit.getIdentifierFieldValue("ix");
- iy = hit.getIdentifierFieldValue("iy");
-
- // Write them as component hits.
- writer.append(String.format("CompHit\t%d\t%d%n", ix, iy));
- }
- }
+ // Process the clusters.
+ for (org.lcsim.event.Cluster cluster : clusters) {
+ // Get the seed hit for the cluster.
+ CalorimeterHit seedHit = (CalorimeterHit)cluster.getCalorimeterHits().get(0);
+ int ix = seedHit.getIdentifierFieldValue("ix");
+ int iy = seedHit.getIdentifierFieldValue("iy");
+ double time = seedHit.getTime();
+
+ // Get the cluster's total energy.
+ double energy = cluster.getEnergy();
+
+ // Write the seed hit to start a cluster.
+ writer.append(String.format("Cluster\t%d\t%d\t%f\t%f%n", ix, iy, energy, time));
+
+ // Write the component hits to the cluster.
+ for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
+ // Get each component hit's x/y coordinates.
+ ix = hit.getIdentifierFieldValue("ix");
+ iy = hit.getIdentifierFieldValue("iy");
+
+ // Write them as component hits.
+ writer.append(String.format("CompHit\t%d\t%d%n", ix, iy));
+ }
+ }
}
// Append the end of event indicator.
@@ -169,7 +169,7 @@
* indicates that they will be output.
*/
public void setIgnoreEmptyEvents(boolean ignoreEmptyEvents) {
- this.ignoreEmptyEvents = ignoreEmptyEvents;
+ this.ignoreEmptyEvents = ignoreEmptyEvents;
}
/**
@@ -179,7 +179,7 @@
* indicates that they will be output.
*/
public void setIgnoreNoClusterEvents(boolean ignoreNoClusterEvents) {
- this.ignoreNoClusterEvents = ignoreNoClusterEvents;
+ this.ignoreNoClusterEvents = ignoreNoClusterEvents;
}
/**
@@ -196,6 +196,6 @@
* will be written and <code>false</code> that they will not.
*/
public void setOutputClusters(boolean outputClusters) {
- this.outputClusters = outputClusters;
+ this.outputClusters = outputClusters;
}
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/LCIOBridgeDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/LCIOBridgeDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/lcsim/LCIOBridgeDriver.java Wed Apr 27 11:11:32 2016
@@ -39,9 +39,9 @@
* @param event - The LCIO event.
*/
public void process(EventHeader event) {
- // If we are still updating the display, skip this event.
- if(updating) { return; }
-
+ // If we are still updating the display, skip this event.
+ if(updating) { return; }
+
// Make sure that this event has calorimeter hits.
if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
// Get the list of calorimeter hits from the event.
@@ -55,44 +55,44 @@
// If this is the correct place to update, do so.
if(eventsProcessed >= displayInterval) {
- // Lock the update method for the duration of the update.
- updating = true;
-
- // Clear the event display.
- eventDisplay.resetDisplay();
-
- // Add all of the hits.
- for(CalorimeterHit hit : hits) {
- // Get the hit's location and energy.
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
- double energy = hit.getRawEnergy();
-
- // Add the hit energy to the event display.
- eventDisplay.addHit(new EcalHit(ix, iy, energy));
- }
-
- // Add all the clusters.
- for(org.lcsim.event.Cluster cluster : clusters) {
- // Get the seed hit.
- CalorimeterHit seed = cluster.getCalorimeterHits().get(0);
- int ix = seed.getIdentifierFieldValue("ix");
- int iy = seed.getIdentifierFieldValue("iy");
- double energy = seed.getRawEnergy(); // FIXME: Should this be getCorrectedEnergy() instead? --JM
-
- // Add the cluster center to the event display.
- Cluster cc = new Cluster(ix, iy, energy);
- eventDisplay.addCluster(cc);
- }
-
- // Update the display.
- eventDisplay.updateDisplay();
-
- // Reset the number of events we've seen since the last update.
- eventsProcessed = 0;
-
- // Unlock the update method so that more events can be processed.
- updating = false;
+ // Lock the update method for the duration of the update.
+ updating = true;
+
+ // Clear the event display.
+ eventDisplay.resetDisplay();
+
+ // Add all of the hits.
+ for(CalorimeterHit hit : hits) {
+ // Get the hit's location and energy.
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+ double energy = hit.getRawEnergy();
+
+ // Add the hit energy to the event display.
+ eventDisplay.addHit(new EcalHit(ix, iy, energy));
+ }
+
+ // Add all the clusters.
+ for(org.lcsim.event.Cluster cluster : clusters) {
+ // Get the seed hit.
+ CalorimeterHit seed = cluster.getCalorimeterHits().get(0);
+ int ix = seed.getIdentifierFieldValue("ix");
+ int iy = seed.getIdentifierFieldValue("iy");
+ double energy = seed.getRawEnergy(); // FIXME: Should this be getCorrectedEnergy() instead? --JM
+
+ // Add the cluster center to the event display.
+ Cluster cc = new Cluster(ix, iy, energy);
+ eventDisplay.addCluster(cc);
+ }
+
+ // Update the display.
+ eventDisplay.updateDisplay();
+
+ // Reset the number of events we've seen since the last update.
+ eventsProcessed = 0;
+
+ // Unlock the update method so that more events can be processed.
+ updating = false;
}
}
}
@@ -119,14 +119,14 @@
* a new event is displayed.
*/
public void setDisplayInterval(String displayInterval) {
- // Convert the argument to an integer.
- int disp = Integer.parseInt(displayInterval);
-
- // If it is negative, make it zero.
- if(disp < 0) { disp = 0; }
-
- // Set the display interval.
- this.displayInterval = disp;
+ // Convert the argument to an integer.
+ int disp = Integer.parseInt(displayInterval);
+
+ // If it is negative, make it zero.
+ if(disp < 0) { disp = 0; }
+
+ // Set the display interval.
+ this.displayInterval = disp;
}
/**
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java Wed Apr 27 11:11:32 2016
@@ -34,8 +34,6 @@
* events from the indicated data source with additional status
* fields defined by the <code>fieldNames</code> argument.
* @param em - The data source event manager.
- * @param fieldNames - An array of additional status fields
- * that should be displayed.
*/
public ActiveViewer(EventManager em) {
// Pass any additional field values to the super class.
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java Wed Apr 27 11:11:32 2016
@@ -503,8 +503,8 @@
/**
* Determines if the crystal at the given coordinates is a active
* or not.
- * @param xCoor - The x-index of the crystal.
- * @param yCoor - The y-index of the crystal.
+ * @param ix - The x-index of the crystal.
+ * @param iy - The y-index of the crystal.
* @return Returns <code>true</code> if the crystal is active
* and <code>false</code> if it is not.
* @throws IndexOutOfBoundsException Occurs when either of the given
@@ -740,28 +740,28 @@
/**
* Sets whether to mirror the x-axis on the calorimeter display.
- * @param state - <code>true</code> indicates that the axis should
+ * @param mirrorX - <code>true</code> indicates that the axis should
* be mirrored and <code>false</code> that it should not.
*/
public void setMirrorX(boolean mirrorX) {
- // Process the change.
- setMirror(mirrorX, mirrorY);
-
- // Throw an event.
- throwSettingsEvent(SettingsEvent.PROPERTY_X_ORIENTATION);
+ // Process the change.
+ setMirror(mirrorX, mirrorY);
+
+ // Throw an event.
+ throwSettingsEvent(SettingsEvent.PROPERTY_X_ORIENTATION);
}
/**
* Sets whether to mirror the y-axis on the calorimeter display.
- * @param state - <code>true</code> indicates that the axis should
+ * @param mirrorY - <code>true</code> indicates that the axis should
* be mirrored and <code>false</code> that it should not.
*/
public void setMirrorY(boolean mirrorY) {
- // Process the change.
- setMirror(mirrorX, mirrorY);
-
- // Throw an event.
- throwSettingsEvent(SettingsEvent.PROPERTY_Y_ORIENTATION);
+ // Process the change.
+ setMirror(mirrorX, mirrorY);
+
+ // Throw an event.
+ throwSettingsEvent(SettingsEvent.PROPERTY_Y_ORIENTATION);
}
/**
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/DataFileViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/DataFileViewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/DataFileViewer.java Wed Apr 27 11:11:32 2016
@@ -97,9 +97,9 @@
filterPanel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
- // Suppress panel redrawing until the highlights are set.
- ecalPanel.setSuppressRedraw(true);
-
+ // Suppress panel redrawing until the highlights are set.
+ ecalPanel.setSuppressRedraw(true);
+
// Clear the panel highlighting.
ecalPanel.clearHighlight();
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/FileViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/FileViewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/FileViewer.java Wed Apr 27 11:11:32 2016
@@ -178,9 +178,9 @@
// be displayed.
Double hitTime = crystalTimeMap.get(crystal);
if(hitTime != null) {
- setStatusField(fieldNames[HIT_TIME], Double.toString(hitTime));
+ setStatusField(fieldNames[HIT_TIME], Double.toString(hitTime));
} else {
- setStatusField(fieldNames[HIT_TIME], ResizableFieldPanel.NULL_VALUE);
+ setStatusField(fieldNames[HIT_TIME], ResizableFieldPanel.NULL_VALUE);
}
}
// Otherwise, clear the field values.
@@ -257,7 +257,7 @@
// Load hit time map.
crystalTimeMap.clear();
for(EcalHit hit : em.getHits()) {
- crystalTimeMap.put(new Point(toPanelX(hit.getX()), toPanelY(hit.getY())), hit.getTime());
+ crystalTimeMap.put(new Point(toPanelX(hit.getX()), toPanelY(hit.getY())), hit.getTime());
}
// Display it.
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java Wed Apr 27 11:11:32 2016
@@ -57,7 +57,6 @@
* Initializes a new <code>DataFileViewer</code> that reads from
* the given event manager for event data and the given hardware
* data file for crystal hardware data readout.
- * @param dataSource - The manager for event data.
* @param crystalDataFilePath - The data file for crystal hardware
* information.
* @throws IOException Occurs if there is an error reading from
@@ -98,9 +97,9 @@
filterPanel.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
- // Suppress panel redrawing until the highlights are set.
- ecalPanel.setSuppressRedraw(true);
-
+ // Suppress panel redrawing until the highlights are set.
+ ecalPanel.setSuppressRedraw(true);
+
// Clear the panel highlighting.
ecalPanel.clearHighlight();
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
/**
* Adds a new hit to the display.
- * @param hit - The hit to be added.
+ * @param lcioHit - The hit to be added.
*/
public abstract void addHit(CalorimeterHit lcioHit);
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java Wed Apr 27 11:11:32 2016
@@ -122,8 +122,6 @@
/**
* Initializes the viewer window and calorimeter panel.
- * @param statusFields - Additional fields to display in the status
- * panel. This can not be <code>null</code>.
* @throws NullPointerException Occurs if any of the additional field
* arguments are <code>null</code>.
**/
@@ -638,7 +636,7 @@
JFileChooser chooser = new JFileChooser();
if(chooser.showSaveDialog(this) == JFileChooser.CANCEL_OPTION) {
- return;
+ return;
}
// Parse the file name and make sure that it ends in .PNG.
@@ -646,9 +644,9 @@
int index = filepath.lastIndexOf('.');
if(index == -1) { filepath = filepath + ".png"; }
else {
- if(filepath.substring(index + 1).compareTo("png") != 0) {
- filepath = filepath.substring(0, index) + ".png";
- }
+ if(filepath.substring(index + 1).compareTo("png") != 0) {
+ filepath = filepath.substring(0, index) + ".png";
+ }
}
// Get the lowest available file name.
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/BooleanMap.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/BooleanMap.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/BooleanMap.java Wed Apr 27 11:11:32 2016
@@ -10,208 +10,208 @@
* @author Kyle McCarty
*/
public final class BooleanMap implements ColorMap<Double> {
- // The color to display for values which pass the boolean check.
- private Color activeColor = new Color(255, 50, 50);
- // The color to display for values that fail the boolean check.
- private Color inactiveColor = Color.WHITE;
- // The critical value against which the boolean check is performed.
- private double value = 0.0;
- // The type of this boolean scale.
- private final BooleanType boolType;
-
- /**
- * <b>BooleanMap</b><br/><br/>
- * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue)</code><br/><br/>
- * Defines a <code>ColorScale</code> which maps values to colors
- * based on a boolean comparison.
- * @param type - The type of boolean comparison to perform.
- * @param comparisonValue - The value against which the comparison
- * should be made.
- */
- public BooleanMap(BooleanType type, double comparisonValue) {
- // Make sure the comparison type is not null.
- if(type == null) { throw new IllegalArgumentException("Boolean comparison type can not be null."); }
-
- // Define the critical value and the boolean type.
- value = comparisonValue;
- boolType = type;
- }
-
- /**
- * <b>BooleanMap</b><br/><br/>
- * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue,
- * Color activeColor)</code><br/><br/>
- * Defines a <code>ColorScale</code> which maps values to colors
- * based on a boolean comparison.
- * @param type - The type of boolean comparison to perform.
- * @param comparisonValue - The value against which the comparison
- * should be made.
- * @param activeColor - The color in which values that pass the
- * comparison should be displayed.
- */
- public BooleanMap(BooleanType type, double comparisonValue, Color activeColor) {
- // Set the critical value and the boolean type.
- this(type, comparisonValue);
-
- // Set the active color.
- this.activeColor = activeColor;
- }
-
- /**
- * <b>BooleanMap</b><br/><br/>
- * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue,
- * Color activeColor, Color inactiveColor)</code><br/><br/>
- * Defines a <code>ColorScale</code> which maps values to colors
- * based on a boolean comparison.
- * @param type - The type of boolean comparison to perform.
- * @param comparisonValue - The value against which the comparison
- * should be made.
- * @param activeColor - The color in which values that pass the
- * comparison should be displayed.
- * @param inactiveColor - The color in which values that fail the
- * comparison should be displayed.
- */
- public BooleanMap(BooleanType type, double comparisonValue, Color activeColor, Color inactiveColor) {
- // Set the critical value and the boolean type.
- this(type, comparisonValue);
-
- // Set the active and inactive colors.
- this.activeColor = activeColor;
- this.inactiveColor = inactiveColor;
- }
-
- public Color getColor(Double value) {
- // If the argument is null, treat it is zero.
- if(value == null) { value = 0.0; }
-
- // If it passes the boolean comparison, return the active color.
- if(passes(value)) { return activeColor; }
-
- // Otherwise, return the inactive color.
- else { return inactiveColor; }
- }
-
- /**
- * <b>getActiveColor</b><br/><br/>
- * <code>public Color <b>getActiveColor</b>()</code><br/><br/>
- * Gets the color used by the scale for values which pass the
- * boolean comparison.
- * @return Returns the color as a <code>Color</code> object.
- */
- public Color getActiveColor() { return activeColor; }
-
- /**
- * <b>getBooleanType</b><br/><br/>
- * <code>public BooleanType <b>getBooleanType</b>()</code><br/><br/>
- * Indicates what type of boolean comparison is performed by this
- * scale.
- * @return Returns the type of comparison as a <code>BooleanType
- * </code> enumerable.
- */
- public BooleanType getBooleanType() { return boolType; }
-
- /**
- * <b>getComparisonValue</b><br/><br/>
- * <code>public double <b>getComparisonValue</b>()</code><br/><br/>
- * Gets the value against which the boolean comparisons are
- * performed.
- * @return Returns the value which is compared against.
- */
- public double getComparisonValue() { return value; }
-
- /**
- * <b>getInactiveColor</b><br/><br/>
- * <code>public Color <b>getInactiveColor</b>()</code><br/><br/>
- * Gets the color used by the scale for values which fail the
- * boolean comparison.
- * @return Returns the color as a <code>Color</code> object.
- */
- public Color getInactiveColor() { return inactiveColor; }
-
- /**
- * <b>setComparisonValue</b><br/><br/>
- * <code>public void <b>setComparisonValue</b>(double value)</code><br/><br/>
- * Sets the value against which the boolean comparison is performed.
- * @param value - The value to compare against.
- */
- public void setComparisonValue(double value) { this.value = value; }
-
- /**
- * <b>passes</b><br/><br/>
- * <code>private boolean <b>passes</b>(double d)</code><br/><br/>
- * Determines whether a given external value passes the boolean
- * check or not.
- * @param d - The external value to compare.
- * @return Returns <code>true</code> if the value passes the boolean
- * check and <code>false</code> if it does not.
- */
- private boolean passes(double d) {
- // Perform the appropriate comparison. Note that the default
- // case is included to satisfy the compiler -- it should not
- // ever actually be used.
- switch(boolType) {
- case EQUAL_TO:
- return d == value;
- case NOT_EQUAL_TO:
- return d != value;
- case GREATER_THAN:
- return d > value;
- case LESS_THAN:
- return d < value;
- case GREATER_THAN_OR_EQUAL_TO:
- return d >= value;
- case LESS_THAN_OR_EQUAL_TO:
- return d<= value;
- default:
- return false;
- }
- }
-
- /**
- * Enumerable <code>BooleanType</code> defines the type of boolean
- * comparison that is to be performed by the scale.
- */
- public enum BooleanType {
- /**
- * <b>EQUAL_TO</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] == [Comparison Value]</code>
- */
- EQUAL_TO,
-
- /**
- * <b>NOT_EQUAL_TO</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] != [Comparison Value]</code>
- */
- NOT_EQUAL_TO,
-
- /**
- * <b>GREATER_THAN</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] > [Comparison Value]</code>
- */
- GREATER_THAN,
-
- /**
- * <b>LESS_THAN</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] < [Comparison Value]</code>
- */
- LESS_THAN,
-
- /**
- * <b>GREATER_THAN_OR_EQUAL_TO</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] >= [Comparison Value]</code>
- */
- GREATER_THAN_OR_EQUAL_TO,
-
- /**
- * <b>LESS_THAN_OR_EQUAL_TO</b><br/><br/>
- * Performs the boolean check:<br/><br/>
- * <code>[External Value] <= [Comparison Value]</code>
- */
- LESS_THAN_OR_EQUAL_TO
- };
+ // The color to display for values which pass the boolean check.
+ private Color activeColor = new Color(255, 50, 50);
+ // The color to display for values that fail the boolean check.
+ private Color inactiveColor = Color.WHITE;
+ // The critical value against which the boolean check is performed.
+ private double value = 0.0;
+ // The type of this boolean scale.
+ private final BooleanType boolType;
+
+ /**
+ * <b>BooleanMap</b><br/><br/>
+ * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue)</code><br/><br/>
+ * Defines a <code>ColorScale</code> which maps values to colors
+ * based on a boolean comparison.
+ * @param type - The type of boolean comparison to perform.
+ * @param comparisonValue - The value against which the comparison
+ * should be made.
+ */
+ public BooleanMap(BooleanType type, double comparisonValue) {
+ // Make sure the comparison type is not null.
+ if(type == null) { throw new IllegalArgumentException("Boolean comparison type can not be null."); }
+
+ // Define the critical value and the boolean type.
+ value = comparisonValue;
+ boolType = type;
+ }
+
+ /**
+ * <b>BooleanMap</b><br/><br/>
+ * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue,
+ * Color activeColor)</code><br/><br/>
+ * Defines a <code>ColorScale</code> which maps values to colors
+ * based on a boolean comparison.
+ * @param type - The type of boolean comparison to perform.
+ * @param comparisonValue - The value against which the comparison
+ * should be made.
+ * @param activeColor - The color in which values that pass the
+ * comparison should be displayed.
+ */
+ public BooleanMap(BooleanType type, double comparisonValue, Color activeColor) {
+ // Set the critical value and the boolean type.
+ this(type, comparisonValue);
+
+ // Set the active color.
+ this.activeColor = activeColor;
+ }
+
+ /**
+ * <b>BooleanMap</b><br/><br/>
+ * <code>public <b>BooleanMap</b>(BooleanType type, double comparisonValue,
+ * Color activeColor, Color inactiveColor)</code><br/><br/>
+ * Defines a <code>ColorScale</code> which maps values to colors
+ * based on a boolean comparison.
+ * @param type - The type of boolean comparison to perform.
+ * @param comparisonValue - The value against which the comparison
+ * should be made.
+ * @param activeColor - The color in which values that pass the
+ * comparison should be displayed.
+ * @param inactiveColor - The color in which values that fail the
+ * comparison should be displayed.
+ */
+ public BooleanMap(BooleanType type, double comparisonValue, Color activeColor, Color inactiveColor) {
+ // Set the critical value and the boolean type.
+ this(type, comparisonValue);
+
+ // Set the active and inactive colors.
+ this.activeColor = activeColor;
+ this.inactiveColor = inactiveColor;
+ }
+
+ public Color getColor(Double value) {
+ // If the argument is null, treat it is zero.
+ if(value == null) { value = 0.0; }
+
+ // If it passes the boolean comparison, return the active color.
+ if(passes(value)) { return activeColor; }
+
+ // Otherwise, return the inactive color.
+ else { return inactiveColor; }
+ }
+
+ /**
+ * <b>getActiveColor</b><br/><br/>
+ * <code>public Color <b>getActiveColor</b>()</code><br/><br/>
+ * Gets the color used by the scale for values which pass the
+ * boolean comparison.
+ * @return Returns the color as a <code>Color</code> object.
+ */
+ public Color getActiveColor() { return activeColor; }
+
+ /**
+ * <b>getBooleanType</b><br/><br/>
+ * <code>public BooleanType <b>getBooleanType</b>()</code><br/><br/>
+ * Indicates what type of boolean comparison is performed by this
+ * scale.
+ * @return Returns the type of comparison as a <code>BooleanType
+ * </code> enumerable.
+ */
+ public BooleanType getBooleanType() { return boolType; }
+
+ /**
+ * <b>getComparisonValue</b><br/><br/>
+ * <code>public double <b>getComparisonValue</b>()</code><br/><br/>
+ * Gets the value against which the boolean comparisons are
+ * performed.
+ * @return Returns the value which is compared against.
+ */
+ public double getComparisonValue() { return value; }
+
+ /**
+ * <b>getInactiveColor</b><br/><br/>
+ * <code>public Color <b>getInactiveColor</b>()</code><br/><br/>
+ * Gets the color used by the scale for values which fail the
+ * boolean comparison.
+ * @return Returns the color as a <code>Color</code> object.
+ */
+ public Color getInactiveColor() { return inactiveColor; }
+
+ /**
+ * <b>setComparisonValue</b><br/><br/>
+ * <code>public void <b>setComparisonValue</b>(double value)</code><br/><br/>
+ * Sets the value against which the boolean comparison is performed.
+ * @param value - The value to compare against.
+ */
+ public void setComparisonValue(double value) { this.value = value; }
+
+ /**
+ * <b>passes</b><br/><br/>
+ * <code>private boolean <b>passes</b>(double d)</code><br/><br/>
+ * Determines whether a given external value passes the boolean
+ * check or not.
+ * @param d - The external value to compare.
+ * @return Returns <code>true</code> if the value passes the boolean
+ * check and <code>false</code> if it does not.
+ */
+ private boolean passes(double d) {
+ // Perform the appropriate comparison. Note that the default
+ // case is included to satisfy the compiler -- it should not
+ // ever actually be used.
+ switch(boolType) {
+ case EQUAL_TO:
+ return d == value;
+ case NOT_EQUAL_TO:
+ return d != value;
+ case GREATER_THAN:
+ return d > value;
+ case LESS_THAN:
+ return d < value;
+ case GREATER_THAN_OR_EQUAL_TO:
+ return d >= value;
+ case LESS_THAN_OR_EQUAL_TO:
+ return d<= value;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Enumerable <code>BooleanType</code> defines the type of boolean
+ * comparison that is to be performed by the scale.
+ */
+ public enum BooleanType {
+ /**
+ * <b>EQUAL_TO</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] == [Comparison Value]</code>
+ */
+ EQUAL_TO,
+
+ /**
+ * <b>NOT_EQUAL_TO</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] != [Comparison Value]</code>
+ */
+ NOT_EQUAL_TO,
+
+ /**
+ * <b>GREATER_THAN</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] > [Comparison Value]</code>
+ */
+ GREATER_THAN,
+
+ /**
+ * <b>LESS_THAN</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] < [Comparison Value]</code>
+ */
+ LESS_THAN,
+
+ /**
+ * <b>GREATER_THAN_OR_EQUAL_TO</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] >= [Comparison Value]</code>
+ */
+ GREATER_THAN_OR_EQUAL_TO,
+
+ /**
+ * <b>LESS_THAN_OR_EQUAL_TO</b><br/><br/>
+ * Performs the boolean check:<br/><br/>
+ * <code>[External Value] <= [Comparison Value]</code>
+ */
+ LESS_THAN_OR_EQUAL_TO
+ };
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/ColorScale.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/ColorScale.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/ColorScale.java Wed Apr 27 11:11:32 2016
@@ -47,8 +47,8 @@
* value, if scaling is logarithmic.
*/
public double getScaledMaximum() {
- if(linear) { return max; }
- else { return lMax; }
+ if(linear) { return max; }
+ else { return lMax; }
}
/**
@@ -61,8 +61,8 @@
* value, if scaling is logarithmic.
*/
public double getScaledMinimum() {
- if(linear) { return min; }
- else { return lMin; }
+ if(linear) { return min; }
+ else { return lMin; }
}
/**
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalEvent.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalEvent.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalEvent.java Wed Apr 27 11:11:32 2016
@@ -14,41 +14,41 @@
* @author Kyle McCarty
*/
public class CrystalEvent extends AWTEvent {
- private static final long serialVersionUID = 77198267255387212L;
- // Stores the location of the triggering crystal.
- private final Point crystal;
- // The AWTEvent id for this event.
- private static final int AWT_ID = AWTEvent.RESERVED_ID_MAX + 10;
-
- /**
- * <b>CrystalEvent</b><br/><br/>
- * <code>public <b>CrystalEvent</b>(Viewer parent, Point triggerCrystal)</code><br/><br/>
- * Creates a crystal event for the indicated crystal and triggering
- * component.
- * @param source - The triggering component.
- * @param triggerCrystal - The crystal associated with the event.
- * @throws IllegalArgumentException Occurs if the associated crystal
- * is <code>null</code>.
- */
- public CrystalEvent(Viewer source, Point triggerCrystal) throws IllegalArgumentException {
- // Run the superclass constructor.
- super(source, AWT_ID);
-
- // Make sure that the trigger crystal is not null.
- if(triggerCrystal == null) {
- throw new IllegalArgumentException("Crystal events can not occur with respect to non-exstant crystals.");
- }
-
- // Define the event parameters.
- crystal = triggerCrystal;
- }
-
- /**
- * <b>getCrystalID</b><br/><br/>
- * <code>public Point <b>getCrystalID</b>()</code><br/><br/>
- * Indicates the panel indices at which the crystal is located.
- * @return Returns the crystal's panel indices as a <code>Point
- * </code> object.
- */
- public Point getCrystalID() { return crystal; }
+ private static final long serialVersionUID = 77198267255387212L;
+ // Stores the location of the triggering crystal.
+ private final Point crystal;
+ // The AWTEvent id for this event.
+ private static final int AWT_ID = AWTEvent.RESERVED_ID_MAX + 10;
+
+ /**
+ * <b>CrystalEvent</b><br/><br/>
+ * <code>public <b>CrystalEvent</b>(Viewer parent, Point triggerCrystal)</code><br/><br/>
+ * Creates a crystal event for the indicated crystal and triggering
+ * component.
+ * @param source - The triggering component.
+ * @param triggerCrystal - The crystal associated with the event.
+ * @throws IllegalArgumentException Occurs if the associated crystal
+ * is <code>null</code>.
+ */
+ public CrystalEvent(Viewer source, Point triggerCrystal) throws IllegalArgumentException {
+ // Run the superclass constructor.
+ super(source, AWT_ID);
+
+ // Make sure that the trigger crystal is not null.
+ if(triggerCrystal == null) {
+ throw new IllegalArgumentException("Crystal events can not occur with respect to non-exstant crystals.");
+ }
+
+ // Define the event parameters.
+ crystal = triggerCrystal;
+ }
+
+ /**
+ * <b>getCrystalID</b><br/><br/>
+ * <code>public Point <b>getCrystalID</b>()</code><br/><br/>
+ * Indicates the panel indices at which the crystal is located.
+ * @return Returns the crystal's panel indices as a <code>Point
+ * </code> object.
+ */
+ public Point getCrystalID() { return crystal; }
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalListener.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalListener.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/CrystalListener.java Wed Apr 27 11:11:32 2016
@@ -11,27 +11,27 @@
* @author Kyle McCarty
*/
public interface CrystalListener extends EventListener {
- /**
- * <b>crystalActivated</b><br/><br/>
- * <code>public void <b>crystalActivated</b>(CrystalEvent e)</code><br/><br/>
- * Invoked when a crystal becomes highlighted.
- * @param e - An object describing the event.
- */
- public void crystalActivated(CrystalEvent e);
-
- /**
- * <b>crystalDeactivated</b><br/><br/>
- * <code>public void <b>crystalDeactivated</b>(CrystalEvent e)</code><br/><br/>
- * Invoked when a crystal ceases to be highlighted.
- * @param e - An object describing the event.
- */
- public void crystalDeactivated(CrystalEvent e);
-
- /**
- * <b>crystalClicked</b><br/><br/>
- * <code>public void <b>crystalClicked</b>(CrystalEvent e)</code><br/><br/>
- * Invoked when a crystal is clicked
- * @param e - An object describing the event.
- */
- public void crystalClicked(CrystalEvent e);
+ /**
+ * <b>crystalActivated</b><br/><br/>
+ * <code>public void <b>crystalActivated</b>(CrystalEvent e)</code><br/><br/>
+ * Invoked when a crystal becomes highlighted.
+ * @param e - An object describing the event.
+ */
+ public void crystalActivated(CrystalEvent e);
+
+ /**
+ * <b>crystalDeactivated</b><br/><br/>
+ * <code>public void <b>crystalDeactivated</b>(CrystalEvent e)</code><br/><br/>
+ * Invoked when a crystal ceases to be highlighted.
+ * @param e - An object describing the event.
+ */
+ public void crystalDeactivated(CrystalEvent e);
+
+ /**
+ * <b>crystalClicked</b><br/><br/>
+ * <code>public void <b>crystalClicked</b>(CrystalEvent e)</code><br/><br/>
+ * Invoked when a crystal is clicked
+ * @param e - An object describing the event.
+ */
+ public void crystalClicked(CrystalEvent e);
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/DatabaseCheck.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/DatabaseCheck.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/DatabaseCheck.java Wed Apr 27 11:11:32 2016
@@ -17,122 +17,122 @@
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
public class DatabaseCheck {
- private static final Set<Integer> idFailSet = new HashSet<Integer>();
- private static final Set<Point> pointFailSet = new HashSet<Point>();
-
- public static void main(String[] args) throws ConditionsNotFoundException, IOException {
- // Check that an appropriate file has been given.
- String filepath = null;
- if(args.length == 1) {
- filepath = args[0];
- }
-
- // If no file path was defined, throw an error.
- if(filepath == null) {
- throw new FileNotFoundException("No CSV mapping file defined.");
- }
-
- // Initialize the local database.
- EcalWiringManager manager = new EcalWiringManager(filepath);
-
- // Initialize the database.
- int runNumber = 2000;
- String detectorName = "HPS-Proposal2014-v7-2pt2";
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
- conditionsManager.setDetector(detectorName, runNumber);
-
- // Get ECAL conditions.
- EcalConditions ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions();
-
- // Get the list of EcalChannel objects.
- EcalChannelCollection channels = ecalConditions.getChannelCollection();
- EcalLedCollection leds = conditionsManager.getCachedConditions(EcalLedCollection.class, "ecal_leds").getCachedData();
-
- // Map the LED objects to their channels.
- Map<Integer, EcalLed> ledMap = new HashMap<Integer, EcalLed>();
+ private static final Set<Integer> idFailSet = new HashSet<Integer>();
+ private static final Set<Point> pointFailSet = new HashSet<Point>();
+
+ public static void main(String[] args) throws ConditionsNotFoundException, IOException {
+ // Check that an appropriate file has been given.
+ String filepath = null;
+ if(args.length == 1) {
+ filepath = args[0];
+ }
+
+ // If no file path was defined, throw an error.
+ if(filepath == null) {
+ throw new FileNotFoundException("No CSV mapping file defined.");
+ }
+
+ // Initialize the local database.
+ EcalWiringManager manager = new EcalWiringManager(filepath);
+
+ // Initialize the database.
+ int runNumber = 2000;
+ String detectorName = "HPS-Proposal2014-v7-2pt2";
+ DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
+ conditionsManager.setDetector(detectorName, runNumber);
+
+ // Get ECAL conditions.
+ EcalConditions ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions();
+
+ // Get the list of EcalChannel objects.
+ EcalChannelCollection channels = ecalConditions.getChannelCollection();
+ EcalLedCollection leds = conditionsManager.getCachedConditions(EcalLedCollection.class, "ecal_leds").getCachedData();
+
+ // Map the LED objects to their channels.
+ Map<Integer, EcalLed> ledMap = new HashMap<Integer, EcalLed>();
for (EcalLed led : leds) {
- ledMap.put(led.getEcalChannelId(), led);
+ ledMap.put(led.getEcalChannelId(), led);
}
-
- // Perform the comparison test.
- for(EcalChannel channel : channels) {
- // Get the crystal point information.
- Point crystal = new Point(channel.getX(), channel.getY());
-
- // Get the data from manager.
- CrystalDataSet data = manager.getCrystalData(crystal);
-
- // Get the appropriate LED collection.
- EcalLed led = ledMap.get(channel.getChannelId());
-
- // Perform the comparison.
- System.out.printf("Checking Mappings for Crystal (%3d, %3d):%n", crystal.x, crystal.y);
- System.out.printf("\tChannel ID :: %d%n", channel.getChannelId());
-
- System.out.printf("\tChannel [ %3d ] vs [ %3d ] ... ", channel.getChannel(), data.getFADCChannel());
- if(channel.getChannel() == data.getFADCChannel()) {
- System.out.printf("[ Success ]%n");
- } else {
- System.out.printf("[ Failure ]%n");
- idFailSet.add(channel.getChannelId());
- pointFailSet.add(crystal);
- }
-
- int crate = data.getMotherboard().isTop() ? 1 : 2;
- System.out.printf("\tCrate [ %3d ] vs [ %3d ] ... ", channel.getCrate(), crate);
- if(channel.getCrate() == crate) {
- System.out.printf("[ Success ]%n");
- } else {
- System.out.printf("[ Failure ]%n");
- idFailSet.add(channel.getChannelId());
- pointFailSet.add(crystal);
- }
-
- System.out.printf("\tSlot [ %3d ] vs [ %3d ] ... ", channel.getSlot(), data.getFADCSlot());
- if(channel.getSlot() == data.getFADCSlot()) {
- System.out.printf("[ Success ]%n");
- } else {
- System.out.printf("[ Failure ]%n");
- idFailSet.add(channel.getChannelId());
- pointFailSet.add(crystal);
- }
-
- System.out.printf("\tLED Channel [ %3d ] vs [ %3d ] ... ", led.getLedNumber(), data.getLEDChannel());
- if(led.getLedNumber() == data.getLEDChannel()) {
- System.out.printf("[ Success ]%n");
- } else {
- System.out.printf("[ Failure ]%n");
- idFailSet.add(channel.getChannelId());
- pointFailSet.add(crystal);
- }
-
- System.out.printf("\tLED Crate [ %3d ] vs [ %3d ] ... ", led.getCrateNumber(), crate);
- if(led.getCrateNumber() == crate) {
- System.out.printf("[ Success ]%n");
- } else {
- System.out.printf("[ Failure ]%n");
- idFailSet.add(channel.getChannelId());
- pointFailSet.add(crystal);
- }
-
- System.out.println();
- System.out.println();
- }
-
- // Print out the failing crystals.
- System.out.println("Crystals that Failed:");
- for(Point fail : pointFailSet) {
- System.out.printf("\tCrystal (%3d, %3d)%n", fail.x, fail.y);
- }
- if(pointFailSet.isEmpty()) {
- System.out.println("\tNone!");
- }
-
- // Indicate the database connection settings.
- System.out.println("\n");
- System.out.printf("Detector :: %s%n", detectorName);
- System.out.printf("Run Number :: %d%n", runNumber);
- System.out.printf("Channel Collection :: %d%n", channels.getCollectionId());
- System.out.printf("LED Collection :: %d%n", leds.getCollectionId());
- }
+
+ // Perform the comparison test.
+ for(EcalChannel channel : channels) {
+ // Get the crystal point information.
+ Point crystal = new Point(channel.getX(), channel.getY());
+
+ // Get the data from manager.
+ CrystalDataSet data = manager.getCrystalData(crystal);
+
+ // Get the appropriate LED collection.
+ EcalLed led = ledMap.get(channel.getChannelId());
+
+ // Perform the comparison.
+ System.out.printf("Checking Mappings for Crystal (%3d, %3d):%n", crystal.x, crystal.y);
+ System.out.printf("\tChannel ID :: %d%n", channel.getChannelId());
+
+ System.out.printf("\tChannel [ %3d ] vs [ %3d ] ... ", channel.getChannel(), data.getFADCChannel());
+ if(channel.getChannel() == data.getFADCChannel()) {
+ System.out.printf("[ Success ]%n");
+ } else {
+ System.out.printf("[ Failure ]%n");
+ idFailSet.add(channel.getChannelId());
+ pointFailSet.add(crystal);
+ }
+
+ int crate = data.getMotherboard().isTop() ? 1 : 2;
+ System.out.printf("\tCrate [ %3d ] vs [ %3d ] ... ", channel.getCrate(), crate);
+ if(channel.getCrate() == crate) {
+ System.out.printf("[ Success ]%n");
+ } else {
+ System.out.printf("[ Failure ]%n");
+ idFailSet.add(channel.getChannelId());
+ pointFailSet.add(crystal);
+ }
+
+ System.out.printf("\tSlot [ %3d ] vs [ %3d ] ... ", channel.getSlot(), data.getFADCSlot());
+ if(channel.getSlot() == data.getFADCSlot()) {
+ System.out.printf("[ Success ]%n");
+ } else {
+ System.out.printf("[ Failure ]%n");
+ idFailSet.add(channel.getChannelId());
+ pointFailSet.add(crystal);
+ }
+
+ System.out.printf("\tLED Channel [ %3d ] vs [ %3d ] ... ", led.getLedNumber(), data.getLEDChannel());
+ if(led.getLedNumber() == data.getLEDChannel()) {
+ System.out.printf("[ Success ]%n");
+ } else {
+ System.out.printf("[ Failure ]%n");
+ idFailSet.add(channel.getChannelId());
+ pointFailSet.add(crystal);
+ }
+
+ System.out.printf("\tLED Crate [ %3d ] vs [ %3d ] ... ", led.getCrateNumber(), crate);
+ if(led.getCrateNumber() == crate) {
+ System.out.printf("[ Success ]%n");
+ } else {
+ System.out.printf("[ Failure ]%n");
+ idFailSet.add(channel.getChannelId());
+ pointFailSet.add(crystal);
+ }
+
+ System.out.println();
+ System.out.println();
+ }
+
+ // Print out the failing crystals.
+ System.out.println("Crystals that Failed:");
+ for(Point fail : pointFailSet) {
+ System.out.printf("\tCrystal (%3d, %3d)%n", fail.x, fail.y);
+ }
+ if(pointFailSet.isEmpty()) {
+ System.out.println("\tNone!");
+ }
+
+ // Indicate the database connection settings.
+ System.out.println("\n");
+ System.out.printf("Detector :: %s%n", detectorName);
+ System.out.printf("Run Number :: %d%n", runNumber);
+ System.out.printf("Channel Collection :: %d%n", channels.getCollectionId());
+ System.out.printf("LED Collection :: %d%n", leds.getCollectionId());
+ }
}
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/GradientScale.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/GradientScale.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/GradientScale.java Wed Apr 27 11:11:32 2016
@@ -21,9 +21,9 @@
private int[] drgb = { 255, 255, 255 };
public Color getColor(Double value) {
- // If the argument is null, treat it as zero.
- if(value == null) { value = 0.0; }
-
+ // If the argument is null, treat it as zero.
+ if(value == null) { value = 0.0; }
+
// If the value is less than the minimum, return the cold color.
if (value < min) { return coldColor; }
Modified: java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/MultiGradientScale.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/MultiGradientScale.java (original)
+++ java/branches/HPSJAVA-409/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/util/MultiGradientScale.java Wed Apr 27 11:11:32 2016
@@ -29,9 +29,9 @@
}
public Color getColor(Double value) {
- // If the value is null, treat it as zero.
- if(value == null) { value = 0.0; }
-
+ // If the value is null, treat it as zero.
+ if(value == null) { value = 0.0; }
+
// Get the number of colors and scales.
int colors = colorList.size();
int scales = scaleList.size();
@@ -48,7 +48,7 @@
else { sValue = Math.log10(scale * value); }
if(value < 1 && (Double.isNaN(sValue) || Double.isInfinite(sValue))) {
- return scaleList.get(0).getColor(0.0);
+ return scaleList.get(0).getColor(0.0);
}
// Otherwise, determine which scale should get the value.
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/pom.xml (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-readout-sim/</url>
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ClockSingleton.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ClockSingleton.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ClockSingleton.java Wed Apr 27 11:11:32 2016
@@ -13,35 +13,35 @@
*/
public class ClockSingleton {
- public static final ClockSingleton _instance = new ClockSingleton();
- private int clock;
- //time between events (bunch spacing)
- private double dt = 2.0;
+ public static final ClockSingleton _instance = new ClockSingleton();
+ private int clock;
+ //time between events (bunch spacing)
+ private double dt = 2.0;
- private ClockSingleton() {
- }
+ private ClockSingleton() {
+ }
- public static void init() {
- _instance.clock = 0;
- }
+ public static void init() {
+ _instance.clock = 0;
+ }
- public static int getClock() {
- return _instance.clock;
- }
+ public static int getClock() {
+ return _instance.clock;
+ }
- public static double getTime() {
- return _instance.dt * _instance.clock;
- }
+ public static double getTime() {
+ return _instance.dt * _instance.clock;
+ }
- public static double getDt() {
- return _instance.dt;
- }
+ public static double getDt() {
+ return _instance.dt;
+ }
- public static void setDt(double dt) {
- _instance.dt = dt;
- }
+ public static void setDt(double dt) {
+ _instance.dt = dt;
+ }
- public static void step() {
- _instance.clock++;
- }
+ public static void step() {
+ _instance.clock++;
+ }
}
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java Wed Apr 27 11:11:32 2016
@@ -310,7 +310,7 @@
/**
* Return the map of preamp signal buffers. For debug only.
*
- * @return
+ * @return the map of preamp signal buffers
*/
public Map<Long, RingBuffer> getSignalMap() {
return analogPipelines;
@@ -319,7 +319,7 @@
/**
* Return the map of FADC pipelines. For debug only.
*
- * @return
+ * @return the map of FADC pipelines
*/
public Map<Long, FADCPipeline> getPipelineMap() {
return digitalPipelines;
@@ -383,7 +383,7 @@
}
} else {
if (pedestalSubtractedValue < triggerThreshold || triggerPathHitTimes.get(cellID) + delay0 == readoutCounter) {
-// System.out.printf("sum = %f\n",sum);
+// System.out.printf("sum = %f\n",sum);
triggerPathDelayQueue.add(new BaseRawCalorimeterHit(cellID,
(int) Math.round((sum + pedestalSubtractedValue) / scaleFactor),
64 * triggerPathHitTimes.get(cellID)));
@@ -460,15 +460,15 @@
short[] adcValues = new short[readoutWindow];
for (int i = 0; i < readoutWindow; i++) {
adcValues[i] = (short) pipeline.getValue(readoutLatency - i - 1);
-// if (adcValues[i] != 0) {
-// System.out.println("getWindow: " + adcValues[i] + " at i = " + i);
-// }
+// if (adcValues[i] != 0) {
+// System.out.println("getWindow: " + adcValues[i] + " at i = " + i);
+// }
}
return adcValues;
}
protected List<RawTrackerHit> readWindow() {
-// System.out.println("Reading FADC data");
+// System.out.println("Reading FADC data");
List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
for (Long cellID : digitalPipelines.keySet()) {
short[] adcValues = getWindow(cellID);
@@ -488,7 +488,7 @@
}
protected List<RawTrackerHit> readPulses() {
-// System.out.println("Reading FADC data");
+// System.out.println("Reading FADC data");
List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
for (Long cellID : digitalPipelines.keySet()) {
short[] window = getWindow(cellID);
@@ -522,7 +522,7 @@
}
protected List<RawCalorimeterHit> readIntegrals() {
-// System.out.println("Reading FADC data");
+// System.out.println("Reading FADC data");
List<RawCalorimeterHit> hits = new ArrayList<RawCalorimeterHit>();
for (Long cellID : digitalPipelines.keySet()) {
short[] window = getWindow(cellID);
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java Wed Apr 27 11:11:32 2016
@@ -34,7 +34,7 @@
private int pairCoincidence = 2; // Maximum allowed time difference between clusters. (4 ns clock-cycles)
private int backgroundLevel = -1; // Automatically sets the cuts to achieve a predetermined background rate.
private TriggerModule triggerModule = new TriggerModule(1.0, 0.050,
- 6.600, 0.010, 6.600, 0.000, 13.200, 6.600, 0.0, 360, 0.0055);
+ 6.600, 0.010, 6.600, 0.000, 13.200, 6.600, 0.0, 360, 0.0055);
// ==================================================================
// ==== Driver Internal Variables ===================================
@@ -77,55 +77,55 @@
*/
@Override
public void startOfData() {
- // Define plot type names.
- String[] plotType = new String[PLOT_COUNT];
- plotType[NO_CUTS] = "";
- plotType[ALL_CUTS] = " (Passed All Cuts)";
- plotType[OVER_1HIT] = " (More than 1 Hit)";
- plotType[OVER_2HIT] = " (More than 2 Hits)";
- plotType[SINGLES_CUTS] = " (Passed Single Cuts)";
-
- // Define plot type directories.
- String[] plotDir = new String[PLOT_COUNT];
- plotDir[NO_CUTS] = "NoCuts/";
- plotDir[ALL_CUTS] = "PassedAll/";
- plotDir[OVER_1HIT] = "2PlusHits/";
- plotDir[OVER_2HIT] = "3PlusHits/";
- plotDir[SINGLES_CUTS] = "PassedSingles/";
-
- // Instantiate the singles plot arrays.
- clusterSeedEnergy = new IHistogram1D[PLOT_COUNT];
- clusterHitCount = new IHistogram1D[PLOT_COUNT];
- clusterTotalEnergy = new IHistogram1D[PLOT_COUNT];
- clusterDistribution = new IHistogram2D[PLOT_COUNT];
-
- // Instantiate the pair plot arrays. Note that the pair cuts
- // only ever see clusters that pass the singles cuts, so the
- // "passed singles cuts" plots are meaningless. Thusly, the
- // pair plots have one fewer plot than the singles.
- pairEnergySum = new IHistogram1D[PLOT_COUNT - 1];
- pairEnergyDifference = new IHistogram1D[PLOT_COUNT - 1];
- pairCoplanarity = new IHistogram1D[PLOT_COUNT - 1];
- pairEnergySlope = new IHistogram1D[PLOT_COUNT - 1];
- pairEnergySum2DDistribution = new IHistogram2D[PLOT_COUNT - 1];
-
- // Instantiate the plots.
- for(int i = 0; i < PLOT_COUNT; i++) {
- System.out.println(plotDir[i] + "Cluster Seed Energy" + plotType[i]);
- clusterSeedEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Seed Energy" + plotType[i], 176, 0.0, 2.2);
- clusterHitCount[i] = aida.histogram1D(plotDir[i] + "Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
- clusterTotalEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Total Energy" + plotType[i], 176, 0.0, 2.2);
- clusterDistribution[i] = aida.histogram2D(plotDir[i] + "Cluster Seed" + plotType[i], 46, -23, 23, 11, -5.5, 5.5);
-
- if(i != PLOT_COUNT - 1) {
- pairEnergySum[i] = aida.histogram1D(plotDir[i] + "Pair Energy Sum" + plotType[i], 176, 0.0, 4.4);
- pairEnergyDifference[i] = aida.histogram1D(plotDir[i] + "Pair Energy Difference" + plotType[i], 176, 0.0, 2.2);
- pairCoplanarity[i] = aida.histogram1D(plotDir[i] + "Pair Coplanarity" + plotType[i], 180, 0.0, 180.0);
- pairEnergySlope[i] = aida.histogram1D(plotDir[i] + "Pair Energy Slope" + plotType[i], 200, 0.0, 4.0);
- pairEnergySum2DDistribution[i] = aida.histogram2D(plotDir[i] + "Pair Energy Sum 2D" + plotType[i], 176, 0.0, 4.4, 176, 0.0, 4.4);
- }
- }
-
+ // Define plot type names.
+ String[] plotType = new String[PLOT_COUNT];
+ plotType[NO_CUTS] = "";
+ plotType[ALL_CUTS] = " (Passed All Cuts)";
+ plotType[OVER_1HIT] = " (More than 1 Hit)";
+ plotType[OVER_2HIT] = " (More than 2 Hits)";
+ plotType[SINGLES_CUTS] = " (Passed Single Cuts)";
+
+ // Define plot type directories.
+ String[] plotDir = new String[PLOT_COUNT];
+ plotDir[NO_CUTS] = "NoCuts/";
+ plotDir[ALL_CUTS] = "PassedAll/";
+ plotDir[OVER_1HIT] = "2PlusHits/";
+ plotDir[OVER_2HIT] = "3PlusHits/";
+ plotDir[SINGLES_CUTS] = "PassedSingles/";
+
+ // Instantiate the singles plot arrays.
+ clusterSeedEnergy = new IHistogram1D[PLOT_COUNT];
+ clusterHitCount = new IHistogram1D[PLOT_COUNT];
+ clusterTotalEnergy = new IHistogram1D[PLOT_COUNT];
+ clusterDistribution = new IHistogram2D[PLOT_COUNT];
+
+ // Instantiate the pair plot arrays. Note that the pair cuts
+ // only ever see clusters that pass the singles cuts, so the
+ // "passed singles cuts" plots are meaningless. Thusly, the
+ // pair plots have one fewer plot than the singles.
+ pairEnergySum = new IHistogram1D[PLOT_COUNT - 1];
+ pairEnergyDifference = new IHistogram1D[PLOT_COUNT - 1];
+ pairCoplanarity = new IHistogram1D[PLOT_COUNT - 1];
+ pairEnergySlope = new IHistogram1D[PLOT_COUNT - 1];
+ pairEnergySum2DDistribution = new IHistogram2D[PLOT_COUNT - 1];
+
+ // Instantiate the plots.
+ for(int i = 0; i < PLOT_COUNT; i++) {
+ System.out.println(plotDir[i] + "Cluster Seed Energy" + plotType[i]);
+ clusterSeedEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Seed Energy" + plotType[i], 176, 0.0, 2.2);
+ clusterHitCount[i] = aida.histogram1D(plotDir[i] + "Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
+ clusterTotalEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Total Energy" + plotType[i], 176, 0.0, 2.2);
+ clusterDistribution[i] = aida.histogram2D(plotDir[i] + "Cluster Seed" + plotType[i], 46, -23, 23, 11, -5.5, 5.5);
+
+ if(i != PLOT_COUNT - 1) {
+ pairEnergySum[i] = aida.histogram1D(plotDir[i] + "Pair Energy Sum" + plotType[i], 176, 0.0, 4.4);
+ pairEnergyDifference[i] = aida.histogram1D(plotDir[i] + "Pair Energy Difference" + plotType[i], 176, 0.0, 2.2);
+ pairCoplanarity[i] = aida.histogram1D(plotDir[i] + "Pair Coplanarity" + plotType[i], 180, 0.0, 180.0);
+ pairEnergySlope[i] = aida.histogram1D(plotDir[i] + "Pair Energy Slope" + plotType[i], 200, 0.0, 4.0);
+ pairEnergySum2DDistribution[i] = aida.histogram2D(plotDir[i] + "Pair Energy Sum 2D" + plotType[i], 176, 0.0, 4.4, 176, 0.0, 4.4);
+ }
+ }
+
// Make sure that a valid cluster collection name has been
// defined. If it has not, throw an exception.
if (clusterCollectionName == null) {
@@ -239,15 +239,15 @@
// Fill the hit count plots for N > 1.
if(hitCount > 1) {
- // Populate the plots.
+ // Populate the plots.
clusterSeedEnergy[OVER_1HIT].fill(seedEnergy);
clusterTotalEnergy[OVER_1HIT].fill(clusterEnergy);
clusterHitCount[OVER_1HIT].fill(hitCount);
clusterDistribution[OVER_1HIT].fill(ix, iy);
-
+
// Fill the hit count plots for N > 2.
if(hitCount > 2) {
- // Populate the plots.
+ // Populate the plots.
clusterSeedEnergy[OVER_2HIT].fill(seedEnergy);
clusterTotalEnergy[OVER_2HIT].fill(clusterEnergy);
clusterHitCount[OVER_2HIT].fill(hitCount);
@@ -260,14 +260,14 @@
// VERBOSE :: Print the seed energy comparison check.
if(verbose) {
System.out.printf("\tSeed Energy Cut :: %.3f < %.3f < %.3f --> %b%n",
- triggerModule.getCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW), seedEnergy,
- triggerModule.getCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH),
- triggerModule.clusterSeedEnergyCut(cluster));
+ triggerModule.getCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW), seedEnergy,
+ triggerModule.getCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH),
+ triggerModule.clusterSeedEnergyCut(cluster));
}
// If the cluster fails the cut, skip to the next cluster.
if(!triggerModule.clusterSeedEnergyCut(cluster)) {
- continue clusterLoop;
+ continue clusterLoop;
}
// Otherwise, note that it passed the cut.
@@ -278,13 +278,13 @@
// VERBOSE :: Print the hit count comparison check.
if(verbose) {
System.out.printf("\tHit Count Cut :: %d >= %.0f --> %b%n",
- hitCount, triggerModule.getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW),
- triggerModule.clusterHitCountCut(cluster));
+ hitCount, triggerModule.getCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW),
+ triggerModule.clusterHitCountCut(cluster));
}
// If the cluster fails the cut, skip to the next cluster.
if(!triggerModule.clusterHitCountCut(cluster)) {
- continue clusterLoop;
+ continue clusterLoop;
}
// Otherwise, note that it passed the cut.
@@ -295,14 +295,14 @@
// VERBOSE :: Print the cluster energy comparison check.
if(verbose) {
System.out.printf("\tCluster Energy Cut :: %.3f < %.3f < %.3f --> %b%n",
- triggerModule.getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), clusterEnergy,
- triggerModule.getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH),
- triggerModule.clusterTotalEnergyCut(cluster));
+ triggerModule.getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW), clusterEnergy,
+ triggerModule.getCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH),
+ triggerModule.clusterTotalEnergyCut(cluster));
}
// If the cluster fails the cut, skip to the next cluster.
if(!triggerModule.clusterTotalEnergyCut(cluster)) {
- continue clusterLoop;
+ continue clusterLoop;
}
// Otherwise, note that it passed the cut.
@@ -368,7 +368,7 @@
* Sets the maximum deviation from coplanarity that a cluster pair
* may possess and still pass the coplanarity pair cut. Value uses
* units of degrees.
- * @param maxCoplanarityAngle - The parameter value.
+ * @param coplanarityHigh - The parameter value.
*/
public void setCoplanarityHigh(double coplanarityHigh) {
triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, coplanarityHigh);
@@ -407,7 +407,7 @@
* Sets the lowest allowed energy a cluster pair may have and
* still pass the cluster pair energy sum cluster cut. Value uses
* units of GeV.
- * @param energySumHigh - The parameter value.
+ * @param energySumLow - The parameter value.
*/
public void setEnergySumLow(double energySumLow) {
triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, energySumLow * EcalUtils.GeV);
@@ -544,63 +544,63 @@
// Some cut values are almost always the same thing. Set those
// here and only overwrite if necessary.
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.125);
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 1.300);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.200);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.700);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.500);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 2.000);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 1.200);
- triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
- triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 2);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.125);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 1.300);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.200);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.700);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.500);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 2.000);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 1.200);
+ triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 2);
// Set the variable values.
if(backgroundLevel == 1) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 1.000);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.2);
- triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 20);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 1.000);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.2);
+ triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 20);
} else if(backgroundLevel == 2) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.0);
- triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 20);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.0);
+ triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 20);
} else if(backgroundLevel == 3) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.0);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.0);
} else if(backgroundLevel == 4) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.8);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.8);
} else if(backgroundLevel == 5) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.8);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.8);
} else if(backgroundLevel == 6) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.6);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.6);
} else if(backgroundLevel == 7) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.6);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.6);
} else if(backgroundLevel == 8) {
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.500);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.500);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
} else if(backgroundLevel == 9) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
} else if(backgroundLevel == 10) {
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.4);
} else if(backgroundLevel == 0) {
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.100);
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 6.600);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.100);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.500);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 1.900);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 2.200);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.1);
- triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 35);
- triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.100);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 6.600);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.100);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 1.500);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 1.900);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 2.200);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 1.1);
+ triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 35);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
} else if(backgroundLevel == -1) {
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.050);
- triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 6.600);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.010);
- triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 6.600);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 13.200);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 6.600);
- triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.0);
- triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 360);
- triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, 0.050);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_HIGH, 6.600);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.010);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 6.600);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.000);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 13.200);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 6.600);
+ triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.0);
+ triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 360);
+ triggerModule.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
}
}
@@ -636,16 +636,16 @@
// Fill the hit count plots for N > 1.
if(clusterPair[0].getCalorimeterHits().size() > 1 && clusterPair[1].getCalorimeterHits().size() > 1) {
- // Populate the plots.
+ // Populate the plots.
pairEnergySum[OVER_1HIT].fill(energySum);
pairEnergyDifference[OVER_1HIT].fill(energyDifference);
pairEnergySlope[OVER_1HIT].fill(energySlope);
pairCoplanarity[OVER_1HIT].fill(coplanarity);
pairEnergySum2DDistribution[OVER_1HIT].fill(clusterPair[0].getEnergy(), clusterPair[1].getEnergy());
-
+
// Fill the hit count plots for N > 2.
if(clusterPair[0].getCalorimeterHits().size() > 2 && clusterPair[1].getCalorimeterHits().size() > 2) {
- // Populate the plots.
+ // Populate the plots.
pairEnergySum[OVER_2HIT].fill(energySum);
pairEnergyDifference[OVER_2HIT].fill(energyDifference);
pairEnergySlope[OVER_2HIT].fill(energySlope);
@@ -658,7 +658,7 @@
// =============================================================
// If the cluster fails the cut, skip to the next pair.
if(!triggerModule.pairEnergySumCut(clusterPair)) {
- continue pairLoop;
+ continue pairLoop;
}
// Otherwise, note that it passed the cut.
@@ -668,7 +668,7 @@
// =============================================================
// If the cluster fails the cut, skip to the next pair.
if(!triggerModule.pairEnergyDifferenceCut(clusterPair)) {
- continue pairLoop;
+ continue pairLoop;
}
// Otherwise, note that it passed the cut.
@@ -678,7 +678,7 @@
// =============================================================
// If the cluster fails the cut, skip to the next pair.
if(!triggerModule.pairEnergySlopeCut(clusterPair)) {
- continue pairLoop;
+ continue pairLoop;
}
// Otherwise, note that it passed the cut.
@@ -688,7 +688,7 @@
// =============================================================
// If the cluster fails the cut, skip to the next pair.
if(!triggerModule.pairCoplanarityCut(clusterPair)) {
- continue pairLoop;
+ continue pairLoop;
}
// Otherwise, note that it passed the cut.
@@ -774,6 +774,6 @@
* @param cuts - The cut string.
*/
public void setCuts(String cuts) {
- triggerModule.setCutValues(false, cuts);
+ triggerModule.setCutValues(false, cuts);
}
}
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java Wed Apr 27 11:11:32 2016
@@ -532,7 +532,6 @@
/**
* Get a list of all unique cluster pairs in the event
*
- * @param ecalClusters : List of ECal clusters
* @return list of cluster pairs
*/
protected List<Cluster[]> getClusterPairsTopBot() {
@@ -584,7 +583,7 @@
* Checks if the ECal clusters making up a cluster pair both have at least
* the minimum number of hits.
*
- * @param clusterPair: pair of clusters
+ * @param clusterPair the pair of clusters
* @return true if pair passes cut, false if fail
*/
protected boolean clusterHitCount(Cluster[] clusterPair) {
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java Wed Apr 27 11:11:32 2016
@@ -63,14 +63,14 @@
//System.out.printf("%d ecal clusters in event\n", clusters.size());
//System.out.printf("%s: %d clusters\n",this.getClass().getSimpleName(),clusters.size());
- //for(Cluster cl : clusters) {
- // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
- //}
- List<Cluster> unique_clusters = this.getUniqueClusters(clusters);
- //System.out.printf("%s: %d unique clusters\n",this.getClass().getSimpleName(),unique_clusters.size());
- //for(Cluster cl : unique_clusters) {
- // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
- //}
+ //for(Cluster cl : clusters) {
+ // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+ //}
+ List<Cluster> unique_clusters = this.getUniqueClusters(clusters);
+ //System.out.printf("%s: %d unique clusters\n",this.getClass().getSimpleName(),unique_clusters.size());
+ //for(Cluster cl : unique_clusters) {
+ // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+ //}
updateClusterQueues(unique_clusters);
List<Cluster[]> clusterPairs = getClusterPairsTopBot();
@@ -122,47 +122,47 @@
private List<Cluster> getUniqueClusters(List<Cluster> clusters) {
- List<Cluster> unique = new ArrayList<Cluster>();
- for(Cluster loop_cl : clusters) {
- ClusterCmp loop_clCmp = new ClusterCmp(loop_cl);
- boolean found = false;
- for(Cluster cl : unique) {
- if( loop_clCmp.compareTo(cl) == 0 ) {
- found = true;
- }
- }
- if( !found ) {
- unique.add(loop_cl);
- }
- }
- return unique;
+ List<Cluster> unique = new ArrayList<Cluster>();
+ for(Cluster loop_cl : clusters) {
+ ClusterCmp loop_clCmp = new ClusterCmp(loop_cl);
+ boolean found = false;
+ for(Cluster cl : unique) {
+ if( loop_clCmp.compareTo(cl) == 0 ) {
+ found = true;
+ }
+ }
+ if( !found ) {
+ unique.add(loop_cl);
+ }
+ }
+ return unique;
}
private static class ClusterCmp implements Comparable<Cluster> {
- private Cluster _cluster;
- public ClusterCmp(Cluster cl) {
- set_cluster(cl);
- }
- @Override
- public int compareTo(Cluster cl) {
- if(cl.getEnergy()==get_cluster().getEnergy() && cl.getPosition()[0]==get_cluster().getPosition()[0] && cl.getPosition()[1]==get_cluster().getPosition()[1] ) {
- return 0;
- } else {
- if( cl.getEnergy() > get_cluster().getEnergy()) {
- return 1;
- } else {
- return -1;
- }
- }
- }
- public Cluster get_cluster() {
- return _cluster;
- }
- public void set_cluster(Cluster _cluster) {
- this._cluster = _cluster;
- }
-
+ private Cluster _cluster;
+ public ClusterCmp(Cluster cl) {
+ set_cluster(cl);
+ }
+ @Override
+ public int compareTo(Cluster cl) {
+ if(cl.getEnergy()==get_cluster().getEnergy() && cl.getPosition()[0]==get_cluster().getPosition()[0] && cl.getPosition()[1]==get_cluster().getPosition()[1] ) {
+ return 0;
+ } else {
+ if( cl.getEnergy() > get_cluster().getEnergy()) {
+ return 1;
+ } else {
+ return -1;
+ }
+ }
+ }
+ public Cluster get_cluster() {
+ return _cluster;
+ }
+ public void set_cluster(Cluster _cluster) {
+ this._cluster = _cluster;
+ }
+
}
}
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/MollerTriggerDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/MollerTriggerDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/MollerTriggerDriver.java Wed Apr 27 11:11:32 2016
@@ -56,8 +56,8 @@
@Override
public void process(EventHeader event) {
- // Run the superclass process event.
- super.process(event);
+ // Run the superclass process event.
+ super.process(event);
}
@Override
@@ -87,29 +87,29 @@
aMomentumAngle = aida.histogram2D("Trigger Plots :: Particle Momentum Distribution (t = 0, Passed All Cuts)", 500, -0.01, 0.06, 500, -0.04, 0.04);
// Add the allowed seed crystal positions to the seed set.
- if(useVersionOne) {
- // Add the allowed seed crystal positions to the seed set.
- // y = +/- 1, x = -11 -> -15
- for(int ix = -15; ix <= -11; ix++) {
- allowedSeedSet.add(new Point(ix, 1));
- allowedSeedSet.add(new Point(ix, -1));
- } // y = +/- 2, x = -9 -> -15
- for(int ix = -15; ix <= -9; ix++) {
- allowedSeedSet.add(new Point(ix, 2));
- allowedSeedSet.add(new Point(ix, -2));
- }
- }
- else {
- // y = +/- 1, x = -11 -> -13
- for(int ix = -13; ix <= -11; ix++) {
- allowedSeedSet.add(new Point(ix, 1));
- allowedSeedSet.add(new Point(ix, -1));
- } // y = +/- 2, x = -10 -> -14
- for(int ix = -14; ix <= -10; ix++) {
- allowedSeedSet.add(new Point(ix, 2));
- allowedSeedSet.add(new Point(ix, -2));
- }
- }
+ if(useVersionOne) {
+ // Add the allowed seed crystal positions to the seed set.
+ // y = +/- 1, x = -11 -> -15
+ for(int ix = -15; ix <= -11; ix++) {
+ allowedSeedSet.add(new Point(ix, 1));
+ allowedSeedSet.add(new Point(ix, -1));
+ } // y = +/- 2, x = -9 -> -15
+ for(int ix = -15; ix <= -9; ix++) {
+ allowedSeedSet.add(new Point(ix, 2));
+ allowedSeedSet.add(new Point(ix, -2));
+ }
+ }
+ else {
+ // y = +/- 1, x = -11 -> -13
+ for(int ix = -13; ix <= -11; ix++) {
+ allowedSeedSet.add(new Point(ix, 1));
+ allowedSeedSet.add(new Point(ix, -1));
+ } // y = +/- 2, x = -10 -> -14
+ for(int ix = -14; ix <= -10; ix++) {
+ allowedSeedSet.add(new Point(ix, 2));
+ allowedSeedSet.add(new Point(ix, -2));
+ }
+ }
}
@Override
@@ -201,31 +201,31 @@
// Require that the cluster pass each of the cuts in
// order to qualify for a trigger.
if(totalEnergyCut && seedEnergyCut && hitCountCut && positionCut) {
- // Increment the number of events that have passed
- // the cuts.
- passedEvents++;
-
- // If the number of passed events exceeds the prescaling
- // threshold, throw a trigger.
- if(passedEvents >= prescale) {
- // Reset the number of passed events.
- passedEvents = 0;
-
- // Add the clusters to the cut histograms.
- aClusterHitCount.fill(cluster.getCalorimeterHits().size());
- aClusterTotalEnergy.fill(cluster.getEnergy());
- aClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
- aClusterDistribution.fill(ix > 0 ? ix - 1 : ix, iy, 1);
-
- // Increment the trigger count.
- triggers++;
-
- // VERBOSE :: Indicate that a trigger occurred.
- if(verbose) { System.out.printf("\tTriggered!%n%n"); }
-
- // Return a trigger.
- return true;
- }
+ // Increment the number of events that have passed
+ // the cuts.
+ passedEvents++;
+
+ // If the number of passed events exceeds the prescaling
+ // threshold, throw a trigger.
+ if(passedEvents >= prescale) {
+ // Reset the number of passed events.
+ passedEvents = 0;
+
+ // Add the clusters to the cut histograms.
+ aClusterHitCount.fill(cluster.getCalorimeterHits().size());
+ aClusterTotalEnergy.fill(cluster.getEnergy());
+ aClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
+ aClusterDistribution.fill(ix > 0 ? ix - 1 : ix, iy, 1);
+
+ // Increment the trigger count.
+ triggers++;
+
+ // VERBOSE :: Indicate that a trigger occurred.
+ if(verbose) { System.out.printf("\tTriggered!%n%n"); }
+
+ // Return a trigger.
+ return true;
+ }
}
}
@@ -371,7 +371,7 @@
* will be thrown.
*/
public void setPrescale(int prescale) {
- this.prescale = prescale;
+ this.prescale = prescale;
}
/**
@@ -383,7 +383,7 @@
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
-
+
/**
* Toggles whether the more inclusive acceptance region version 1
* is used, or the slightly smaller and more exclusive acceptance
@@ -392,9 +392,9 @@
* 1 of the acceptance region should be used and <code>false</code>
* that version 2 should be used.
*/
- public void setUseVersionOne(boolean useVersionOne) {
- this.useVersionOne = useVersionOne;
- }
+ public void setUseVersionOne(boolean useVersionOne) {
+ this.useVersionOne = useVersionOne;
+ }
// ==================================================================
// ==== AIDA Plots ==================================================
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/OccupancyAnalysisDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/OccupancyAnalysisDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/OccupancyAnalysisDriver.java Wed Apr 27 11:11:32 2016
@@ -12,19 +12,19 @@
import org.lcsim.util.aida.AIDA;
public class OccupancyAnalysisDriver extends Driver {
- // Internal variables.
- private double scalingFactor = 0.05;
- private double seedThreshold = 0.050;
- private double beamRatio = 1.92 / 2.2;
- private double clusterThreshold = 0.200;
+ // Internal variables.
+ private double scalingFactor = 0.05;
+ private double seedThreshold = 0.050;
+ private double beamRatio = 1.92 / 2.2;
+ private double clusterThreshold = 0.200;
private AIDA aida = AIDA.defaultInstance();
- private boolean ignoreBeamGapRows = false;
-
+ private boolean ignoreBeamGapRows = false;
+
// LCIO Collection Names
private String clusterCollectionName = "EcalClusters";
private String hitCollectionName = "EcalCorrectedHits";
- // Trigger plots.
+ // Trigger plots.
IHistogram2D occupancyDistribution;
IHistogram2D[] clusterDistribution = new IHistogram2D[2];
IHistogram1D[] clusterHitDistribution = new IHistogram1D[2];
@@ -32,129 +32,129 @@
IHistogram1D[] clusterEnergyDistribution = new IHistogram1D[2];
public void setIgnoreBeamGapRows(boolean ignoreBeamGapRows) {
- this.ignoreBeamGapRows = ignoreBeamGapRows;
+ this.ignoreBeamGapRows = ignoreBeamGapRows;
}
public void setBeamRatio(double beamRatio) {
- this.beamRatio = beamRatio;
+ this.beamRatio = beamRatio;
}
public void setScalingFactor(double scalingFactor) {
- this.scalingFactor = scalingFactor;
+ this.scalingFactor = scalingFactor;
}
public void setSeedThreshold(double seedThreshold) {
- this.seedThreshold = seedThreshold;
+ this.seedThreshold = seedThreshold;
}
public void setClusterThreshold(double clusterThreshold) {
- this.clusterThreshold = clusterThreshold;
+ this.clusterThreshold = clusterThreshold;
}
public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
+ this.clusterCollectionName = clusterCollectionName;
}
public void setHitCollectionName(String hitCollectionName) {
- this.hitCollectionName = hitCollectionName;
+ this.hitCollectionName = hitCollectionName;
}
@Override
public void process(EventHeader event) {
- // If clusters are present, process them.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the list of clusters.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Use the clusters to populate the cluster plots.
- for(Cluster cluster : clusterList) {
- // Get the ix and iy values for the cluster.
- int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- int iy = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
-
- // If we want to ignore the beam gap rows, make sure
- // that iy exceeds two.
- if(!ignoreBeamGapRows || (Math.abs(iy) > 2)) {
- // If the cluster passes the seed threshold, place it in
- // the level 1 plots.
- if(cluster.getCalorimeterHits().get(0).getCorrectedEnergy() >= seedThreshold) {
- clusterDistribution[0].fill(ix, iy, scalingFactor);
- clusterHitDistribution[0].fill(cluster.getCalorimeterHits().size(), scalingFactor);
- clusterEnergyDistribution[0].fill(cluster.getEnergy() * beamRatio, scalingFactor);
- }
-
- // If the cluster energy passes the cluster threshold,
- // populate the level 2 plots.
- if(cluster.getEnergy() >= clusterThreshold) {
- clusterDistribution[1].fill(ix, iy, scalingFactor);
- clusterHitDistribution[1].fill(cluster.getCalorimeterHits().size(), scalingFactor);
- clusterEnergyDistribution[1].fill(cluster.getEnergy() * beamRatio, scalingFactor);
- }
- }
- }
- }
-
- // If the event has hits, process them.
- if(event.hasCollection(CalorimeterHit.class, hitCollectionName)) {
- // Get the list of hits.
- List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, hitCollectionName);
-
- // Track the energy in the top and bottom of the calorimeter.
- double[] energy = { 0.0, 0.0 };
-
- // Iterate over the hits.
- for(CalorimeterHit hit : hitList) {
- // Get the ix and iy values.
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
-
- // If we want to ignore beam gap rows, ensure that iy
- // is greater than 2.
- if(!ignoreBeamGapRows || Math.abs(iy) > 2) {
- // Add the energy to the appropriate energy tracking
- // variable for the calorimeter halves.
- if(iy > 0) { energy[0] += hit.getCorrectedEnergy() * beamRatio; }
- else { energy[1] += hit.getCorrectedEnergy() * beamRatio; }
-
- // Populate the occupancy distribution.
- occupancyDistribution.fill(ix, iy, scalingFactor);
- }
- }
-
- // Populate the total calorimeter energy plot.
- totalEnergyDistribution[0].fill(energy[0], scalingFactor);
- totalEnergyDistribution[1].fill(energy[1], scalingFactor);
- }
+ // If clusters are present, process them.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the list of clusters.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Use the clusters to populate the cluster plots.
+ for(Cluster cluster : clusterList) {
+ // Get the ix and iy values for the cluster.
+ int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ int iy = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+
+ // If we want to ignore the beam gap rows, make sure
+ // that iy exceeds two.
+ if(!ignoreBeamGapRows || (Math.abs(iy) > 2)) {
+ // If the cluster passes the seed threshold, place it in
+ // the level 1 plots.
+ if(cluster.getCalorimeterHits().get(0).getCorrectedEnergy() >= seedThreshold) {
+ clusterDistribution[0].fill(ix, iy, scalingFactor);
+ clusterHitDistribution[0].fill(cluster.getCalorimeterHits().size(), scalingFactor);
+ clusterEnergyDistribution[0].fill(cluster.getEnergy() * beamRatio, scalingFactor);
+ }
+
+ // If the cluster energy passes the cluster threshold,
+ // populate the level 2 plots.
+ if(cluster.getEnergy() >= clusterThreshold) {
+ clusterDistribution[1].fill(ix, iy, scalingFactor);
+ clusterHitDistribution[1].fill(cluster.getCalorimeterHits().size(), scalingFactor);
+ clusterEnergyDistribution[1].fill(cluster.getEnergy() * beamRatio, scalingFactor);
+ }
+ }
+ }
+ }
+
+ // If the event has hits, process them.
+ if(event.hasCollection(CalorimeterHit.class, hitCollectionName)) {
+ // Get the list of hits.
+ List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, hitCollectionName);
+
+ // Track the energy in the top and bottom of the calorimeter.
+ double[] energy = { 0.0, 0.0 };
+
+ // Iterate over the hits.
+ for(CalorimeterHit hit : hitList) {
+ // Get the ix and iy values.
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+
+ // If we want to ignore beam gap rows, ensure that iy
+ // is greater than 2.
+ if(!ignoreBeamGapRows || Math.abs(iy) > 2) {
+ // Add the energy to the appropriate energy tracking
+ // variable for the calorimeter halves.
+ if(iy > 0) { energy[0] += hit.getCorrectedEnergy() * beamRatio; }
+ else { energy[1] += hit.getCorrectedEnergy() * beamRatio; }
+
+ // Populate the occupancy distribution.
+ occupancyDistribution.fill(ix, iy, scalingFactor);
+ }
+ }
+
+ // Populate the total calorimeter energy plot.
+ totalEnergyDistribution[0].fill(energy[0], scalingFactor);
+ totalEnergyDistribution[1].fill(energy[1], scalingFactor);
+ }
}
@Override
public void startOfData() {
- // Define the cluster distribution plots.
- String[] clusterDistName = { String.format("Comp Plots :: Cluster Seed Distribution [Seed Threshold %.3f GeV]", seedThreshold),
- String.format("Comp Plots :: Cluster Seed Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
+ // Define the cluster distribution plots.
+ String[] clusterDistName = { String.format("Comp Plots :: Cluster Seed Distribution [Seed Threshold %.3f GeV]", seedThreshold),
+ String.format("Comp Plots :: Cluster Seed Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
clusterDistribution[0] = aida.histogram2D(clusterDistName[0], 46, -23, 23, 11, -5.5, 5.5);
clusterDistribution[1] = aida.histogram2D(clusterDistName[1], 46, -23, 23, 11, -5.5, 5.5);
- // Define the occupancy distribution plots.
- String occupancyDistName = String.format("Comp Plots :: Crystal Occupancy");
- occupancyDistribution = aida.histogram2D(occupancyDistName, 46, -23, 23, 11, -5.5, 5.5);
-
+ // Define the occupancy distribution plots.
+ String occupancyDistName = String.format("Comp Plots :: Crystal Occupancy");
+ occupancyDistribution = aida.histogram2D(occupancyDistName, 46, -23, 23, 11, -5.5, 5.5);
+
// Define the cluster hit count distribution.
- String[] clusterHitDistName = { String.format("Comp Plots :: Cluster Hit Count Distribution [Seed Threshold %.3f GeV]", seedThreshold),
- String.format("Comp Plots :: Cluster Hit Count Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
- clusterHitDistribution[0] = aida.histogram1D(clusterHitDistName[0], 9, 1, 10);
- clusterHitDistribution[1] = aida.histogram1D(clusterHitDistName[1], 9, 1, 10);
-
+ String[] clusterHitDistName = { String.format("Comp Plots :: Cluster Hit Count Distribution [Seed Threshold %.3f GeV]", seedThreshold),
+ String.format("Comp Plots :: Cluster Hit Count Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
+ clusterHitDistribution[0] = aida.histogram1D(clusterHitDistName[0], 9, 1, 10);
+ clusterHitDistribution[1] = aida.histogram1D(clusterHitDistName[1], 9, 1, 10);
+
// Define the cluster total energy distribution.
- String[] clusterEnergyDistName = { String.format("Comp Plots :: Cluster Total Energy Distribution [Seed Threshold %.3f GeV]", seedThreshold),
- String.format("Comp Plots :: Cluster Total Energy Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
- clusterEnergyDistribution[0] = aida.histogram1D(clusterEnergyDistName[0], 176, 0.0, 2.2);
- clusterEnergyDistribution[1] = aida.histogram1D(clusterEnergyDistName[1], 176, 0.0, 2.2);
-
+ String[] clusterEnergyDistName = { String.format("Comp Plots :: Cluster Total Energy Distribution [Seed Threshold %.3f GeV]", seedThreshold),
+ String.format("Comp Plots :: Cluster Total Energy Distribution [Cluster Threshold %.3f GeV]", clusterThreshold) };
+ clusterEnergyDistribution[0] = aida.histogram1D(clusterEnergyDistName[0], 176, 0.0, 2.2);
+ clusterEnergyDistribution[1] = aida.histogram1D(clusterEnergyDistName[1], 176, 0.0, 2.2);
+
// Define the calorimeter total energy distribution.
- String[] totalEnergyDistName = { String.format("Comp Plots :: Calorimeter Event Energy Distribution [Top]"),
- String.format("Comp Plots :: Calorimeter Event Energy Distribution [Bottom]") };
- totalEnergyDistribution[0] = aida.histogram1D(totalEnergyDistName[0], 500, 0.0, 10.0);
- totalEnergyDistribution[1] = aida.histogram1D(totalEnergyDistName[1], 500, 0.0, 10.0);
+ String[] totalEnergyDistName = { String.format("Comp Plots :: Calorimeter Event Energy Distribution [Top]"),
+ String.format("Comp Plots :: Calorimeter Event Energy Distribution [Bottom]") };
+ totalEnergyDistribution[0] = aida.histogram1D(totalEnergyDistName[0], 500, 0.0, 10.0);
+ totalEnergyDistribution[1] = aida.histogram1D(totalEnergyDistName[1], 500, 0.0, 10.0);
}
}
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTrigger.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTrigger.java Wed Apr 27 11:11:32 2016
@@ -22,16 +22,16 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ReadoutTrigger extends Driver {
- // Define settable parameters.
- private double energySlopeParamF = 0.0055;
- private String clusterCollectionName = "EcalClusters";
-
- // Define internal variables.
- private TriggerModule trigger = new TriggerModule();
-
- // Define output plots.
- private static final int NO_CUTS = 0;
- private static final int ALL_CUTS = 1;
+ // Define settable parameters.
+ private double energySlopeParamF = 0.0055;
+ private String clusterCollectionName = "EcalClusters";
+
+ // Define internal variables.
+ private TriggerModule trigger = new TriggerModule();
+
+ // Define output plots.
+ private static final int NO_CUTS = 0;
+ private static final int ALL_CUTS = 1;
private AIDA aida = AIDA.defaultInstance();
private IHistogram1D[] clusterSeedEnergy;
private IHistogram1D[] clusterHitCount;
@@ -46,267 +46,267 @@
private IHistogram2D[] clusterDistribution;
private IHistogram2D[] pairEnergySum2D;
private IHistogram2D[] pairEnergySlope2D;
-
+
/**
* Instantiates cluster plots.
*/
@Override
public void startOfData() {
- // Define plot type names.
- String[] plotType = new String[2];
- plotType[NO_CUTS] = "";
- plotType[ALL_CUTS] = " (Passed All Cuts)";
-
- // Define plot type directories.
- String[] plotDir = new String[2];
- plotDir[NO_CUTS] = "NoCuts/";
- plotDir[ALL_CUTS] = "PassedAll/";
-
- // Instantiate the plots.
- for(int i = 0; i < 2; i++) {
- System.out.println(plotDir[i] + "Cluster Seed Energy" + plotType[i]);
- clusterSeedEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Seed Energy" + plotType[i], 88, 0.0, 1.1);
- clusterSeedEnergy[i].annotation().addItem("xAxisLabel", "Seed Energy (GeV)");
- clusterSeedEnergy[i].annotation().addItem("yAxisLabel", "Count");
-
- clusterHitCount[i] = aida.histogram1D(plotDir[i] + "Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
- clusterHitCount[i].annotation().addItem("xAxisLabel", "Hit Count");
- clusterHitCount[i].annotation().addItem("yAxisLabel", "Count");
-
- clusterTotalEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Total Energy" + plotType[i], 88, 0.0, 1.1);
- clusterTotalEnergy[i].annotation().addItem("xAxisLabel", "Cluster Energy (GeV)");
- clusterTotalEnergy[i].annotation().addItem("yAxisLabel", "Count");
-
- clusterTime[i] = aida.histogram1D(plotDir[i] + "Cluster Time" + plotType[i], 100, 0.0, 400);
- clusterTime[i].annotation().addItem("xAxisLabel", "Cluster Time (ns)");
- clusterTime[i].annotation().addItem("yAxisLabel", "Count");
-
- pairEnergySum[i] = aida.histogram1D(plotDir[i] + "Pair Energy Sum" + plotType[i], 88, 0.0, 2.2);
- pairEnergySum[i].annotation().addItem("xAxisLabel", "Energy Sum (GeV)");
- pairEnergySum[i].annotation().addItem("yAxisLabel", "Count");
-
- pairEnergyDifference[i] = aida.histogram1D(plotDir[i] + "Pair Energy Difference" + plotType[i], 88, 0.0, 1.1);
- pairEnergyDifference[i].annotation().addItem("xAxisLabel", "Energy Difference (GeV)");
- pairEnergyDifference[i].annotation().addItem("yAxisLabel", "Count");
-
- pairCoplanarity[i] = aida.histogram1D(plotDir[i] + "Pair Coplanarity" + plotType[i], 180, 0.0, 180.0);
- pairCoplanarity[i].annotation().addItem("xAxisLabel", "Coplanarity Angle (Degrees)");
- pairCoplanarity[i].annotation().addItem("yAxisLabel", "Count");
-
- pairEnergySlope[i] = aida.histogram1D(plotDir[i] + "Pair Energy Slope" + plotType[i], 200, 0.0, 4.0);
- pairEnergySlope[i].annotation().addItem("xAxisLabel", "Energy Slope (GeV)");
- pairEnergySlope[i].annotation().addItem("yAxisLabel", "Count");
-
- pairTime[i] = aida.histogram1D(plotDir[i] + "Pair Time" + plotType[i], 100, 0.0, 400);
- pairTime[i].annotation().addItem("xAxisLabel", "Cluster Time (ns)");
- pairTime[i].annotation().addItem("yAxisLabel", "Count");
-
- pairCoincidence[i] = aida.histogram1D(plotDir[i] + "Pair Coincidence" + plotType[i], 8, 0.0, 32);
- pairCoincidence[i].annotation().addItem("xAxisLabel", "Coincidence Time (ns)");
- pairCoincidence[i].annotation().addItem("yAxisLabel", "Count");
-
- clusterDistribution[i] = aida.histogram2D(plotDir[i] + "Cluster Seed Distribution" + plotType[i], 46, -23, 23, 11, -5.5, 5.5);
- clusterDistribution[i].annotation().addItem("xAxisLabel", "x-Index");
- clusterDistribution[i].annotation().addItem("yAxisLabel", "y-Index");
-
- pairEnergySum2D[i] = aida.histogram2D(plotDir[i] + "Pair Energy Sum 2D" + plotType[i], 88, 0.0, 2.2, 88, 0.0, 2.2);
- pairEnergySum2D[i].annotation().addItem("xAxisLabel", "E1");
- pairEnergySum2D[i].annotation().addItem("yAxisLabel", "E2");
-
- pairEnergySlope2D[i] = aida.histogram2D(plotDir[i] + "Pair Energy Slope 2D" + plotType[i], 88, 0.0, 1.1, 200, 0.0, 400);
- pairEnergySlope2D[i].annotation().addItem("xAxisLabel", "E1");
- pairEnergySlope2D[i].annotation().addItem("yAxisLabel", "E2");
- }
+ // Define plot type names.
+ String[] plotType = new String[2];
+ plotType[NO_CUTS] = "";
+ plotType[ALL_CUTS] = " (Passed All Cuts)";
+
+ // Define plot type directories.
+ String[] plotDir = new String[2];
+ plotDir[NO_CUTS] = "NoCuts/";
+ plotDir[ALL_CUTS] = "PassedAll/";
+
+ // Instantiate the plots.
+ for(int i = 0; i < 2; i++) {
+ System.out.println(plotDir[i] + "Cluster Seed Energy" + plotType[i]);
+ clusterSeedEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Seed Energy" + plotType[i], 88, 0.0, 1.1);
+ clusterSeedEnergy[i].annotation().addItem("xAxisLabel", "Seed Energy (GeV)");
+ clusterSeedEnergy[i].annotation().addItem("yAxisLabel", "Count");
+
+ clusterHitCount[i] = aida.histogram1D(plotDir[i] + "Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
+ clusterHitCount[i].annotation().addItem("xAxisLabel", "Hit Count");
+ clusterHitCount[i].annotation().addItem("yAxisLabel", "Count");
+
+ clusterTotalEnergy[i] = aida.histogram1D(plotDir[i] + "Cluster Total Energy" + plotType[i], 88, 0.0, 1.1);
+ clusterTotalEnergy[i].annotation().addItem("xAxisLabel", "Cluster Energy (GeV)");
+ clusterTotalEnergy[i].annotation().addItem("yAxisLabel", "Count");
+
+ clusterTime[i] = aida.histogram1D(plotDir[i] + "Cluster Time" + plotType[i], 100, 0.0, 400);
+ clusterTime[i].annotation().addItem("xAxisLabel", "Cluster Time (ns)");
+ clusterTime[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairEnergySum[i] = aida.histogram1D(plotDir[i] + "Pair Energy Sum" + plotType[i], 88, 0.0, 2.2);
+ pairEnergySum[i].annotation().addItem("xAxisLabel", "Energy Sum (GeV)");
+ pairEnergySum[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairEnergyDifference[i] = aida.histogram1D(plotDir[i] + "Pair Energy Difference" + plotType[i], 88, 0.0, 1.1);
+ pairEnergyDifference[i].annotation().addItem("xAxisLabel", "Energy Difference (GeV)");
+ pairEnergyDifference[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairCoplanarity[i] = aida.histogram1D(plotDir[i] + "Pair Coplanarity" + plotType[i], 180, 0.0, 180.0);
+ pairCoplanarity[i].annotation().addItem("xAxisLabel", "Coplanarity Angle (Degrees)");
+ pairCoplanarity[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairEnergySlope[i] = aida.histogram1D(plotDir[i] + "Pair Energy Slope" + plotType[i], 200, 0.0, 4.0);
+ pairEnergySlope[i].annotation().addItem("xAxisLabel", "Energy Slope (GeV)");
+ pairEnergySlope[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairTime[i] = aida.histogram1D(plotDir[i] + "Pair Time" + plotType[i], 100, 0.0, 400);
+ pairTime[i].annotation().addItem("xAxisLabel", "Cluster Time (ns)");
+ pairTime[i].annotation().addItem("yAxisLabel", "Count");
+
+ pairCoincidence[i] = aida.histogram1D(plotDir[i] + "Pair Coincidence" + plotType[i], 8, 0.0, 32);
+ pairCoincidence[i].annotation().addItem("xAxisLabel", "Coincidence Time (ns)");
+ pairCoincidence[i].annotation().addItem("yAxisLabel", "Count");
+
+ clusterDistribution[i] = aida.histogram2D(plotDir[i] + "Cluster Seed Distribution" + plotType[i], 46, -23, 23, 11, -5.5, 5.5);
+ clusterDistribution[i].annotation().addItem("xAxisLabel", "x-Index");
+ clusterDistribution[i].annotation().addItem("yAxisLabel", "y-Index");
+
+ pairEnergySum2D[i] = aida.histogram2D(plotDir[i] + "Pair Energy Sum 2D" + plotType[i], 88, 0.0, 2.2, 88, 0.0, 2.2);
+ pairEnergySum2D[i].annotation().addItem("xAxisLabel", "E1");
+ pairEnergySum2D[i].annotation().addItem("yAxisLabel", "E2");
+
+ pairEnergySlope2D[i] = aida.histogram2D(plotDir[i] + "Pair Energy Slope 2D" + plotType[i], 88, 0.0, 1.1, 200, 0.0, 400);
+ pairEnergySlope2D[i].annotation().addItem("xAxisLabel", "E1");
+ pairEnergySlope2D[i].annotation().addItem("yAxisLabel", "E2");
+ }
}
/**
* Produces both uncut and cut distributions from clusters.
*/
- @Override
- public void process(EventHeader event) {
- // Check for a collection of clusters.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the list of clusters.
- List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
-
- // Track which clusters have already been plotted.
- Set<Cluster> plottedClustersUncut = new HashSet<Cluster>(clusters.size());
- Set<Cluster> plottedClustersCut = new HashSet<Cluster>(clusters.size());
-
- // Populate a list of cluster pairs.
- List<Cluster[]> pairs = getClusterPairs(clusters);
-
- // Process all cluster pairs.
- pairLoop:
- for(Cluster[] pair : pairs) {
- // Get the x and y indices for each cluster in the pair.
- int[] ix = { pair[0].getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
- pair[1].getCalorimeterHits().get(0).getIdentifierFieldValue("ix") };
- int[] iy = { pair[0].getCalorimeterHits().get(0).getIdentifierFieldValue("iy"),
- pair[1].getCalorimeterHits().get(0).getIdentifierFieldValue("iy") };
-
- // Iterate over the clusters in the pair and plot the
- // cluster singles distributions.
- for(int clusterIndex = 0; clusterIndex < 2; clusterIndex++) {
- // Only plot cluster singles distributions for
- // clusters if they have not already been plotted.
- // Note that this is needed because the same cluster
- // can appear across multiple pairs.
- if(!plottedClustersUncut.contains(pair[clusterIndex])) {
- clusterSeedEnergy[NO_CUTS].fill(TriggerModule.getValueClusterSeedEnergy(pair[clusterIndex]));
- clusterTotalEnergy[NO_CUTS].fill(TriggerModule.getValueClusterTotalEnergy(pair[clusterIndex]));
- clusterHitCount[NO_CUTS].fill(TriggerModule.getValueClusterHitCount(pair[clusterIndex]));
- clusterDistribution[NO_CUTS].fill(ix[clusterIndex], iy[clusterIndex]);
- clusterTime[NO_CUTS].fill(pair[clusterIndex].getCalorimeterHits().get(0).getTime());
- plottedClustersUncut.add(pair[clusterIndex]);
- }
- }
-
- // Plot the cluster pair distributions.
- pairEnergySum[NO_CUTS].fill(TriggerModule.getValueEnergySum(pair));
- pairEnergyDifference[NO_CUTS].fill(TriggerModule.getValueEnergyDifference(pair));
- pairEnergySlope[NO_CUTS].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
- pairCoplanarity[NO_CUTS].fill(TriggerModule.getValueCoplanarity(pair));
- pairTime[NO_CUTS].fill(pair[1].getCalorimeterHits().get(0).getTime());
- pairCoincidence[NO_CUTS].fill(TriggerModule.getValueTimeCoincidence(pair));
- pairEnergySum2D[NO_CUTS].fill(pair[0].getEnergy(), pair[1].getEnergy());
- if(pair[0].getEnergy() < pair[1].getEnergy()) {
- pairEnergySlope2D[NO_CUTS].fill(pair[0].getEnergy(), TriggerModule.getClusterDistance(pair[0]));
- } else {
- pairEnergySlope2D[NO_CUTS].fill(pair[1].getEnergy(), TriggerModule.getClusterDistance(pair[1]));
- }
-
- // Perform the cluster singles cuts.
- if(!(trigger.clusterHitCountCut(pair[0]) && trigger.clusterHitCountCut(pair[1]))) {
- continue pairLoop;
- } if(!(trigger.clusterTotalEnergyCut(pair[0]) && trigger.clusterTotalEnergyCut(pair[1]))) {
- continue pairLoop;
- } if(!(trigger.clusterSeedEnergyCut(pair[0]) && trigger.clusterSeedEnergyCut(pair[1]))) {
- continue pairLoop;
- }
-
- // Perform the cluster pair cuts.
- if(!trigger.pairCoplanarityCut(pair)) {
- continue pairLoop;
- } if(!trigger.pairEnergyDifferenceCut(pair)) {
- continue pairLoop;
- } if(!trigger.pairEnergySlopeCut(pair)) {
- continue pairLoop;
- } if(!trigger.pairEnergySumCut(pair)) {
- continue pairLoop;
- }
-
- // Iterate over the clusters in the pair and plot the
- // cluster singles distributions.
- for(int clusterIndex = 0; clusterIndex < 2; clusterIndex++) {
- // Only plot cluster singles distributions for
- // clusters if they have not already been plotted.
- // Note that this is needed because the same cluster
- // can appear across multiple pairs.
- if(!plottedClustersCut.contains(pair[clusterIndex])) {
- clusterSeedEnergy[ALL_CUTS].fill(TriggerModule.getValueClusterSeedEnergy(pair[clusterIndex]));
- clusterTotalEnergy[ALL_CUTS].fill(TriggerModule.getValueClusterTotalEnergy(pair[clusterIndex]));
- clusterHitCount[ALL_CUTS].fill(TriggerModule.getValueClusterHitCount(pair[clusterIndex]));
- clusterDistribution[ALL_CUTS].fill(ix[clusterIndex], iy[clusterIndex]);
- clusterTime[ALL_CUTS].fill(pair[clusterIndex].getCalorimeterHits().get(0).getTime());
- plottedClustersCut.add(pair[clusterIndex]);
- }
- }
-
- // Plot the cluster pair distributions.
- pairEnergySum[ALL_CUTS].fill(TriggerModule.getValueEnergySum(pair));
- pairEnergyDifference[ALL_CUTS].fill(TriggerModule.getValueEnergyDifference(pair));
- pairEnergySlope[ALL_CUTS].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
- pairCoplanarity[ALL_CUTS].fill(TriggerModule.getValueCoplanarity(pair));
- pairTime[ALL_CUTS].fill(pair[1].getCalorimeterHits().get(0).getTime());
- pairCoincidence[ALL_CUTS].fill(TriggerModule.getValueTimeCoincidence(pair));
- pairEnergySum2D[ALL_CUTS].fill(pair[0].getEnergy(), pair[1].getEnergy());
- if(pair[0].getEnergy() < pair[1].getEnergy()) {
- pairEnergySlope2D[ALL_CUTS].fill(pair[0].getEnergy(), TriggerModule.getClusterDistance(pair[0]));
- } else {
- pairEnergySlope2D[ALL_CUTS].fill(pair[1].getEnergy(), TriggerModule.getClusterDistance(pair[1]));
- }
-
- }
- }
- }
-
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
-
- public void setEnergySlopeParamF(double energySlopeParamF) {
- this.energySlopeParamF = energySlopeParamF;
- trigger.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, energySlopeParamF);
- }
-
- public void setSeedEnergyLow(double value) {
- trigger.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, value);
- }
-
- public void setClusterEnergyLow(double value) {
- trigger.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, value);
- }
-
- public void setClusterEnergyHigh(double value) {
- trigger.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, value);
- }
-
- public void setHitCountLow(double value) {
- trigger.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, value);
- }
-
- public void setEnergySumLow(double value) {
- trigger.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, value);
- }
-
- public void setEnergySumHigh(double value) {
- trigger.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, value);
- }
-
- public void setEnergyDifferenceHigh(double value) {
- trigger.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, value);
- }
-
- public void setEnergySlopeLow(double value) {
- trigger.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, value);
- }
-
- public void setCoplanarityHigh(double value) {
- trigger.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, value);
- }
-
- public void setTimeCoincidence(double value) {
- trigger.setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, value);
- }
-
- /**
- * Creates all top/bottom pairs from the event data.
- * @param clusters - A list of clusters from which to form pairs.
- * @return Returns a <code>List</code> collection that contains
- * <code>Cluster</code> arrays of size two.
- */
- private List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
- // Separate the clusters into top nad bottom clusters.
- List<Cluster> topList = new ArrayList<Cluster>();
- List<Cluster> botList = new ArrayList<Cluster>();
- for(Cluster cluster : clusters) {
- if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
- topList.add(cluster);
- } else {
- botList.add(cluster);
- }
- }
-
- // Create all possible top/bottom cluster pairs.
- List<Cluster[]> pairList = new ArrayList<Cluster[]>();
- for(Cluster topCluster : topList) {
- for(Cluster botCluster : botList) {
- pairList.add(new Cluster[] { topCluster, botCluster });
- }
- }
-
- // Return the pairs.
- return pairList;
- }
+ @Override
+ public void process(EventHeader event) {
+ // Check for a collection of clusters.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the list of clusters.
+ List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
+
+ // Track which clusters have already been plotted.
+ Set<Cluster> plottedClustersUncut = new HashSet<Cluster>(clusters.size());
+ Set<Cluster> plottedClustersCut = new HashSet<Cluster>(clusters.size());
+
+ // Populate a list of cluster pairs.
+ List<Cluster[]> pairs = getClusterPairs(clusters);
+
+ // Process all cluster pairs.
+ pairLoop:
+ for(Cluster[] pair : pairs) {
+ // Get the x and y indices for each cluster in the pair.
+ int[] ix = { pair[0].getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
+ pair[1].getCalorimeterHits().get(0).getIdentifierFieldValue("ix") };
+ int[] iy = { pair[0].getCalorimeterHits().get(0).getIdentifierFieldValue("iy"),
+ pair[1].getCalorimeterHits().get(0).getIdentifierFieldValue("iy") };
+
+ // Iterate over the clusters in the pair and plot the
+ // cluster singles distributions.
+ for(int clusterIndex = 0; clusterIndex < 2; clusterIndex++) {
+ // Only plot cluster singles distributions for
+ // clusters if they have not already been plotted.
+ // Note that this is needed because the same cluster
+ // can appear across multiple pairs.
+ if(!plottedClustersUncut.contains(pair[clusterIndex])) {
+ clusterSeedEnergy[NO_CUTS].fill(TriggerModule.getValueClusterSeedEnergy(pair[clusterIndex]));
+ clusterTotalEnergy[NO_CUTS].fill(TriggerModule.getValueClusterTotalEnergy(pair[clusterIndex]));
+ clusterHitCount[NO_CUTS].fill(TriggerModule.getValueClusterHitCount(pair[clusterIndex]));
+ clusterDistribution[NO_CUTS].fill(ix[clusterIndex], iy[clusterIndex]);
+ clusterTime[NO_CUTS].fill(pair[clusterIndex].getCalorimeterHits().get(0).getTime());
+ plottedClustersUncut.add(pair[clusterIndex]);
+ }
+ }
+
+ // Plot the cluster pair distributions.
+ pairEnergySum[NO_CUTS].fill(TriggerModule.getValueEnergySum(pair));
+ pairEnergyDifference[NO_CUTS].fill(TriggerModule.getValueEnergyDifference(pair));
+ pairEnergySlope[NO_CUTS].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
+ pairCoplanarity[NO_CUTS].fill(TriggerModule.getValueCoplanarity(pair));
+ pairTime[NO_CUTS].fill(pair[1].getCalorimeterHits().get(0).getTime());
+ pairCoincidence[NO_CUTS].fill(TriggerModule.getValueTimeCoincidence(pair));
+ pairEnergySum2D[NO_CUTS].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ if(pair[0].getEnergy() < pair[1].getEnergy()) {
+ pairEnergySlope2D[NO_CUTS].fill(pair[0].getEnergy(), TriggerModule.getClusterDistance(pair[0]));
+ } else {
+ pairEnergySlope2D[NO_CUTS].fill(pair[1].getEnergy(), TriggerModule.getClusterDistance(pair[1]));
+ }
+
+ // Perform the cluster singles cuts.
+ if(!(trigger.clusterHitCountCut(pair[0]) && trigger.clusterHitCountCut(pair[1]))) {
+ continue pairLoop;
+ } if(!(trigger.clusterTotalEnergyCut(pair[0]) && trigger.clusterTotalEnergyCut(pair[1]))) {
+ continue pairLoop;
+ } if(!(trigger.clusterSeedEnergyCut(pair[0]) && trigger.clusterSeedEnergyCut(pair[1]))) {
+ continue pairLoop;
+ }
+
+ // Perform the cluster pair cuts.
+ if(!trigger.pairCoplanarityCut(pair)) {
+ continue pairLoop;
+ } if(!trigger.pairEnergyDifferenceCut(pair)) {
+ continue pairLoop;
+ } if(!trigger.pairEnergySlopeCut(pair)) {
+ continue pairLoop;
+ } if(!trigger.pairEnergySumCut(pair)) {
+ continue pairLoop;
+ }
+
+ // Iterate over the clusters in the pair and plot the
+ // cluster singles distributions.
+ for(int clusterIndex = 0; clusterIndex < 2; clusterIndex++) {
+ // Only plot cluster singles distributions for
+ // clusters if they have not already been plotted.
+ // Note that this is needed because the same cluster
+ // can appear across multiple pairs.
+ if(!plottedClustersCut.contains(pair[clusterIndex])) {
+ clusterSeedEnergy[ALL_CUTS].fill(TriggerModule.getValueClusterSeedEnergy(pair[clusterIndex]));
+ clusterTotalEnergy[ALL_CUTS].fill(TriggerModule.getValueClusterTotalEnergy(pair[clusterIndex]));
+ clusterHitCount[ALL_CUTS].fill(TriggerModule.getValueClusterHitCount(pair[clusterIndex]));
+ clusterDistribution[ALL_CUTS].fill(ix[clusterIndex], iy[clusterIndex]);
+ clusterTime[ALL_CUTS].fill(pair[clusterIndex].getCalorimeterHits().get(0).getTime());
+ plottedClustersCut.add(pair[clusterIndex]);
+ }
+ }
+
+ // Plot the cluster pair distributions.
+ pairEnergySum[ALL_CUTS].fill(TriggerModule.getValueEnergySum(pair));
+ pairEnergyDifference[ALL_CUTS].fill(TriggerModule.getValueEnergyDifference(pair));
+ pairEnergySlope[ALL_CUTS].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
+ pairCoplanarity[ALL_CUTS].fill(TriggerModule.getValueCoplanarity(pair));
+ pairTime[ALL_CUTS].fill(pair[1].getCalorimeterHits().get(0).getTime());
+ pairCoincidence[ALL_CUTS].fill(TriggerModule.getValueTimeCoincidence(pair));
+ pairEnergySum2D[ALL_CUTS].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ if(pair[0].getEnergy() < pair[1].getEnergy()) {
+ pairEnergySlope2D[ALL_CUTS].fill(pair[0].getEnergy(), TriggerModule.getClusterDistance(pair[0]));
+ } else {
+ pairEnergySlope2D[ALL_CUTS].fill(pair[1].getEnergy(), TriggerModule.getClusterDistance(pair[1]));
+ }
+
+ }
+ }
+ }
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ public void setEnergySlopeParamF(double energySlopeParamF) {
+ this.energySlopeParamF = energySlopeParamF;
+ trigger.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, energySlopeParamF);
+ }
+
+ public void setSeedEnergyLow(double value) {
+ trigger.setCutValue(TriggerModule.CLUSTER_SEED_ENERGY_LOW, value);
+ }
+
+ public void setClusterEnergyLow(double value) {
+ trigger.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, value);
+ }
+
+ public void setClusterEnergyHigh(double value) {
+ trigger.setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, value);
+ }
+
+ public void setHitCountLow(double value) {
+ trigger.setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, value);
+ }
+
+ public void setEnergySumLow(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, value);
+ }
+
+ public void setEnergySumHigh(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, value);
+ }
+
+ public void setEnergyDifferenceHigh(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, value);
+ }
+
+ public void setEnergySlopeLow(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, value);
+ }
+
+ public void setCoplanarityHigh(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, value);
+ }
+
+ public void setTimeCoincidence(double value) {
+ trigger.setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, value);
+ }
+
+ /**
+ * Creates all top/bottom pairs from the event data.
+ * @param clusters - A list of clusters from which to form pairs.
+ * @return Returns a <code>List</code> collection that contains
+ * <code>Cluster</code> arrays of size two.
+ */
+ private List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
+ // Separate the clusters into top nad bottom clusters.
+ List<Cluster> topList = new ArrayList<Cluster>();
+ List<Cluster> botList = new ArrayList<Cluster>();
+ for(Cluster cluster : clusters) {
+ if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
+ topList.add(cluster);
+ } else {
+ botList.add(cluster);
+ }
+ }
+
+ // Create all possible top/bottom cluster pairs.
+ List<Cluster[]> pairList = new ArrayList<Cluster[]>();
+ for(Cluster topCluster : topList) {
+ for(Cluster botCluster : botList) {
+ pairList.add(new Cluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Return the pairs.
+ return pairList;
+ }
}
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/RingBuffer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/RingBuffer.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/RingBuffer.java Wed Apr 27 11:11:32 2016
@@ -8,46 +8,46 @@
*/
public class RingBuffer {
- protected double[] array;
- protected int ptr;
+ protected double[] array;
+ protected int ptr;
- public RingBuffer(int size) {
- array = new double[size]; //initialized to 0
- ptr = 0;
- }
+ public RingBuffer(int size) {
+ array = new double[size]; //initialized to 0
+ ptr = 0;
+ }
- /**
- *
- * @return value stored at current cell
- */
- public double currentValue() {
- return array[ptr];
- }
+ /**
+ *
+ * @return value stored at current cell
+ */
+ public double currentValue() {
+ return array[ptr];
+ }
- //return content of specified cell (pos=0 for current cell)
- public double getValue(int pos) {
- return array[((ptr + pos) % array.length + array.length) % array.length];
- }
+ //return content of specified cell (pos=0 for current cell)
+ public double getValue(int pos) {
+ return array[((ptr + pos) % array.length + array.length) % array.length];
+ }
- /**
- * Clear value at current cell and step to the next one
- */
- public void step() {
- array[ptr] = 0;
- ptr++;
- if (ptr == array.length) {
- ptr = 0;
- }
- }
+ /**
+ * Clear value at current cell and step to the next one
+ */
+ public void step() {
+ array[ptr] = 0;
+ ptr++;
+ if (ptr == array.length) {
+ ptr = 0;
+ }
+ }
- /**
- * Add given value to specified cell
- * @param pos Target position relative to current cell (pos=0 for current cell)
- * @param val
- */
- public void addToCell(int pos, double val) {
- array[(ptr + pos) % array.length] += val;
- }
+ /**
+ * Add given value to specified cell
+ * @param pos Target position relative to current cell (pos=0 for current cell)
+ * @param val
+ */
+ public void addToCell(int pos, double val) {
+ array[(ptr + pos) % array.length] += val;
+ }
public int getLength() {
return array.length;
Modified: java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
double t0 = 18.0;
public TimeEvolutionEcalReadoutDriver() {
- hitClass = CalorimeterHit.class;
+ hitClass = CalorimeterHit.class;
}
public void setT0(double t0) {
@@ -39,8 +39,8 @@
protected void readHits(List<CalorimeterHit> hits) {
for (Long cellID : eDepMap.keySet()) {
RingBuffer eDepBuffer = eDepMap.get(cellID);
- if (eDepBuffer.currentValue() > threshold) {
- hits.add(CalorimeterHitUtilities.create(eDepBuffer.currentValue(), readoutTime(), cellID, hitType));
+ if (eDepBuffer.currentValue() > threshold) {
+ hits.add(CalorimeterHitUtilities.create(eDepBuffer.currentValue(), readoutTime(), cellID, hitType));
}
eDepBuffer.step();
}
Modified: java/branches/HPSJAVA-409/ecal-recon/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/pom.xml (original)
+++ java/branches/HPSJAVA-409/ecal-recon/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-recon/</url>
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java Wed Apr 27 11:11:32 2016
@@ -35,7 +35,7 @@
import org.lcsim.util.aida.AIDA;
/**
- * This Driver will generate a {@link org.hps.conditions.EcalCalibration} collection
+ * This Driver will generate a {@link org.hps.conditions.ecal.EcalCalibration} collection
* from the ADC value distributions of raw ECAL data. It may optionally insert this
* information into the conditions database using the file's run number.
*
@@ -46,20 +46,20 @@
*/
public class EcalCalibrationsDriver extends Driver {
- EcalConditions ecalConditions = null;
- DatabaseConditionsManager conditionsManager = null;
- AIDA aida = AIDA.defaultInstance();
- IFunctionFactory functionFactory = aida.analysisFactory().createFunctionFactory(null);
- IFitFactory fitFactory = aida.analysisFactory().createFitFactory();
- boolean loadCalibrations = false;
- boolean performFit = true;
- Integer runStart = null;
- Integer runEnd = null;
- File outputFile = null;
- Set<Integer> runs = new HashSet<Integer>();
- static DecimalFormat decimalFormat = new DecimalFormat("#.####");
- String inputHitsCollectionName = "EcalReadoutHits";
- static String ECAL_CALIBRATIONS = "ecal_calibrations";
+ private EcalConditions ecalConditions = null;
+ private DatabaseConditionsManager conditionsManager = null;
+ private AIDA aida = AIDA.defaultInstance();
+ private IFunctionFactory functionFactory = aida.analysisFactory().createFunctionFactory(null);
+ private IFitFactory fitFactory = aida.analysisFactory().createFitFactory();
+ private boolean loadCalibrations = false;
+ private boolean performFit = true;
+ private Integer runStart = null;
+ private Integer runEnd = null;
+ private File outputFile = null;
+ private Set<Integer> runs = new HashSet<Integer>();
+ private static DecimalFormat DECIMAL_FORMAT = new DecimalFormat("#.####");
+ private String inputHitsCollectionName = "EcalReadoutHits";
+ private static String ECAL_CALIBRATIONS = "ecal_calibrations";
/**
* Set the RawTrackerHit collection of hits to be used for the calibration.
@@ -91,7 +91,7 @@
/**
* Set the end run number for the conditions record.
* It must be >= the runEnd.
- * @param runStart The run start number.
+ * @param runEnd The run end number.
*/
public void setRunEnd(int runEnd) {
if (runEnd < 0) {
@@ -216,15 +216,15 @@
}
// Truncate to 4 decimal places.
- mean = Double.valueOf(decimalFormat.format(mean));
- sigma = Double.valueOf(decimalFormat.format(sigma));
+ mean = Double.valueOf(DECIMAL_FORMAT.format(mean));
+ sigma = Double.valueOf(DECIMAL_FORMAT.format(sigma));
// Create a new calibration object and add it to the collection, using mean for pedestal
// and sigma for noise.
try {
- calibrations.add(new EcalCalibration(channelId, mean, sigma));
+ calibrations.add(new EcalCalibration(channelId, mean, sigma));
} catch (ConditionsObjectException e) {
- throw new RuntimeException("Error adding new calibration object.", e);
+ throw new RuntimeException("Error adding new calibration object.", e);
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java Wed Apr 27 11:11:32 2016
@@ -6,7 +6,6 @@
import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawCalorimeterHit;
-import org.lcsim.event.base.BaseCalorimeterHit;
import org.lcsim.geometry.Detector;
import org.lcsim.lcio.LCIOConstants;
import org.lcsim.util.Driver;
@@ -17,8 +16,8 @@
* @version $Id: EcalConverterDriver.java,v 1.1 2013/02/25 22:39:24 meeg Exp $
*/
public class EcalConverterDriver extends Driver {
-
- Detector detector = null;
+
+ Detector detector = null;
String rawCollectionName;
String ecalReadoutName = "EcalHits";
@@ -59,7 +58,7 @@
@Override
public void detectorChanged(Detector detector) {
- this.detector = detector;
+ this.detector = detector;
}
@Override
@@ -87,8 +86,8 @@
}
private CalorimeterHit HitDtoA(RawCalorimeterHit hit) {
- double energy = DtoA(hit.getAmplitude(), hit.getCellID());
- return CalorimeterHitUtilities.create(energy, period * hit.getTimeStamp() + dt, hit.getCellID());
+ double energy = DtoA(hit.getAmplitude(), hit.getCellID());
+ return CalorimeterHitUtilities.create(energy, period * hit.getTimeStamp() + dt, hit.getCellID());
}
// private RawCalorimeterHit HitAtoD(CalorimeterHit hit) {
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java Wed Apr 27 11:11:32 2016
@@ -29,11 +29,11 @@
* @author Holly Szumila <[log in to unmask]>
*/
public class EcalEdepToTriggerConverterDriver extends Driver {
-
+
private EcalConditions ecalConditions = null;
private static final boolean isBadChannelLoaded = true;
-
+
private final String ecalReadoutName = "EcalHits";
private String inputCollection = "EcalHits";
private String readoutCollection = "EcalCalHits";
@@ -94,7 +94,7 @@
@Override
public void detectorChanged(Detector detector) {
-
+
// ECAL combined conditions object.
ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions();
@@ -104,7 +104,7 @@
public boolean isBadCrystal(CalorimeterHit hit) {
// Get the channel data.
EcalChannelConstants channelData = findChannel(hit.getCellID());
-
+
return isBadChannelLoaded ? channelData.isBadChannel() : false;
}
@@ -165,8 +165,8 @@
// System.out.format("trigger: %f %f\n", amplitude, triggerIntegral);
int truncatedIntegral = (int) Math.floor(triggerIntegral / truncateScale);
- if (truncatedIntegral > 0) {
- return CalorimeterHitUtilities.create(truncatedIntegral, hit.getTime(), hit.getCellID());
+ if (truncatedIntegral > 0) {
+ return CalorimeterHitUtilities.create(truncatedIntegral, hit.getTime(), hit.getCellID());
}
return null;
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverter.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverter.java Wed Apr 27 11:11:32 2016
@@ -27,28 +27,28 @@
private int nPeak = 3;
public EcalOnlineRawConverter() {
- // Track changes in the DAQ configuration.
- ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // Get the FADC configuration.
- config = ConfigurationManager.getInstance().getFADCConfig();
- // Get the number of peaks.
- if(config.getMode() == 1) nPeak = Integer.MAX_VALUE;
- else nPeak = config.getMaxPulses();
- // Print the FADC configuration.
- System.out.println();
- System.out.println();
- System.out.printf("NSA :: %d ns%n", config.getNSA());
- System.out.printf("NSB :: %d ns%n", config.getNSB());
- System.out.printf("Window Samples :: %d clock-cycles%n", config.getWindowWidth());
- System.out.printf("Max Peaks :: %d peaks%n", nPeak);
- System.out.println("======================================================================");
- System.out.println("=== FADC Pulse-Processing Settings ===================================");
- System.out.println("======================================================================");
- config.printConfig();
- }
- });
+ // Track changes in the DAQ configuration.
+ ConfigurationManager.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // Get the FADC configuration.
+ config = ConfigurationManager.getInstance().getFADCConfig();
+ // Get the number of peaks.
+ if(config.getMode() == 1) nPeak = Integer.MAX_VALUE;
+ else nPeak = config.getMaxPulses();
+ // Print the FADC configuration.
+ System.out.println();
+ System.out.println();
+ System.out.printf("NSA :: %d ns%n", config.getNSA());
+ System.out.printf("NSB :: %d ns%n", config.getNSB());
+ System.out.printf("Window Samples :: %d clock-cycles%n", config.getWindowWidth());
+ System.out.printf("Max Peaks :: %d peaks%n", nPeak);
+ System.out.println("======================================================================");
+ System.out.println("=== FADC Pulse-Processing Settings ===================================");
+ System.out.println("======================================================================");
+ config.printConfig(System.out);
+ }
+ });
}
/**
@@ -137,10 +137,10 @@
// search for next threshold crossing begins at end of this pulse:
if (ConfigurationManager.getInstance().getFADCConfig().getMode() == 1) {
// special case, emulating SSP:
- ii += 8;
+ ii += 8;
} else {
// "normal" case, emulating FADC250:
- ii += config.getNSA()/nsPerSample - 1;
+ ii += config.getNSA()/nsPerSample - 1;
}
// firmware limit on # of peaks:
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalOnlineRawConverterDriver.java Wed Apr 27 11:11:32 2016
@@ -85,12 +85,12 @@
@Override
public void process(EventHeader event) {
- // Do not process the event if the DAQ configuration should be
- // used for value, but is not initialized.
- if(!ConfigurationManager.isInitialized()) {
- return;
- }
-
+ // Do not process the event if the DAQ configuration should be
+ // used for value, but is not initialized.
+ if(!ConfigurationManager.isInitialized()) {
+ return;
+ }
+
double timeOffset = 0.0;
int flags = 0;
flags += 1 << LCIOConstants.RCHBIT_TIME; //store hit time
@@ -102,12 +102,12 @@
* This is for FADC Mode-1 data:
*/
if (event.hasCollection(RawTrackerHit.class, rawCollectionName)) {
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawCollectionName);
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawCollectionName);
- for (RawTrackerHit hit : hits) {
- newHits.addAll(converter.HitDtoA(event,hit));
- }
- event.put(ecalCollectionName, newHits, CalorimeterHit.class, flags, ecalReadoutName);
+ for (RawTrackerHit hit : hits) {
+ newHits.addAll(converter.HitDtoA(event,hit));
+ }
+ event.put(ecalCollectionName, newHits, CalorimeterHit.class, flags, ecalReadoutName);
}
/*
@@ -115,26 +115,26 @@
*/
if (event.hasCollection(RawCalorimeterHit.class, rawCollectionName)) {
- /*
- * This is for FADC Mode-7 data:
- */
- if (event.hasCollection(LCRelation.class, extraDataRelationsName)) { // extra information available from mode 7 readout
- List<LCRelation> extraDataRelations = event.get(LCRelation.class, extraDataRelationsName);
- for (LCRelation rel : extraDataRelations) {
- RawCalorimeterHit hit = (RawCalorimeterHit) rel.getFrom();
- GenericObject extraData = (GenericObject) rel.getTo();
- newHits.add(converter.HitDtoA(event,hit, extraData, timeOffset));
- }
- } else {
- /*
- * This is for FADC Mode-3 data:
- */
- List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, rawCollectionName);
- for (RawCalorimeterHit hit : hits) {
- newHits.add(converter.HitDtoA(event, hit, timeOffset));
- }
- }
- event.put(ecalCollectionName, newHits, CalorimeterHit.class, flags, ecalReadoutName);
+ /*
+ * This is for FADC Mode-7 data:
+ */
+ if (event.hasCollection(LCRelation.class, extraDataRelationsName)) { // extra information available from mode 7 readout
+ List<LCRelation> extraDataRelations = event.get(LCRelation.class, extraDataRelationsName);
+ for (LCRelation rel : extraDataRelations) {
+ RawCalorimeterHit hit = (RawCalorimeterHit) rel.getFrom();
+ GenericObject extraData = (GenericObject) rel.getTo();
+ newHits.add(converter.HitDtoA(event,hit, extraData, timeOffset));
+ }
+ } else {
+ /*
+ * This is for FADC Mode-3 data:
+ */
+ List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, rawCollectionName);
+ for (RawCalorimeterHit hit : hits) {
+ newHits.add(converter.HitDtoA(event, hit, timeOffset));
+ }
+ }
+ event.put(ecalCollectionName, newHits, CalorimeterHit.class, flags, ecalReadoutName);
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPedestalCalculator.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPedestalCalculator.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPedestalCalculator.java Wed Apr 27 11:11:32 2016
@@ -114,11 +114,11 @@
}
}
if (uploadToDB) {
- try {
- uploadToDB();
- } catch (DatabaseObjectException | ConditionsObjectException | SQLException e) {
- throw new RuntimeException("Error uploading to database.", e);
- }
+ try {
+ uploadToDB();
+ } catch (DatabaseObjectException | ConditionsObjectException | SQLException e) {
+ throw new RuntimeException("Error uploading to database.", e);
+ }
} else {
System.out.println("!!!!!!!!!!!!!!!!!!!!!!! Not Writing Database !!!!!!!!!!!!!!!!!!!!!!!!!!");
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java Wed Apr 27 11:11:32 2016
@@ -41,9 +41,9 @@
*/
public class EcalRawConverter {
- /**
- * If true, time walk correction is performed.
- */
+ /**
+ * If true, time walk correction is performed.
+ */
private boolean useTimeWalkCorrection = true;
/**
@@ -149,42 +149,42 @@
* for trigger emulation.
*/
public EcalRawConverter() {
- // Track changes in the DAQ configuration.
- ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // If the DAQ configuration should be used, load the
- // relevant settings into the driver.
- if(useDAQConfig) {
- // Get the FADC configuration.
- config = ConfigurationManager.getInstance().getFADCConfig();
-
- // Load the settings.
- NSB = config.getNSB();
- NSA = config.getNSA();
- windowSamples = config.getWindowWidth() / 4;
-
- // Get the number of peaks.
- if(config.getMode() == 1) {
- nPeak = Integer.MAX_VALUE;
- } else {
- nPeak = config.getMaxPulses();
- }
-
- // Print the FADC configuration.
- System.out.println();
- System.out.println();
- System.out.printf("NSA :: %d ns%n", NSA);
- System.out.printf("NSB :: %d ns%n", NSB);
- System.out.printf("Window Samples :: %d clock-cycles%n", windowSamples);
- System.out.printf("Max Peaks :: %d peaks%n", nPeak);
- System.out.println("======================================================================");
- System.out.println("=== FADC Pulse-Processing Settings ===================================");
- System.out.println("======================================================================");
- config.printConfig();
- }
- }
- });
+ // Track changes in the DAQ configuration.
+ ConfigurationManager.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // If the DAQ configuration should be used, load the
+ // relevant settings into the driver.
+ if(useDAQConfig) {
+ // Get the FADC configuration.
+ config = ConfigurationManager.getInstance().getFADCConfig();
+
+ // Load the settings.
+ NSB = config.getNSB();
+ NSA = config.getNSA();
+ windowSamples = config.getWindowWidth() / 4;
+
+ // Get the number of peaks.
+ if(config.getMode() == 1) {
+ nPeak = Integer.MAX_VALUE;
+ } else {
+ nPeak = config.getMaxPulses();
+ }
+
+ // Print the FADC configuration.
+ System.out.println();
+ System.out.println();
+ System.out.printf("NSA :: %d ns%n", NSA);
+ System.out.printf("NSB :: %d ns%n", NSB);
+ System.out.printf("Window Samples :: %d clock-cycles%n", windowSamples);
+ System.out.printf("Max Peaks :: %d peaks%n", nPeak);
+ System.out.println("======================================================================");
+ System.out.println("=== FADC Pulse-Processing Settings ===================================");
+ System.out.println("======================================================================");
+ config.printConfig(System.out);
+ }
+ }
+ });
}
@@ -313,7 +313,7 @@
* for trigger emulation.
*/
public void setUseDAQConfig(boolean state) {
- useDAQConfig = state;
+ useDAQConfig = state;
}
@@ -325,10 +325,10 @@
EcalChannelConstants channelData = findChannel(hit.getCellID());
double pedestal;
if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
- pedestal = config.getPedestal(hit.getCellID());
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
+ pedestal = config.getPedestal(hit.getCellID());
} else {
- pedestal = channelData.getCalibration().getPedestal();
+ pedestal = channelData.getCalibration().getPedestal();
}
int sum = 0;
@@ -356,10 +356,10 @@
* Choose whether to use static pedestal from database or running pedestal from mode-7.
*/
public double getSingleSamplePedestal(EventHeader event,long cellID) {
- if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(cellID);
- return config.getPedestal(cellID);
- }
+ if(useDAQConfig) {
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(cellID);
+ return config.getPedestal(cellID);
+ }
if (useRunningPedestal && event!=null) {
if (event.hasItem("EcalRunningPedestals")) {
Map<EcalChannel, Double> runningPedMap = (Map<EcalChannel, Double>) event.get("EcalRunningPedestals");
@@ -536,12 +536,12 @@
// threshold is pedestal plus threshold configuration parameter:
final int absoluteThreshold;
if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
- //int leadingEdgeThreshold = ConfigurationManager.getInstance().getFADCConfig().getThreshold(channel.getChannelId());
- int leadingEdgeThreshold = config.getThreshold(cellID);
- absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
+ //int leadingEdgeThreshold = ConfigurationManager.getInstance().getFADCConfig().getThreshold(channel.getChannelId());
+ int leadingEdgeThreshold = config.getThreshold(cellID);
+ absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
} else {
- absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
+ absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
}
ArrayList <Integer> thresholdCrossings = new ArrayList<Integer>();
@@ -562,10 +562,10 @@
// search for next threshold crossing begins at end of this pulse:
if(useDAQConfig && ConfigurationManager.getInstance().getFADCConfig().getMode() == 1) {
// special case, emulating SSP:
- ii += 8;
+ ii += 8;
} else {
// "normal" case, emulating FADC250:
- ii += NSA/nsPerSample - 1;
+ ii += NSA/nsPerSample - 1;
}
// firmware limit on # of peaks:
@@ -674,8 +674,8 @@
EcalChannelConstants channelData = findChannel(cellID);
if(useDAQConfig) {
- //float gain = ConfigurationManager.getInstance().getFADCConfig().getGain(ecalConditions.getChannelCollection().findGeometric(cellID));
- return config.getGain(cellID) * adcSum * EcalUtils.MeV;
+ //float gain = ConfigurationManager.getInstance().getFADCConfig().getGain(ecalConditions.getChannelCollection().findGeometric(cellID));
+ return config.getGain(cellID) * adcSum * EcalUtils.MeV;
} else if(use2014Gain) {
if (constantGain) {
return adcSum * EcalUtils.gainFactor * EcalUtils.ecalReadoutPeriod;
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java Wed Apr 27 11:11:32 2016
@@ -18,14 +18,18 @@
import org.lcsim.util.Driver;
/**
- * This class is used to convert between collections of {@link org.lcsim.event.RawCalorimeterHit}
- * and {@link org.lcsim.event.RawTrackerHit}, objects with ADC/sample information, and
- * collections of {@link org.lcsim.event.CalorimeterHit}, objects with energy/time information.
- *
- * org.hps.recon.ecal.EcalRawConverter is called to do most of the lower level work.
- *
- *
-*/
+ * This <code>Driver</code> converts raw ECal data collections to {@link org.lcsim.event.CalorimeterHit} collections
+ * with energy and time information. The {@link EcalRawConverter} does most of the low-level work.
+ * <p>
+ * The following input collections are used:
+ * <ul>
+ * <li>EcalReadoutHits<li>
+ * <li>EcalReadoutExtraDataRelations</li>
+ * <li>EcalRunningPedestals</li>
+ * </ul>
+ * <p>
+ * The results are by default written to the <b>EcalCalHits</b> output collection.
+ */
public class EcalRawConverterDriver extends Driver {
// To import database conditions
@@ -376,8 +380,8 @@
* conditions database.
*/
public void setUseDAQConfig(boolean state) {
- useDAQConfig = state;
- converter.setUseDAQConfig(state);
+ useDAQConfig = state;
+ converter.setUseDAQConfig(state);
}
@Override
@@ -400,18 +404,17 @@
/**
* @return false if the channel is a good one, true if it is a bad one
- * @param CalorimeterHit
+ * @param hit the <code>CalorimeterHit</code> pointing to the channel
*/
public boolean isBadCrystal(CalorimeterHit hit) {
// Get the channel data.
EcalChannelConstants channelData = findChannel(hit.getCellID());
-
return channelData.isBadChannel();
}
/**
* @return false if the ADC is a good one, true if it is a bad one
- * @param CalorimeterHit
+ * @param hit the <code>CalorimeterHit</code> pointing to the FADC
*/
public boolean isBadFADC(CalorimeterHit hit) {
return (getCrate(hit.getCellID()) == 1 && getSlot(hit.getCellID()) == 3);
@@ -433,12 +436,12 @@
@Override
public void process(EventHeader event) {
- // Do not process the event if the DAQ configuration should be
- // used for value, but is not initialized.
- if(useDAQConfig && !ConfigurationManager.isInitialized()) {
- return;
- }
-
+ // Do not process the event if the DAQ configuration should be
+ // used for value, but is not initialized.
+ if(useDAQConfig && !ConfigurationManager.isInitialized()) {
+ return;
+ }
+
final int SYSTEM_TRIGGER = 0;
// final int SYSTEM_TRACKER = 1;
final int SYSTEM_ECAL = 2;
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRunningPedestalDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRunningPedestalDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRunningPedestalDriver.java Wed Apr 27 11:11:32 2016
@@ -18,18 +18,21 @@
import org.lcsim.util.Driver;
/**
- * Calculate a running pedestal average for every channel from Mode7 FADCs. Uses
- * pedestals from the database if not available from the data.
- *
- * May 2015: Updated to also work on Mode1 data.
- *
- * TODO: Use Logger.
+ * This <code>Driver</code> takes Mode-1 or Mode-7 ECal data and computes a running pedestal
+ * average for every channel. Pedestals from the database will be used if this is not available.
+ * <p>
+ * The following input collections are used:
+ * <ul>
+ * <li>EcalReadoutHits</li>
+ * <li>EcalReadoutExtraDataRelations</li>
+ * </ul>
+ * <p>
+ * Results are by default written to the <b>EcalRunningPedestals</b> output collection.
*
* TODO: Timestamps from EVIO for some runs appear to not be monotonically increasing.
* This interferes with minLookbackTime, so it defaults to disabled and its setter
- * is left private for now.
+ * is left private for now. (Should be a JIRA item??? --JM)
*
- * @version $Id: ECalRunningPedestalDriver.java,v 1.0 2015/02/10 00:00:00
* @author <[log in to unmask]>
*/
public class EcalRunningPedestalDriver extends Driver {
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTimeWalk.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTimeWalk.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTimeWalk.java Wed Apr 27 11:11:32 2016
@@ -39,11 +39,11 @@
* Time walk parameters for pulse fitting
*/
private static final double[] par = {
- 0.9509,
- -33.21,
- 0.2614,
- -0.9128,
- 0.6251
+ 0.9509,
+ -33.21,
+ 0.2614,
+ -0.9128,
+ 0.6251
};
/**
@@ -53,9 +53,9 @@
* @return corrected time (ns)
*/
public static final double correctTimeWalkPulseFitting(double time, double energy) {
- final double polyA = par[0] + par[1]*energy;
- final double polyB = par[2] + par[3] * energy + par[4] * Math.pow(energy, 2);
- return time - (Math.exp(polyA) + polyB);
+ final double polyA = par[0] + par[1]*energy;
+ final double polyB = par[2] + par[3] * energy + par[4] * Math.pow(energy, 2);
+ return time - (Math.exp(polyA) + polyB);
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/FADCGenericHit.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/FADCGenericHit.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/FADCGenericHit.java Wed Apr 27 11:11:32 2016
@@ -24,6 +24,14 @@
this.slot = slot;
this.channel = channel;
this.data = data;
+ }
+
+ public FADCGenericHit(GenericObject object) {
+ this.readoutMode = getReadoutMode(object);
+ this.crate = getCrate(object);
+ this.slot = getSlot(object);
+ this.channel = getChannel(object);
+ this.data = getData(object);
}
@Override
@@ -114,5 +122,5 @@
data[i] = object.getIntVal(i+4);
}
return data;
- }
+ }
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/IterateGainFactorDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/IterateGainFactorDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/IterateGainFactorDriver.java Wed Apr 27 11:11:32 2016
@@ -26,15 +26,15 @@
*/
public class IterateGainFactorDriver extends Driver {
-
- private EcalConditions ecalConditions = null;
-
- /**
+
+ private EcalConditions ecalConditions = null;
+
+ /**
* Set the input collection name (source).
*
* @param inputCollectionName the input collection name
*/
- private String inputCollectionName = "EcalCalHits";
+ private String inputCollectionName = "EcalCalHits";
public void setInputCollectionName(final String inputCollectionName) {
this.inputCollectionName = inputCollectionName;
}
@@ -116,14 +116,14 @@
* @return the output hit collection with gain corrected energies
*/
public List<CalorimeterHit> iterateHits(final List<CalorimeterHit> hits) {
- ArrayList<CalorimeterHit> newHits = new ArrayList<CalorimeterHit>();
+ ArrayList<CalorimeterHit> newHits = new ArrayList<CalorimeterHit>();
for (final CalorimeterHit hit : hits) {
- double time = hit.getTime();
- long cellID = hit.getCellID();
- double energy = hit.getCorrectedEnergy()*gainFileGains.get(findChannelId(cellID));
- CalorimeterHit newHit = CalorimeterHitUtilities.create(energy, time, cellID);
- newHits.add(newHit);
-
+ double time = hit.getTime();
+ long cellID = hit.getCellID();
+ double energy = hit.getCorrectedEnergy()*gainFileGains.get(findChannelId(cellID));
+ CalorimeterHit newHit = CalorimeterHitUtilities.create(energy, time, cellID);
+ newHits.add(newHit);
+
}
return newHits;
}
@@ -134,26 +134,26 @@
*/
@Override
public void process(final EventHeader event) {
- readGainFile();
-
- // Check if output collection already exists in event which is an error.
+ readGainFile();
+
+ // Check if output collection already exists in event which is an error.
if (event.hasItem(outputCollectionName)) {
throw new RuntimeException("collection " + outputCollectionName + " already exists in event");
}
// Get the input collection.
if (event.hasCollection(CalorimeterHit.class,inputCollectionName)){
- final List<CalorimeterHit> inputHitCollection = event.get(CalorimeterHit.class, inputCollectionName);
+ final List<CalorimeterHit> inputHitCollection = event.get(CalorimeterHit.class, inputCollectionName);
- // Iterate the gain correction coefficient on each hit.
- final List<CalorimeterHit> outputHitCollection = this.iterateHits(inputHitCollection);
-
+ // Iterate the gain correction coefficient on each hit.
+ final List<CalorimeterHit> outputHitCollection = this.iterateHits(inputHitCollection);
+
int flags = 0;
flags += 1 << LCIOConstants.RCHBIT_TIME; //store hit time
flags += 1 << LCIOConstants.RCHBIT_LONG; //store hit position; this flag has no effect for RawCalorimeterHits
-
- // Put the collection into the event.
- event.put(outputCollectionName, outputHitCollection, CalorimeterHit.class, flags, ecalReadoutName);
+
+ // Put the collection into the event.
+ event.put(outputCollectionName, outputHitCollection, CalorimeterHit.class, flags, ecalReadoutName);
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
/**
* This is an abstract class that {@link Clusterer} classes should implement
* to perform a clustering algorithm on a <code>CalorimeterHit</code> collection.
- * The sub-class should implement {@link #createClusters(List)} which is
+ * The sub-class should implement {@link #createClusters(EventHeader, List)} which is
* the method that should perform the clustering algorithm.
*
* @see Clusterer
@@ -69,8 +69,8 @@
/**
* This is the primary method for sub-classes to implement their clustering algorithm.
- * @param hits
- * @return
+ * @param hits the list of hits
+ * @return the list of created clusters
*/
public abstract List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits);
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java Wed Apr 27 11:11:32 2016
@@ -73,7 +73,7 @@
/**
* Set the name of the input CalorimeterHit collection to use for clustering.
- * @param inputHitcollectionName The name of the input hit collection.
+ * @param inputHitCollectionName The name of the input hit collection.
*/
public void setInputHitCollectionName(String inputHitCollectionName) {
this.inputHitCollectionName = inputHitCollectionName;
@@ -128,7 +128,7 @@
* This will use a factory method which first tries to use some hard-coded names from
* the cluster package. As a last resort, it will interpret the name as a canonical
* class name and try to instantiate it using the Class API.
- * @param The name or canonical class name of the Clusterer.
+ * @param name The name or canonical class name of the Clusterer.
*/
public void setClustererName(String name) {
clusterer = ClustererFactory.create(name);
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java Wed Apr 27 11:11:32 2016
@@ -1,89 +1,208 @@
package org.hps.recon.ecal.cluster;
+import hep.physics.vec.Hep3Vector;
+
+import org.hps.detector.ecal.EcalCrystal;
+import org.hps.detector.ecal.HPSEcalDetectorElement;
+import org.jdom.DataConversionException;
+// import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.Cluster;
import org.lcsim.event.base.BaseCluster;
+import org.lcsim.geometry.subdetector.HPSEcal3;
/**
- * This is the cluster energy correction requiring the particle id
- * uncorrected cluster energy. This is not accurate for edge crystals
- * and should be used after cluster-track matching and after position
- * corrections.
+ * This is the cluster energy correction requiring the particle id uncorrected
+ * cluster energy. This is now updated to include edge corrections and sampling
+ * fractions derived from data.
*
* @author Holly Vance <[log in to unmask]>
* @author Jeremy McCormick <[log in to unmask]>
*/
public final class ClusterEnergyCorrection {
+
+ // Variables for electron energy corrections.
+ static final double par0_em = -0.017;
+ static final double par1_em[] = { 35, -0.06738, -0.0005613, 16.42, 0.3431,
+ -2.021, 74.85, -0.3626 };
+ static final double par2_em[] = { 35, 0.933, 0.003234, 18.06, 0.24, 8.586,
+ 75.08, -0.39 };
+
+ // Variables for positron energy corrections.
+ static final double par0_ep = -0.0131;
+ static final double par1_ep[] = { 35, -0.076, -0.0008183, 17.88, 0.2886,
+ -1.192, 73.12, -0.3747 };
+ static final double par2_ep[] = { 35, 0.94, 0.003713, 18.19, 0.24, 8.342,
+ 72.44, -0.39 };
+
+ // Variables for photon energy corrections.
+ static final double par0_p = -0.0113;
+ static final double par1_p[] = { 35, -0.0585, -0.0008572, 16.76, 0.2784,
+ -0.07232, 72.88, -0.1685 };
+ static final double par2_p[] = { 35, 0.9307, 0.004, 18.05, 0.23, 3.027,
+ 74.93, -0.34 };
+
+ /**
+ * Calculate the corrected energy for the cluster.
+ *
+ * @param cluster
+ * The input cluster.
+ * @return The corrected energy.
+ */
+ public static double calculateCorrectedEnergy(HPSEcal3 ecal, Cluster cluster) {
+ double rawE = cluster.getEnergy();
+ return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE,
+ cluster.getPosition()[0], cluster.getPosition()[1]);
+ }
+
+ /**
+ * Calculate the corrected energy for the cluster using track position at
+ * ecal.
+ *
+ * @param cluster
+ * The input cluster.
+ * @return The corrected energy.
+ */
+ public static double calculateCorrectedEnergy(HPSEcal3 ecal,
+ Cluster cluster, double ypos) {
+ double rawE = cluster.getEnergy();
+ return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE,
+ cluster.getPosition()[0], ypos);
+ }
+
+ /**
+ * Calculate the corrected energy and set on the cluster.
+ *
+ * @param cluster
+ * The input cluster.
+ */
+ public static void setCorrectedEnergy(HPSEcal3 ecal, BaseCluster cluster) {
+ double correctedEnergy = calculateCorrectedEnergy(ecal, cluster);
+ cluster.setEnergy(correctedEnergy);
+ }
+
+ /**
+ * Calculate the corrected energy and set on the cluster.
+ *
+ * @param cluster
+ * The input cluster.
+ */
+
+ public static void setCorrectedEnergy(HPSEcal3 ecal, BaseCluster cluster,
+ double ypos) {
+ double correctedEnergy = calculateCorrectedEnergy(ecal, cluster, ypos);
+ cluster.setEnergy(correctedEnergy);
+ }
+
+ /**
+ * Calculates energy correction based on cluster raw energy and particle
+ * type as per <a href=
+ * "https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014"
+ * >HPS Note 2014-001</a>
+ *
+ * @param pdg
+ * Particle id as per PDG
+ * @param rawEnergy
+ * Raw Energy of the cluster (sum of hits with shared hit
+ * distribution)
+ * @return Corrected Energy
+ */
+
+ private static double computeCorrectedEnergy(HPSEcal3 ecal, int pdg,
+ double rawEnergy, double xpos, double ypos) {
+ // distance to beam gap edge
+ double r;
+ // Get these values from the Ecal geometry:
+ HPSEcalDetectorElement detElement = (HPSEcalDetectorElement) ecal
+ .getDetectorElement();
+ // double BEAMGAPTOP =
+ // 22.3;//ecal.getNode().getChild("layout").getAttribute("beamgapTop").getDoubleValue();//mm
+ double BEAMGAPTOP = 20.0;
+ try {
+ BEAMGAPTOP = ecal.getNode().getChild("layout")
+ .getAttribute("beamgapTop").getDoubleValue();
+ } catch (DataConversionException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }// mm
+ double BEAMGAPBOT = -20.0;
+ try {
+ BEAMGAPBOT = -ecal.getNode().getChild("layout")
+ .getAttribute("beamgapBottom").getDoubleValue();
+ } catch (DataConversionException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }// mm
+ double BEAMGAPTOPC = BEAMGAPTOP + 13.0;// mm
+ double BEAMGAPBOTC = BEAMGAPBOT - 13.0;// mm
+ // x-coordinates of crystals on either side of row 1 cut out
+ EcalCrystal crystalM = detElement.getCrystal(-11, 1);
+ Hep3Vector posM = crystalM.getPositionFront();
+ EcalCrystal crystalP = detElement.getCrystal(-1, 1);
+ Hep3Vector posP = crystalP.getPositionFront();
+
+ if ((xpos < posM.x()) || (xpos > posP.x())) {
+ if (ypos > 0) {
+ r = Math.abs(ypos - BEAMGAPTOP);
+ } else {
+ r = Math.abs(ypos - BEAMGAPBOT);
+ }
+ }
+ // crystals above row 1 cut out
+ else {
+ if (ypos > 0) {
+ if (ypos > (par1_em[0] + BEAMGAPTOP)) {
+ r = Math.abs(ypos - BEAMGAPTOP);
+ } else {
+ r = Math.abs(ypos - BEAMGAPTOPC);
+ }
+ } else {
+ if (ypos > (-par1_em[0] + BEAMGAPBOT)) {
+ r = Math.abs(ypos - BEAMGAPBOTC);
+ } else {
+ r = Math.abs(ypos - BEAMGAPBOT);
+ }
+ }
+ }
- // Variables for electron energy corrections.
- //Updated with recent monte carlo --7AUG15 HS.
- //Old values from 2014-2015 commented out to right.
- static final double ELECTRON_ENERGY_A = 0.01004;//-0.0027;
- static final double ELECTRON_ENERGY_B = -0.122;//-0.06;
- static final double ELECTRON_ENERGY_C = 0.9646;//0.95;
-
- // Variables for positron energy corrections.
- static final double POSITRON_ENERGY_A = 0.00711;//-0.0096;
- static final double POSITRON_ENERGY_B = -0.1154;//-0.042;
- static final double POSITRON_ENERGY_C = 0.9614;//0.94;
-
- // Variables for photon energy corrections.
- static final double PHOTON_ENERGY_A = 0.007595;//0.0015;
- static final double PHOTON_ENERGY_B = -0.09766;//-0.047;
- static final double PHOTON_ENERGY_C = 0.9512;//0.94;
-
- /**
- * Calculate the corrected energy for the cluster.
- * @param cluster The input cluster.
- * @return The corrected energy.
- */
- public static double calculateCorrectedEnergy(Cluster cluster) {
- double rawE = cluster.getEnergy();
- return computeCorrectedEnergy(cluster.getParticleId(), rawE);
- }
-
- /**
- * Calculate the corrected energy and set on the cluster.
- * @param cluster The input cluster.
- */
- public static void setCorrectedEnergy(BaseCluster cluster) {
- double correctedEnergy = calculateCorrectedEnergy(cluster);
- cluster.setEnergy(correctedEnergy);
- }
-
- /**
- * Calculates energy correction based on cluster raw energy and particle type as per
- * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
- * @param pdg Particle id as per PDG
- * @param rawEnergy Raw Energy of the cluster (sum of hits with shared hit distribution)
- * @return Corrected Energy
- */
- private static double computeCorrectedEnergy(int pdg, double rawEnergy) {
- switch(pdg) {
- case 11:
- // electron
- return computeCorrectedEnergy(rawEnergy, ELECTRON_ENERGY_A, ELECTRON_ENERGY_B, ELECTRON_ENERGY_C);
- case -11:
- // positron
- return computeCorrectedEnergy(rawEnergy, POSITRON_ENERGY_A, POSITRON_ENERGY_B, POSITRON_ENERGY_C);
- case 22:
- // photon
- return computeCorrectedEnergy(rawEnergy, PHOTON_ENERGY_A, PHOTON_ENERGY_B, PHOTON_ENERGY_C);
- default:
- // unknown
- return rawEnergy;
+ //Eliminates corrections at outermost edges to negative cluster energies
+ //66 for positrons, 69 is safe for electrons and photons
+ if (r > 66) {r = 66;}
+
+ switch (pdg) {
+ case 11:
+ // electron
+ return computeCorrectedEnergy(r, rawEnergy, par0_em, par1_em,
+ par2_em);
+ case -11:
+ // positron
+ return computeCorrectedEnergy(r, rawEnergy, par0_ep, par1_ep,
+ par2_ep);
+ case 22:
+ // photon
+ return computeCorrectedEnergy(r, rawEnergy, par0_p, par1_p, par2_p);
+ default:
+ // unknown
+ return rawEnergy;
}
}
-
- /**
- * Calculates the energy correction to a cluster given the variables from the fit as per
- * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
- * Note that this is correct as there is a typo in the formula print in the note.
- * @param rawEnergy Raw energy of the cluster
+
+ /**
+ * Calculates the energy correction to a cluster given the variables from
+ * the fit as per <a href=
+ * "https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014"
+ * >HPS Note 2014-001</a> Note that this is correct as there is a typo in
+ * the formula print in the note.
+ *
+ * @param rawEnergy
+ * Raw energy of the cluster
* @param A,B,C from fitting in note
* @return Corrected Energy
- */
- private static double computeCorrectedEnergy(double rawEnergy, double varA, double varB, double varC){
- double corrEnergy = rawEnergy / (varA / rawEnergy + varB / (Math.sqrt(rawEnergy)) + varC);
+ */
+ private static double computeCorrectedEnergy(double y, double rawEnergy,
+ double varA, double varB[], double varC[]) {
+ int ii = y < varB[0] ? 2 : 5;
+ double corrEnergy = rawEnergy/ (varA / rawEnergy+ (varB[1] - varB[ii]* Math.exp(-(y - varB[ii + 1]) * varB[ii + 2]))/ (Math.sqrt(rawEnergy)) +
+ (varC[1] - varC[ii]* Math.exp(-(y - varC[ii + 1]) * varC[ii + 2])));
return corrEnergy;
- }
+ }
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java Wed Apr 27 11:11:32 2016
@@ -12,30 +12,29 @@
* @author Jeremy McCormick <[log in to unmask]>
*/
public final class ClusterPositionCorrection {
-
+ //Parameterizations tested in MC using v3-fieldmap
+ //Nov 2015
+
// Variables for electron position corrections.
- static final double ELECTRON_POS_A1 = -0.0005813;//0.0066;
- static final double ELECTRON_POS_A2 = 0.005738;//-0.03;
- static final double ELECTRON_POS_A3 = -0.0309;
- static final double ELECTRON_POS_B1 = 0.02963;//0.028;
- static final double ELECTRON_POS_B2 = -4.289;//-0.451;
- static final double ELECTRON_POS_B3 = 4.596;//0.465;
+ static final double ELECTRON_POS_A1 = 0.004483;
+ static final double ELECTRON_POS_A2 = -0.02884;
+ static final double ELECTRON_POS_B1 = 0.6197;
+ static final double ELECTRON_POS_B2 = -2.279;
+ static final double ELECTRON_POS_B3 = 3.66;
// Variables for positron position corrections.
- static final double POSITRON_POS_A1 = -0.0006243;//0.0072;
- static final double POSITRON_POS_A2 = 0.006799;//-0.031;
- static final double POSITRON_POS_A3 = -0.03141;
- static final double POSITRON_POS_B1 = 0.0869;//0.007;
- static final double POSITRON_POS_B2 = 2.965;//0.342;
- static final double POSITRON_POS_B3 = 1.653;//0.108;
+ static final double POSITRON_POS_A1 = 0.006887;
+ static final double POSITRON_POS_A2 = -0.03207;
+ static final double POSITRON_POS_B1 = -0.8048;
+ static final double POSITRON_POS_B2 = 0.9366;
+ static final double POSITRON_POS_B3 = 2.628;
// Variables for photon position corrections.
- static final double PHOTON_POS_A1 = -0.0006329;//0.005;
- static final double PHOTON_POS_A2 = 0.00595;//-0.032;
- static final double PHOTON_POS_A3 = -0.03563;
- static final double PHOTON_POS_B1 = 0.06444;//0.011;
- static final double PHOTON_POS_B2 = -0.5836;//-0.037;
- static final double PHOTON_POS_B3 = 3.508;//0.294;
+ static final double PHOTON_POS_A1 = 0.005385;
+ static final double PHOTON_POS_A2 = -0.03562;
+ static final double PHOTON_POS_B1 = -0.1948;
+ static final double PHOTON_POS_B2 = -0.7991;
+ static final double PHOTON_POS_B3 = 3.797;
public static double[] calculateCorrectedPosition(Cluster cluster) {
@@ -59,20 +58,20 @@
* @param pdg Particle id as per PDG
* @param xCl Calculated x centroid position of the cluster, uncorrected, at face
* @param rawEnergy Raw energy of the cluster (sum of hits with shared hit distribution)
- * @return Corrected x position
+ * @return the corrected x position
*/
private static double computeCorrectedPosition(int pdg, double xPos, double rawEnergy) {
//double xCl = xPos / 10.0;//convert to cm
double xCorr;
switch(pdg) {
case 11: //Particle is electron
- xCorr = positionCorrection(xPos, rawEnergy, ELECTRON_POS_A1, ELECTRON_POS_A2, ELECTRON_POS_A3, ELECTRON_POS_B1, ELECTRON_POS_B2, ELECTRON_POS_B3);
+ xCorr = positionCorrection(xPos, rawEnergy, ELECTRON_POS_A1, ELECTRON_POS_A2, ELECTRON_POS_B1, ELECTRON_POS_B2, ELECTRON_POS_B3);
return xCorr;
case -11:// Particle is positron
- xCorr = positionCorrection(xPos, rawEnergy, POSITRON_POS_A1, POSITRON_POS_A2, POSITRON_POS_A3, POSITRON_POS_B1, POSITRON_POS_B2, POSITRON_POS_B3);
+ xCorr = positionCorrection(xPos, rawEnergy, POSITRON_POS_A1, POSITRON_POS_A2, POSITRON_POS_B1, POSITRON_POS_B2, POSITRON_POS_B3);
return xCorr;
case 22: // Particle is photon
- xCorr = positionCorrection(xPos, rawEnergy, PHOTON_POS_A1, PHOTON_POS_A2, PHOTON_POS_A3, PHOTON_POS_B1, PHOTON_POS_B2, PHOTON_POS_B3);
+ xCorr = positionCorrection(xPos, rawEnergy, PHOTON_POS_A1, PHOTON_POS_A2, PHOTON_POS_B1, PHOTON_POS_B2, PHOTON_POS_B3);
return xCorr;
default: //Unknown
xCorr = xPos;
@@ -92,10 +91,10 @@
* @param varB1
* @param varB2
* @param varB3
- * @return
+ * @return the cluster position correction
*/
- private static double positionCorrection(double xCl, double rawEnergy, double varA1, double varA2, double varA3, double varB1, double varB2, double varB3) {
+ private static double positionCorrection(double xCl, double rawEnergy, double varA1, double varA2, double varB1, double varB2, double varB3) {
//return ((xCl - (varB1 * rawEnergy + varB2 / Math.sqrt(rawEnergy) + varB3))/(varA1 / Math.sqrt(rawEnergy) + varA2 + 1));
- return ((xCl - (varB1 * rawEnergy + varB2 / Math.sqrt(rawEnergy) + varB3))/(varA1 * rawEnergy + varA2 / Math.sqrt(rawEnergy) + varA3 + 1));
+ return ((xCl - (varB1 * rawEnergy + varB2 / Math.sqrt(rawEnergy) + varB3))/(varA1 / Math.sqrt(rawEnergy) + varA2 + 1));
}
-}
+}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java Wed Apr 27 11:11:32 2016
@@ -199,7 +199,7 @@
/**
* Find the unique set of MCParticles that are referenced by the hits of the Cluster.
- * @param clusters The input Cluster.
+ * @param cluster The input Cluster.
* @return The set of unique MCParticles.
*/
public static Set<MCParticle> findMCParticles(Cluster cluster) {
@@ -334,7 +334,7 @@
* Apply HPS-specific energy and position corrections to a list of clusters in place.
* @param clusters The list of clusters.
*/
- public static void applyCorrections(List<Cluster> clusters) {
+ public static void applyCorrections(HPSEcal3 ecal, List<Cluster> clusters) {
// Loop over the clusters.
for (Cluster cluster : clusters) {
@@ -347,16 +347,16 @@
ClusterPositionCorrection.setCorrectedPosition(baseCluster);
// Apply PID based energy correction.
- ClusterEnergyCorrection.setCorrectedEnergy(baseCluster);
- }
- }
- }
-
- /**
- * Apply HPS-specific energy and position corrections to a cluster.
- * @param cluster The input cluster.
- */
- public static void applyCorrections(Cluster cluster) {
+ ClusterEnergyCorrection.setCorrectedEnergy(ecal, baseCluster);
+ }
+ }
+ }
+
+ /**
+ * Apply HPS-specific energy and position corrections to a cluster without track information.
+ * @param cluster The input cluster.
+ */
+ public static void applyCorrections(HPSEcal3 ecal, Cluster cluster) {
if (cluster instanceof BaseCluster) {
@@ -366,7 +366,25 @@
ClusterPositionCorrection.setCorrectedPosition(baseCluster);
// Apply PID based energy correction.
- ClusterEnergyCorrection.setCorrectedEnergy(baseCluster);
+ ClusterEnergyCorrection.setCorrectedEnergy(ecal, baseCluster);
+ }
+ }
+
+ /**
+ * Apply HPS-specific energy and position corrections to a cluster with track information.
+ * @param cluster The input cluster.
+ */
+ public static void applyCorrections(HPSEcal3 ecal, Cluster cluster, double ypos) {
+
+ if (cluster instanceof BaseCluster) {
+
+ BaseCluster baseCluster = (BaseCluster)cluster;
+
+ // Apply PID based position correction, which should happen before final energy correction.
+ ClusterPositionCorrection.setCorrectedPosition(baseCluster);
+
+ // Apply PID based energy correction.
+ ClusterEnergyCorrection.setCorrectedEnergy(ecal, baseCluster, ypos);
}
}
@@ -410,7 +428,7 @@
/**
* Get the set of hits from a list of clusters.
- * @param The input cluster list.
+ * @param clusters The input cluster list.
* @return The list of hits from all the clusters.
*/
public static Set<CalorimeterHit> getHits(List<Cluster> clusters) {
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java Wed Apr 27 11:11:32 2016
@@ -38,7 +38,7 @@
* @param name The name of the clustering algorithm.
* @param cuts The set of cuts (can be null).
* @return The clustering algorithm.
- * @throw IllegalArgumentException if there is no Clusterer found with name.
+ * @throws IllegalArgumentException if there is no Clusterer found with name.
*/
public static Clusterer create(String name, double[] cuts) {
Clusterer clusterer;
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java Wed Apr 27 11:11:32 2016
@@ -111,8 +111,8 @@
/**
* Set to <code>true</code> to store hits in the output clusters.
- *
- * @return <code>true</code> to store hits in the output clusters
+ *
+ * @param storeHits <code>true</code> to store hits; <code>false</code> to not store hits
*/
public void setStoreHits(final boolean storeHits) {
this.storeHits = storeHits;
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java Wed Apr 27 11:11:32 2016
@@ -22,8 +22,8 @@
* @see GTPClusterer
*/
public class GTPClusterDriver extends ClusterDriver {
- /** An instance of the clustering algorithm object for producing
- * cluster objects. */
+ /** An instance of the clustering algorithm object for producing
+ * cluster objects. */
private final GTPClusterer gtp;
/**
@@ -112,11 +112,11 @@
*/
@Override
public void setWriteClusterCollection(boolean state) {
- // Set the flag as appropriate with the superclass.
- super.setWriteClusterCollection(state);
-
- // Also tell the clusterer whether it should persist its hit
- // collection or not.
- gtp.setWriteHitCollection(state);
+ // Set the flag as appropriate with the superclass.
+ super.setWriteClusterCollection(state);
+
+ // Also tell the clusterer whether it should persist its hit
+ // collection or not.
+ gtp.setWriteHitCollection(state);
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java Wed Apr 27 11:11:32 2016
@@ -124,7 +124,7 @@
* forms a collection of <code>Cluster</code> objects according to
* the GTP clustering algorithm.
* @param event - The object containing event data.
- * @param hitList - A list of <code>CalorimeterHit</code> objects
+ * @param hits - A list of <code>CalorimeterHit</code> objects
* from which clusters should be formed.
*/
public List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits) {
@@ -146,24 +146,24 @@
// stored in LCIO format properly, it needs to separately store
// its clusters' hits in a collection.
if(writeHitCollection) {
- // Create a set to store the hits so that each one may be
- // stored only once.
- Set<CalorimeterHit> hitSet = new HashSet<CalorimeterHit>();
-
- // Loop over all clusters and add their hits to the set.
- for(Cluster cluster : clusterList) {
- for(CalorimeterHit hit : cluster.getCalorimeterHits()) {
- hitSet.add(hit);
- }
- }
-
- // Convert the set into a List object so that it can be stored
- // in LCIO.
- List<CalorimeterHit> clusterHits = new ArrayList<CalorimeterHit>(hitSet.size());
- clusterHits.addAll(hitSet);
-
- // Place the list of hits into the event stream.
- event.put("GTPHits", hits, CalorimeterHit.class, 0);
+ // Create a set to store the hits so that each one may be
+ // stored only once.
+ Set<CalorimeterHit> hitSet = new HashSet<CalorimeterHit>();
+
+ // Loop over all clusters and add their hits to the set.
+ for(Cluster cluster : clusterList) {
+ for(CalorimeterHit hit : cluster.getCalorimeterHits()) {
+ hitSet.add(hit);
+ }
+ }
+
+ // Convert the set into a List object so that it can be stored
+ // in LCIO.
+ List<CalorimeterHit> clusterHits = new ArrayList<CalorimeterHit>(hitSet.size());
+ clusterHits.addAll(hitSet);
+
+ // Place the list of hits into the event stream.
+ event.put("GTPHits", hits, CalorimeterHit.class, 0);
}
// Return the clusters.
@@ -260,7 +260,7 @@
* should not.
*/
void setVerbose(boolean verbose) {
- this.verbose = verbose;
+ this.verbose = verbose;
}
/**
@@ -271,7 +271,7 @@
* persisted and <code>false</code> that they will not.
*/
void setWriteHitCollection(boolean state) {
- writeHitCollection = state;
+ writeHitCollection = state;
}
/**
@@ -291,7 +291,7 @@
// VERBOSE :: Print the cluster window.
if(verbose) {
- // Print the event header.
+ // Print the event header.
System.out.printf("%n%nEvent:%n");
// Calculate some constants.
@@ -304,9 +304,9 @@
CalorimeterHit hit = null;
for(Entry<Long, CalorimeterHit> entry : bufferMap.entrySet()) {
- hit = entry.getValue();
- System.out.printf("\t(%3d, %3d) --> %.4f (%.4f)%n", hit.getIdentifierFieldValue("ix"),
- hit.getIdentifierFieldValue("iy"), hit.getCorrectedEnergy(), hit.getRawEnergy());
+ hit = entry.getValue();
+ System.out.printf("\t(%3d, %3d) --> %.4f (%.4f)%n", hit.getIdentifierFieldValue("ix"),
+ hit.getIdentifierFieldValue("iy"), hit.getCorrectedEnergy(), hit.getRawEnergy());
}
bufferNum++;
@@ -327,8 +327,8 @@
// VERBOSE :: Print the current cluster.
if(verbose) {
System.out.printf("Cluster Check:%n");
- System.out.printf("\t(%3d, %3d) --> %.4f%n", currentHit.getIdentifierFieldValue("ix"),
- currentHit.getIdentifierFieldValue("iy"), currentHit.getCorrectedEnergy());
+ System.out.printf("\t(%3d, %3d) --> %.4f%n", currentHit.getIdentifierFieldValue("ix"),
+ currentHit.getIdentifierFieldValue("iy"), currentHit.getCorrectedEnergy());
}
// Store the energy of the current hit.
@@ -337,11 +337,11 @@
// If the hit energy is lower than the minimum threshold,
// then we immediately reject this hit as a possible cluster.
if (currentEnergy < seedEnergyThreshold) {
- // VERBOSE :: Note the reason the potential seed was
- // rejected.
- if(verbose) { System.out.printf("\tREJECT :: Does not exceed seed threshold %.4f.%n", seedEnergyThreshold); }
-
- // Skip to the next potential seed.
+ // VERBOSE :: Note the reason the potential seed was
+ // rejected.
+ if(verbose) { System.out.printf("\tREJECT :: Does not exceed seed threshold %.4f.%n", seedEnergyThreshold); }
+
+ // Skip to the next potential seed.
continue seedLoop;
}
@@ -368,16 +368,16 @@
// is larger than then original hit. If it is, we may
// stop the comparison because this is not a cluster.
if (bufferHitEnergy > currentEnergy) {
- // VERBOSE :: Output the reason the potential
- // seed was rejected along with the
- // hit that caused it.
- if(verbose) {
- System.out.printf("\tREJECT :: Buffer hit surpasses hit energy.");
- System.out.printf("\tBUFFER HIT :: (%3d, %3d) --> %.4f%n", bufferHit.getIdentifierFieldValue("ix"),
- bufferHit.getIdentifierFieldValue("iy"), bufferHit.getCorrectedEnergy(), bufferHit.getRawEnergy());
- }
-
- // Skip to the next potential seed.
+ // VERBOSE :: Output the reason the potential
+ // seed was rejected along with the
+ // hit that caused it.
+ if(verbose) {
+ System.out.printf("\tREJECT :: Buffer hit surpasses hit energy.");
+ System.out.printf("\tBUFFER HIT :: (%3d, %3d) --> %.4f%n", bufferHit.getIdentifierFieldValue("ix"),
+ bufferHit.getIdentifierFieldValue("iy"), bufferHit.getCorrectedEnergy(), bufferHit.getRawEnergy());
+ }
+
+ // Skip to the next potential seed.
continue seedLoop;
}
@@ -403,16 +403,16 @@
// If it is, we may stop the comparison because this
// is not a cluster.
if (neighborHitEnergy > currentEnergy) {
- // VERBOSE :: Output the reason the potential
- // seed was rejected along with the
- // hit that caused it.
- if(verbose) {
- System.out.printf("\tREJECT :: Buffer hit surpasses hit energy.%n");
- System.out.printf("\tBUFFER HIT :: (%3d, %3d) --> %.4f%n", neighborHit.getIdentifierFieldValue("ix"),
- neighborHit.getIdentifierFieldValue("iy"), neighborHit.getCorrectedEnergy(), neighborHit.getRawEnergy());
- }
-
- // Skip to the next potential seed.
+ // VERBOSE :: Output the reason the potential
+ // seed was rejected along with the
+ // hit that caused it.
+ if(verbose) {
+ System.out.printf("\tREJECT :: Buffer hit surpasses hit energy.%n");
+ System.out.printf("\tBUFFER HIT :: (%3d, %3d) --> %.4f%n", neighborHit.getIdentifierFieldValue("ix"),
+ neighborHit.getIdentifierFieldValue("iy"), neighborHit.getCorrectedEnergy(), neighborHit.getRawEnergy());
+ }
+
+ // Skip to the next potential seed.
continue seedLoop;
}
@@ -436,10 +436,10 @@
if(verbose) {
System.out.printf("Cluster added.%n");
System.out.printf("\t(%3d, %3d) --> %.4f GeV --> %d hits%n", cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
- cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"), cluster.getEnergy(), cluster.getCalorimeterHits().size());
+ cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"), cluster.getEnergy(), cluster.getCalorimeterHits().size());
for(CalorimeterHit hit : cluster.getCalorimeterHits()) {
- System.out.printf("\t\tCLUSTER HIT :: (%3d, %3d) --> %.4f%n", hit.getIdentifierFieldValue("ix"),
- hit.getIdentifierFieldValue("iy"), hit.getCorrectedEnergy(), hit.getRawEnergy());
+ System.out.printf("\t\tCLUSTER HIT :: (%3d, %3d) --> %.4f%n", hit.getIdentifierFieldValue("ix"),
+ hit.getIdentifierFieldValue("iy"), hit.getCorrectedEnergy(), hit.getRawEnergy());
}
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterDriver.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterDriver.java Wed Apr 27 11:11:32 2016
@@ -39,8 +39,8 @@
* @see org.hps.record.daqconfig.DAQConfigDriver
*/
public class GTPOnlineClusterDriver extends ClusterDriver {
- /** An instance of the clustering algorithm object for producing
- * cluster objects. */
+ /** An instance of the clustering algorithm object for producing
+ * cluster objects. */
private final GTPOnlineClusterer gtp;
/** Indicates whether the <code>ConfigurationManager</code> object
* should be used for clustering settings or not. */
@@ -53,29 +53,29 @@
* @see GTPOnlineClusterer
*/
public GTPOnlineClusterDriver() {
- // Instantiate the clusterer.
+ // Instantiate the clusterer.
clusterer = ClustererFactory.create("GTPOnlineClusterer");
gtp = (GTPOnlineClusterer) clusterer;
// Track the DAQ configuration status.
ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // If DAQ configuration settings should be used, then
- // update the clusterer.
- if(useDAQConfig) {
- // Get the GTP settings.
- GTPConfig config = ConfigurationManager.getInstance().getGTPConfig();
-
- // Send the DAQ configuration settings to the clusterer.
- gtp.setSeedLowThreshold(config.getSeedEnergyCutConfig().getLowerBound());
- gtp.setWindowAfter(config.getTimeWindowAfter());
- gtp.setWindowBefore(config.getTimeWindowBefore());
-
- // Print the updated settings.
- logSettings();
- }
- }
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // If DAQ configuration settings should be used, then
+ // update the clusterer.
+ if(useDAQConfig) {
+ // Get the GTP settings.
+ GTPConfig config = ConfigurationManager.getInstance().getGTPConfig();
+
+ // Send the DAQ configuration settings to the clusterer.
+ gtp.setSeedLowThreshold(config.getSeedEnergyCutConfig().getLowerBound());
+ gtp.setWindowAfter(config.getTimeWindowAfter());
+ gtp.setWindowBefore(config.getTimeWindowBefore());
+
+ // Print the updated settings.
+ logSettings();
+ }
+ }
});
}
@@ -90,11 +90,11 @@
*/
@Override
public void process(EventHeader event) {
- // Only process an event if either the DAQ configuration is not
- // in use or if it has been initialized.
- if((useDAQConfig && ConfigurationManager.isInitialized()) || !useDAQConfig) {
- super.process(event);
- }
+ // Only process an event if either the DAQ configuration is not
+ // in use or if it has been initialized.
+ if((useDAQConfig && ConfigurationManager.isInitialized()) || !useDAQConfig) {
+ super.process(event);
+ }
}
/**
@@ -103,8 +103,8 @@
*/
@Override
public void startOfData() {
- // VERBOSE :: Output the driver settings.
- if(gtp.isVerbose()) { logSettings(); }
+ // VERBOSE :: Output the driver settings.
+ if(gtp.isVerbose()) { logSettings(); }
}
/**
@@ -168,23 +168,23 @@
* @see org.hps.record.daqconfig.DAQConfigDriver
*/
public void setUseDAQConfig(boolean state) {
- useDAQConfig = state;
+ useDAQConfig = state;
}
/**
* Outputs the current GTP settings to the terminal.
*/
private void logSettings() {
- // Print the cluster driver header.
- System.out.println();
- System.out.println();
- System.out.println("======================================================================");
- System.out.println("=== GTP Readout Clusterer Settings ===================================");
- System.out.println("======================================================================");
-
- // Output the driver settings.
- System.out.printf("Seed Energy Threshold :: %.3f GeV%n", gtp.getSeedLowThreshold());
- System.out.printf("Time Window (Before) :: %.0f ns%n", gtp.getWindowBefore());
- System.out.printf("Time Window (After) :: %.0f ns%n", gtp.getWindowAfter());
+ // Print the cluster driver header.
+ System.out.println();
+ System.out.println();
+ System.out.println("======================================================================");
+ System.out.println("=== GTP Readout Clusterer Settings ===================================");
+ System.out.println("======================================================================");
+
+ // Output the driver settings.
+ System.out.printf("Seed Energy Threshold :: %.3f GeV%n", gtp.getSeedLowThreshold());
+ System.out.printf("Time Window (Before) :: %.0f ns%n", gtp.getWindowBefore());
+ System.out.printf("Time Window (After) :: %.0f ns%n", gtp.getWindowAfter());
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterer.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPOnlineClusterer.java Wed Apr 27 11:11:32 2016
@@ -69,10 +69,10 @@
* @see GTPClusterer
*/
public class GTPOnlineClusterer extends AbstractClusterer {
- /**
- * The length of the temporal window for inclusing clusters that
- * occur before the seed hit.
- */
+ /**
+ * The length of the temporal window for inclusing clusters that
+ * occur before the seed hit.
+ */
private double timeBefore = 4;
/**
@@ -102,7 +102,7 @@
// Diagnostic plots.
private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D hitEnergy = aida.histogram1D("GTP(O) Cluster Plot/Hit Energy Distribution", 256, -1.0, 2.2);
+ private IHistogram1D hitEnergy = aida.histogram1D("GTP(O) Cluster Plots/Hit Energy Distribution", 256, -1.0, 2.2);
private IHistogram1D clusterSeedEnergy = aida.histogram1D("GTP(O) Cluster Plots/Cluster Seed Energy Distribution", 176, 0.0, 2.2);
private IHistogram1D clusterHitCount = aida.histogram1D("GTP(O) Cluster Plots/Cluster Hit Count Distribution", 9, 1, 10);
private IHistogram1D clusterTotalEnergy = aida.histogram1D("GTP(O) Cluster Plots/Cluster Total Energy Distribution", 176, 0.0, 2.2);
@@ -128,29 +128,29 @@
*/
@Override
public List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hitList) {
- // VERBOSE :: Print the driver header.
- if(verbose) {
- System.out.println();
- System.out.println();
- System.out.println("======================================================================");
- System.out.println("=== GTP Readout Clusterer ============================================");
- System.out.println("======================================================================");
-
- // Sort the hits by x-index and then by y-index.
- Collections.sort(hitList, new Comparator<CalorimeterHit>() {
- @Override
- public int compare(CalorimeterHit firstHit, CalorimeterHit secondHit) {
- int[] ix = { firstHit.getIdentifierFieldValue("ix"), secondHit.getIdentifierFieldValue("ix") };
- if(ix[0] != ix[1]) { return Integer.compare(ix[0], ix[1]); }
- else {
- int iy[] = { firstHit.getIdentifierFieldValue("iy"), secondHit.getIdentifierFieldValue("iy") };
- return Integer.compare(iy[0], iy[1]);
- }
- }
- });
-
- // Print the hit collection.
- System.out.println("Event Hit Collection:");
+ // VERBOSE :: Print the driver header.
+ if(verbose) {
+ System.out.println();
+ System.out.println();
+ System.out.println("======================================================================");
+ System.out.println("=== GTP Readout Clusterer ============================================");
+ System.out.println("======================================================================");
+
+ // Sort the hits by x-index and then by y-index.
+ Collections.sort(hitList, new Comparator<CalorimeterHit>() {
+ @Override
+ public int compare(CalorimeterHit firstHit, CalorimeterHit secondHit) {
+ int[] ix = { firstHit.getIdentifierFieldValue("ix"), secondHit.getIdentifierFieldValue("ix") };
+ if(ix[0] != ix[1]) { return Integer.compare(ix[0], ix[1]); }
+ else {
+ int iy[] = { firstHit.getIdentifierFieldValue("iy"), secondHit.getIdentifierFieldValue("iy") };
+ return Integer.compare(iy[0], iy[1]);
+ }
+ }
+ });
+
+ // Print the hit collection.
+ System.out.println("Event Hit Collection:");
for(CalorimeterHit hit : hitList) {
int ix = hit.getIdentifierFieldValue("ix");
int iy = hit.getIdentifierFieldValue("iy");
@@ -161,7 +161,7 @@
}
System.out.println();
}
-
+
// Track the valid clusters.
List<Cluster> clusterList = new ArrayList<Cluster>();
@@ -183,10 +183,10 @@
// Iterate over each hit and see if it qualifies as a seed hit.
seedLoop:
for(CalorimeterHit seed : hitList) {
- // Put the hit energy into the hit energy distribution.
- hitEnergy.fill(seed.getCorrectedEnergy());
- hitDistribution.fill(seed.getIdentifierFieldValue("ix"), seed.getIdentifierFieldValue("iy"));
-
+ // Put the hit energy into the hit energy distribution.
+ hitEnergy.fill(seed.getCorrectedEnergy());
+ hitDistribution.fill(seed.getIdentifierFieldValue("ix"), seed.getIdentifierFieldValue("iy"));
+
// Check whether the potential seed passes the seed
// energy cut.
if(seed.getCorrectedEnergy() < seedThreshold) {
@@ -204,16 +204,16 @@
// energies.
hitLoop:
for(CalorimeterHit hit : hitList) {
- // Negative energy hits are never valid. Skip them.
- if(hit.getCorrectedEnergy() < 0) {
- continue hitLoop;
- }
-
- // Do not compare the potential seed hit to itself.
- if(hit == seed) {
- continue hitLoop;
- }
-
+ // Negative energy hits are never valid. Skip them.
+ if(hit.getCorrectedEnergy() < 0) {
+ continue hitLoop;
+ }
+
+ // Do not compare the potential seed hit to itself.
+ if(hit == seed) {
+ continue hitLoop;
+ }
+
// Check if the hit is within the spatiotemporal
// clustering window.
if(withinTimeVerificationWindow(seed, hit) && withinSpatialWindow(seed, hit)) {
@@ -246,25 +246,25 @@
clusterTotalEnergy.fill(protoCluster.getEnergy());
clusterHitCount.fill(protoCluster.getCalorimeterHits().size());
clusterDistribution.fill(protoCluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix"),
- protoCluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"));
+ protoCluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy"));
// Determine how much energy in the cluster is negative
// and how is positive.
double nenergy = 0.0;
double penergy = 0.0;
for(CalorimeterHit hit : protoCluster.getCalorimeterHits()) {
- if(hit.getCorrectedEnergy() > 0) { penergy += hit.getCorrectedEnergy(); }
- else { nenergy += hit.getCorrectedEnergy(); }
+ if(hit.getCorrectedEnergy() > 0) { penergy += hit.getCorrectedEnergy(); }
+ else { nenergy += hit.getCorrectedEnergy(); }
}
energyDistribution.fill(Math.abs(nenergy) / (penergy + Math.abs(nenergy)));
}
// VERBOSE :: Print out all the clusters in the event.
if(verbose) {
- // Print the clusters.
- System.out.println("Event Cluster Collection:");
+ // Print the clusters.
+ System.out.println("Event Cluster Collection:");
for(Cluster cluster : clusterList) {
- // Output basic cluster positional and energy data.
+ // Output basic cluster positional and energy data.
CalorimeterHit seedHit = cluster.getCalorimeterHits().get(0);
int ix = seedHit.getIdentifierFieldValue("ix");
int iy = seedHit.getIdentifierFieldValue("iy");
@@ -281,7 +281,7 @@
System.out.printf("\t\tCompHit --> %.3f GeV at (%3d, %3d) and at t = %.2f%n", henergy, hix, hiy, htime);
}
}
- System.out.println();
+ System.out.println();
}
// VERBOSE :: Print a new line.
@@ -366,11 +366,11 @@
* the seed hit in clock cycles.
*/
void setWindowBefore(int cyclesBefore) {
- // The cluster window can not be negative.
- if(cyclesBefore < 0) { cyclesBefore = 0; }
-
- // Convert the window to nanoseconds and set the two time
- // windows appropriately.
+ // The cluster window can not be negative.
+ if(cyclesBefore < 0) { cyclesBefore = 0; }
+
+ // Convert the window to nanoseconds and set the two time
+ // windows appropriately.
timeBefore = cyclesBefore * 4;
timeWindow = Math.max(timeBefore, timeAfter);
}
@@ -384,11 +384,11 @@
* the seed hit in clock cycles.
*/
void setWindowAfter(int cyclesAfter) {
- // The cluster window can not be negative.
- if(cyclesAfter < 0) { cyclesAfter = 0; }
-
- // Convert the window to nanoseconds and set the two time
- // windows appropriately.
+ // The cluster window can not be negative.
+ if(cyclesAfter < 0) { cyclesAfter = 0; }
+
+ // Convert the window to nanoseconds and set the two time
+ // windows appropriately.
timeAfter = cyclesAfter * 4;
timeWindow = Math.max(timeBefore, timeAfter);
}
@@ -494,9 +494,9 @@
// considered to be adjacent to ix = -1 rather than the
// expected ix = 0. (ix = 0 does not exist.)
else {
- // ix = -1 is adjacent to ix = 1 and vice versa.
+ // ix = -1 is adjacent to ix = 1 and vice versa.
if((six == -1 && hix == 1) || (six == 1 && hix == -1)) {
- return true;
+ return true;
}
// Any other combination that reaches this point is not
@@ -561,4 +561,4 @@
// treated as within time.
else { return false; }
}
-}
+}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java Wed Apr 27 11:11:32 2016
@@ -289,7 +289,7 @@
// If the neighboring crystal exists and is not already
// in a cluster, add it to the list of neighboring hits.
if (secondaryNeighborHit != null && !hitToSeed.containsKey(secondaryNeighborHit)
- && hitList.contains(secondaryNeighborHit)) {
+ && hitList.contains(secondaryNeighborHit)) {
secondaryNeighborHits.add(secondaryNeighborHit);
}
}
@@ -325,7 +325,7 @@
// If it exists, add it to the neighboring hit list.
if (clusteredNeighborHit != null && hitToSeed.get(clusteredNeighborHit) != null
- && hitList.contains(clusteredNeighborHit)) {
+ && hitList.contains(clusteredNeighborHit)) {
clusteredNeighborHits.add(clusteredNeighborHit);
}
}
@@ -383,32 +383,32 @@
// Consider time cut-is this hit in same time window as seed?
if (useTimeCut){
- if(Math.abs(ihit.getTime() - iseed.getTime()) < timeWindow)
- {
- icluster.addHit(ihit);
- }
+ if(Math.abs(ihit.getTime() - iseed.getTime()) < timeWindow)
+ {
+ icluster.addHit(ihit);
+ }
} // end of using time cut
else {icluster.addHit(ihit);}
}
// Add common hits
for (Map.Entry<CalorimeterHit, List<CalorimeterHit>> commHit : commonHits.entrySet()) {
- // Check that the common hit is in both time windows to their clusters
- CalorimeterHit seedA = commHit.getValue().get(0);
+ // Check that the common hit is in both time windows to their clusters
+ CalorimeterHit seedA = commHit.getValue().get(0);
CalorimeterHit seedB = commHit.getValue().get(1);
-
+
boolean inTimeWithA = false;
boolean inTimeWithB = false;
- // In time window with seedA?
+ // In time window with seedA?
if (Math.abs(commHit.getKey().getTime() - seedA.getTime()) < timeWindow){
- inTimeWithA = true;
+ inTimeWithA = true;
}
// In time window with seedB?
if (Math.abs(commHit.getKey().getTime() - seedB.getTime()) < timeWindow){
- inTimeWithB = true;
- }
-
+ inTimeWithB = true;
+ }
+
double eclusterA = seedToCluster.get(seedA).getEnergy();
double eclusterB = seedToCluster.get(seedB).getEnergy();
double fractionA = eclusterA / (eclusterA + eclusterB);
@@ -420,25 +420,25 @@
BaseCluster clusterB = seedToCluster.get(seedB);
if (useTimeCut){
- // Do this if the hit is in both cluster's windows
- if (inTimeWithA && inTimeWithB){
- clusterA.addHit(commHit.getKey(), hitcontributionA);
- clusterB.addHit(commHit.getKey(), hitcontributionB);
- }
+ // Do this if the hit is in both cluster's windows
+ if (inTimeWithA && inTimeWithB){
+ clusterA.addHit(commHit.getKey(), hitcontributionA);
+ clusterB.addHit(commHit.getKey(), hitcontributionB);
+ }
- //If the hit is only in 1 cluster's window, add the full contribution
- else if(inTimeWithA ^ inTimeWithB){
- if(inTimeWithA){
- clusterA.addHit(commHit.getKey());
- }
- else{
- clusterB.addHit(commHit.getKey());
- }
- }
+ //If the hit is only in 1 cluster's window, add the full contribution
+ else if(inTimeWithA ^ inTimeWithB){
+ if(inTimeWithA){
+ clusterA.addHit(commHit.getKey());
+ }
+ else{
+ clusterB.addHit(commHit.getKey());
+ }
+ }
} // end of using time cut
else{
- clusterA.addHit(commHit.getKey(), hitcontributionA);
- clusterB.addHit(commHit.getKey(), hitcontributionB);
+ clusterA.addHit(commHit.getKey(), hitcontributionA);
+ clusterB.addHit(commHit.getKey(), hitcontributionB);
}
}
Modified: java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java (original)
+++ java/branches/HPSJAVA-409/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java Wed Apr 27 11:11:32 2016
@@ -125,9 +125,8 @@
}
/**
- * This method takes a list of potential cluster hits and applies selection cuts,
- * returning a new list that has the hit lists which did not pass the cuts removed.
- * @param clusteredHitLists The input hit lists.
+ * Apply selection cuts to cluster list and return filtered list.
+ * @param clusterList The input hit lists.
* @return The hit lists that passed the cuts.
*/
protected List<Cluster> applyCuts(List<Cluster> clusterList) {
Modified: java/branches/HPSJAVA-409/evio/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/evio/pom.xml (original)
+++ java/branches/HPSJAVA-409/evio/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/evio/</url>
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java Wed Apr 27 11:11:32 2016
@@ -12,12 +12,7 @@
import org.hps.record.svt.SvtEvioExceptions.SvtEvioReaderException;
import org.hps.util.Pair;
import org.jlab.coda.jevio.BaseStructure;
-import org.jlab.coda.jevio.DataType;
import org.jlab.coda.jevio.EvioEvent;
-import org.jlab.coda.jevio.IEvioFilter;
-import org.jlab.coda.jevio.IEvioStructure;
-import org.jlab.coda.jevio.StructureFinder;
-import org.jlab.coda.jevio.StructureType;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
@@ -31,7 +26,6 @@
*
* @author Omar Moreno <[log in to unmask]>
* @author Per Hansson Adrian <[log in to unmask]>
- * @date November 20, 2014
*
*/
public abstract class AbstractSvtEvioReader extends EvioReader {
@@ -39,7 +33,7 @@
public static final String SVT_HEADER_COLLECTION_NAME = "SvtHeaders";
// Initialize the logger
- public static Logger LOGGER = Logger.getLogger(AbstractSvtEvioReader.class.getPackage().getName());
+ public static final Logger LOGGER = Logger.getLogger(AbstractSvtEvioReader.class.getPackage().getName());
// A Map from DAQ pair (FPGA/Hybrid or FEB ID/FEB Hybrid ID) to the
// corresponding sensor
@@ -47,16 +41,11 @@
HpsSiSensor /* Sensor */> daqPairToSensor
= new HashMap<Pair<Integer, Integer>, HpsSiSensor>();
- // A collection of banks that should be processed after all hits have been made
- protected List<BaseStructure> eventBanks = new ArrayList<BaseStructure>();
-
// Flag indicating whether the DAQ map has been setup
protected boolean isDaqMapSetup = false;
// Collections and names
private static final String SVT_HIT_COLLECTION_NAME = "SVTRawTrackerHits";
- List<RawTrackerHit> rawHits = new ArrayList<RawTrackerHit>();
- List<SvtHeaderDataInfo> headers = new ArrayList<SvtHeaderDataInfo>();
// Constants
private static final String SUBDETECTOR_NAME = "Tracker";
@@ -76,6 +65,19 @@
*/
abstract protected int getMaxRocBankTag();
+ /**
+ * Get the minimum SVT ROC bank tag in the event.
+ *
+ * @return Minimum SVT ROC bank tag
+ */
+ abstract protected int getMinDataBankTag();
+
+ /**
+ * Get the maximum SVT ROC bank tag in the event.
+ *
+ * @return Maximum SVT ROC bank tag
+ */
+ abstract protected int getMaxDataBankTag();
/**
* Get the SVT ROC bank number of the bank encapsulating the SVT samples.
@@ -116,14 +118,6 @@
*/
abstract protected HpsSiSensor getSensor(int[] data);
- /**
- * Check whether a data bank is valid i.e. contains SVT samples only.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- */
- abstract protected boolean isValidDataBank(BaseStructure dataBank);
-
/**
* Check whether the samples are valid
*
@@ -154,125 +148,88 @@
* @return true if the raw hits were created successfully, false otherwise
* @throws SvtEvioReaderException
*/
+ @Override
public boolean makeHits(EvioEvent event, EventHeader lcsimEvent) throws SvtEvioReaderException {
LOGGER.finest("Physics Event: " + event.toString());
- // Retrieve the ROC banks encapsulated by the physics bank. The ROC
+ // Retrieve the data banks encapsulated by the physics bank. The ROC
// bank range is set in the subclass.
- List<BaseStructure> rocBanks = new ArrayList<BaseStructure>();
- for (int rocBankTag = this.getMinRocBankTag();
- rocBankTag <= this.getMaxRocBankTag(); rocBankTag++) {
-
- LOGGER.finest("Retrieving ROC bank: " + rocBankTag);
- List<BaseStructure> matchingRocBanks = this.getMatchingBanks(event, rocBankTag);
- if (matchingRocBanks == null) {
- LOGGER.finest("ROC bank " + rocBankTag + " was not found!");
- continue;
- }
- rocBanks.addAll(matchingRocBanks);
- }
- LOGGER.finest("Total ROC banks found: " + rocBanks.size());
-
- // Return false if ROC banks weren't found
- if (rocBanks.isEmpty()) return false;
+ List<BaseStructure> dataBanks = SvtEvioUtils.getDataBanks(event, this.getMinRocBankTag(), this.getMaxRocBankTag(), this.getMinDataBankTag(), this.getMaxDataBankTag());
+
+ // Return false if data banks weren't found
+ if (dataBanks.isEmpty()) return false;
// Setup the DAQ map if it's not setup
if (!this.isDaqMapSetup)
this.setupDaqMap(lcsimEvent.getDetector().getSubdetector(
SUBDETECTOR_NAME));
- // Clear the list of raw tracker hits
- rawHits.clear();
-
- // Clear the list of headers
- headers.clear();
-
- // Loop over the SVT ROC banks and process all samples
- for (BaseStructure rocBank : rocBanks) {
-
- LOGGER.finest("ROC bank: " + rocBank.toString());
-
- LOGGER.finest("Processing ROC bank " + rocBank.getHeader().getTag());
-
- // If the ROC bank doesn't contain any data, raise an exception
- if (rocBank.getChildCount() == 0) {
- throw new SvtEvioReaderException("[ " + this.getClass().getSimpleName()
- + " ]: SVT bank doesn't contain any data banks.");
+ List<RawTrackerHit> rawHits = new ArrayList<RawTrackerHit>();
+ List<SvtHeaderDataInfo> headers = new ArrayList<SvtHeaderDataInfo>();
+
+ LOGGER.finest("Total data banks found: " + dataBanks.size());
+
+ // Loop over all of the data banks contained by the ROC banks and
+ // processed them
+ for (BaseStructure dataBank : dataBanks) {
+
+ LOGGER.finest("Processing data bank: " + dataBank.toString());
+
+ // Get the int data encapsulated by the data bank
+ int[] data = dataBank.getIntData();
+ LOGGER.finest("Total number of integers contained by the data bank: " + data.length);
+
+ // Check that a complete set of samples exist
+ int sampleCount = data.length - this.getDataHeaderLength()
+ - this.getDataTailLength();
+ LOGGER.finest("Total number of samples: " + sampleCount);
+ if (sampleCount % 4 != 0) {
+ throw new SvtEvioReaderException("[ "
+ + this.getClass().getSimpleName()
+ + " ]: Size of samples array is not divisible by 4");
}
-
- // Get the data banks containing the SVT samples.
- List<BaseStructure> dataBanks = rocBank.getChildren();
- LOGGER.finest("Total data banks found: " + dataBanks.size());
-
- // Loop over all of the data banks contained by the ROC banks and
- // processed them
- for (BaseStructure dataBank : dataBanks) {
-
- LOGGER.finest("Processing data bank: " + dataBank.toString());
-
- // Check that the bank is valid
- if (!this.isValidDataBank(dataBank)) continue;
-
- // Get the int data encapsulated by the data bank
- int[] data = dataBank.getIntData();
- LOGGER.finest("Total number of integers contained by the data bank: " + data.length);
-
- // Check that a complete set of samples exist
- int sampleCount = data.length - this.getDataHeaderLength()
- - this.getDataTailLength();
- LOGGER.finest("Total number of samples: " + sampleCount);
- if (sampleCount % 4 != 0) {
- throw new SvtEvioReaderException("[ "
- + this.getClass().getSimpleName()
- + " ]: Size of samples array is not divisible by 4");
- }
-
- // extract header and tail information
- SvtHeaderDataInfo headerData = this.extractSvtHeader(dataBank.getHeader().getNumber(), data);
-
- // Check that the multisample count is consistent
- this.checkSvtSampleCount(sampleCount, headerData);
-
- // Add header to list
- headers.add(headerData);
-
-
- // Store the multisample headers
- // Note that the length is not known but can't be longer than the multisample count
- // in other words the data can be only header multisamples for example.
- int multisampleHeaderData[] = new int[sampleCount];
- int multisampleHeaderIndex = 0;
-
- LOGGER.finest("sampleCount " + sampleCount);
-
- // Loop through all of the samples and make hits
- for (int samplesN = 0; samplesN < sampleCount; samplesN += 4) {
-
- int[] samples = new int[4];
- System.arraycopy(data, this.getDataHeaderLength() + samplesN, samples, 0, samples.length);
-
- LOGGER.finest("samplesN " + samplesN + " multisampleHeaderCount " + multisampleHeaderIndex);
- if(SvtEvioUtils.isMultisampleHeader(samples))
- LOGGER.finest("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
- else
- LOGGER.finest("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
-
-
+
+ // extract header and tail information
+ SvtHeaderDataInfo headerData = this.extractSvtHeader(dataBank.getHeader().getNumber(), data);
+
+ // Check that the multisample count is consistent
+ this.checkSvtSampleCount(sampleCount, headerData);
+
+ // Add header to list
+ headers.add(headerData);
+
+ // Store the multisample headers
+ // Note that the length is not known but can't be longer than the multisample count
+ // in other words the data can be only header multisamples for example.
+ int multisampleHeaderData[] = new int[sampleCount];
+ int multisampleHeaderIndex = 0;
+
+ LOGGER.finest("sampleCount " + sampleCount);
+
+ List<int[]> multisampleList = SvtEvioUtils.getMultisamples(data, sampleCount, this.getDataHeaderLength());
+ // Loop through all of the samples and make hits
+ for (int[] samples:multisampleList) {
+ if (SvtEvioUtils.isMultisampleHeader(samples)) {
+ LOGGER.finest("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
// Extract data words from multisample header and update index
multisampleHeaderIndex += this.extractMultisampleHeaderData(samples, multisampleHeaderIndex, multisampleHeaderData);
-
- // If a set of samples is associated with an APV header or tail, skip it
- if (!this.isValidSampleSet(samples)) continue;
- rawHits.add(this.makeHit(samples));
+ } else {
+ LOGGER.finest("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
}
-
- LOGGER.finest("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
-
- // add multisample header tails to header data object
- this.setMultiSampleHeaders(headerData, multisampleHeaderIndex, multisampleHeaderData);
-
+
+ // If a set of samples is associated with an APV header or tail, skip it
+ if (!this.isValidSampleSet(samples)) {
+ continue;
+ }
+ rawHits.add(this.makeHit(samples));
}
+
+ LOGGER.finest("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
+
+ // add multisample header tails to header data object
+ this.setMultiSampleHeaders(headerData, multisampleHeaderIndex, multisampleHeaderData);
+
}
LOGGER.finest("Total number of RawTrackerHits created: " + rawHits.size());
@@ -282,13 +239,9 @@
// Add the collection of raw hits to the LCSim event
lcsimEvent.put(SVT_HIT_COLLECTION_NAME, rawHits, RawTrackerHit.class, flag, READOUT_NAME);
-
// Process SVT headers
this.processSvtHeaders(headers, lcsimEvent);
-
-
-
return true;
}
@@ -322,7 +275,7 @@
* @param samples
* @param index
* @param multisampleHeaderData
- * @return
+ * @return the length of the extracted samples or 0 if not a multisample header
*/
protected int extractMultisampleHeaderData(int[] samples, int index, int[] multisampleHeaderData) {
LOGGER.finest("extractMultisampleHeaderData: index " + index);
@@ -344,7 +297,7 @@
*/
protected void checkSvtSampleCount(int sampleCount, SvtHeaderDataInfo headerData) throws SvtEvioHeaderException {
if( sampleCount != SvtEvioUtils.getSvtTailMultisampleCount(headerData.getTail())*4)
- throw new SvtEvioHeaderException("multisample count is not consistent with bank size.");
+ throw new SvtEvioHeaderException("ROC " + headerData.getNum() + " multisample count " + sampleCount + " is not consistent with bank size " + SvtEvioUtils.getSvtTailMultisampleCount(headerData.getTail()));
}
/**
@@ -394,26 +347,4 @@
// Create and return a RawTrackerHit
return new BaseRawTrackerHit(hitTime, cellID, SvtEvioUtils.getSamples(data), null, sensor);
}
-
- /**
- * Retrieve all the banks in an event that match the given tag in their
- * header and are not data banks.
- *
- * @param structure : The event/bank being queried
- * @param tag : The tag to match
- * @return A collection of all bank structures that pass the filter
- * provided by the event
- */
- protected List<BaseStructure> getMatchingBanks(BaseStructure structure, final int tag) {
- IEvioFilter filter = new IEvioFilter() {
- public boolean accept(StructureType type, IEvioStructure struc) {
- return (type == StructureType.BANK)
- && (tag == struc.getHeader().getTag())
- && (struc.getHeader().getDataType() == DataType.ALSOBANK);
- }
- };
- return StructureFinder.getMatchingStructures(structure, filter);
- }
-
-
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AugmentedSvtEvioReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AugmentedSvtEvioReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/AugmentedSvtEvioReader.java Wed Apr 27 11:11:32 2016
@@ -32,7 +32,10 @@
@Override
protected void processSvtHeaders(List<SvtHeaderDataInfo> headers, EventHeader lcsimEvent) throws SvtEvioHeaderException {
-
+
+
+ LOGGER.finest("Process " + headers.size() + " SVT headers for run " + lcsimEvent.getRunNumber() + " and event " + lcsimEvent.getEventNumber());
+
// Check that the SVT header data is valid
// Catch the exceptions locally, add stuff to the event, then throw it again
// and handle it outside
@@ -43,6 +46,8 @@
if( !exceptions.isEmpty() ) {
+ LOGGER.finest("Found " + exceptions.size() + " " + SvtEvioHeaderException.class.getSimpleName() + " exceptions");
+
// print some debug info
List<String> exceptionNames = SvtEventHeaderChecker.getSvtEvioHeaderExceptionNames(exceptions);
@@ -68,8 +73,14 @@
throw new SvtEvioHeaderException(exceptions.get(0));
} else {
+
+ LOGGER.finest("No " + SvtEvioHeaderException.class.getSimpleName() + " exceptions found for this event");
+
// add skimming flag - the header is OK since I would never get here otherwise
SvtEventFlagger.voidAddHeaderCheckResultToMetaData(true, lcsimEvent);
+
+
+
}
// Add SVT header data to the event
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/BasicEvioFileReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/BasicEvioFileReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/BasicEvioFileReader.java Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.record.evio.EvioEventUtilities;
import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
import org.hps.record.triggerbank.HeadBankData;
@@ -33,7 +33,7 @@
if (args.length == 0) {
printUsage(options);
}
- final CommandLineParser parser = new DefaultParser();
+ final CommandLineParser parser = new PosixParser();
CommandLine cl = null;
try {
cl = parser.parse(options, args);
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/DummyEventBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/DummyEventBuilder.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/DummyEventBuilder.java Wed Apr 27 11:11:32 2016
@@ -41,7 +41,7 @@
public void readEvioEvent(EvioEvent evioEvent) {
}
- @Override
- public void conditionsChanged(ConditionsEvent conditionsEvent) {
- }
+ @Override
+ public void conditionsChanged(ConditionsEvent conditionsEvent) {
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EcalHitWriter.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EcalHitWriter.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EcalHitWriter.java Wed Apr 27 11:11:32 2016
@@ -212,10 +212,10 @@
Map<Integer, List<Long>> slotMap = new HashMap<Integer, List<Long>>();
for (Long id : hitMap.keySet()) {
dec.setID(id);
-// System.out.println(dec.getIDDescription());
-// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
+// System.out.println(dec.getIDDescription());
+// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
// Long daqID = EcalConditions.physicalToDaqID(id);
-// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
+// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
int slot = getSlot(id);
if (slotMap.get(slot) == null) {
slotMap.put(slot, new ArrayList<Long>());
@@ -231,7 +231,7 @@
for (int slot : slotMap.keySet()) {
data.addUchar((byte) slot); // slot #
data.addUint(0); // trigger #
- data.addUlong(0); // timestamp
+ data.addUlong(0); // timestamp
List<Long> hitIDs = slotMap.get(slot);
int nhits = hitIDs.size();
data.addN(nhits); // number of channels
@@ -285,10 +285,10 @@
Map<Integer, List<Long>> slotMap = new HashMap<Integer, List<Long>>();
for (Long id : hitMap.keySet()) {
dec.setID(id);
-// System.out.println(dec.getIDDescription());
-// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
+// System.out.println(dec.getIDDescription());
+// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
// Long daqID = EcalConditions.physicalToDaqID(id);
-// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
+// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
int slot = getSlot(id);
if (slotMap.get(slot) == null) {
slotMap.put(slot, new ArrayList<Long>());
@@ -304,7 +304,7 @@
for (int slot : slotMap.keySet()) {
data.addUchar((byte) slot); // slot #
data.addUint(0); // trigger #
- data.addUlong(0); // timestamp
+ data.addUlong(0); // timestamp
List<Long> hitIDs = slotMap.get(slot);
int nhits = hitIDs.size();
data.addN(nhits); // number of channels
@@ -358,10 +358,10 @@
Map<Integer, List<Long>> slotMap = new HashMap<Integer, List<Long>>();
for (Long id : hitMap.keySet()) {
dec.setID(id);
-// System.out.println(dec.getIDDescription());
-// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
+// System.out.println(dec.getIDDescription());
+// System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
// Long daqID = EcalConditions.physicalToDaqID(id);
-// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
+// System.out.printf("physicalID %d, daqID %d\n", id, daqID);
int slot = getSlot(id);
if (slotMap.get(slot) == null) {
slotMap.put(slot, new ArrayList<Long>());
@@ -380,7 +380,7 @@
// EvioBank slotBank = new EvioBank(EventConstants.ECAL_WINDOW_BANK_TAG, DataType.COMPOSITE, slot);
data.addUchar((byte) slot); // slot #
data.addUint(0); // trigger #
- data.addUlong(0); // timestamp
+ data.addUlong(0); // timestamp
List<Long> hitIDs = slotMap.get(slot);
int nhits = hitIDs.size();
data.addN(nhits); // number of channels
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioReader.java Wed Apr 27 11:11:32 2016
@@ -5,45 +5,45 @@
import org.lcsim.event.EventHeader;
/**
- * Abstract class containing shared methods used by EVIO readers.
+ * Abstract class containing shared methods used by EVIO readers.
*
- * @author Sho Uemura <[log in to unmask]>
+ * @author Sho Uemura <[log in to unmask]>
*/
public abstract class EvioReader {
- // Debug flag
- protected boolean debug = false;
-
- // Name of the hit collection that will be created
- protected String hitCollectionName = null;
+ // Debug flag
+ protected boolean debug = false;
+
+ // Name of the hit collection that will be created
+ protected String hitCollectionName = null;
- /**
- * Make a LCIO hit collection (e.g. {@link RawTrackerHit},
- * {@link CalorimeterHit} from raw EVIO data.
- *
- * @param event : The EVIO event to read the raw data from
- * @param lcsimEvent : The LCSim event to write the collections to
- * @return True if the appropriate EVIO bank is found, false otherwise
- * @throws Exception
- *
- */
- abstract boolean makeHits(EvioEvent event, EventHeader lcsimEvent) throws Exception;
+ /**
+ * Make a LCIO hit collection (e.g. {@link org.lcsim.event.RawTrackerHit},
+ * {@link org.lcsim.event.CalorimeterHit} from raw EVIO data.
+ *
+ * @param event : The EVIO event to read the raw data from
+ * @param lcsimEvent : The LCSim event to write the collections to
+ * @return True if the appropriate EVIO bank is found, false otherwise
+ * @throws Exception
+ *
+ */
+ abstract boolean makeHits(EvioEvent event, EventHeader lcsimEvent) throws Exception;
- /**
- * Set the hit collection name.
- *
- * @param hitCollectionName : Name of the hit collection
- */
- public void setHitCollectionName(String hitCollectionName) {
- this.hitCollectionName = hitCollectionName;
- }
+ /**
+ * Set the hit collection name.
+ *
+ * @param hitCollectionName : Name of the hit collection
+ */
+ public void setHitCollectionName(String hitCollectionName) {
+ this.hitCollectionName = hitCollectionName;
+ }
- /**
- * Enable/disable debug output.
- *
- * @param debug : Set to true to enable, false to disable.
- */
- public void setDebug(boolean debug) {
- this.debug = debug;
- }
+ /**
+ * Enable/disable debug output.
+ *
+ * @param debug : Set to true to enable, false to disable.
+ */
+ public void setDebug(boolean debug) {
+ this.debug = debug;
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioToLcio.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioToLcio.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/EvioToLcio.java Wed Apr 27 11:11:32 2016
@@ -71,7 +71,7 @@
* @author Jeremy McCormick <[log in to unmask]>
* @author Sho Uemura <[log in to unmask]>
*/
-public class EvioToLcio {
+public final class EvioToLcio {
/**
* The default steering resource, which basically does nothing except print event numbers.
@@ -113,6 +113,7 @@
*/
private static Options OPTIONS = new Options();
static {
+ OPTIONS.addOption(new Option("h", false, "print help and exit"));
OPTIONS.addOption(new Option("d", true, "detector name (required)"));
OPTIONS.getOption("d").setRequired(true);
OPTIONS.addOption(new Option("f", true, "text file containing a list of EVIO files"));
@@ -251,10 +252,11 @@
}
public void parse(String[] args) {
- // Parse the command line options.
+
if (args.length == 0) {
this.printUsage();
}
+
final CommandLineParser parser = new PosixParser();
CommandLine cl = null;
try {
@@ -262,8 +264,13 @@
} catch (final ParseException e) {
throw new RuntimeException("Problem parsing command line options.", e);
}
+
+ if (cl.hasOption("h")) {
+ this.printUsage();
+ }
// Set the log level.
+ // TODO: Remove this argument; use java logging prop instead.
if (cl.hasOption("L")) {
final Level level = Level.parse(cl.getOptionValue("L").toUpperCase());
@@ -366,6 +373,10 @@
// Process the LCSim job variable definitions, if any.
jobManager = new JobManager();
+
+ // Initialize run manager and add as listener on conditions system.
+ RunManager runManager = RunManager.getRunManager();
+ DatabaseConditionsManager.getInstance().addConditionsListener(runManager);
// Enable dry run because events will be processed individually.
jobManager.setDryRun(true);
@@ -452,8 +463,6 @@
* This method will execute the EVIO to LCIO conversion and optionally process the events with LCSim Drivers from a
* steering file. Then the resultant LCIO events will be written to disk if this option is enabled in the command
* line arguments.
- *
- * @param args The command line arguments.
*/
public void run() {
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java Wed Apr 27 11:11:32 2016
@@ -67,6 +67,11 @@
* Modulus of TI timestamp offset (units of nanoseconds).
*/
private final long timestampCycle = 24 * 6 * 35;
+
+ /**
+ * The current TI time offset in nanoseconds from the run manager.
+ */
+ private Long currentTiTimeOffset = null;
/**
* Class constructor.
@@ -83,31 +88,59 @@
intBanks.add(new IntBankDefinition(TIData.class, new int[]{sspCrateBankTag, 0xe10a}));
intBanks.add(new IntBankDefinition(HeadBankData.class, new int[]{sspCrateBankTag, 0xe10f}));
intBanks.add(new IntBankDefinition(TDCData.class, new int[]{0x3a, 0xe107}));
- // ecalReader = new ECalEvioReader(0x25, 0x27);
triggerConfigReader = new TriggerConfigEvioReader();
svtEventFlagger = new SvtEventFlagger();
}
@Override
public void conditionsChanged(final ConditionsEvent conditionsEvent) {
+
super.conditionsChanged(conditionsEvent);
svtEventFlagger.initialize();
- }
-
- /**
- * Get the time from the TI data.
+
+ // Set TI time offset from run database.
+ setTiTimeOffsetForRun(conditionsEvent.getConditionsManager().getRun());
+ }
+
+ /**
+ * Get TI time offset from the run database, if available.
+ * @param run the run number
+ */
+ private void setTiTimeOffsetForRun(int run) {
+ currentTiTimeOffset = null;
+ RunManager runManager = RunManager.getRunManager();
+ if (runManager.getRun() != null) {
+ if (runManager.runExists()) {
+ currentTiTimeOffset = runManager.getRunSummary().getTiTimeOffset();
+ LOGGER.info("TI time offset set to " + currentTiTimeOffset + " for run "
+ + run + " from database");
+ } else {
+ LOGGER.warning("Run " + run
+ + " does not exist in the run database.");
+ }
+ } else {
+ LOGGER.info("Run manager is not initialized; TI time offset not available.");
+ }
+ /* Make sure connection is closed immediately. --JM */
+ try {
+ LOGGER.info("Closing run manager db connection ...");
+ RunManager.getRunManager().closeConnection();
+ LOGGER.info("Run manager db connection was closed.");
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Get the time from the TI data with time offset applied from run database.
*
* @param triggerList the TI data list
*/
@Override
protected long getTime(final List<AbstractIntData> triggerList) {
long tiTimeOffset = 0;
- try {
- if (RunManager.getRunManager().runExists() && RunManager.getRunManager().getTriggerConfig().getTiTimeOffset() != null) {
- tiTimeOffset = (RunManager.getRunManager().getTriggerConfig().getTiTimeOffset() / timestampCycle) * timestampCycle;
- }
- } catch (IllegalStateException e) {
- // May happen if RunManager is not initialized; just ignore.
+ if (currentTiTimeOffset != null) {
+ tiTimeOffset = (currentTiTimeOffset / timestampCycle) * timestampCycle;
}
for (final AbstractIntData data : triggerList) {
if (data instanceof TIData) {
@@ -137,7 +170,11 @@
LOGGER.finest("created new LCSim event " + lcsimEvent.getEventNumber());
// Put DAQ Configuration info into lcsimEvent.
- triggerConfigReader.getDAQConfig(evioEvent, lcsimEvent);
+ try {
+ triggerConfigReader.getDAQConfig(evioEvent, lcsimEvent);
+ } catch (final Exception e) {
+ LOGGER.log(Level.SEVERE,"DAQ CONFIG BROKEN.",e);
+ }
// Make RawCalorimeterHit collection, combining top and bottom section
// of ECal into one list.
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitFunction.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitFunction.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitFunction.java Wed Apr 27 11:11:32 2016
@@ -7,29 +7,29 @@
* Straight line fit
*/
public class RfFitFunction extends AbstractIFunction {
- protected double intercept=0;
- protected double slope=0;
- public RfFitFunction() {
- this("");
- }
- public RfFitFunction(String title) {
- super();
- this.variableNames=new String[]{"time"};
- this.parameterNames=new String[]{"intercept","slope"};
+ protected double intercept=0;
+ protected double slope=0;
+ public RfFitFunction() {
+ this("");
+ }
+ public RfFitFunction(String title) {
+ super();
+ this.variableNames=new String[]{"time"};
+ this.parameterNames=new String[]{"intercept","slope"};
- init(title);
- }
- public double value(double [] v) {
- return intercept + (v[0])*slope;
- }
- public void setParameters(double[] pars) throws IllegalArgumentException {
- super.setParameters(pars);
- intercept=pars[0];
- slope=pars[1];
- }
- public void setParameter(String key,double value) throws IllegalArgumentException{
- super.setParameter(key,value);
- if (key.equals("intercept")) intercept=value;
- else if (key.equals("slope")) slope=value;
- }
+ init(title);
+ }
+ public double value(double [] v) {
+ return intercept + (v[0])*slope;
+ }
+ public void setParameters(double[] pars) throws IllegalArgumentException {
+ super.setParameters(pars);
+ intercept=pars[0];
+ slope=pars[1];
+ }
+ public void setParameter(String key,double value) throws IllegalArgumentException{
+ super.setParameter(key,value);
+ if (key.equals("intercept")) intercept=value;
+ else if (key.equals("slope")) slope=value;
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitterDriver.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfFitterDriver.java Wed Apr 27 11:11:32 2016
@@ -1,9 +1,4 @@
package org.hps.evio;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import hep.aida.IAnalysisFactory;
import hep.aida.IDataPointSet;
@@ -11,7 +6,9 @@
import hep.aida.IFitResult;
import hep.aida.IFitter;
import hep.aida.IFunction;
-import hep.aida.IFunctionFactory;
+
+import java.util.ArrayList;
+import java.util.List;
import org.hps.recon.ecal.FADCGenericHit;
import org.lcsim.event.EventHeader;
@@ -19,146 +16,156 @@
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-/*
+/**
* Extract RF time from waveform and put into lcsim event.
*/
public class RfFitterDriver extends Driver {
- static final double NOISE=2.0; // units = FADC
- static final int CRATE=46;
- static final int SLOT=13;
- static final int CHANNELS[]={0,1};
- static final double NSPERSAMPLE=4;
-
+ private static final double NOISE = 2.0; // units = FADC
+ private static final int CRATE = 46;
+ private static final int SLOT = 13;
+ private static final int CHANNELS[] = {0, 1};
+ private static final double NSPERSAMPLE = 4;
- // boilerplate:
- AIDA aida = AIDA.defaultInstance();
- IAnalysisFactory analysisFactory = aida.analysisFactory();
- IFunctionFactory functionFactory = analysisFactory.createFunctionFactory(null);
- IFitFactory fitFactory = analysisFactory.createFitFactory();
- IFitter fitter=fitFactory.createFitter();
- IDataPointSet fitData=aida.analysisFactory().createDataPointSetFactory(null).create("RF ADC DataPointSet", 2);
-
+ // boilerplate:
+ private AIDA aida = AIDA.defaultInstance();
+ private IAnalysisFactory analysisFactory = aida.analysisFactory();
+ //private IFunctionFactory functionFactory = analysisFactory.createFunctionFactory(null);
+ private IFitFactory fitFactory = analysisFactory.createFitFactory();
+ private IFitter fitter = fitFactory.createFitter();
+ private IDataPointSet fitData = aida.analysisFactory().createDataPointSetFactory(null).create("RF ADC DataPointSet", 2);
+
// the function used to fit the RF pulse:
- IFunction fitFunction=new RfFitFunction();
+ private IFunction fitFunction = new RfFitFunction();
- /*
- * Check the event for an RF pulse, and, if found, fit it to get
- * RF time and then dump it in the lcsim event.
+ /**
+ * Check the event for an RF pulse, and, if found, fit it to get RF time.
*/
- public void process(EventHeader event) {
- if (!event.hasCollection(GenericObject.class,"FADCGenericHits")) return;
-
- boolean foundRf=false;
- double times[]={-9999,-9999};
-
- for (GenericObject gob : event.get(GenericObject.class,"FADCGenericHits")) {
- FADCGenericHit hit=(FADCGenericHit)gob;
-
- // ignore hits not from proper RF signals based on crate/slot/channel:
- if (hit.getCrate()!=CRATE || hit.getSlot()!=SLOT) continue;
- for (int ii=0; ii<CHANNELS.length; ii++) {
- if (hit.getChannel()==CHANNELS[ii]) {
-
- // we found a RF readout, fit it:
- foundRf=true;
- times[ii] = fitPulse(hit);
-
- break;
- }
- }
- }
-
- // if we found an RF readout, dump the fit result in the event:
- if (foundRf) {
- List <RfHit> rfHits=new ArrayList<RfHit>();
- rfHits.add(new RfHit(times));
- event.put("RFHits", rfHits, RfHit.class, 1);
- }
- }
+ public void process(EventHeader event) {
- /*
- * Perform the fit to the RF pulse:
- */
- public double fitPulse(FADCGenericHit hit) {
- fitData.clear();
- final int adcSamples[]=hit.getData();
- //stores the number of peaks
- int iz=0;
- int peakBin[]={-999,-999};
- final int threshold = 300;
- double fitThresh[]={-999,-999};
- double pedVal[]={-999,-999};
-
- // Look for bins containing the peaks (2-3 peaks)
- for (int ii=4; ii<adcSamples.length; ii++) {
- // After 2 peaks, stop looking for more
- if (iz==2){break;}
- if ((adcSamples[ii+1]>0) && (adcSamples[ii-1]>0) && (adcSamples[ii]>threshold) && ii>8){
- if ((adcSamples[ii]>adcSamples[ii+1]) && (adcSamples[ii]>=adcSamples[ii-1]) ){
-
- peakBin[iz]=ii;
- iz++;
- }
- }
- }
-
-
- int jj=0;
- // Choose peak closest to center of window (second peak, ik=1)
- final int ik=1;
- pedVal[ik] = (adcSamples[peakBin[ik]-6]+adcSamples[peakBin[ik]-7]+adcSamples[peakBin[ik]-8]+adcSamples[peakBin[ik]-9])/4.0;
- fitThresh[ik]= (adcSamples[peakBin[ik]]+pedVal[ik])/3.0;
-
- // Initial values: we find/fit 3 points:
- double itime[] = {-999,-999,-999};
- double ifadc[] = {-999,-999,-999};
-
- // Find the points of the peak bin to peak bin-5
- for (int ll=0; ll<5; ll++){
- if ((adcSamples[peakBin[ik]-5+ll]) > fitThresh[ik]){
- // One point is below fit threshold and two points are above
- if(jj==0 && (adcSamples[peakBin[ik]-6+ll] > pedVal[ik])){
- final int zz=fitData.size();
- fitData.addPoint();
- itime[zz] = peakBin[ik]-6+ll;
- ifadc[zz] = adcSamples[peakBin[ik]-6+ll];
- fitData.point(zz).coordinate(0).setValue(peakBin[ik]-6+ll);
- fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-6+ll]);
- fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
- fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
- jj++;
- }
- final int zz=fitData.size();
- fitData.addPoint();
- itime[zz] = peakBin[ik]-5+ll;
- ifadc[zz] = adcSamples[peakBin[ik]-5+ll];
- fitData.point(zz).coordinate(0).setValue(peakBin[ik]-5+ll);
- fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-5+ll]);
- fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
- fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
-
- jj++;
- if (jj==3) {break;}
- }
- }
-
- double islope = ((double)(ifadc[2]-ifadc[0]))/(itime[2]-itime[0]);
- double icept = ifadc[1] - islope*itime[1];
- // Initialize fit parameters:
- fitFunction.setParameter("intercept",icept);
- fitFunction.setParameter("slope",islope);
+ List<RfHit> rfHits = new ArrayList<RfHit>();
- // this used to be turned on somewhere else on every event, dunno if it still is:
- //Logger.getLogger("org.freehep.math.minuit").setLevel(Level.OFF);
-
- IFitResult fitResults = fitter.fit(fitData,fitFunction);
-
- // Read the time value at this location on the fit:
- double halfVal = (adcSamples[peakBin[1]]+pedVal[1])/2.0;
-
- return NSPERSAMPLE*(halfVal-fitResults.fittedParameter("intercept"))/fitResults.fittedParameter("slope");
-
- }
-
+ boolean foundRf = false;
+ double times[] = {-9999, -9999};
+
+ if (event.hasCollection(GenericObject.class, "FADCGenericHits")) {
+
+ for (GenericObject gob : event.get(GenericObject.class, "FADCGenericHits")) {
+
+ FADCGenericHit hit = null;
+
+ /* Added conversion from GenericObject in case loading back from an LCIO file. --JM */
+ if (gob instanceof FADCGenericHit) {
+ hit = (FADCGenericHit) gob;
+ } else {
+ hit = new FADCGenericHit(gob);
+ }
+
+ // ignore hits not from proper RF signals based on crate/slot/channel:
+ if (hit.getCrate() != CRATE || hit.getSlot() != SLOT)
+ continue;
+
+ for (int ii = 0; ii < CHANNELS.length; ii++) {
+ if (hit.getChannel() == CHANNELS[ii]) {
+
+ // we found a RF readout, fit it:
+ foundRf = true;
+ times[ii] = fitPulse(hit);
+
+ break;
+ }
+ }
+ }
+ }
+ if (foundRf) {
+ rfHits.add(new RfHit(times));
+ }
+ event.put("RFHits", rfHits, RfHit.class, 1);
+ }
+
+ /**
+ * Perform the fit to the RF pulse:
+ */
+ private double fitPulse(FADCGenericHit hit) {
+ fitData.clear();
+ final int adcSamples[] = hit.getData();
+ // stores the number of peaks
+ int iz = 0;
+ int peakBin[] = {-999, -999};
+ final int threshold = 300;
+ double fitThresh[] = {-999, -999};
+ double pedVal[] = {-999, -999};
+
+ // Look for bins containing the peaks (2-3 peaks)
+ for (int ii = 4; ii < adcSamples.length; ii++) {
+ // After 2 peaks, stop looking for more
+ if (iz == 2) {
+ break;
+ }
+ if ((adcSamples[ii + 1] > 0) && (adcSamples[ii - 1] > 0) && (adcSamples[ii] > threshold) && ii > 8) {
+ if ((adcSamples[ii] > adcSamples[ii + 1]) && (adcSamples[ii] >= adcSamples[ii - 1])) {
+
+ peakBin[iz] = ii;
+ iz++;
+ }
+ }
+ }
+
+ int jj = 0;
+ // Choose peak closest to center of window (second peak, ik=1)
+ final int ik = 1;
+ pedVal[ik] = (adcSamples[peakBin[ik] - 6] + adcSamples[peakBin[ik] - 7] + adcSamples[peakBin[ik] - 8] + adcSamples[peakBin[ik] - 9]) / 4.0;
+ fitThresh[ik] = (adcSamples[peakBin[ik]] + pedVal[ik]) / 3.0;
+
+ // Initial values: we find/fit 3 points:
+ double itime[] = {-999, -999, -999};
+ double ifadc[] = {-999, -999, -999};
+
+ // Find the points of the peak bin to peak bin-5
+ for (int ll = 0; ll < 5; ll++) {
+ if ((adcSamples[peakBin[ik] - 5 + ll]) > fitThresh[ik]) {
+ // One point is below fit threshold and two points are above
+ if (jj == 0 && (adcSamples[peakBin[ik] - 6 + ll] > pedVal[ik])) {
+ final int zz = fitData.size();
+ fitData.addPoint();
+ itime[zz] = peakBin[ik] - 6 + ll;
+ ifadc[zz] = adcSamples[peakBin[ik] - 6 + ll];
+ fitData.point(zz).coordinate(0).setValue(peakBin[ik] - 6 + ll);
+ fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik] - 6 + ll]);
+ fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
+ fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
+ jj++;
+ }
+ final int zz = fitData.size();
+ fitData.addPoint();
+ itime[zz] = peakBin[ik] - 5 + ll;
+ ifadc[zz] = adcSamples[peakBin[ik] - 5 + ll];
+ fitData.point(zz).coordinate(0).setValue(peakBin[ik] - 5 + ll);
+ fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik] - 5 + ll]);
+ fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
+ fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
+
+ jj++;
+ if (jj == 3) {
+ break;
+ }
+ }
+ }
+
+ double islope = ((double) (ifadc[2] - ifadc[0])) / (itime[2] - itime[0]);
+ double icept = ifadc[1] - islope * itime[1];
+ // Initialize fit parameters:
+ fitFunction.setParameter("intercept", icept);
+ fitFunction.setParameter("slope", islope);
+
+ // this used to be turned on somewhere else on every event, dunno if it still is:
+ // Logger.getLogger("org.freehep.math.minuit").setLevel(Level.OFF);
+
+ IFitResult fitResults = fitter.fit(fitData, fitFunction);
+
+ // Read the time value at this location on the fit:
+ double halfVal = (adcSamples[peakBin[1]] + pedVal[1]) / 2.0;
+
+ return NSPERSAMPLE * (halfVal - fitResults.fittedParameter("intercept")) / fitResults.fittedParameter("slope");
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfHit.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfHit.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/RfHit.java Wed Apr 27 11:11:32 2016
@@ -6,13 +6,13 @@
* class to store RF times after extracting from waveform.
*/
public class RfHit implements GenericObject {
- private double[] times;
- public RfHit(double[] times) { this.times=times; }
- public int getNInt() { return 0; }
- public int getNFloat() { return 0; }
- public int getNDouble() { return times.length; }
- public double getDoubleVal(int ii) { return times[ii]; }
- public float getFloatVal (int ii) { return 0; }
- public int getIntVal (int ii) { return 0; }
- public boolean isFixedSize() { return false; }
+ private double[] times;
+ public RfHit(double[] times) { this.times=times; }
+ public int getNInt() { return 0; }
+ public int getNFloat() { return 0; }
+ public int getNDouble() { return times.length; }
+ public double getDoubleVal(int ii) { return times[ii]; }
+ public float getFloatVal (int ii) { return 0; }
+ public int getIntVal (int ii) { return 0; }
+ public boolean isFixedSize() { return false; }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/SvtEvioReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/SvtEvioReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/SvtEvioReader.java Wed Apr 27 11:11:32 2016
@@ -7,7 +7,6 @@
import org.hps.record.svt.SvtEvioExceptions.SvtEvioHeaderException;
import org.hps.record.svt.SvtEvioExceptions.SvtEvioReaderException;
import org.hps.util.Pair;
-import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
@@ -18,8 +17,6 @@
* SVT EVIO reader used to convert SVT bank integer data to LCIO objects.
*
* @author Omar Moreno <[log in to unmask]>
- * @data February 03, 2015
- *
*/
public class SvtEvioReader extends AbstractSvtEvioReader {
@@ -30,6 +27,7 @@
private static final int DATA_TAIL_LENGTH = 1;
public static final int MIN_ROC_BANK_TAG = 51;
public static final int MAX_ROC_BANK_TAG = 66;
+ public static final int DATA_BANK_TAG = 3;
private static final int ROC_BANK_NUMBER = 0;
/**
@@ -52,6 +50,16 @@
return MAX_ROC_BANK_TAG;
}
+ @Override
+ protected int getMinDataBankTag() {
+ return DATA_BANK_TAG;
+ }
+
+ @Override
+ protected int getMaxDataBankTag() {
+ return DATA_BANK_TAG;
+ }
+
/**
* Get the SVT ROC bank number of the bank encapsulating the SVT samples.
*
@@ -127,36 +135,13 @@
}
/**
- * Check whether a data bank is valid i.e. contains SVT samples only. For
- * the engineering run, a valid data bank has a tag of 3.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- *
- */
- @Override
- protected boolean isValidDataBank(BaseStructure dataBank) {
-
- // The SVT configuration is stored in a bank with tag equal to 57614.
- // All other event banks are invalid
- if (dataBank.getHeader().getTag() == 57614) {
-
- // Store the event bank for processing later.
- eventBanks.add(dataBank);
-
- return false;
- } else if (dataBank.getHeader().getTag() != 3) return false;
-
- return true;
- }
-
- /**
* Check whether the samples are valid. Specifically, check if the samples
* are APV header or tails.
*
* @param data : sample block of data
* @return true if the samples are valid, false otherwise
*/
+ @Override
protected boolean isValidSampleSet(int[] data) {
return !(SvtEvioUtils.isMultisampleHeader(data) || SvtEvioUtils.isMultisampleTail(data));
}
@@ -191,9 +176,6 @@
}
}*/
- // Clear out the event banks after they have been processed
- eventBanks.clear();
-
return success;
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java Wed Apr 27 11:11:32 2016
@@ -21,96 +21,96 @@
*/
public class TestRunReconToEvio extends Driver {
- EventWriter writer;
- String rawCalorimeterHitCollectionName = "EcalDigitizedHits";
- String evioOutputFile = "TestRunData.evio";
- EventBuilder builder = null;
- private int eventsWritten = 0;
- EcalHitWriter ecalWriter = null;
- SVTHitWriter svtWriter = null;
-
- Detector detector = null;
+ EventWriter writer;
+ String rawCalorimeterHitCollectionName = "EcalDigitizedHits";
+ String evioOutputFile = "TestRunData.evio";
+ EventBuilder builder = null;
+ private int eventsWritten = 0;
+ EcalHitWriter ecalWriter = null;
+ SVTHitWriter svtWriter = null;
+
+ Detector detector = null;
- public TestRunReconToEvio() {
- }
-
+ public TestRunReconToEvio() {
+ }
+
@Override
- public void detectorChanged(Detector detector) {
- // set the detector
+ public void detectorChanged(Detector detector) {
+ // set the detector
this.detector = detector;
}
- public void setEvioOutputFile(String evioOutputFile) {
- this.evioOutputFile = evioOutputFile;
- }
+ public void setEvioOutputFile(String evioOutputFile) {
+ this.evioOutputFile = evioOutputFile;
+ }
- public void setRawCalorimeterHitCollectionName(String rawCalorimeterHitCollectionName) {
- this.rawCalorimeterHitCollectionName = rawCalorimeterHitCollectionName;
- if (ecalWriter != null) {
- ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
- }
- }
+ public void setRawCalorimeterHitCollectionName(String rawCalorimeterHitCollectionName) {
+ this.rawCalorimeterHitCollectionName = rawCalorimeterHitCollectionName;
+ if (ecalWriter != null) {
+ ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
+ }
+ }
- protected void startOfData() {
- try {
- writer = new EventWriter(evioOutputFile);
- } catch (EvioException e) {
- throw new RuntimeException(e);
- }
+ protected void startOfData() {
+ try {
+ writer = new EventWriter(evioOutputFile);
+ } catch (EvioException e) {
+ throw new RuntimeException(e);
+ }
- ecalWriter = new EcalHitWriter();
- ecalWriter.setDetector(detector);
- ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
+ ecalWriter = new EcalHitWriter();
+ ecalWriter.setDetector(detector);
+ ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
- svtWriter = new SVTHitWriter();
- }
+ svtWriter = new SVTHitWriter();
+ }
- protected void endOfData() {
- System.out.println(this.getClass().getSimpleName() + " - wrote " + eventsWritten + " EVIO events in job.");
- writer.close();
- }
+ protected void endOfData() {
+ System.out.println(this.getClass().getSimpleName() + " - wrote " + eventsWritten + " EVIO events in job.");
+ writer.close();
+ }
- protected void process(EventHeader event) {
+ protected void process(EventHeader event) {
- if (!svtWriter.hasData(event)) {
- return;
- }
+ if (!svtWriter.hasData(event)) {
+ return;
+ }
- // Make a new EVIO event.
- builder = new EventBuilder(0, DataType.BANK, event.getEventNumber());
+ // Make a new EVIO event.
+ builder = new EventBuilder(0, DataType.BANK, event.getEventNumber());
- // Write SVTData.
- svtWriter.writeData(event, builder);
+ // Write SVTData.
+ svtWriter.writeData(event, builder);
- // Write RawCalorimeterHit collection.
- ecalWriter.writeData(event, builder);
-// writeRawCalorimeterHits(event);
+ // Write RawCalorimeterHit collection.
+ ecalWriter.writeData(event, builder);
+// writeRawCalorimeterHits(event);
- // Write this EVIO event.
- writeEvioEvent();
- }
+ // Write this EVIO event.
+ writeEvioEvent();
+ }
- private void writeEvioEvent() {
- EvioBank eventIDBank = new EvioBank(EvioEventConstants.EVENTID_BANK_TAG, DataType.UINT32, 0);
- int[] eventID = new int[3];
- eventID[0] = eventsWritten;
- eventID[1] = 0; //trigger type
- eventID[2] = 0; //status
+ private void writeEvioEvent() {
+ EvioBank eventIDBank = new EvioBank(EvioEventConstants.EVENTID_BANK_TAG, DataType.UINT32, 0);
+ int[] eventID = new int[3];
+ eventID[0] = eventsWritten;
+ eventID[1] = 0; //trigger type
+ eventID[2] = 0; //status
- try {
- eventIDBank.appendIntData(eventID);
- builder.addChild(builder.getEvent(), eventIDBank);
- } catch (EvioException e) {
- throw new RuntimeException(e);
- }
- builder.setAllHeaderLengths();
- try {
- writer.writeEvent(builder.getEvent());
- ++eventsWritten;
- } catch (EvioException e) {
- throw new RuntimeException(e);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
+ try {
+ eventIDBank.appendIntData(eventID);
+ builder.addChild(builder.getEvent(), eventIDBank);
+ } catch (EvioException e) {
+ throw new RuntimeException(e);
+ }
+ builder.setAllHeaderLengths();
+ try {
+ writer.writeEvent(builder.getEvent());
+ ++eventsWritten;
+ } catch (EvioException e) {
+ throw new RuntimeException(e);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java (original)
+++ java/branches/HPSJAVA-409/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java Wed Apr 27 11:11:32 2016
@@ -6,7 +6,6 @@
import org.hps.record.svt.SvtEvioUtils;
import org.hps.record.svt.SvtHeaderDataInfo;
import org.hps.util.Pair;
-import org.jlab.coda.jevio.BaseStructure;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.HpsTestRunSiSensor;
import org.lcsim.event.EventHeader;
@@ -18,8 +17,6 @@
* objects.
*
* @author Omar Moreno <[log in to unmask]>
- * @date November 20, 2014
- *
*/
public class TestRunSvtEvioReader extends AbstractSvtEvioReader {
@@ -30,6 +27,7 @@
private static final int DATA_HEADER_LENGTH = 7;
private static final int DATA_TAIL_LENGTH = 1;
private static final int MAX_FPGA_ID = 6;
+ public static final int MIN_DATA_BANK_TAG = 0;
private static final int ROC_BANK_TAG = 3;
private static final int ROC_BANK_NUMBER = -1;
@@ -56,6 +54,16 @@
@Override
protected int getMaxRocBankTag() {
return ROC_BANK_TAG;
+ }
+
+ @Override
+ protected int getMinDataBankTag() {
+ return MIN_DATA_BANK_TAG;
+ }
+
+ @Override
+ protected int getMaxDataBankTag() {
+ return MAX_FPGA_ID;
}
/**
@@ -131,26 +139,12 @@
}
/**
- * Check whether a data bank is valid i.e. contains SVT samples only. For
- * the test run, a valid data bank has a tag in the range 0-6.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- *
- */
- @Override
- protected boolean isValidDataBank(BaseStructure dataBank) {
- if (dataBank.getHeader().getTag() < 0
- || dataBank.getHeader().getTag() >= MAX_FPGA_ID) return false;
- return true;
- }
-
- /**
* Check whether the samples are valid.
*
* @param data : sample block of data
* @return true if the samples are valid, false otherwise
*/
+ @Override
protected boolean isValidSampleSet(int[] data) {
return true;
}
Modified: java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimEngRunEventBuilderTest.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimEngRunEventBuilderTest.java (original)
+++ java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimEngRunEventBuilderTest.java Wed Apr 27 11:11:32 2016
@@ -24,60 +24,60 @@
*/
public class LCSimEngRunEventBuilderTest extends TestCase {
- public void testLCSimEngRunEventBuilder() throws Exception {
-
- // Setup database conditions.
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
- conditionsManager.setXmlConfig("/org/hps/conditions/config/conditions_dev.xml");
- conditionsManager.setDetector("HPS-Proposal2014-v8-6pt6", 2000);
+ public void testLCSimEngRunEventBuilder() throws Exception {
+
+ // Setup database conditions.
+ DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
+ conditionsManager.setXmlConfig("/org/hps/conditions/config/conditions_dev.xml");
+ conditionsManager.setDetector("HPS-Proposal2014-v8-6pt6", 2000);
- // Configure LCIO writer.
- new TestOutputFile(getClass().getSimpleName()).mkdirs();
- File lcioFile = new TestOutputFile(getClass().getSimpleName() + File.separator + getClass().getSimpleName() + "_output.slcio");
- LCIOWriter writer;
- try {
- writer = new LCIOWriter(lcioFile);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ // Configure LCIO writer.
+ new TestOutputFile(getClass().getSimpleName()).mkdirs();
+ File lcioFile = new TestOutputFile(getClass().getSimpleName() + File.separator + getClass().getSimpleName() + "_output.slcio");
+ LCIOWriter writer;
+ try {
+ writer = new LCIOWriter(lcioFile);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
- // Create event builder.
- LCSimEventBuilder builder = new LCSimEngRunEventBuilder();
- conditionsManager.addConditionsListener(builder);
- //builder.setDetectorName("HPS-Proposal2014-v8-6pt6");
- conditionsManager.setDetector("HPS-Proposal2014-v8-6pt6", 2744);
+ // Create event builder.
+ LCSimEventBuilder builder = new LCSimEngRunEventBuilder();
+ conditionsManager.addConditionsListener(builder);
+ //builder.setDetectorName("HPS-Proposal2014-v8-6pt6");
+ conditionsManager.setDetector("HPS-Proposal2014-v8-6pt6", 2744);
- // Get remote test file.
- FileCache cache = new FileCache();
- File evioFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/LCSimEngRunEventBuilderTest/hps_002744.evio.0"));
+ // Get remote test file.
+ FileCache cache = new FileCache();
+ File evioFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/LCSimEngRunEventBuilderTest/hps_002744.evio.0"));
- // Open the EVIO reader.
- System.out.println("Opening file " + evioFile);
- EvioReader reader = null;
- try {
- reader = new EvioReader(evioFile);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
+ // Open the EVIO reader.
+ System.out.println("Opening file " + evioFile);
+ EvioReader reader = null;
+ try {
+ reader = new EvioReader(evioFile);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
- // Run the event builder on the EVIO.
- EvioEvent evioEvent = null;
- while ((evioEvent = reader.nextEvent()) != null) {
- reader.parseEvent(evioEvent);
- builder.readEvioEvent(evioEvent);
- if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
- try {
- EventHeader lcsimEvent = builder.makeLCSimEvent(evioEvent);
- System.out.println("created LCSim event #" + lcsimEvent.getEventNumber());
- writer.write(lcsimEvent);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
+ // Run the event builder on the EVIO.
+ EvioEvent evioEvent = null;
+ while ((evioEvent = reader.nextEvent()) != null) {
+ reader.parseEvent(evioEvent);
+ builder.readEvioEvent(evioEvent);
+ if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
+ try {
+ EventHeader lcsimEvent = builder.makeLCSimEvent(evioEvent);
+ System.out.println("created LCSim event #" + lcsimEvent.getEventNumber());
+ writer.write(lcsimEvent);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
- // Close the LCIO writer.
- writer.flush();
- writer.close();
- }
+ // Close the LCIO writer.
+ writer.flush();
+ writer.close();
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimTestRunEventBuilderTest.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimTestRunEventBuilderTest.java (original)
+++ java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/LCSimTestRunEventBuilderTest.java Wed Apr 27 11:11:32 2016
@@ -15,60 +15,60 @@
import org.hps.record.evio.EvioEventUtilities;
/**
- * Integration test to check the conversion of test run EVIO to LCIO
+ * Integration test to check the conversion of test run EVIO to LCIO
*
- * @author Omar Moreno <[log in to unmask]>
- * @date November 20, 2014
+ * @author Omar Moreno <[log in to unmask]>
+ * @date November 20, 2014
*/
public class LCSimTestRunEventBuilderTest extends TestCase {
- //-----------------//
- //--- Constants ---//
- //-----------------//
- private static final String DB_CONFIGURATION
- = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
-
- public void testLCSimTestRunEventBuilder() throws Exception {
-
- // Configure the conditions system to retrieve test run conditions fo run 1351.
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
- conditionsManager.setXmlConfig(DB_CONFIGURATION);
-
- // Create the test run event builder
- LCSimTestRunEventBuilder builder = new LCSimTestRunEventBuilder();
- conditionsManager.addConditionsListener(builder);
+ //-----------------//
+ //--- Constants ---//
+ //-----------------//
+ private static final String DB_CONFIGURATION
+ = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
+
+ public void testLCSimTestRunEventBuilder() throws Exception {
+
+ // Configure the conditions system to retrieve test run conditions fo run 1351.
+ DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
+ conditionsManager.setXmlConfig(DB_CONFIGURATION);
+
+ // Create the test run event builder
+ LCSimTestRunEventBuilder builder = new LCSimTestRunEventBuilder();
+ conditionsManager.addConditionsListener(builder);
- conditionsManager.setDetector("HPS-TestRun-v5", 1351);
+ conditionsManager.setDetector("HPS-TestRun-v5", 1351);
- // Retrieve the remote test file. The file currently being contains a
- // subset of events from run 1351
- FileCache cache = new FileCache();
- File evioFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/hps1351_test.evio"));
-
- // Instantiate the EVIO reader and open the test file. If the file
- // can't be found, throw a runtime exception
- EvioReader reader = null;
- try {
- reader = new EvioReader(evioFile);
- } catch (Exception e) {
- throw new RuntimeException(
- "[ " + this.getClass().getSimpleName() + " ]: EVIO file couldn't be opened.");
- }
-
- // Loop through all EVIO events in the file and process them using the
- // event builder. If the event is a physics event, process the event
- // using the subdetector readers.
- EvioEvent evioEvent = null;
- while ((evioEvent = reader.nextEvent()) != null) {
- reader.parseEvent(evioEvent);
- builder.readEvioEvent(evioEvent);
- if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
- EventHeader lcsimEvent = builder.makeLCSimEvent(evioEvent);
- System.out.println("[ " + this.getClass().getSimpleName() + " ]: Created event number " + lcsimEvent.getEventNumber());
- }
- }
-
- // Close the EVIO reader
- reader.close();
- }
+ // Retrieve the remote test file. The file currently being contains a
+ // subset of events from run 1351
+ FileCache cache = new FileCache();
+ File evioFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/hps1351_test.evio"));
+
+ // Instantiate the EVIO reader and open the test file. If the file
+ // can't be found, throw a runtime exception
+ EvioReader reader = null;
+ try {
+ reader = new EvioReader(evioFile);
+ } catch (Exception e) {
+ throw new RuntimeException(
+ "[ " + this.getClass().getSimpleName() + " ]: EVIO file couldn't be opened.");
+ }
+
+ // Loop through all EVIO events in the file and process them using the
+ // event builder. If the event is a physics event, process the event
+ // using the subdetector readers.
+ EvioEvent evioEvent = null;
+ while ((evioEvent = reader.nextEvent()) != null) {
+ reader.parseEvent(evioEvent);
+ builder.readEvioEvent(evioEvent);
+ if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
+ EventHeader lcsimEvent = builder.makeLCSimEvent(evioEvent);
+ System.out.println("[ " + this.getClass().getSimpleName() + " ]: Created event number " + lcsimEvent.getEventNumber());
+ }
+ }
+
+ // Close the EVIO reader
+ reader.close();
+ }
}
Modified: java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java
=============================================================================
--- java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java (original)
+++ java/branches/HPSJAVA-409/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java Wed Apr 27 11:11:32 2016
@@ -25,47 +25,47 @@
// Initialize the logger
protected static Logger LOGGER = Logger.getLogger(SvtEvioReaderTest.class.getPackage().getName());
- public void testSvtEvioReaderTest() throws Exception {
+ public void testSvtEvioReaderTest() throws Exception {
- // Get the EVIO file that will be used to test the reader
- FileCache fileCache = new FileCache();
- File evioFile = fileCache.getCachedFile(
- new URL("http://www.lcsim.org/test/hps-java/svt_evio_reader_test.evio"));
+ // Get the EVIO file that will be used to test the reader
+ FileCache fileCache = new FileCache();
+ File evioFile = fileCache.getCachedFile(
+ new URL("http://www.lcsim.org/test/hps-java/svt_evio_reader_test.evio"));
- LOGGER.info("Opening file " + evioFile);
+ LOGGER.info("Opening file " + evioFile);
- // Instantiate the EVIO reader and open the file
- EvioReader evioReader = new EvioReader(evioFile);
-
- // Instantiate the SVT EVIO reader
- SvtEvioReader svtReader = new SvtEvioReader();
+ // Instantiate the EVIO reader and open the file
+ EvioReader evioReader = new EvioReader(evioFile);
+
+ // Instantiate the SVT EVIO reader
+ SvtEvioReader svtReader = new SvtEvioReader();
- // Setup the database conditions
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
- conditionsManager.setDetector("HPS-Proposal2014-v9-2pt2", 2000);
+ // Setup the database conditions
+ DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
+ conditionsManager.setDetector("HPS-Proposal2014-v9-2pt2", 2000);
- // Instantiate the event builder
- LCSimEventBuilder eventBuilder = new LCSimEngRunEventBuilder();
+ // Instantiate the event builder
+ LCSimEventBuilder eventBuilder = new LCSimEngRunEventBuilder();
- // Check that the file contains the expected number of events
- int eventCount = evioReader.getEventCount();
- LOGGER.info("File " + evioFile + " contains " + eventCount + " events.");
+ // Check that the file contains the expected number of events
+ int eventCount = evioReader.getEventCount();
+ LOGGER.info("File " + evioFile + " contains " + eventCount + " events.");
- // Loop through the EVIO events and process them.
- EvioEvent evioEvent = null;
- while ((evioEvent = evioReader.nextEvent()) != null) {
- evioReader.parseEvent(evioEvent);
+ // Loop through the EVIO events and process them.
+ EvioEvent evioEvent = null;
+ while ((evioEvent = evioReader.nextEvent()) != null) {
+ evioReader.parseEvent(evioEvent);
- // Only process physics events
- if (!EvioEventUtilities.isPhysicsEvent(evioEvent)) continue;
- LOGGER.info("Found physics event.");
-
- EventHeader lcsimEvent = eventBuilder.makeLCSimEvent(evioEvent);
- LOGGER.info("Created LCSim event # " + lcsimEvent.getEventNumber());
+ // Only process physics events
+ if (!EvioEventUtilities.isPhysicsEvent(evioEvent)) continue;
+ LOGGER.info("Found physics event.");
+
+ EventHeader lcsimEvent = eventBuilder.makeLCSimEvent(evioEvent);
+ LOGGER.info("Created LCSim event # " + lcsimEvent.getEventNumber());
- // Process the event using the SVT evio reader
- svtReader.processEvent(evioEvent, lcsimEvent);
- }
- }
+ // Process the event using the SVT evio reader
+ svtReader.processEvent(evioEvent, lcsimEvent);
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/integration-tests/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/pom.xml (original)
+++ java/branches/HPSJAVA-409/integration-tests/pom.xml Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-test-data</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.0-SNAPSHOT</version>
<scope>test</scope>
<classifier>archive</classifier>
<type>jar</type>
@@ -31,7 +31,7 @@
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-test-data</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.0-SNAPSHOT</version>
<scope>test</scope>
<type>jar</type>
</dependency>
@@ -51,7 +51,8 @@
<version>2.19</version>
<configuration>
<argLine>-server -Xmx2g -XX:MaxPermSize=512m -Djava.util.logging.config.class=org.hps.logging.config.TestLoggingConfig</argLine>
- <forkMode>always</forkMode>
+ <forkCount>1</forkCount>
+ <reuseForks>false</reuseForks>
<includes>
<include>org/hps/test/it/*Test.java</include>
</includes>
@@ -86,28 +87,5 @@
</plugins>
</build>
</profile>
- <!-- This profile activates automatically when not running tests on a SLAC Unix system with NFS access. -->
- <profile>
- <id>no-slac-nfs</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- <file>
- <missing>/nfs/slac/g/hps/</missing>
- </file>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>org/hps/test/it/EvioToLcioTest.java</exclude>
- </excludes>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
</profiles>
</project>
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/DataQualityMonitorTest.java
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/DataQualityMonitorTest.java (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/DataQualityMonitorTest.java Wed Apr 27 11:11:32 2016
@@ -17,16 +17,14 @@
private static final String CLASS_NAME = DataQualityMonitorTest.class.getSimpleName();
private static final File OUTPUT_DIR = new File("./target/test-output/" + CLASS_NAME);
- private static final File OUTPUT_FILE = new File(OUTPUT_DIR.getAbsolutePath() + File.separator + CLASS_NAME);
- private static final File AIDA_FILE = new File(OUTPUT_FILE.getAbsolutePath() + ".aida");
+ private static final File OUTPUT_FILE = new File(OUTPUT_DIR.getAbsolutePath() + File.separator + CLASS_NAME + ".aida");
private static final String STEERING_RESOURCE = "/org/hps/steering/test/DataQualityTest.lcsim";
public void setUp() {
- System.out.println("Setting up DQM Test");
// Delete files if they already exist.
- if (AIDA_FILE.exists())
- AIDA_FILE.delete();
+ if (OUTPUT_FILE.exists())
+ OUTPUT_FILE.delete();
// Create output dir.
OUTPUT_DIR.mkdirs();
@@ -36,11 +34,12 @@
public void testQualityMonitor() {
File dataFile = new TestDataUtility().getTestData("DataQualityMonitorTest.slcio");
- System.out.println("running data quality job with steering resource " + STEERING_RESOURCE);
+ System.out.println("running data quality job with steering resource " + STEERING_RESOURCE + " ...");
JobManager jobManager = new JobManager();
jobManager.addVariableDefinition("outputFile", OUTPUT_FILE.getPath());
jobManager.addInputFile(dataFile);
jobManager.setup(STEERING_RESOURCE);
jobManager.run();
+ System.out.println("Done!");
}
}
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/EvioToLcioTest.java
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/EvioToLcioTest.java (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/EvioToLcioTest.java Wed Apr 27 11:11:32 2016
@@ -1,5 +1,6 @@
package org.hps.test.it;
+import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
@@ -7,8 +8,8 @@
import junit.framework.TestCase;
+import org.hps.data.test.TestDataUtility;
import org.hps.evio.EvioToLcio;
-import org.hps.record.epics.EpicsData;
import org.hps.record.scalers.ScalerData;
import org.hps.test.util.TestOutputFile;
import org.lcsim.event.EventHeader;
@@ -20,16 +21,6 @@
/**
* Basic test of converting EVIO to LCIO using the {@link org.hps.evio.EvioToLcio} command line utility on Engineering
* Run 2015 data.
- * <p>
- * This test checks the LCIO output for the:</br>
- * <ul>
- * <li>correct number of EPICS data collections</li>
- * <li>correct number of scaler data collections</li>
- * <li>all expected event collections</li>
- * <li>scaler parameters in event header</li>
- * </ul>
- * <p>
- * The test input is the first file of run 5772 in which scaler data appears around every 100k events.
*
* @author Jeremy McCormick, SLAC
*/
@@ -45,12 +36,7 @@
* Map to keep track of number of events with empty collections.
*/
Map<String, Integer> emptyCollections = new HashMap<String, Integer>();
-
- /**
- * Number of EPICS data collections found.
- */
- int epicsDataCount = 0;
-
+
/**
* Number of events processed.
*/
@@ -60,6 +46,12 @@
* Number of scaler data collections found.
*/
int scalerDataCount = 0;
+
+ CheckDriver() {
+ for (String collectionName : COLLECTION_NAMES) {
+ emptyCollections.put(collectionName, new Integer(0));
+ }
+ }
/**
* Check a collection by making sure it is present in the event and incrementing a counter if it is empty.
@@ -72,13 +64,9 @@
if (!event.hasCollection(type, name)) {
throw new RuntimeException("Missing " + name + " collection.");
}
+ Integer nEmpty = emptyCollections.get(name);
if (event.get(type, name).isEmpty()) {
- Integer nEmpty = emptyCollections.get(name);
- if (nEmpty == null) {
- nEmpty = 0;
- }
++nEmpty;
- // System.out.println(name + " is empty in event " + event.getEventNumber());
emptyCollections.put(name, nEmpty);
}
}
@@ -88,17 +76,7 @@
this.checkCollection(event, COLLECTION_TYPES[i], COLLECTION_NAMES[i]);
}
}
-
- private void checkEpicsData(final EventHeader event) {
- final EpicsData epicsData = EpicsData.read(event);
- if (epicsData != null) {
- if (epicsData.getEpicsHeader() == null) {
- throw new RuntimeException("The EpicsData header is null.");
- }
- ++epicsDataCount;
- }
- }
-
+
private void checkScalarData(final EventHeader event) {
final ScalerData scalerData = ScalerData.read(event);
if (scalerData != null) {
@@ -113,10 +91,7 @@
*/
@Override
public void process(final EventHeader event) {
-
- // Find and check EPICS data.
- this.checkEpicsData(event);
-
+
// Find scaler data.
this.checkScalarData(event);
@@ -128,41 +103,34 @@
}
/**
- * The number of empty collections that are allowed.
- */
- private static int[] ALLOWED_EMPTY = new int[] {45, 0, 0, 0};
-
- /**
* Names of collections to check.
*/
- private static String[] COLLECTION_NAMES = new String[] {"EcalReadoutHits", "FADCGenericHits", "SVTRawTrackerHits",
- "TriggerBank"};
+ private static String[] COLLECTION_NAMES = new String[] {
+ "EcalReadoutHits",
+ "FADCGenericHits",
+ "SVTRawTrackerHits",
+ "TriggerBank"
+ };
/**
* Classes of collections.
*/
- private static Class<?>[] COLLECTION_TYPES = new Class<?>[] {RawTrackerHit.class, GenericObject.class,
- RawTrackerHit.class, GenericObject.class};
-
- /**
- * The number of EPICS collections that should be found.
- */
- private static int EPICS_DATA_COUNT = 7;
-
- /**
- * The default input file (large file at SLAC so the pom.xml file excludes this test on non-SLAC hosts).
- */
- private static final String INPUT_FILE = "/nfs/slac/g/hps3/data/engrun2015/evio/hps_005772.evio.0";
+ private static Class<?>[] COLLECTION_TYPES = new Class<?>[] {
+ RawTrackerHit.class,
+ GenericObject.class,
+ RawTrackerHit.class,
+ GenericObject.class
+ };
/**
* The number of events that should be processed.
*/
- private static int PROCESSED_COUNT = 251823;
+ private static int PROCESSED_COUNT = 1000;
/**
* The number of scaler data collections that should be found.
*/
- private static int SCALER_DATA_COUNT = 3;
+ private static int SCALER_DATA_COUNT = 1;
/**
* Run the test.
@@ -171,13 +139,15 @@
*/
public void testEvioToLcio() throws Exception {
+ final File inputFile = new TestDataUtility().getTestData("run5772_integrationTest.evio");
+
// LCIO output file.
final TestOutputFile outputFile = new TestOutputFile(EvioToLcioTest.class, "hps_005772.slcio");
// Run the command line utility.
final String[] args = new String[] {"-l", outputFile.getPath(), "-d", "HPS-EngRun2015-Nominal-v1", "-r", "-x",
- "/org/hps/steering/EventMarker.lcsim", INPUT_FILE};
- System.out.println("Running EvioToLcio on " + INPUT_FILE);
+ "/org/hps/steering/EventMarker.lcsim", inputFile.getAbsolutePath()};
+ System.out.println("Running EvioToLcio on " + inputFile.getPath());
Logger.getLogger("org.hps.evio").setLevel(Level.WARNING);
System.out.println("org.hps.evio logging level is " + Logger.getLogger("org.hps.evio").getLevel());
EvioToLcio cnv = new EvioToLcio();
@@ -188,9 +158,6 @@
System.out.println("Done running EvioToLcio!");
System.out.println("conversion to LCIO took " + elapsed + " ms");
- // Check that the conversion did not take too long.
- //assertTrue("Conversion from EVIO to LCIO took too long.", elapsed < 2000000);
-
// Read in the LCIO file and run the CheckDriver on it.
System.out.println("Checking LCIO output ...");
final LCSimLoop loop = new LCSimLoop();
@@ -209,22 +176,15 @@
System.out.println("CheckDriver processed " + checkDriver.processedCount + " events.");
assertEquals("Wrong number of events processed by the check Driver.", PROCESSED_COUNT, checkDriver.processedCount);
- // Check that the correct number of EPICS data collections were written out.
- System.out.println("Found " + checkDriver.epicsDataCount + " events with EPICS data.");
- assertTrue("EPICS data count is wrong.", checkDriver.epicsDataCount == EPICS_DATA_COUNT);
-
// Check that the correct number of scaler data collections were written out.
System.out.println("Found " + checkDriver.scalerDataCount + " events with scaler data.");
assertTrue("Scaler data count is wrong.", checkDriver.scalerDataCount == SCALER_DATA_COUNT);
-
- // Check that there were not too many empty collections.
+
+ // Check that there were no empty output collections.
for (int i = 0; i < COLLECTION_NAMES.length; i++) {
final String collection = COLLECTION_NAMES[i];
final Integer nEmpty = checkDriver.emptyCollections.get(collection);
- if (nEmpty != null) {
- System.out.println(collection + " had " + nEmpty + " empty collections.");
- assertTrue(collection + " had too many empty collections.", nEmpty <= ALLOWED_EMPTY[i]);
- }
+ assertTrue("Collection " + collection + " was empty.", nEmpty == 0);
}
}
}
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/ReconSteeringTest.java
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/ReconSteeringTest.java (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/ReconSteeringTest.java Wed Apr 27 11:11:32 2016
@@ -1,100 +1,42 @@
package org.hps.test.it;
import java.io.File;
-import java.io.IOException;
-import java.net.URL;
import junit.framework.TestCase;
import org.hps.data.test.TestDataUtility;
import org.hps.job.JobManager;
-import org.lcsim.util.cache.FileCache;
import org.lcsim.util.test.TestUtil.TestOutputFile;
/**
- * Test that production MC recon steering files are not broken by running an LCSim job on them
- * using an LCIO file.
+ * Run a test job on Eng Run 2015 data.
*
* @author Jeremy McCormick, SLAC
*/
public class ReconSteeringTest extends TestCase {
- /**
- * List of steering files to run.
- */
- final static String[] STEERING_FILES = {
- "EngineeringRun2014EcalRecon_Pass1.lcsim",
- "EngineeringRun2014EcalRecon.lcsim",
- "EngineeringRun2015EcalRecon.lcsim",
- "EngineeringRun2015FullRecon.lcsim",
- "EngineeringRun2015FullRecon_Pass2.lcsim",
- "EngineeringRun2015HitRecon.lcsim",
- "HPSTrackingDefaultsRecon.lcsim"
- };
-
- /**
- * Test recon steering files.
- * @throws Exception if any error occurs running the recon job
- */
- public void testSteeringFiles() {
+ final static String STEERING_RESOURCE = "/org/hps/steering/recon/EngineeringRun2015FullRecon.lcsim";
+
+ public void testReconSteering() throws Exception {
- File inputFile = new TestDataUtility().getTestData("tritrigv1-egsv3-triv2-g4v1_s2d6_HPS-EngRun2015-Nominal-v3_3.4.0_pairs1_1.slcio");
-
- for (String steeringFile : STEERING_FILES) {
-
- // Run the reconstruction steering file.
- File outputFile = null;
- try {
- outputFile = new TestOutputFile(new File(steeringFile).getName().replace(".lcsim", ""));
- runSteering("/org/hps/steering/recon/" + steeringFile, inputFile, outputFile);
- } catch (Throwable e) {
- System.err.println("Job with steering " + steeringFile + " failed!");
- throw new RuntimeException("Recon job failed.", e);
- }
-
- Runtime runtime = Runtime.getRuntime();
-
- int mb = 1024 * 1024;
-
- System.out.println("total memory: " + runtime.totalMemory() / mb);
- System.out.println("free memory: " + runtime.freeMemory() / mb);
- System.out.println("max memory: " + runtime.maxMemory() / mb);
- System.out.println("used memory: " + (runtime.totalMemory() - runtime.freeMemory()) / mb);
-
- System.gc();
-
- // Create DQM output for QA.
- try {
- runDQM(outputFile);
- } catch (Throwable e) {
- throw new RuntimeException("The DQM job failed.", e);
- }
- }
- }
-
- private void runSteering(String steeringFile, File inputFile, File outputFile) {
- System.out.println("Testing steering file " + steeringFile + " ...");
+ File inputFile = new TestDataUtility().getTestData("run_5772_data_only.slcio");
+
+ File outputFile = null;
+ outputFile = new TestOutputFile(new File(STEERING_RESOURCE).getName().replace(".lcsim", ""));
+ System.out.println("Testing steering " + STEERING_RESOURCE + " ...");
JobManager job = new JobManager();
job.addVariableDefinition("outputFile", outputFile.getPath());
- job.addVariableDefinition("detector", "HPS-EngRun2015-Nominal-v3");
- job.addVariableDefinition("run", "5772");
- job.addVariableDefinition("isMC", "true");
job.addInputFile(inputFile);
- job.setup(steeringFile);
+ job.setup(STEERING_RESOURCE);
+ job.setNumberOfEvents(1000);
job.run();
- System.out.println("Job with steering " + steeringFile + " successfully processed " + job.getLCSimLoop().getTotalCountableConsumed() + " events.");
- }
-
- private void runDQM(File outputFile) {
- System.out.println("Running DQM on " + outputFile.getPath() + " ...");
- JobManager job = new JobManager();
- File inputFile = new File(outputFile.getPath() + ".slcio");
- job.addInputFile(inputFile);
- job.addVariableDefinition("outputFile", outputFile.getPath().replace(".slcio", ""));
- job.setup("/org/hps/steering/production/DataQualityRecon.lcsim");
- job.run();
- System.out.println("DQM processed " + job.getLCSimLoop().getTotalCountableConsumed() + " events from " + outputFile + ".");
+ System.out.println("Done processing " + job.getLCSimLoop().getTotalCountableConsumed() + " events.");
+
+ Runtime runtime = Runtime.getRuntime();
+ int mb = 1024 * 1024;
+ System.out.printf("total memory: %d mb\n", runtime.totalMemory() / mb);
+ System.out.printf("free memory: %d mb\n", runtime.freeMemory() / mb);
+ System.out.printf("max memory: %d mb\n", runtime.maxMemory() / mb);
+ System.out.printf("used memory: %d mb\n", (runtime.totalMemory() - runtime.freeMemory()) / mb);
}
}
-
-
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/SimpleSvtReadoutTest.java
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/SimpleSvtReadoutTest.java (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/java/org/hps/test/it/SimpleSvtReadoutTest.java Wed Apr 27 11:11:32 2016
@@ -26,14 +26,14 @@
// Collection Names
static final String rawTrackerHitCollectionName = "SVTRawTrackerHits";
- public void testSimpleSvtReadout() throws Exception {
-
- File inputFile = new TestDataUtility().getTestData("ReadoutToLcioTest.slcio");
-
+ public void testSimpleSvtReadout() throws Exception {
+
+ File inputFile = new TestDataUtility().getTestData("ReadoutToLcioTest.slcio");
+
outputDir.mkdirs();
if(!outputDir.exists()){
- this.printDebug("Failed to create directory " + outputDir.getPath());
- throw new RuntimeException("Failed to create output directory.");
+ this.printDebug("Failed to create directory " + outputDir.getPath());
+ throw new RuntimeException("Failed to create output directory.");
}
FinalCheckDriver checker = new FinalCheckDriver();
@@ -54,25 +54,25 @@
this.printDebug("");
this.printDebug("===============================");
- }
-
- class FinalCheckDriver extends Driver {
-
- private int totalRawTrackerHits = 0;
-
- public void process(EventHeader event){
- if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) return;
- List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
-
- totalRawTrackerHits += rawHits.size();
- }
-
- public int getTotalNumberOfRawTrackerHits(){
- return totalRawTrackerHits;
- }
- }
-
- private void printDebug(String message){
- System.out.println("[ SimpleSvtReadoutTest ]: " + message);
- }
+ }
+
+ class FinalCheckDriver extends Driver {
+
+ private int totalRawTrackerHits = 0;
+
+ public void process(EventHeader event){
+ if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) return;
+ List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
+
+ totalRawTrackerHits += rawHits.size();
+ }
+
+ public int getTotalNumberOfRawTrackerHits(){
+ return totalRawTrackerHits;
+ }
+ }
+
+ private void printDebug(String message){
+ System.out.println("[ SimpleSvtReadoutTest ]: " + message);
+ }
}
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/ecalreadoutsim/EcalReadoutSimTest.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/ecalreadoutsim/EcalReadoutSimTest.lcsim (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/ecalreadoutsim/EcalReadoutSimTest.lcsim Wed Apr 27 11:11:32 2016
@@ -32,7 +32,7 @@
<driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
<outputFile>${outputFile}.slcio</outputFile>
</driver>
-
+
<driver name="EcalReadout" type="org.hps.readout.ecal.FADCEcalReadoutDriver">
<coincidenceWindow>1</coincidenceWindow>
<ecalName>Ecal</ecalName>
@@ -50,7 +50,7 @@
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>true</use2014Gain>
<!-- <debug>true</debug>-->
- </driver>
+ </driver>
<driver name="EcalClusterer" type="org.hps.recon.ecal.GTPEcalClusterer">
<ecalName>Ecal</ecalName>
@@ -63,7 +63,7 @@
<deadTime>10</deadTime>
<pairCoincidence>2</pairCoincidence>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout">
<addNoise>false</addNoise>
</driver>
@@ -74,4 +74,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/DataQualityTest.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/DataQualityTest.lcsim (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/DataQualityTest.lcsim Wed Apr 27 11:11:32 2016
@@ -18,13 +18,12 @@
<driver name="CleanupDriver"/>
</execute>
<drivers>
- <!-- <driver name="DQMDatabaseDriver" type="org.hps.analysis.dataquality.DQMDatabaseDriver"/> -->
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
- <eventInterval>1</eventInterval>
+ <eventInterval>100</eventInterval>
</driver>
<driver name="RawTrackerHitSensorSetup" type="org.lcsim.recon.tracking.digitization.sisim.config.RawTrackerHitSensorSetup"/>
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>${outputFile}.root</outputFileName>
+ <outputFileName>${outputFile}</outputFileName>
</driver>
<driver name="SVTMonitoring" type="org.hps.analysis.dataquality.SvtMonitoring">
<overwriteDB>false</overwriteDB>
@@ -48,6 +47,5 @@
<overwriteDB>false</overwriteDB>
</driver>
<driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver"/>
-
</drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/Dummy.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/Dummy.lcsim (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/Dummy.lcsim Wed Apr 27 11:11:32 2016
@@ -10,4 +10,4 @@
<driver name="DummyDriver" type="org.hps.test.util.DummyDriver"/>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/EcalReadoutSimTest.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/EcalReadoutSimTest.lcsim (original)
+++ java/branches/HPSJAVA-409/integration-tests/src/test/resources/org/hps/steering/test/EcalReadoutSimTest.lcsim Wed Apr 27 11:11:32 2016
@@ -28,7 +28,7 @@
<driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
<outputFile>${outputFile}.slcio</outputFile>
</driver>
-
+
<driver name="EcalReadout" type="org.hps.readout.ecal.FADCEcalReadoutDriver">
<coincidenceWindow>1</coincidenceWindow>
<ecalName>Ecal</ecalName>
@@ -46,7 +46,7 @@
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>true</use2014Gain>
<!-- <debug>true</debug>-->
- </driver>
+ </driver>
<driver name="EcalClusterer" type="org.hps.recon.ecal.GTPEcalClusterer">
<ecalName>Ecal</ecalName>
@@ -59,7 +59,7 @@
<deadTime>10</deadTime>
<pairCoincidence>2</pairCoincidence>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout">
<addNoise>false</addNoise>
</driver>
@@ -70,4 +70,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/job/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/job/pom.xml (original)
+++ java/branches/HPSJAVA-409/job/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/job/</url>
@@ -19,5 +19,9 @@
<groupId>org.hps</groupId>
<artifactId>hps-detector-model</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-run-database</artifactId>
+ </dependency>
</dependencies>
</project>
Modified: java/branches/HPSJAVA-409/job/src/main/java/org/hps/job/JobManager.java
=============================================================================
--- java/branches/HPSJAVA-409/job/src/main/java/org/hps/job/JobManager.java (original)
+++ java/branches/HPSJAVA-409/job/src/main/java/org/hps/job/JobManager.java Wed Apr 27 11:11:32 2016
@@ -1,20 +1,25 @@
package org.hps.job;
-import java.io.InputStream;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.lcsim.util.Driver;
import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.database.DatabaseConditionsManager;
-import org.hps.detector.svt.SvtDetectorSetup;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
import org.lcsim.job.JobControlManager;
-import org.lcsim.util.Driver;
/**
- * Extension of standard LCSim job manager which does some HPS-specific management of the conditions system.
+ * Extension of standard LCSim job manager.
+ * <p>
+ * Provides setup of database conditions system and adds option to provide conditions system tags.
*
* @author Jeremy McCormick, SLAC
*/
-public class JobManager extends JobControlManager {
-
+public final class JobManager extends JobControlManager {
+
/**
* Run the job manager from the command line.
*
@@ -31,58 +36,58 @@
* Class constructor.
*/
public JobManager() {
+ conditionsSetup = new DatabaseConditionsManagerSetup();
+ }
+
+ /**
+ * Get the conditions setup.
+ * @return the conditions setup
+ */
+ public DatabaseConditionsManagerSetup getDatabaseConditionsManagerSetup() {
+ return (DatabaseConditionsManagerSetup) this.conditionsSetup;
+ }
+
+ /**
+ * Override creation of command line options.
+ * @return the overridden command line options
+ */
+ @Override
+ protected Options createCommandLineOptions() {
+ Options options = super.createCommandLineOptions();
+ options.addOption("t", "tag", true, "conditions system tag (can be used multiple times)");
+ return options;
+ }
+
+ /**
+ * Override command line parsing.
+ * @return the overridden, parsed command line
+ */
+ @Override
+ public CommandLine parse(final String args[]) {
+ CommandLine commandLine = super.parse(args);
+ if (commandLine.hasOption("t")) {
+ Set<String> tags = new HashSet<String>();
+ for (String tag : commandLine.getOptionValues("t")) {
+ tags.add(tag);
+ }
+ getDatabaseConditionsManagerSetup().setTags(tags);
+ }
+ return commandLine;
}
/**
- * Override setup so the conditions system can be reset.
- *
- * @param is the input stream containing config information
- */
- public void setup(InputStream is) {
-
- // Add class that will setup SVT detector with conditions data (this is awkward but has to be done someplace).
- DatabaseConditionsManager.getInstance().addConditionsListener(new SvtDetectorSetup());
-
- super.setup(is);
-
- // Setup the conditions system if there is a ConditionsDriver present.
- this.setupConditionsDriver();
- }
-
- /**
- * Override the parent classes method that runs the job in order to perform conditions system initialization.
- *
- * @return <code>true</code> if job was successful
- */
- @Override
- public final boolean run() {
-
- // Run the job.
- final boolean result = super.run();
-
- // Close the conditions database connection if it is open.
- DatabaseConditionsManager.getInstance().closeConnection();
-
- return result;
- }
-
- /**
- * This method will find the {@link org.hps.conditions.ConditionsDriver} in the list of Drivers registered with the
- * manager and then execute its initialization method, which may override the default behavior of the conditions
- * system.
- */
- private void setupConditionsDriver() {
- ConditionsDriver conditionsDriver = null;
- for (final Driver driver : this.getDriverAdapter().getDriver().drivers()) {
+ * Initialize <code>ConditionsDriver</code> if necessary.
+ **/
+ protected void setupDrivers() {
+ super.setupDrivers();
+ for (Driver driver : this.getDriverExecList()) {
if (driver instanceof ConditionsDriver) {
- conditionsDriver = (ConditionsDriver) driver;
+ ConditionsDriver conditions = (ConditionsDriver) driver;
+ getConditionsSetup().setRun(conditions.getRunNumber());
+ getConditionsSetup().setDetectorName(conditions.getDetectorName());
break;
}
}
- if (conditionsDriver != null) {
- LOGGER.config("initializing conditions Driver");
- conditionsDriver.initialize();
- LOGGER.warning("Conditions driver will be removed soon!");
- }
}
+
}
Modified: java/branches/HPSJAVA-409/logging/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/logging/pom.xml (original)
+++ java/branches/HPSJAVA-409/logging/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/logging/</url>
Modified: java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/logging.properties
=============================================================================
--- java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/logging.properties (original)
+++ java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/logging.properties Wed Apr 27 11:11:32 2016
@@ -16,6 +16,9 @@
# configure the console handler
java.util.logging.ConsoleHandler.level = ALL
java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+# turn minuit logging off
+org.freehep.math.minuit = OFF
# lcsim job
org.lcsim.job.level = CONFIG
@@ -49,7 +52,6 @@
# ecal-recon
org.hps.recon.ecal.level = CONFIG
org.hps.recon.ecal.cluster.level = WARNING
-org.hps.recon.ecal.cluster.ClusterDriver.level = WARNING
# recon
org.hps.recon.filtering.level = WARNING
@@ -75,7 +77,7 @@
# detector-model
org.lcsim.detector.converter.compact.level = INFO
org.lcsim.geometry.compact.converter.level = INFO
-org.hps.detector.svt.level = ALL
+org.hps.detector.svt.level = INFO
# test data
org.hps.data.test = INFO
Modified: java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/test_logging.properties
=============================================================================
--- java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/test_logging.properties (original)
+++ java/branches/HPSJAVA-409/logging/src/main/resources/org/hps/logging/config/test_logging.properties Wed Apr 27 11:11:32 2016
@@ -16,15 +16,18 @@
# configure the console handler
java.util.logging.ConsoleHandler.level = ALL
java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
-f
+
+# turn minuit off
+org.freehep.math.minuit = OFF
+
# lcsim job
-org.lcsim.job.level = WARNING
+org.lcsim.job.level = CONFIG
org.lcsim.job.EventMarkerDriver.level = OFF
org.lcsim.job.EventPrintLoopAdapter = ALL
# conditions
org.hps.conditions.api.level = WARNING
-org.hps.conditions.database.level = CONFIG
+org.hps.conditions.database.level = WARNING
org.hps.conditions.cli.level = WARNING
org.hps.conditions.ecal.level = WARNING
org.hps.conditions.svt.level = WARNING
@@ -44,12 +47,11 @@
org.hps.crawler.level = WARNING
# datacat
-org.hps.datacat.client.level = ALL
+org.hps.datacat.client.level = OFF
# ecal-recon
org.hps.recon.ecal.level = WARNING
org.hps.recon.ecal.cluster.level = WARNING
-org.hps.recon.ecal.cluster.ClusterDriver.level = WARNING
# recon
org.hps.recon.filtering.level = WARNING
@@ -79,3 +81,6 @@
# test data
org.hps.data.test = INFO
+
+# HPS job manager
+org.hps.job.JobManager = WARNING
Modified: java/branches/HPSJAVA-409/monitoring-app/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/pom.xml (original)
+++ java/branches/HPSJAVA-409/monitoring-app/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-app/</url>
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/EventProcessing.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/EventProcessing.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/EventProcessing.java Wed Apr 27 11:11:32 2016
@@ -6,11 +6,14 @@
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import java.util.logging.Logger;
import org.freehep.record.loop.RecordLoop.Command;
import org.hps.conditions.database.DatabaseConditionsManager;
+import org.hps.job.DatabaseConditionsManagerSetup;
import org.hps.job.JobManager;
import org.hps.monitoring.application.model.ConfigurationModel;
import org.hps.monitoring.application.model.ConnectionStatus;
@@ -34,7 +37,6 @@
import org.jlab.coda.et.exception.EtClosedException;
import org.jlab.coda.et.exception.EtException;
import org.lcsim.conditions.ConditionsListener;
-import org.lcsim.conditions.ConditionsReader;
import org.lcsim.util.Driver;
/**
@@ -194,11 +196,6 @@
private SessionState sessionState;
/**
- * The current conditions manager.
- */
- private DatabaseConditionsManager conditionsManager;
-
- /**
* Class constructor, which will initialize with reference to the current monitoring application and lists of extra
* processors to add to the loop, as well as supplemental conditions listeners that activate when the conditions
* change.
@@ -306,7 +303,7 @@
*
* @param configurationModel the current global {@link org.hps.monitoring.application.ConfigurationModel} object
*/
- private void createEventBuilder(final ConfigurationModel configurationModel) {
+ private LCSimEventBuilder createEventBuilder(final ConfigurationModel configurationModel) {
// Get the class for the event builder.
final String eventBuilderClassName = configurationModel.getEventBuilderClassName();
@@ -318,9 +315,8 @@
} catch (final Exception e) {
throw new RuntimeException("Failed to create LCSimEventBuilder.", e);
}
-
- // Add the builder as a listener so it is notified when conditions change.
- this.conditionsManager.addConditionsListener(this.sessionState.eventBuilder);
+
+ return this.sessionState.eventBuilder;
}
/**
@@ -464,35 +460,70 @@
steering = configurationModel.getSteeringFile();
} else {
steering = configurationModel.getSteeringResource();
- }
-
- this.logger.config("set steering " + steering + " with type "
+ if (!steering.startsWith("/")) {
+ steering = "/" + steering;
+ }
+ }
+
+ this.logger.config("set steering " + steering + " with type "
+ (steeringType == SteeringType.RESOURCE ? "RESOURCE" : "FILE"));
try {
// Create the job manager. A new conditions manager is instantiated from this call but not configured.
this.sessionState.jobManager = new JobManager();
-
- // Set ref to current conditions manager.
- this.conditionsManager = DatabaseConditionsManager.getInstance();
- // Add conditions listeners after new database conditions manager is initialized from the job manager.
+ // Setup class for conditions system.
+ DatabaseConditionsManagerSetup conditions = new DatabaseConditionsManagerSetup();
+
+ // Disable run manager.
+ conditions.setEnableRunManager(false);
+
+ // Setup the event builder to translate from EVIO to LCIO.
+ LCSimEventBuilder eventBuilder = this.createEventBuilder(configurationModel);
+ conditions.addConditionsListener(eventBuilder);
+
+ // Add extra conditions listeners.
for (final ConditionsListener conditionsListener : this.sessionState.conditionsListeners) {
- this.logger.config("adding conditions listener " + conditionsListener.getClass().getName());
- this.conditionsManager.addConditionsListener(conditionsListener);
- }
-
+ this.logger.config("Adding conditions listener " + conditionsListener.getClass().getName());
+ conditions.addConditionsListener(conditionsListener);
+ }
+
+ // Add detector alias.
if (configurationModel.hasValidProperty(ConfigurationModel.DETECTOR_ALIAS_PROPERTY)) {
- // Set a detector alias.
- ConditionsReader.addAlias(configurationModel.getDetectorName(),
+ conditions.addAlias(configurationModel.getDetectorName(),
"file://" + configurationModel.getDetectorAlias());
- this.logger.config("using detector alias " + configurationModel.getDetectorAlias());
- }
-
- // Setup the event builder to translate from EVIO to LCIO.
- // This must happen before Driver setup so the builder's listeners are activated first!
- this.createEventBuilder(configurationModel);
-
+ this.logger.config("Added detector alias " + configurationModel.getDetectorAlias()
+ + " for " + configurationModel.getDetectorName());
+ }
+
+ // Add conditions tag.
+ if (configurationModel.hasValidProperty(ConfigurationModel.CONDITIONS_TAG_PROPERTY)
+ && !configurationModel.getConditionsTag().equals("")) {
+ Set<String> tags = new HashSet<String>();
+ tags.add(configurationModel.getConditionsTag());
+ this.logger.config("Added conditions tag " + configurationModel.getConditionsTag());
+ conditions.setTags(tags);
+ }
+
+ // Set user specified job number.
+ if (configurationModel.hasValidProperty(ConfigurationModel.USER_RUN_NUMBER_PROPERTY)) {
+ final int userRun = configurationModel.getUserRunNumber();
+ this.logger.config("User run number set to " + userRun);
+ conditions.setRun(userRun);
+ }
+
+ // Set detector name.
+ conditions.setDetectorName(configurationModel.getDetectorName());
+
+ // Freeze the conditions system to ignore run numbers from event data.
+ if (configurationModel.hasPropertyKey(ConfigurationModel.FREEZE_CONDITIONS_PROPERTY)) {
+ this.logger.config("user configured to freeze conditions system");
+ conditions.setFreeze(configurationModel.getFreezeConditions());
+ }
+
+ // Register the configured conditions settings with the job manager.
+ this.sessionState.jobManager.setConditionsSetup(conditions);
+
// Configure the job manager for the XML steering.
this.sessionState.jobManager.setDryRun(true);
if (steeringType == SteeringType.RESOURCE) {
@@ -500,32 +531,10 @@
} else if (steeringType.equals(SteeringType.FILE)) {
this.setupSteeringFile(steering);
}
-
- // Set conditions tag if applicable.
- if (configurationModel.hasValidProperty(ConfigurationModel.CONDITIONS_TAG_PROPERTY)
- && !configurationModel.getConditionsTag().equals("")) {
- this.logger.config("conditions tag is set to " + configurationModel.getConditionsTag());
- } else {
- this.logger.config("conditions NOT using a tag");
- }
-
- // Is there a user specified run number from the JobPanel?
- if (configurationModel.hasValidProperty(ConfigurationModel.USER_RUN_NUMBER_PROPERTY)) {
- final int userRunNumber = configurationModel.getUserRunNumber();
- final String detectorName = configurationModel.getDetectorName();
- this.logger.config("setting user run number " + userRunNumber + " with detector " + detectorName);
- conditionsManager.setDetector(detectorName, userRunNumber);
- if (configurationModel.hasPropertyKey(ConfigurationModel.FREEZE_CONDITIONS_PROPERTY)) {
- // Freeze the conditions system to ignore run numbers from the events.
- this.logger.config("user configured to freeze conditions system");
- this.conditionsManager.freeze();
- } else {
- // Allow run numbers to be picked up from the events.
- this.logger.config("user run number provided but conditions system is NOT frozen");
- this.conditionsManager.unfreeze();
- }
- }
-
+
+ // Post-init conditions system which may freeze if run and name were provided.
+ this.sessionState.jobManager.getConditionsSetup().postInitialize();
+
this.logger.info("lcsim setup was successful");
} catch (final Throwable t) {
@@ -592,15 +601,19 @@
this.logger.config("added extra Driver " + driver.getName());
}
- // Enable conditions system activation from EVIO event data in case the PRESTART is missed.
- loopConfig.add(new EvioDetectorConditionsProcessor(configurationModel.getDetectorName()));
- this.logger.config("added EvioDetectorConditionsProcessor to job with detector "
- + configurationModel.getDetectorName());
+ // Enable conditions activation from EVIO; not needed if conditions are frozen for the job.
+ if (!DatabaseConditionsManager.getInstance().isFrozen()) {
+ loopConfig.add(new EvioDetectorConditionsProcessor(configurationModel.getDetectorName()));
+ this.logger.config("added EvioDetectorConditionsProcessor to job with detector "
+ + configurationModel.getDetectorName());
+ } else {
+ this.logger.config("Conditions activation from EVIO is disabled.");
+ }
// Create the CompositeLoop with the configuration.
this.sessionState.loop = new CompositeLoop(loopConfig);
- this.logger.config("record loop is setup");
+ this.logger.config("Record loop is setup.");
}
/**
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/Main.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/Main.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/Main.java Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.monitoring.application.model.Configuration;
/**
@@ -29,7 +29,7 @@
final Options options = new Options();
options.addOption(new Option("h", false, "Print help."));
options.addOption(new Option("c", true, "Load a properties file with configuration parameters."));
- final CommandLineParser parser = new DefaultParser();
+ final CommandLineParser parser = new PosixParser();
// Parse command line arguments.
final CommandLine cl;
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplication.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplication.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplication.java Wed Apr 27 11:11:32 2016
@@ -78,7 +78,6 @@
*/
@Override
public void close() throws SecurityException {
- // Does nothing.
}
/**
@@ -86,7 +85,6 @@
*/
@Override
public void flush() {
- // Does nothing.
}
/**
@@ -121,8 +119,6 @@
@Override
public void publish(final LogRecord record) {
super.publish(record);
-
- // FIXME: Is this efficient? Should this always happen here?
flush();
}
@@ -295,18 +291,18 @@
loadConfiguration(new Configuration(DEFAULT_CONFIGURATION), false);
if (userConfiguration != null) {
- // Load user configuration.
+ // Load user configuration to supplement default settings.
loadConfiguration(userConfiguration, true);
}
- // Enable the GUI now that initialization is complete.
+ // Enable the GUI after initialization.
this.frame.setEnabled(true);
- LOGGER.info("application initialized successfully");
+ LOGGER.info("Monitoring app initialized successfully.");
} catch (final Exception e) {
- // Don't use the ErrorHandler here because we don't know that it initialized successfully.
- System.err.println("MonitoringApplication failed to initialize without errors!");
+ // Initialization failed so print info and die.
+ System.err.println("ERROR: MonitoringApplication failed to initialize!");
DialogUtil.showErrorDialog(null, "Error Starting Monitoring Application",
"Monitoring application failed to initialize.");
e.printStackTrace();
@@ -321,9 +317,6 @@
*/
@Override
public void actionPerformed(final ActionEvent e) {
-
- // logger.finest("actionPerformed - " + e.getActionCommand());
-
final String command = e.getActionCommand();
if (Commands.CONNECT.equals(command)) {
startSession();
@@ -374,7 +367,7 @@
}
/**
- * Redirect <code>System.out</code> and <code>System.err</code> to a file chosen by a file chooser.
+ * Redirect <code>System.out</code> and <code>System.err</code> to a chosen file.
*/
private void chooseLogFile() {
final JFileChooser fc = new JFileChooser();
@@ -420,7 +413,7 @@
}
/**
- * Exit from the application from exit menu item or hitting close window button.
+ * Exit from the application.
*/
private void exit() {
if (this.connectionModel.isConnected()) {
@@ -954,7 +947,6 @@
// Add listener to push conditions changes to conditions panel.
final List<ConditionsListener> conditionsListeners = new ArrayList<ConditionsListener>();
- conditionsListeners.add(this.frame.getConditionsPanel().new ConditionsPanelListener());
// Instantiate the event processing wrapper.
this.processing = new EventProcessing(this, processors, drivers, conditionsListeners);
@@ -973,7 +965,7 @@
// Start the event processing thread.
this.processing.start();
- LOGGER.info("new session successfully initialized");
+ LOGGER.info("Event processing session started.");
} catch (final Exception e) {
@@ -989,7 +981,7 @@
"There was an error while starting the session." + '\n' + "See the log for details.",
"Session Error");
- LOGGER.severe("failed to start new session");
+ LOGGER.severe("Failed to start event processing.");
}
}
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplicationFrame.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplicationFrame.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/MonitoringApplicationFrame.java Wed Apr 27 11:11:32 2016
@@ -37,7 +37,7 @@
/**
* The conditions panel.
*/
- private final ConditionsPanel conditionsPanel;
+ //private final ConditionsPanel conditionsPanel;
/**
* The dashboard panel.
@@ -150,8 +150,8 @@
tableTabbedPane.addTab("Trigger Diagnostics", this.triggerPanel);
// Add the conditions panel.
- this.conditionsPanel = new ConditionsPanel();
- tableTabbedPane.addTab("Detector Conditions", this.conditionsPanel);
+ //this.conditionsPanel = new ConditionsPanel();
+ //tableTabbedPane.addTab("Detector Conditions", this.conditionsPanel);
// Vertical split pane in left panel.
this.leftSplitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, this.dashboardPanel, tableTabbedPane);
@@ -209,9 +209,9 @@
*
* @return the conditions panel
*/
- ConditionsPanel getConditionsPanel() {
- return this.conditionsPanel;
- }
+ //ConditionsPanel getConditionsPanel() {
+ // return this.conditionsPanel;
+ //}
/**
* Get the panel for the dashboard.
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/SystemStatusPanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/SystemStatusPanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/SystemStatusPanel.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,3 @@
-/**
- *
- */
package org.hps.monitoring.application;
import java.awt.BorderLayout;
@@ -81,7 +78,7 @@
this.statuses.clear();
}
-
+
private class SystemStatusBeeper extends TimerTask {
@Override
@@ -93,7 +90,7 @@
}
}
if (isAlarming) {
- System.out.println("beep\007");
+ Toolkit.getDefaultToolkit().beep();
}
}
}
Modified: java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/util/TableExporter.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/util/TableExporter.java (original)
+++ java/branches/HPSJAVA-409/monitoring-app/src/main/java/org/hps/monitoring/application/util/TableExporter.java Wed Apr 27 11:11:32 2016
@@ -33,7 +33,7 @@
// Column headers.
for (int columnIndex = 0; columnIndex < columnCount; columnIndex++) {
- buffer.append("\"" + model.getColumnName(columnIndex) + "\"" + fieldDelimiter);
+ buffer.append("\"" + model.getColumnName(columnIndex) + "\"" + fieldDelimiter + ",");
}
buffer.setLength(buffer.length() - 1);
buffer.append('\n');
@@ -47,6 +47,7 @@
} else {
buffer.append("\"" + value + "\"" + fieldDelimiter);
}
+ buffer.append(",");
}
buffer.setLength(buffer.length() - 1);
buffer.append('\n');
Modified: java/branches/HPSJAVA-409/monitoring-drivers/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/pom.xml (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-drivers/</url>
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/scalers/DeadtimePlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/scalers/DeadtimePlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/scalers/DeadtimePlots.java Wed Apr 27 11:11:32 2016
@@ -9,9 +9,7 @@
import org.hps.record.scalers.ScalerUtilities;
import org.hps.record.scalers.ScalerUtilities.LiveTimeIndex;
import org.jfree.chart.JFreeChart;
-import org.jfree.chart.axis.Axis;
import org.jfree.chart.axis.DateAxis;
-import org.jfree.chart.axis.ValueAxis;
import org.jfree.data.time.Second;
import org.jfree.data.time.TimeSeriesCollection;
import org.lcsim.event.EventHeader;
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/PedestalPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/PedestalPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/PedestalPlots.java Wed Apr 27 11:11:32 2016
@@ -23,6 +23,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.hps.recon.tracking.SvtPlotUtils;
//===> import org.hps.conditions.deprecated.HPSSVTCalibrationConstants;
//===> import org.hps.conditions.deprecated.SvtUtils;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTCellIDPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTCellIDPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTCellIDPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -17,52 +17,52 @@
*/
public class SVTCellIDPrintDriver extends Driver {
- String rawTrackerHitCollectionName = "SVTData";
- String outputFileName;
- PrintWriter outputStream = null;
+ String rawTrackerHitCollectionName = "SVTData";
+ String outputFileName;
+ PrintWriter outputStream = null;
- public SVTCellIDPrintDriver() {
- }
+ public SVTCellIDPrintDriver() {
+ }
- public void setRawTrackerHitCollectionName(String rawTrackerHitCollectionName) {
- this.rawTrackerHitCollectionName = rawTrackerHitCollectionName;
- }
+ public void setRawTrackerHitCollectionName(String rawTrackerHitCollectionName) {
+ this.rawTrackerHitCollectionName = rawTrackerHitCollectionName;
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- public void startOfData() {
- if (rawTrackerHitCollectionName == null) {
- throw new RuntimeException("The parameter ecalCollectionName was not set!");
- }
+ public void startOfData() {
+ if (rawTrackerHitCollectionName == null) {
+ throw new RuntimeException("The parameter ecalCollectionName was not set!");
+ }
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(SVTData.class, rawTrackerHitCollectionName)) {
- List<SVTData> hits = event.get(SVTData.class, rawTrackerHitCollectionName);
- //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
- for (SVTData hit : hits) {
- outputStream.printf("FPGA=%d\thybrid=%d\tchannel=%d\n", hit.getFPGAAddress(), hit.getHybridNumber(), hit.getChannelNumber());
- }
- }
- if (event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) {
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
- //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
- for (RawTrackerHit hit : hits) {
- outputStream.printf("name=%s\tside=%d\tstrip=%d\n", hit.getDetectorElement().getName(), hit.getIdentifierFieldValue("side"), hit.getIdentifierFieldValue("strip"));
- }
- }
- }
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(SVTData.class, rawTrackerHitCollectionName)) {
+ List<SVTData> hits = event.get(SVTData.class, rawTrackerHitCollectionName);
+ //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
+ for (SVTData hit : hits) {
+ outputStream.printf("FPGA=%d\thybrid=%d\tchannel=%d\n", hit.getFPGAAddress(), hit.getHybridNumber(), hit.getChannelNumber());
+ }
+ }
+ if (event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) {
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
+ //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
+ for (RawTrackerHit hit : hits) {
+ outputStream.printf("name=%s\tside=%d\tstrip=%d\n", hit.getDetectorElement().getName(), hit.getIdentifierFieldValue("side"), hit.getIdentifierFieldValue("strip"));
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitRecoCorrelations.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitRecoCorrelations.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitRecoCorrelations.java Wed Apr 27 11:11:32 2016
@@ -32,7 +32,7 @@
*/
public class SVTHitRecoCorrelations extends Driver {
- //private List<AIDAFrame> plotterFrame = new ArrayList<AIDAFrame>();
+ //private List<AIDAFrame> plotterFrame = new ArrayList<AIDAFrame>();
private List<IPlotter> plotters = new ArrayList<IPlotter>();
private AIDA aida = AIDA.defaultInstance();
private String rawTrackerHitCollectionName = "SVTRawTrackerHits";
@@ -225,7 +225,7 @@
*/
//for(int i=0;i<2;++i) {
- //plotterFrame.get(i).pack();
+ //plotterFrame.get(i).pack();
// plotterFrame.get(i).setVisible(true);
//}
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitReconstructionPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitReconstructionPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SVTHitReconstructionPlots.java Wed Apr 27 11:11:32 2016
@@ -1,13 +1,9 @@
package org.hps.monitoring.drivers.svt;
-import org.hps.monitoring.drivers.trackrecon.TrackingReconPlots;
import hep.aida.IAnalysisFactory;
-import hep.aida.IHistogram1D;
-import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
import hep.aida.IPlotterStyle;
import hep.aida.IProfile1D;
-import hep.aida.ref.plotter.PlotterRegion;
import java.io.IOException;
import java.util.HashMap;
@@ -16,14 +12,14 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-
+import org.hps.monitoring.drivers.trackrecon.TrackingReconPlots;
//===> import org.hps.conditions.deprecated.SvtUtils;
import org.hps.recon.tracking.FittedRawTrackerHit;
import org.lcsim.detector.identifier.IIdentifier;
import org.lcsim.detector.identifier.IIdentifierHelper;
import org.lcsim.detector.tracker.silicon.DopedSilicon;
+import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.SiSensor;
-import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.SiTrackerIdentifierHelper;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
@@ -38,7 +34,7 @@
*/
public class SVTHitReconstructionPlots extends Driver {
- //private AIDAFrame plotterFrame;
+ //private AIDAFrame plotterFrame;
private AIDA aida = AIDA.defaultInstance();
private String fittedTrackerHitCollectionName = "SVTFittedRawTrackerHits";
private String trackerHitCollectionName = "StripClusterer_SiTrackerHitStrip1D";
@@ -138,18 +134,18 @@
// TODO: Check if this block of code is equivalent to the block commented out below
for(SiSensor sensor : sensors){
-
- int region = computePlotterRegion(sensor);
- if(region >= plotter1.numberOfRegions()) {
- throw new RuntimeException("not enough regions! (" + region + "/"+ plotter1.numberOfRegions()+ ")");
- }
- plotter1.region(region).plot(aida.histogram1D(sensor.getName() + "_raw_hits", 10, -0.5, 9.5));
- plotter3.region(region).plot(aida.histogram1D(sensor.getName() + "_reco_hits", 10, -0.5, 9.5));
- plotter2.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_hits", 10, -0.5, 9.5));
- plotter4.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_size", 9, 0.5, 9.5));
- plotter5.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_amp", 50, 0, 4000.0));
- ((PlotterRegion) plotter5.region(region)).getPlot().getXAxis().setLabel("Cluster amplitude [ADC counts]");
- plotter6.region(region).plot(aida.histogram2D(sensor.getName() + "_cluster_vs_strip", 128, 0, 640, 100, -50, 50));
+ int region = computePlotterRegion(sensor);
+ if (region >= plotter1.numberOfRegions()) {
+ throw new RuntimeException("not enough regions! (" + region + "/" + plotter1.numberOfRegions() + ")");
+ }
+ plotter1.region(region).plot(aida.histogram1D(sensor.getName() + "_raw_hits", 10, -0.5, 9.5));
+ plotter3.region(region).plot(aida.histogram1D(sensor.getName() + "_reco_hits", 10, -0.5, 9.5));
+ plotter2.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_hits", 10, -0.5, 9.5));
+ plotter4.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_size", 9, 0.5, 9.5));
+ plotter5.region(region).plot(aida.histogram1D(sensor.getName() + "_cluster_amp", 50, 0, 4000.0));
+ plotter5.style().xAxisStyle().setLabel("Cluster amplitude [ADC counts]");
+ plotter6.region(region).plot(
+ aida.histogram2D(sensor.getName() + "_cluster_vs_strip", 128, 0, 640, 100, -50, 50));
}
/* ===>
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java Wed Apr 27 11:11:32 2016
@@ -28,6 +28,7 @@
import org.hps.conditions.svt.SvtBiasMyaDataReader;
import org.hps.conditions.svt.SvtBiasMyaDataReader.SvtBiasMyaRange;
import org.hps.conditions.svt.SvtBiasMyaDataReader.SvtBiasRunRange;
+import org.hps.recon.tracking.SvtPlotUtils;
import org.hps.record.epics.EpicsData;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.HeadBankData;
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SensorOccupancyPlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SensorOccupancyPlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SensorOccupancyPlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -1,9 +1,4 @@
package org.hps.monitoring.drivers.svt;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
import hep.aida.IAnalysisFactory;
import hep.aida.IHistogram1D;
@@ -13,22 +8,27 @@
import hep.aida.IPlotterRegion;
import hep.aida.IPlotterStyle;
import hep.aida.ITree;
-import hep.aida.ref.plotter.Plotter;
-import hep.aida.ref.plotter.PlotterRegion;
+import hep.aida.jfree.plotter.Plotter;
+import hep.aida.jfree.plotter.PlotterRegion;
import hep.aida.ref.rootwriter.RootFileStore;
import hep.physics.vec.Hep3Vector;
-import java.util.HashSet;
-import java.util.Set;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import org.hps.monitoring.subsys.StatusCode;
import org.hps.monitoring.subsys.Subsystem;
import org.hps.monitoring.subsys.SystemStatus;
import org.hps.monitoring.subsys.SystemStatusImpl;
+import org.hps.recon.tracking.SvtPlotUtils;
+import org.hps.record.triggerbank.AbstractIntData;
+import org.hps.record.triggerbank.TIData;
+import org.lcsim.detector.ITransform3D;
+import org.lcsim.detector.tracker.silicon.ChargeCarrier;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
-import org.lcsim.detector.tracker.silicon.ChargeCarrier;
import org.lcsim.detector.tracker.silicon.SiStrips;
-import org.lcsim.detector.ITransform3D;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.event.RawTrackerHit;
@@ -36,8 +36,6 @@
import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
import org.lcsim.recon.tracking.digitization.sisim.TrackerHitType;
import org.lcsim.util.Driver;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.TIData;
import org.lcsim.util.aida.AIDA;
/**
@@ -51,7 +49,6 @@
//static {
// hep.aida.jfree.AnalysisFactory.register();
//}
-
// Plotting
private static ITree tree = null;
private IAnalysisFactory analysisFactory = AIDA.defaultInstance().analysisFactory();
@@ -75,8 +72,6 @@
private String triggerBankCollectionName = "TriggerBank";
private String stripClusterCollectionName = "StripClusterer_SiTrackerHitStrip1D";
- String rootFile = null;
-
private int maxSamplePosition = -1;
private int timeWindowWeight = 1;
private int eventCount = 0;
@@ -108,7 +103,9 @@
private boolean enableClusterTimeCuts = true;
private double clusterTimeCutMax = 4.0;
private double clusterTimeCutMin = -4.0;
-
+
+ private boolean saveRootFile = true;
+
public SensorOccupancyPlotsDriver() {
maxSampleStatus = new SystemStatusImpl(Subsystem.SVT, "Checks that SVT is timed in (max sample plot)", true);
maxSampleStatus.setStatus(StatusCode.UNKNOWN, "Status is unknown.");
@@ -194,6 +191,10 @@
public void setMaxPeakOccupancy(double maxPeakOccupancy) {
this.maxPeakOccupancy = maxPeakOccupancy;
+ }
+
+ public void setSaveRootFile(boolean saveRootFile) {
+ this.saveRootFile = saveRootFile;
}
/**
@@ -404,12 +405,12 @@
plotters.get("Occupancy vs Position").region(SvtPlotUtils.computePlotterRegion(sensor))
.plot(positionPlots.get(sensor.getName()), this.createOccupancyPlotStyle("Distance from Beam [mm]", sensor, false));
plotters.get("Cluster occupancy vs Position").region(SvtPlotUtils.computePlotterRegion(sensor))
- .plot(clusterPositionPlots.get(sensor.getName()), this.createOccupancyPlotStyle("Distance from Beam [mm]", sensor, false));
+ .plot(clusterPositionPlots.get(sensor.getName()), this.createOccupancyPlotStyle("Distance from Beam [mm]", sensor, false));
}
occupancyMap.put(sensor.getName(), new int[640]);
if (enableMaxSamplePlots) {
- maxSamplePositionPlots.put(sensor.getName(), histogramFactory.createHistogram1D(sensor.getName() + " - Max Sample Number", 6, 0, 6));
+ maxSamplePositionPlots.put(sensor.getName(), histogramFactory.createHistogram1D(sensor.getName() + " - Max Sample Number", 6, -0.5, 5.5));
plotters.get("Max Sample Number").region(SvtPlotUtils.computePlotterRegion(sensor))
.plot(maxSamplePositionPlots.get(sensor.getName()),
this.createOccupancyPlotStyle("Max Sample Number", sensor, false));
@@ -418,12 +419,11 @@
for (IPlotter plotter : plotters.values()) {
for (int regionN = 0; regionN < plotter.numberOfRegions(); regionN++) {
- //Plotter l;
- //PlotterRegion region = ((PlotterRegion) ((Plotter) plotter).region(regionN));
- //if (region..getPlottedObjects().isEmpty()) {
- // continue;
- //}
- //region.getPanel().addMouseListener(new PopupPlotterListener(region));
+ PlotterRegion region = ((PlotterRegion) ((Plotter) plotter).region(regionN));
+ if (region.getPlottedObjects().isEmpty()) {
+ continue;
+ }
+ region.getPanel().addMouseListener(new PopupPlotterListener(region));
}
plotter.show();
}
@@ -520,22 +520,22 @@
maxSamplePositionPlots.get(((HpsSiSensor) rawHit.getDetectorElement()).getName()).fill(maxSamplePositionFound);
}
}
-
+
// Fill the strip cluster counts if available
- if(event.hasCollection(SiTrackerHitStrip1D.class, stripClusterCollectionName)) {
+ if (event.hasCollection(SiTrackerHitStrip1D.class, stripClusterCollectionName)) {
List<SiTrackerHitStrip1D> stripHits1D = event.get(SiTrackerHitStrip1D.class, stripClusterCollectionName);
- for(SiTrackerHitStrip1D h : stripHits1D) {
+ for (SiTrackerHitStrip1D h : stripHits1D) {
SiTrackerHitStrip1D global = h.getTransformedHit(TrackerHitType.CoordinateSystem.GLOBAL);
Hep3Vector pos_global = global.getPositionAsVector();
- if(enableClusterTimeCuts) {
- if( h.getTime() < clusterTimeCutMax && h.getTime() > clusterTimeCutMin)
+ if (enableClusterTimeCuts) {
+ if (h.getTime() < clusterTimeCutMax && h.getTime() > clusterTimeCutMin) {
clusterPositionPlotCounts.get(((HpsSiSensor) h.getRawHits().get(0).getDetectorElement()).getName()).fill(pos_global.y());
- } else
+ }
+ } else {
clusterPositionPlotCounts.get(((HpsSiSensor) h.getRawHits().get(0).getDetectorElement()).getName()).fill(pos_global.y());
- }
- }
-
-
+ }
+ }
+ }
if (enableMaxSamplePlots && eventCount > maxSampleMonitorStart && eventCount % maxSampleMonitorPeriod == 0) {
checkMaxSample();
@@ -563,17 +563,17 @@
positionPlots.get(sensor.getName()).fill(stripPosition, stripOccupancy);
}
}
- if(enablePositionPlots) {
+ if (enablePositionPlots) {
clusterPositionPlots.get(sensor.getName()).reset();
IHistogram1D h = clusterPositionPlotCounts.get(sensor.getName());
- for(int bin=0; bin<h.axis().bins(); ++bin) {
+ for (int bin = 0; bin < h.axis().bins(); ++bin) {
int y = h.binEntries(bin);
double stripClusterOccupancy = (double) y / (double) eventCount;
double x = h.axis().binCenter(bin);
- clusterPositionPlots.get(sensor.getName()).fill(x,stripClusterOccupancy);
+ clusterPositionPlots.get(sensor.getName()).fill(x, stripClusterOccupancy);
}
}
-
+
}
}
@@ -704,14 +704,16 @@
@Override
public void endOfData() {
- rootFile = "run" + runNumber + "_occupancy.root";
- RootFileStore store = new RootFileStore(rootFile);
- try {
- store.open();
- store.add(tree);
- store.close();
- } catch (IOException e) {
- e.printStackTrace();
+ if (saveRootFile) {
+ String rootFile = "run" + runNumber + "_occupancy.root";
+ RootFileStore store = new RootFileStore(rootFile);
+ try {
+ store.open();
+ store.add(tree);
+ store.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
}
System.out.println("%===============================================================================%");
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtClusterPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtClusterPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtClusterPlots.java Wed Apr 27 11:11:32 2016
@@ -1,9 +1,4 @@
package org.hps.monitoring.drivers.svt;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.List;
import hep.aida.IAnalysisFactory;
import hep.aida.IHistogram1D;
@@ -16,18 +11,20 @@
import hep.aida.jfree.plotter.Plotter;
import hep.aida.jfree.plotter.PlotterRegion;
import hep.aida.ref.rootwriter.RootFileStore;
-import java.util.HashSet;
-import java.util.Set;
-
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.tracking.SvtPlotUtils;
+import org.lcsim.detector.tracker.silicon.DopedSilicon;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.geometry.Detector;
import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
import org.lcsim.util.Driver;
-
-import org.hps.recon.tracking.FittedRawTrackerHit;
-import org.lcsim.detector.tracker.silicon.DopedSilicon;
import org.lcsim.util.aida.AIDA;
/**
@@ -55,11 +52,15 @@
private static Map<String, IHistogram1D> singleHitClusterChargePlots = new HashMap<String, IHistogram1D>();
private static Map<String, IHistogram1D> clusterTimePlots = new HashMap<String, IHistogram1D>();
private static Map<String, IHistogram2D> hitTimeTrigTimePlots = new HashMap<String, IHistogram2D>();
- private static IHistogram1D[] hitTimeTrigTimePlots1D = new IHistogram1D[6];
+ private static IHistogram1D[][] hitTimeTrigTimePlots1D = new IHistogram1D[6][2];
+ private static IHistogram2D[][] hitTimeTrigTimePlots2D = new IHistogram2D[6][2];
+
+ private static final int TOP = 0;
+ private static final int BOTTOM = 1;
private List<HpsSiSensor> sensors;
- private Map<RawTrackerHit, FittedRawTrackerHit> fittedRawTrackerHitMap
- = new HashMap<RawTrackerHit, FittedRawTrackerHit>();
+// private Map<RawTrackerHit, FittedRawTrackerHit> fittedRawTrackerHitMap
+// = new HashMap<RawTrackerHit, FittedRawTrackerHit>();
// Detector name
private static final String SUBDETECTOR_NAME = "Tracker";
@@ -70,10 +71,16 @@
private int runNumber = -1;
+ private boolean saveRootFile = true;
+
private boolean dropSmallHitEvents = true;
public void setDropSmallHitEvents(boolean dropSmallHitEvents) {
this.dropSmallHitEvents = dropSmallHitEvents;
+ }
+
+ public void setSaveRootFile(boolean saveRootFile) {
+ this.saveRootFile = saveRootFile;
}
private int computePlotterRegion(HpsSiSensor sensor) {
@@ -84,20 +91,17 @@
} else {
return 6 * (sensor.getLayerNumber() - 1) + 1;
}
- } else {
-
- if (sensor.isTopLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6 * (sensor.getLayerNumber() - 7) + 2;
- } else {
- return 6 * (sensor.getLayerNumber() - 7) + 3;
- }
- } else if (sensor.isBottomLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6 * (sensor.getLayerNumber() - 7) + 4;
- } else {
- return 6 * (sensor.getLayerNumber() - 7) + 5;
- }
+ } else if (sensor.isTopLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6 * (sensor.getLayerNumber() - 7) + 2;
+ } else {
+ return 6 * (sensor.getLayerNumber() - 7) + 3;
+ }
+ } else if (sensor.isBottomLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6 * (sensor.getLayerNumber() - 7) + 4;
+ } else {
+ return 6 * (sensor.getLayerNumber() - 7) + 5;
}
}
return -1;
@@ -158,8 +162,7 @@
private void resetPlots() {
// Clear the fitted raw hit map of old values
- fittedRawTrackerHitMap.clear();
-
+// fittedRawTrackerHitMap.clear();
// Since all plots are mapped to the name of a sensor, loop
// through the sensors, get the corresponding plots and clear them.
for (HpsSiSensor sensor : sensors) {
@@ -171,34 +174,40 @@
for (IHistogram2D histogram : hitTimeTrigTimePlots.values()) {
histogram.reset();
}
- }
-
- /**
- * Method that creates a map between a fitted raw hit and it's corresponding
- * raw fit
- *
- * @param fittedHits : List of fitted hits to map
- */
- private void mapFittedRawHits(List<FittedRawTrackerHit> fittedHits) {
-
- // Clear the fitted raw hit map of old values
- fittedRawTrackerHitMap.clear();
-
- // Loop through all fitted hits and map them to their corresponding raw hits
- for (FittedRawTrackerHit fittedHit : fittedHits) {
- fittedRawTrackerHitMap.put(fittedHit.getRawTrackerHit(), fittedHit);
- }
- }
-
- /**
- *
- * @param rawHit
- * @return
- */
- private FittedRawTrackerHit getFittedHit(RawTrackerHit rawHit) {
- return fittedRawTrackerHitMap.get(rawHit);
- }
-
+
+ for (int i = 0; i < 6; i++) {
+ for (int j = 0; j < 2; j++) {
+ hitTimeTrigTimePlots1D[i][j].reset();
+ hitTimeTrigTimePlots2D[i][j].reset();
+ }
+ }
+ }
+
+// /**
+// * Method that creates a map between a fitted raw hit and it's corresponding
+// * raw fit
+// *
+// * @param fittedHits : List of fitted hits to map
+// */
+// private void mapFittedRawHits(List<FittedRawTrackerHit> fittedHits) {
+//
+// // Clear the fitted raw hit map of old values
+// fittedRawTrackerHitMap.clear();
+//
+// // Loop through all fitted hits and map them to their corresponding raw hits
+// for (FittedRawTrackerHit fittedHit : fittedHits) {
+// fittedRawTrackerHitMap.put(fittedHit.getRawTrackerHit(), fittedHit);
+// }
+// }
+//
+// /**
+// *
+// * @param rawHit
+// * @return
+// */
+// private FittedRawTrackerHit getFittedHit(RawTrackerHit rawHit) {
+// return fittedRawTrackerHitMap.get(rawHit);
+// }
protected void detectorChanged(Detector detector) {
// Get the HpsSiSensor objects from the geometry
@@ -236,7 +245,7 @@
.plot(singleHitClusterChargePlots.get(sensor.getName()), this.createStyle(null, "Cluster Amplitude [ADC Counts]", ""));
clusterTimePlots.put(sensor.getName(),
- histogramFactory.createHistogram1D(sensor.getName() + " - Cluster Time", 100, -50, 50));
+ histogramFactory.createHistogram1D(sensor.getName() + " - Cluster Time", 100, -75, 50));
plotters.get("Cluster Time").region(this.computePlotterRegion(sensor))
.plot(clusterTimePlots.get(sensor.getName()), this.createStyle(null, "Cluster Time [ns]", ""));
}
@@ -245,18 +254,25 @@
plotters.get("SVT-trigger timing top-bottom").createRegions(1, 2);
hitTimeTrigTimePlots.put("Top",
- histogramFactory.createHistogram2D("Top Cluster Time vs. Trigger Phase", 100, -50, 50, 6, 0, 24));
+ histogramFactory.createHistogram2D("Top Cluster Time vs. Trigger Phase", 100, -75, 50, 6, 0, 24));
plotters.get("SVT-trigger timing top-bottom").region(0).plot(hitTimeTrigTimePlots.get("Top"), this.createStyle(null, "Cluster Time [ns]", "Trigger Phase[ns]"));
hitTimeTrigTimePlots.put("Bottom",
- histogramFactory.createHistogram2D("Bottom Cluster Time vs. Trigger Phase", 100, -50, 50, 6, 0, 24));
+ histogramFactory.createHistogram2D("Bottom Cluster Time vs. Trigger Phase", 100, -75, 50, 6, 0, 24));
plotters.get("SVT-trigger timing top-bottom").region(1).plot(hitTimeTrigTimePlots.get("Bottom"), this.createStyle(null, "Cluster Time [ns]", "Trigger Phase[ns]"));
plotters.put("SVT-trigger timing by phase", plotterFactory.create("SVT-trigger timing by phase"));
- plotters.get("SVT-trigger timing by phase").createRegions(1, 6);
+ plotters.get("SVT-trigger timing by phase").createRegions(2, 6);
+
+ plotters.put("SVT-trigger timing and amplitude by phase", plotterFactory.create("SVT-trigger timing and amplitude by phase"));
+ plotters.get("SVT-trigger timing and amplitude by phase").createRegions(2, 6);
for (int i = 0; i < 6; i++) {
- hitTimeTrigTimePlots1D[i] = histogramFactory.createHistogram1D("Cluster Time for Phase " + i, 100, -50, 50);
- plotters.get("SVT-trigger timing by phase").region(i).plot(hitTimeTrigTimePlots1D[i], this.createStyle(null, "Cluster Time [ns]", ""));
+ for (int j = 0; j < 2; j++) {
+ hitTimeTrigTimePlots1D[i][j] = histogramFactory.createHistogram1D(String.format("Cluster Time for Phase %d, %s", i, j == TOP ? "Top" : "Bottom"), 100, -75, 50);
+ plotters.get("SVT-trigger timing by phase").region(i + 6 * j).plot(hitTimeTrigTimePlots1D[i][j], this.createStyle(null, "Cluster Time [ns]", ""));
+ hitTimeTrigTimePlots2D[i][j] = histogramFactory.createHistogram2D(String.format("Cluster Amplitude vs. Time for Phase %d, %s", i, j == TOP ? "Top" : "Bottom"), 100, -75, 50, 100, 0, 5000.0);
+ plotters.get("SVT-trigger timing and amplitude by phase").region(i + 6 * j).plot(hitTimeTrigTimePlots2D[i][j], this.createStyle(null, "Cluster Time [ns]", "Cluster Amplitude [ADC Counts]"));
+ }
}
for (IPlotter plotter : plotters.values()) {
@@ -277,17 +293,15 @@
runNumber = event.getRunNumber();
}
- // If the event doesn't contain fitted raw hits, skip it
- if (!event.hasCollection(FittedRawTrackerHit.class, fittedHitsCollectionName)) {
- return;
- }
-
- // Get the list of fitted hits from the event
- List<FittedRawTrackerHit> fittedHits = event.get(FittedRawTrackerHit.class, fittedHitsCollectionName);
-
- // Map the fitted hits to their corresponding raw hits
- this.mapFittedRawHits(fittedHits);
-
+// // If the event doesn't contain fitted raw hits, skip it
+// if (!event.hasCollection(FittedRawTrackerHit.class, fittedHitsCollectionName)) {
+// return;
+// }
+// Get the list of fitted hits from the event
+// List<FittedRawTrackerHit> fittedHits = event.get(FittedRawTrackerHit.class, fittedHitsCollectionName);
+//
+// // Map the fitted hits to their corresponding raw hits
+// this.mapFittedRawHits(fittedHits);
// If the event doesn't contain any clusters, skip it
if (!event.hasCollection(SiTrackerHitStrip1D.class, clusterCollectionName)) {
return;
@@ -318,26 +332,29 @@
}
clusterTimePlots.get(sensor.getName()).fill(cluster.getTime());
- hitTimeTrigTimePlots1D[(int) ((event.getTimeStamp() / 4) % 6)].fill(cluster.getTime());
if (sensor.isTopLayer()) {
+ hitTimeTrigTimePlots1D[(int) ((event.getTimeStamp() / 4) % 6)][TOP].fill(cluster.getTime());
+ hitTimeTrigTimePlots2D[(int) ((event.getTimeStamp() / 4) % 6)][TOP].fill(cluster.getTime(), cluster.getdEdx() / DopedSilicon.ENERGY_EHPAIR);
hitTimeTrigTimePlots.get("Top").fill(cluster.getTime(), event.getTimeStamp() % 24);
} else {
+ hitTimeTrigTimePlots1D[(int) ((event.getTimeStamp() / 4) % 6)][BOTTOM].fill(cluster.getTime());
+ hitTimeTrigTimePlots2D[(int) ((event.getTimeStamp() / 4) % 6)][BOTTOM].fill(cluster.getTime(), cluster.getdEdx() / DopedSilicon.ENERGY_EHPAIR);
hitTimeTrigTimePlots.get("Bottom").fill(cluster.getTime(), event.getTimeStamp() % 24);
}
}
}
public void endOfData() {
-
- String rootFile = "run" + runNumber + "_cluster_analysis.root";
- RootFileStore store = new RootFileStore(rootFile);
- try {
- store.open();
- store.add(tree);
- store.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
+ if (saveRootFile) {
+ String rootFile = "run" + runNumber + "_cluster_analysis.root";
+ RootFileStore store = new RootFileStore(rootFile);
+ try {
+ store.open();
+ store.add(tree);
+ store.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtHitPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtHitPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtHitPlots.java Wed Apr 27 11:11:32 2016
@@ -2,6 +2,7 @@
import hep.aida.IAnalysisFactory;
import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
import hep.aida.IHistogramFactory;
import hep.aida.IPlotter;
import hep.aida.IPlotterFactory;
@@ -9,18 +10,19 @@
import hep.aida.ITree;
import hep.aida.jfree.plotter.Plotter;
import hep.aida.jfree.plotter.PlotterRegion;
-
+import hep.aida.ref.rootwriter.RootFileStore;
+
+import java.io.IOException;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Set;
-
+
+import org.hps.recon.tracking.SvtPlotUtils;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
-import org.lcsim.util.Driver;
-import org.lcsim.geometry.Detector;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
/**
@@ -28,6 +30,8 @@
* event.
*
* @author Omar Moreno <[log in to unmask]>
+ * @author Per Hansson Adrian <[log in to unmask]>
+ *
*/
public class SvtHitPlots extends Driver {
@@ -38,23 +42,25 @@
// Plotting
private static ITree tree = null;
- private IAnalysisFactory analysisFactory = AIDA.defaultInstance().analysisFactory();
- private IPlotterFactory plotterFactory = analysisFactory.createPlotterFactory("SVT Hits");
+ private final IAnalysisFactory analysisFactory = AIDA.defaultInstance().analysisFactory();
+ private final IPlotterFactory plotterFactory = analysisFactory.createPlotterFactory("SVT Hits");
private IHistogramFactory histogramFactory = null;
protected Map<String, IPlotter> plotters = new HashMap<String, IPlotter>();
// Histogram Maps
- private static Map<String, IHistogram1D> hitsPerSensorPlots = new HashMap<String, IHistogram1D>();
- private static Map<String, int[]> hitsPerSensor = new HashMap<String, int[]>();
- private static Map<String, IHistogram1D> layersHitPlots = new HashMap<String, IHistogram1D>();
- private static Map<String, IHistogram1D> hitCountPlots = new HashMap<String, IHistogram1D>();
- private static Map<String, IHistogram1D> firstSamplePlots = new HashMap<String, IHistogram1D>();
+ private static final Map<String, IHistogram1D> hitsPerSensorPlots = new HashMap<String, IHistogram1D>();
+ private static final Map<String, int[]> hitsPerSensor = new HashMap<String, int[]>();
+ private static final Map<String, IHistogram1D> layersHitPlots = new HashMap<String, IHistogram1D>();
+ private static final Map<String, IHistogram1D> hitCountPlots = new HashMap<String, IHistogram1D>();
+ private static final Map<String, IHistogram1D> firstSamplePlots = new HashMap<String, IHistogram1D>();
+// private static Map<String, IHistogram1D> firstSamplePlotsNoise = new HashMap<String, IHistogram1D>();
+ private static final Map<String, IHistogram2D> firstSamplePlotsNoisePerChannel = new HashMap<String, IHistogram2D>();
private List<HpsSiSensor> sensors;
private static final String SUBDETECTOR_NAME = "Tracker";
- private String rawTrackerHitCollectionName = "SVTRawTrackerHits";
-
+ private final String rawTrackerHitCollectionName = "SVTRawTrackerHits";
+
// Counters
double eventCount = 0;
double totalHitCount = 0;
@@ -62,36 +68,23 @@
double totalBotHitCount = 0;
private boolean dropSmallHitEvents = true;
+ private static final boolean debug = false;
+ private boolean doPerChannelSamplePlots = false;
+ private int maxSampleCutForNoise = -1;
+ private boolean saveRootFile = false;
+ private String outputRootFilename = "";
+ private boolean showPlots = true;
public void setDropSmallHitEvents(boolean dropSmallHitEvents) {
this.dropSmallHitEvents = dropSmallHitEvents;
}
- private int computePlotterRegion(HpsSiSensor sensor) {
-
- if (sensor.getLayerNumber() < 7) {
- if (sensor.isTopLayer()) {
- return 6 * (sensor.getLayerNumber() - 1);
- } else {
- return 6 * (sensor.getLayerNumber() - 1) + 1;
- }
- } else {
-
- if (sensor.isTopLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6 * (sensor.getLayerNumber() - 7) + 2;
- } else {
- return 6 * (sensor.getLayerNumber() - 7) + 3;
- }
- } else if (sensor.isBottomLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6 * (sensor.getLayerNumber() - 7) + 4;
- } else {
- return 6 * (sensor.getLayerNumber() - 7) + 5;
- }
- }
- }
- return -1;
+ public void setDoPerChannelsSampleplots(boolean val) {
+ doPerChannelSamplePlots = val;
+ }
+
+ public void setSaveRootFile(boolean save) {
+ saveRootFile = save;
}
/**
@@ -137,6 +130,10 @@
for (HpsSiSensor sensor : sensors) {
hitsPerSensorPlots.get(sensor.getName()).reset();
firstSamplePlots.get(sensor.getName()).reset();
+// firstSamplePlotsNoise.get(sensor.getName()).reset();
+ if (doPerChannelSamplePlots) {
+ firstSamplePlotsNoisePerChannel.get(sensor.getName()).reset();
+ }
}
for (IHistogram1D histogram : layersHitPlots.values()) {
@@ -149,12 +146,13 @@
}
+ @Override
protected void detectorChanged(Detector detector) {
// Get the HpsSiSensor objects from the geometry
sensors = detector.getSubdetector(SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class);
- if (sensors.size() == 0) {
+ if (sensors.isEmpty()) {
throw new RuntimeException("No sensors were found in this detector.");
}
@@ -192,43 +190,74 @@
plotters.get("Raw hit counts/Event").createRegions(2, 2);
hitCountPlots.put("Raw hit counts/Event",
- histogramFactory.createHistogram1D("Raw hit counts", 100, 0, 100));
+ histogramFactory.createHistogram1D("Raw hit counts", 100, 0, 500));
plotters.get("Raw hit counts/Event").region(0).plot(hitCountPlots.get("Raw hit counts/Event"), SvtPlotUtils.createStyle(plotterFactory, "Number of Raw Hits", ""));
hitCountPlots.put("SVT top raw hit counts/Event",
- histogramFactory.createHistogram1D("SVT top raw hit counts", 100, 0, 100));
+ histogramFactory.createHistogram1D("SVT top raw hit counts", 100, 0, 300));
plotters.get("Raw hit counts/Event").region(2).plot(hitCountPlots.get("SVT top raw hit counts/Event"), SvtPlotUtils.createStyle(plotterFactory, "Number of Raw Hits in Top Volume", ""));
hitCountPlots.put("SVT bottom raw hit counts/Event",
- histogramFactory.createHistogram1D("SVT bottom raw hit counts", 100, 0, 100));
+ histogramFactory.createHistogram1D("SVT bottom raw hit counts", 100, 0, 300));
plotters.get("Raw hit counts/Event").region(3).plot(hitCountPlots.get("SVT bottom raw hit counts/Event"), SvtPlotUtils.createStyle(plotterFactory, "Number of Raw Bits in the Bottom Volume", ""));
plotters.put("First sample distributions (pedestal shifts)", plotterFactory.create("First sample distributions (pedestal shifts)"));
plotters.get("First sample distributions (pedestal shifts)").createRegions(6, 6);
+
+// plotters.put("First sample distributions (pedestal shifts, MAX_SAMPLE>=4)", plotterFactory.create("First sample distributions (pedestal shifts, MAX_SAMPLE>=4)"));
+// plotters.get("First sample distributions (pedestal shifts, MAX_SAMPLE>=4)").createRegions(6, 6);
+ if (doPerChannelSamplePlots) {
+ plotters.put("First sample channel distributions (pedestal shifts)", plotterFactory.create("First sample channel distributions (pedestal shifts)"));
+ plotters.get("First sample channel distributions (pedestal shifts)").createRegions(6, 6);
+ }
+
for (HpsSiSensor sensor : sensors) {
firstSamplePlots.put(sensor.getName(),
histogramFactory.createHistogram1D(sensor.getName() + " - first sample", 100, -500.0, 2000.0));
- plotters.get("First sample distributions (pedestal shifts)").region(this.computePlotterRegion(sensor))
+ plotters.get("First sample distributions (pedestal shifts)").region(SvtPlotUtils.computePlotterRegion(sensor))
.plot(firstSamplePlots.get(sensor.getName()), this.createStyle(sensor, "First sample - pedestal [ADC counts]", ""));
+// firstSamplePlotsNoise.put(sensor.getName(),
+// histogramFactory.createHistogram1D(sensor.getName() + " - first sample (MAX_SAMPLE>=4)", 100, -500.0, 2000.0));
+// plotters.get("First sample distributions (pedestal shifts, MAX_SAMPLE>=4)").region(SvtPlotUtils.computePlotterRegion(sensor))
+// .plot(firstSamplePlotsNoise.get(sensor.getName()), this.createStyle(sensor, "First sample - pedestal (MAX_SAMPLE>=4) [ADC counts]", ""));
+
+ if (doPerChannelSamplePlots) {
+ firstSamplePlotsNoisePerChannel.put(sensor.getName(),
+ histogramFactory.createHistogram2D(sensor.getName() + " channels - first sample", 640, -0.5, 639.5, 20, -500.0, 500.0));
+ plotters.get("First sample channel distributions (pedestal shifts)").region(SvtPlotUtils.computePlotterRegion(sensor))
+ .plot(firstSamplePlotsNoisePerChannel.get(sensor.getName()), this.createStyle(sensor, "First sample channels - pedestal [ADC counts]", ""));
+ }
+
}
for (IPlotter plotter : plotters.values()) {
for (int regionN = 0; regionN < plotter.numberOfRegions(); regionN++) {
PlotterRegion region = ((PlotterRegion) ((Plotter) plotter).region(regionN));
- if (region.getPlottedObjects().size() == 0) {
+ if (region.getPlottedObjects().isEmpty()) {
continue;
}
region.getPanel().addMouseListener(new PopupPlotterListener(region));
}
- plotter.show();
- }
- }
-
+ if (showPlots) {
+ plotter.show();
+ }
+ }
+ }
+
+ @Override
public void process(EventHeader event) {
if (!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) {
return;
}
+ if (debug && ((int) eventCount % 100 == 0)) {
+ System.out.println(this.getClass().getSimpleName() + ": processed " + String.valueOf(eventCount) + " events");
+ }
+
eventCount++;
+
+ if (outputRootFilename.isEmpty()) {
+ outputRootFilename = "run" + String.valueOf(event.getRunNumber());
+ }
// Get RawTrackerHit collection from event.
List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
@@ -240,8 +269,31 @@
this.clearHitMaps();
for (RawTrackerHit rawHit : rawHits) {
HpsSiSensor sensor = (HpsSiSensor) rawHit.getDetectorElement();
+ int channel = rawHit.getIdentifierFieldValue("strip");
+ double pedestal = sensor.getPedestal(channel, 0);
+ // Find the sample with maximum ADC count
+ int maxSample = 0;
+ double maxSampleValue = 0;
+ for (int s = 0; s < 6; ++s) {
+ if (((double) rawHit.getADCValues()[s] - pedestal) > maxSampleValue) {
+ maxSample = s;
+ maxSampleValue = ((double) rawHit.getADCValues()[s]) - pedestal;
+ }
+ }
+
hitsPerSensor.get(sensor.getName())[0]++;
- firstSamplePlots.get(sensor.getName()).fill(rawHit.getADCValues()[0] - sensor.getPedestal(rawHit.getIdentifierFieldValue("strip"), 0));
+ firstSamplePlots.get(sensor.getName()).fill(rawHit.getADCValues()[0] - pedestal);
+// if (maxSampleCutForNoise >= 0 && maxSample >= maxSampleCutForNoise) {
+// firstSamplePlotsNoise.get(sensor.getName()).fill(rawHit.getADCValues()[0] - pedestal);
+ if (doPerChannelSamplePlots) {
+ firstSamplePlotsNoisePerChannel.get(sensor.getName()).fill(channel, rawHit.getADCValues()[0] - pedestal);
+ }
+// } else {
+// firstSamplePlotsNoise.get(sensor.getName()).fill(rawHit.getADCValues()[0] - pedestal);
+// if (doPerChannelSamplePlots) {
+// firstSamplePlotsNoisePerChannel.get(sensor.getName()).fill(channel, rawHit.getADCValues()[0] - pedestal);
+// }
+// }
}
int[] topLayersHit = new int[12];
@@ -300,6 +352,23 @@
System.out.println("% Total Top SVT Hits/Event: " + totalTopHitCount / eventCount);
System.out.println("% Total Bottom SVT Hits/Event: " + totalBotHitCount / eventCount);
System.out.println("\n%================================================%");
+
+ if (saveRootFile) {
+ String rootFileName = outputRootFilename.isEmpty() ? "svthitplots.root" : outputRootFilename + "_svthitplots.root";
+ RootFileStore rootFileStore = new RootFileStore(rootFileName);
+ try {
+ rootFileStore.open();
+ rootFileStore.add(tree);
+ rootFileStore.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+ public void setShowPlots(boolean showPlots) {
+ this.showPlots = showPlots;
}
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtTimingInPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtTimingInPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SvtTimingInPlots.java Wed Apr 27 11:11:32 2016
@@ -1,35 +1,31 @@
package org.hps.monitoring.drivers.svt;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
import hep.aida.IAnalysisFactory;
-import hep.aida.IHistogramFactory;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
+import hep.aida.IHistogramFactory;
import hep.aida.IPlotter;
import hep.aida.IPlotterFactory;
import hep.aida.IPlotterStyle;
import hep.aida.ITree;
import hep.aida.ref.rootwriter.RootFileStore;
-import org.lcsim.util.Driver;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.tracking.FittedRawTrackerHit;
+import org.hps.recon.tracking.ShapeFitParameters;
+import org.hps.record.triggerbank.SSPCluster;
+import org.hps.record.triggerbank.SSPData;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.SiSensor;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.GenericObject;
import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.geometry.Detector;
-import org.hps.recon.tracking.FittedRawTrackerHit;
-import org.hps.recon.tracking.ShapeFitParameters;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.SSPCluster;
-import org.hps.record.triggerbank.SSPData;
-import org.hps.record.triggerbank.SSPSinglesTrigger;
-import org.hps.record.triggerbank.TIData;
+import org.lcsim.util.Driver;
/**
* Monitoring driver that will be used when 'timing in' the SVT.
@@ -244,51 +240,51 @@
isEcalTopCluster = false;
List<SSPCluster> clusters = null;
SSPData sspData = null;
- /*if(event.hasCollection(GenericObject.class, triggerBankCollectionName)) {
-
- // Get the list of trigger banks from the event
- List<GenericObject> triggerBanks = event.get(GenericObject.class, triggerBankCollectionName);
-
- System.out.println("Total trigger banks: " + triggerBanks.size());
-
- // Loop through the collection of banks and get the SSP and TI banks.
- for (GenericObject triggerBank : triggerBanks) {
-
- // If the bank contains TI data, process it
- if (AbstractIntData.getTag(triggerBank) == TIData.BANK_TAG) {
-
- TIData tiData = new TIData(triggerBank);
-
- // Check if the trigger is singles
- if (tiData.isSingle0Trigger() || tiData.isSingle1Trigger()) {
- isSingleClusterTrigger = true;
- }
-
- } else if (AbstractIntData.getTag(triggerBank) == SSPData.BANK_TAG) {
-
- sspData = new SSPData(triggerBank);
-
- clusters = sspData.getClusters();
-
- for (SSPCluster cluster : clusters) {
- if (cluster.getYIndex() > 0) {
- isEcalTopCluster = true;
- }
- }
- }
- }
-
- if (isSingleClusterTrigger) {
- System.out.println("Total number of singles triggers: " + sspData.getSinglesTriggers().size());
- for (SSPSinglesTrigger trigger : sspData.getSinglesTriggers()) {
- System.out.println("Trigger: " + trigger.toString());
- }
- System.out.println("Total number of SSP clusters: " + clusters.size());
- for (SSPCluster cluster : clusters) {
- System.out.println("X: " + cluster.getXIndex() + " Y: " + cluster.getYIndex() + " time: " + cluster.getTime());
- }
- }
- }*/
+ /*if(event.hasCollection(GenericObject.class, triggerBankCollectionName)) {
+
+ // Get the list of trigger banks from the event
+ List<GenericObject> triggerBanks = event.get(GenericObject.class, triggerBankCollectionName);
+
+ System.out.println("Total trigger banks: " + triggerBanks.size());
+
+ // Loop through the collection of banks and get the SSP and TI banks.
+ for (GenericObject triggerBank : triggerBanks) {
+
+ // If the bank contains TI data, process it
+ if (AbstractIntData.getTag(triggerBank) == TIData.BANK_TAG) {
+
+ TIData tiData = new TIData(triggerBank);
+
+ // Check if the trigger is singles
+ if (tiData.isSingle0Trigger() || tiData.isSingle1Trigger()) {
+ isSingleClusterTrigger = true;
+ }
+
+ } else if (AbstractIntData.getTag(triggerBank) == SSPData.BANK_TAG) {
+
+ sspData = new SSPData(triggerBank);
+
+ clusters = sspData.getClusters();
+
+ for (SSPCluster cluster : clusters) {
+ if (cluster.getYIndex() > 0) {
+ isEcalTopCluster = true;
+ }
+ }
+ }
+ }
+
+ if (isSingleClusterTrigger) {
+ System.out.println("Total number of singles triggers: " + sspData.getSinglesTriggers().size());
+ for (SSPSinglesTrigger trigger : sspData.getSinglesTriggers()) {
+ System.out.println("Trigger: " + trigger.toString());
+ }
+ System.out.println("Total number of SSP clusters: " + clusters.size());
+ for (SSPCluster cluster : clusters) {
+ System.out.println("X: " + cluster.getXIndex() + " Y: " + cluster.getYIndex() + " time: " + cluster.getTime());
+ }
+ }
+ }*/
// Obtain all relations between an SVT raw hit and its corresponding
// fit parameters. The fit parameters are obtained from the fit to
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/PlotAndFitUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/PlotAndFitUtilities.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/PlotAndFitUtilities.java Wed Apr 27 11:11:32 2016
@@ -50,7 +50,7 @@
static void plot(IPlotter plotter, IBaseHistogram histogram, IPlotterStyle style, int region) {
if (style == null)
style = getPlotterStyle(histogram);
- System.out.println("Putting plot in region " + region);
+ //System.out.println("Putting plot in region " + region);
plotter.region(region).plot(histogram, style);
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/SVTOpeningAlignment.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/SVTOpeningAlignment.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/SVTOpeningAlignment.java Wed Apr 27 11:11:32 2016
@@ -1,8 +1,8 @@
package org.hps.monitoring.drivers.trackrecon;
+import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.fitAndPutParameters;
import hep.aida.IAnalysisFactory;
import hep.aida.IFitFactory;
-import hep.aida.IFitResult;
import hep.aida.IFitter;
import hep.aida.IFunction;
import hep.aida.IFunctionFactory;
@@ -10,12 +10,13 @@
import hep.aida.IPlotter;
import hep.aida.IPlotterFactory;
import hep.aida.IPlotterStyle;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
-import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.fitAndPutParameters;
+
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
import org.lcsim.event.TrackState;
@@ -24,7 +25,6 @@
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.performGaussianFit;
/**
*
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackResiduals.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackResiduals.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackResiduals.java Wed Apr 27 11:11:32 2016
@@ -1,20 +1,20 @@
package org.hps.monitoring.drivers.trackrecon;
+import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.fitAndPutParameters;
+import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
import hep.aida.IAnalysisFactory;
import hep.aida.IFitFactory;
-import hep.aida.IFitResult;
-import hep.aida.IFitter;
import hep.aida.IFunction;
import hep.aida.IFunctionFactory;
import hep.aida.IHistogram1D;
import hep.aida.IPlotter;
import hep.aida.IPlotterFactory;
+
import java.io.IOException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
-import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.fitAndPutParameters;
-import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
+
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.geometry.Detector;
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackTimePlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackTimePlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackTimePlots.java Wed Apr 27 11:11:32 2016
@@ -7,17 +7,14 @@
import hep.aida.IPlotterStyle;
import hep.aida.ref.plotter.style.registry.IStyleStore;
import hep.aida.ref.plotter.style.registry.StyleRegistry;
-
import java.util.List;
import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.TestRunTriggerData;
import org.lcsim.detector.tracker.silicon.DopedSilicon;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.GenericObject;
+import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
import org.lcsim.fit.helicaltrack.HelicalTrackCross;
@@ -116,7 +113,7 @@
for (int i = 0; i < nlayers; i++) {
int region = computePlotterRegion(i);
- trackHit2D[i] = aida.histogram2D("Layer " + i + " trackHit vs dt", 75, -50, 100.0, 50, -20, 20.0);
+ trackHit2D[i] = aida.histogram2D("Layer " + i + " trigger phase vs dt", 80, -20, 20.0, 6, 0, 24.0);
plot(plotter5, trackHit2D[i], style2d, region);
trackHitDtChan[i] = aida.histogram2D("Layer " + i + " dt vs position", 200, -20, 20, 50, -20, 20.0);
plot(plotter6, trackHitDtChan[i], style2d, region);
@@ -124,23 +121,23 @@
plotter.show();
plotter3.show();
plotter4.show();
-// plotter5.show();"Track Time vs. dt"
-// plotter6.show(); "Track dt vs. Channel"
+ plotter5.show();//"Track Time vs. dt"
+ plotter6.show();// "Track dt vs. Channel"
for (int module = 0; module < 2; module++) {
- trackT0[module] = aida.histogram1D((module == 0 ? "Top" : "Bottom") + " Track Time", 75, -50, 100.0);
+ trackT0[module] = aida.histogram1D((module == 0 ? "Top" : "Bottom") + " Track Time", 80, -20, 20.0);
plot(plotter2, trackT0[module], null, module);
- trackTrigTime[module] = aida.histogram2D((module == 0 ? "Top" : "Bottom") + " Track Time vs. Trig Time", 75, -50, 100.0, 6, -2, 22);
+ trackTrigTime[module] = aida.histogram2D((module == 0 ? "Top" : "Bottom") + " Track Time vs. Trig Time", 80, -20, 20.0, 6, 0, 24);
plot(plotter2, trackTrigTime[module], style2d, module + 2);
trackTimeRange[module] = aida.histogram1D((module == 0 ? "Top" : "Bottom") + " Track Hit Time Range", 75, 0, 30.0);
plot(plotter7, trackTimeRange[module], null, module);
- trackTimeMinMax[module] = aida.histogram2D((module == 0 ? "Top" : "Bottom") + " First and Last Track Hit Times", 75, -50, 100.0, 75, -50, 100.0);
+ trackTimeMinMax[module] = aida.histogram2D((module == 0 ? "Top" : "Bottom") + " First and Last Track Hit Times", 80, -20, 20.0, 80, -20, 20.0);
plot(plotter7, trackTimeMinMax[module], style2d, module + 2);
}
plotter2.show();
-// plotter7.show(); //"Track Hit Time Range"
+ plotter7.show(); //"Track Hit Time Range"
}
public void setHitCollection(String hitCollection) {
@@ -166,7 +163,7 @@
List<Track> tracks = event.get(Track.class, trackCollectionName);
for (Track track : tracks) {
- int trackModule = -1;
+ int trackModule;
if (track.getTrackerHits().get(0).getPosition()[2] > 0) {
trackModule = 0;
} else {
@@ -179,7 +176,8 @@
for (TrackerHit hitCross : track.getTrackerHits()) {
for (HelicalTrackStrip hit : ((HelicalTrackCross) hitCross).getStrips()) {
int layer = hit.layer();
- trackHitT0[trackModule][layer - 1].fill(hit.dEdx() / DopedSilicon.ENERGY_EHPAIR);
+ int module = ((RawTrackerHit) hit.rawhits().get(0)).getIdentifierFieldValue("module");
+ trackHitT0[module][layer - 1].fill(hit.dEdx() / DopedSilicon.ENERGY_EHPAIR);
trackTime += hit.time();
hitCount++;
if (hit.time() > maxTime) {
@@ -202,8 +200,9 @@
for (TrackerHit hitCross : track.getTrackerHits()) {
for (HelicalTrackStrip hit : ((HelicalTrackCross) hitCross).getStrips()) {
int layer = hit.layer();
- trackHitDt[trackModule][layer - 1].fill(hit.time() - trackTime);
- trackHit2D[layer - 1].fill(trackTime, hit.time() - trackTime);
+ int module = ((RawTrackerHit) hit.rawhits().get(0)).getIdentifierFieldValue("module");
+ trackHitDt[module][layer - 1].fill(hit.time() - trackTime);
+ trackHit2D[layer - 1].fill(hit.time() - trackTime, event.getTimeStamp() % 24);
trackHitDtChan[layer - 1].fill(hit.umeas(), hit.time() - trackTime);
}
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackingReconPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackingReconPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/TrackingReconPlots.java Wed Apr 27 11:11:32 2016
@@ -12,14 +12,17 @@
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
-import org.hps.recon.tracking.HPSTrack;
+
+import org.hps.recon.tracking.HpsHelicalTrackFit;
+import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.Cluster;
-
import org.lcsim.event.EventHeader;
import org.lcsim.event.LCIOParameters.ParameterName;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.Track;
+import org.lcsim.event.TrackState;
import org.lcsim.event.TrackerHit;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
import org.lcsim.geometry.Detector;
@@ -267,10 +270,12 @@
SeedTrack stEle = (SeedTrack) trk;
SeedCandidate seedEle = stEle.getSeedCandidate();
HelicalTrackFit ht = seedEle.getHelix();
- HPSTrack hpstrk = new HPSTrack(ht);
+ HpsHelicalTrackFit hpstrk = new HpsHelicalTrackFit(ht);
double svt_l12 = 900.00;//mm ~approximately...this doesn't matter much
double ecal_face = 1393.00;//mm ~approximately ... this matters! Should use typical shower depth...or, once have cluster match, use that value of Z
- Hep3Vector posAtEcal = hpstrk.getPositionAtZMap(svt_l12, ecal_face, 5.0, event.getDetector().getFieldMap())[0];
+ TrackState stateAtEcal = TrackUtils.getTrackStateAtECal(trk);
+ Hep3Vector posAtEcal = new BasicHep3Vector(stateAtEcal.getReferencePoint());
+ //Hep3Vector posAtEcal = hpstrk.getPositionAtZMap(svt_l12, ecal_face, 5.0, event.getDetector().getFieldMap())[0];
List<Cluster> clusters = event.get(Cluster.class, ecalCollectionName);
if (clusters != null) {
if (debug)
@@ -282,10 +287,11 @@
Hep3Vector clusterPos = new BasicHep3Vector(clust.getPosition());
double zCluster = clusterPos.z();
//improve the extrapolation...use the reconstructed cluster z-position
- posAtEcal = hpstrk.getPositionAtZMap(svt_l12, zCluster, 5.0, event.getDetector().getFieldMap())[0];
+// stateAtEcal = TrackUtils.extrapolateTrackUsingFieldMap(trk, svt_l12, zCluster, 5.0, event.getDetector().getFieldMap());
+// posAtEcal = new BasicHep3Vector(stateAtEcal.getReferencePoint());
double eOverP = clust.getEnergy() / pmag;
- double dx = posAtEcal.x() - clusterPos.x();
- double dy = posAtEcal.y() - clusterPos.y();
+ double dx = posAtEcal.y() - clusterPos.x();
+ double dy = posAtEcal.z() - clusterPos.y();
heOverP.fill(eOverP);
hdelXECal.fill(dx);
hdelYECal.fill(dy);
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/V0ReconPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/V0ReconPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/trackrecon/V0ReconPlots.java Wed Apr 27 11:11:32 2016
@@ -1,5 +1,6 @@
package org.hps.monitoring.drivers.trackrecon;
+import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
import hep.aida.IAnalysisFactory;
import hep.aida.IFitFactory;
import hep.aida.IFunctionFactory;
@@ -7,12 +8,12 @@
import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
import hep.aida.IPlotterFactory;
-import hep.aida.IPlotterStyle;
+
import java.io.IOException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
-import static org.hps.monitoring.drivers.trackrecon.PlotAndFitUtilities.plot;
+
import org.lcsim.event.EventHeader;
import org.lcsim.event.ReconstructedParticle;
import org.lcsim.event.Track;
@@ -58,7 +59,7 @@
@Override
protected void detectorChanged(Detector detector) {
- System.out.println("V0Monitoring::detectorChanged Setting up the plotter");
+ //System.out.println("V0Monitoring::detectorChanged Setting up the plotter");
IAnalysisFactory fac = aida.analysisFactory();
IPlotterFactory pfac = fac.createPlotterFactory("V0 Recon");
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/BasicMonitoringPlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/BasicMonitoringPlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/BasicMonitoringPlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -18,83 +18,83 @@
* @author Jeremy McCormick <[log in to unmask]>
*/
public class BasicMonitoringPlotsDriver extends Driver {
-
- private String calHitsCollectionName = "EcalCalHits";
- private String rawHitsCollectionName = "EcalReadoutHits";
- private String clustersCollectionName = "EcalClusters";
-
- private IHistogram1D calHitEnergyH1D;
- private IHistogram1D clusterEnergyH1D;
- private IHistogram1D rawHitAmplitudeH1D;
- private IHistogram2D calHitEnergyMapH2D;
-
- public BasicMonitoringPlotsDriver() {
- }
-
- public void startOfData() {
-
- IAnalysisFactory.create().createHistogramFactory(null);
- IPlotterFactory plotFactory = IAnalysisFactory.create().createPlotterFactory("ECAL Monitoring");
- IHistogramFactory histogramFactory = IAnalysisFactory.create().createHistogramFactory(null);
-
- calHitEnergyH1D = histogramFactory.createHistogram1D(calHitsCollectionName + ": Energy", calHitsCollectionName + ": Energy", 200, 0.0, 2.0);
- calHitEnergyH1D.annotation().addItem("xAxisLabel", "GeV");
- calHitEnergyH1D.annotation().addItem("yAxisLabel", "Count");
- IPlotter plotter = plotFactory.create("CalorimeterHits");
- plotter.createRegion();
- plotter.style().gridStyle().setVisible(false);
- plotter.style().dataStyle().errorBarStyle().setVisible(false);
- plotter.region(0).plot(calHitEnergyH1D);
- plotter.show();
-
- rawHitAmplitudeH1D = histogramFactory.createHistogram1D(rawHitsCollectionName + ": Amplitude", rawHitsCollectionName + ": Amplitude", 150, 0.0, 15000.0);
- rawHitAmplitudeH1D.annotation().addItem("xAxisLabel", "ADC Value");
- rawHitAmplitudeH1D.annotation().addItem("yAxisLabel", "Count");
- plotter = plotFactory.create("RawCalorimeterHits");
- plotter.createRegion();
- plotter.style().gridStyle().setVisible(false);
- plotter.style().dataStyle().errorBarStyle().setVisible(false);
- plotter.region(0).plot(rawHitAmplitudeH1D);
- plotter.show();
-
- clusterEnergyH1D = histogramFactory.createHistogram1D(clustersCollectionName + ": Energy", clustersCollectionName + ": Energy", 100, 0.0, 3.0);
- clusterEnergyH1D.annotation().addItem("xAxisLabel", "GeV");
- clusterEnergyH1D.annotation().addItem("yAxisLabel", "Count");
- plotter = plotFactory.create("Clusters");
- plotter.createRegion();
- plotter.style().gridStyle().setVisible(false);
- plotter.style().dataStyle().errorBarStyle().setVisible(false);
- plotter.region(0).plot(clusterEnergyH1D);
- plotter.show();
-
- calHitEnergyMapH2D = histogramFactory.createHistogram2D(calHitsCollectionName + ": Energy Map", calHitsCollectionName + ": Energy Map", 47, -23.5, 23.5, 11, -5.5, 5.5);
- plotter = plotFactory.create("CalorimeterHit Energy Map");
- plotter.createRegion();
- plotter.style().setParameter("hist2DStyle", "colorMap");
- plotter.style().gridStyle().setVisible(false);
- plotter.region(0).plot(calHitEnergyMapH2D);
- plotter.show();
- }
-
- public void process(EventHeader event) {
-
+
+ private String calHitsCollectionName = "EcalCalHits";
+ private String rawHitsCollectionName = "EcalReadoutHits";
+ private String clustersCollectionName = "EcalClusters";
+
+ private IHistogram1D calHitEnergyH1D;
+ private IHistogram1D clusterEnergyH1D;
+ private IHistogram1D rawHitAmplitudeH1D;
+ private IHistogram2D calHitEnergyMapH2D;
+
+ public BasicMonitoringPlotsDriver() {
+ }
+
+ public void startOfData() {
+
+ IAnalysisFactory.create().createHistogramFactory(null);
+ IPlotterFactory plotFactory = IAnalysisFactory.create().createPlotterFactory("ECAL Monitoring");
+ IHistogramFactory histogramFactory = IAnalysisFactory.create().createHistogramFactory(null);
+
+ calHitEnergyH1D = histogramFactory.createHistogram1D(calHitsCollectionName + ": Energy", calHitsCollectionName + ": Energy", 200, 0.0, 2.0);
+ calHitEnergyH1D.annotation().addItem("xAxisLabel", "GeV");
+ calHitEnergyH1D.annotation().addItem("yAxisLabel", "Count");
+ IPlotter plotter = plotFactory.create("CalorimeterHits");
+ plotter.createRegion();
+ plotter.style().gridStyle().setVisible(false);
+ plotter.style().dataStyle().errorBarStyle().setVisible(false);
+ plotter.region(0).plot(calHitEnergyH1D);
+ plotter.show();
+
+ rawHitAmplitudeH1D = histogramFactory.createHistogram1D(rawHitsCollectionName + ": Amplitude", rawHitsCollectionName + ": Amplitude", 150, 0.0, 15000.0);
+ rawHitAmplitudeH1D.annotation().addItem("xAxisLabel", "ADC Value");
+ rawHitAmplitudeH1D.annotation().addItem("yAxisLabel", "Count");
+ plotter = plotFactory.create("RawCalorimeterHits");
+ plotter.createRegion();
+ plotter.style().gridStyle().setVisible(false);
+ plotter.style().dataStyle().errorBarStyle().setVisible(false);
+ plotter.region(0).plot(rawHitAmplitudeH1D);
+ plotter.show();
+
+ clusterEnergyH1D = histogramFactory.createHistogram1D(clustersCollectionName + ": Energy", clustersCollectionName + ": Energy", 100, 0.0, 3.0);
+ clusterEnergyH1D.annotation().addItem("xAxisLabel", "GeV");
+ clusterEnergyH1D.annotation().addItem("yAxisLabel", "Count");
+ plotter = plotFactory.create("Clusters");
+ plotter.createRegion();
+ plotter.style().gridStyle().setVisible(false);
+ plotter.style().dataStyle().errorBarStyle().setVisible(false);
+ plotter.region(0).plot(clusterEnergyH1D);
+ plotter.show();
+
+ calHitEnergyMapH2D = histogramFactory.createHistogram2D(calHitsCollectionName + ": Energy Map", calHitsCollectionName + ": Energy Map", 47, -23.5, 23.5, 11, -5.5, 5.5);
+ plotter = plotFactory.create("CalorimeterHit Energy Map");
+ plotter.createRegion();
+ plotter.style().setParameter("hist2DStyle", "colorMap");
+ plotter.style().gridStyle().setVisible(false);
+ plotter.region(0).plot(calHitEnergyMapH2D);
+ plotter.show();
+ }
+
+ public void process(EventHeader event) {
+
if (event.hasCollection(CalorimeterHit.class, calHitsCollectionName)) {
for (CalorimeterHit hit : event.get(CalorimeterHit.class, calHitsCollectionName)) {
calHitEnergyH1D.fill(hit.getCorrectedEnergy());
calHitEnergyMapH2D.fill(hit.getIdentifierFieldValue("ix"), hit.getIdentifierFieldValue("iy"), hit.getCorrectedEnergy());
}
}
-
+
if (event.hasCollection(Cluster.class, clustersCollectionName)) {
for (Cluster cluster : event.get(Cluster.class, clustersCollectionName)) {
clusterEnergyH1D.fill(cluster.getEnergy());
}
}
-
- if (event.hasCollection(RawCalorimeterHit.class, rawHitsCollectionName)) {
- for (RawCalorimeterHit hit : event.get(RawCalorimeterHit.class, rawHitsCollectionName)) {
- rawHitAmplitudeH1D.fill(hit.getAmplitude());
- }
- }
- }
+
+ if (event.hasCollection(RawCalorimeterHit.class, rawHitsCollectionName)) {
+ for (RawCalorimeterHit hit : event.get(RawCalorimeterHit.class, rawHitsCollectionName)) {
+ rawHitAmplitudeH1D.fill(hit.getAmplitude());
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalClusterPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalClusterPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalClusterPlots.java Wed Apr 27 11:11:32 2016
@@ -49,15 +49,15 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class EcalClusterPlots extends Driver {
- // Internal variables.
+ // Internal variables.
private boolean hide = false;
private boolean logScale = false;
- private AIDA aida = AIDA.defaultInstance();
+ private AIDA aida = AIDA.defaultInstance();
private double maxE = 5000 * EcalUtils.MeV;
- private IPlotter[] plotter = new IPlotter[4];
- private String clusterCollectionName = "EcalClusters";
-
- // Monitoring plot variables.
+ private IPlotter[] plotter = new IPlotter[4];
+ private String clusterCollectionName = "EcalClusters";
+
+ // Monitoring plot variables.
private IHistogram1D clusterCountPlot;
private IHistogram1D clusterSizePlot;
private IHistogram1D clusterEnergyPlot;
@@ -79,7 +79,7 @@
private static final int TAB_CLUSTER_TIME = 2;
private static final int TAB_CLUSTER_PAIR = 3;
private static final String[] TAB_NAMES = { "Cluster Count Plots", "Cluster Energy Plots",
- "Cluster Time Plots", "Cluster Pair Plots" };
+ "Cluster Time Plots", "Cluster Pair Plots" };
/**
* Resets all of the plots for the new detector.
@@ -110,10 +110,10 @@
// Apply formatting that is constant across all tabs.
for(int tabIndex = 0; tabIndex < plotter.length; tabIndex++) {
- plotter[tabIndex] = plotterFactory.create(TAB_NAMES[tabIndex]);
- plotter[tabIndex].setTitle(TAB_NAMES[tabIndex]);
- plotter[tabIndex].style().dataStyle().errorBarStyle().setVisible(false);
- plotter[tabIndex].style().dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ plotter[tabIndex] = plotterFactory.create(TAB_NAMES[tabIndex]);
+ plotter[tabIndex].setTitle(TAB_NAMES[tabIndex]);
+ plotter[tabIndex].style().dataStyle().errorBarStyle().setVisible(false);
+ plotter[tabIndex].style().dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
if(logScale) { plotter[tabIndex].style().yAxisStyle().setParameter("scale", "log"); }
}
@@ -145,9 +145,9 @@
// If they should not be hidden, display the tabs.
if(!hide) {
- for(IPlotter tab : plotter) {
- tab.show();
- }
+ for(IPlotter tab : plotter) {
+ tab.show();
+ }
}
}
@@ -157,121 +157,121 @@
*/
@Override
public void process(EventHeader event) {
- // Check whether the event has clusters or not.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the list of clusters.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Create lists to store the clusters from the top of the
- // calorimeter and the bottom.
- List<Cluster> topList = new ArrayList<Cluster>();
- List<Cluster> bottomList = new ArrayList<Cluster>();
-
- // Track the highest energy cluster in the event.
- double maxEnergy = 0.0;
-
- // Process each of the clusters.
- for(Cluster cluster : clusterList) {
- // If this cluster has a higher energy then was seen
- // previously, it is now the highest energy cluster.
- if (cluster.getEnergy() > maxEnergy) {
- maxEnergy = cluster.getEnergy();
- }
-
-
- // Get the list of calorimeter hits and its size.
- List<CalorimeterHit> hitList = cluster.getCalorimeterHits();
- int hitCount = hitList.size();
-
- // Track cluster statistics.
- double xEnergyWeight = 0.0;
- double yEnergyWeight = 0.0;
- double[] hitTimes = new double[hitCount];
- double totalHitEnergy = 0.0;
-
- // Iterate over the hits and extract statistics from them.
- for(int hitIndex = 0; hitIndex < hitCount; hitIndex++) {
- hitTimes[hitIndex] = hitList.get(hitIndex).getTime();
- totalHitEnergy += hitList.get(hitIndex).getRawEnergy();
- xEnergyWeight += (hitList.get(hitIndex).getRawEnergy() * hitList.get(hitIndex).getIdentifierFieldValue("ix"));
- yEnergyWeight += (hitList.get(hitIndex).getRawEnergy() * hitList.get(hitIndex).getIdentifierFieldValue("iy"));
- }
-
- // If the cluster energy exceeds zero, plot the cluster
- // statistics.
- if(cluster.getEnergy() > 0) {
- clusterSizePlot.fill(hitCount);
- clusterTimes.fill(StatUtils.mean(hitTimes, 0, hitCount));
- clusterTimeSigma.fill(Math.sqrt(StatUtils.variance(hitTimes, 0, hitCount)));
- edgePlot.fill(xEnergyWeight / totalHitEnergy, yEnergyWeight / totalHitEnergy);
- }
-
- // Fill the single cluster plots.
- clusterEnergyPlot.fill(cluster.getEnergy());
-
- // Cluster pairs are formed from all top/bottom cluster
- // combinations. To create these pairs, separate the
- // clusters into two lists based on their y-indices.
- if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix") > 0) {
- topList.add(cluster);
- } else {
- bottomList.add(cluster);
- }
- }
-
- // Populate the event plots.
- clusterCountPlot.fill(clusterList.size());
- if(maxEnergy > 0) { clusterMaxEnergyPlot.fill(maxEnergy); }
-
- // Create a list to store cluster pairs.
- List<Cluster[]> pairList = new ArrayList<Cluster[]>(topList.size() * bottomList.size());
-
- // Form pairs from all possible combinations of clusters
- // from the top and bottom lists.
- for(Cluster topCluster : topList) {
- for(Cluster bottomCluster : bottomList) {
- // Make a cluster pair array.
- Cluster[] pair = new Cluster[2];
-
- // The lower energy cluster goes in the second slot.
- if(topCluster.getEnergy() > bottomCluster.getEnergy()) {
- pair[0] = topCluster;
- pair[1] = bottomCluster;
- } else {
- pair[0] = bottomCluster;
- pair[1] = topCluster;
- }
-
- // Add the pair to the pair list.
- pairList.add(pair);
- }
- }
-
- // Iterate over each pair and calculate the pair cut values.
- for(Cluster[] pair : pairList) {
- // Get the energy slope value.
- double energySumValue = TriggerModule.getValueEnergySum(pair);
- double energyDifferenceValue = TriggerModule.getValueEnergyDifference(pair);
- double energySlopeValue = TriggerModule.getValueEnergySlope(pair, 0.005500);
- double coplanarityValue = TriggerModule.getValueCoplanarity(pair);
- double xMean = ((pair[0].getEnergy() * pair[0].getPosition()[0]) +
- (pair[1].getEnergy() * pair[1].getPosition()[0])) / energySumValue;
- double yMean = ((pair[0].getEnergy() * pair[0].getPosition()[1]) +
- (pair[1].getEnergy() * pair[1].getPosition()[1])) / energySumValue;
-
- // Populate the cluster pair plots.
- pairEnergySum.fill(energySumValue, 1);;
- pairEnergyDifference.fill(energyDifferenceValue, 1);
- pairEnergySlope.fill(energySlopeValue, 1);
- pairCoplanarity.fill(coplanarityValue, 1);
- pairEnergyPositionMeanX.fill(xMean);
- pairEnergyPositionMeanY.fill(yMean);
- }
- }
-
- // If the event does not contain clusters, update the "Event
- // Clusters" plot accordingly.
- else { clusterCountPlot.fill(0); }
+ // Check whether the event has clusters or not.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the list of clusters.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Create lists to store the clusters from the top of the
+ // calorimeter and the bottom.
+ List<Cluster> topList = new ArrayList<Cluster>();
+ List<Cluster> bottomList = new ArrayList<Cluster>();
+
+ // Track the highest energy cluster in the event.
+ double maxEnergy = 0.0;
+
+ // Process each of the clusters.
+ for(Cluster cluster : clusterList) {
+ // If this cluster has a higher energy then was seen
+ // previously, it is now the highest energy cluster.
+ if (cluster.getEnergy() > maxEnergy) {
+ maxEnergy = cluster.getEnergy();
+ }
+
+
+ // Get the list of calorimeter hits and its size.
+ List<CalorimeterHit> hitList = cluster.getCalorimeterHits();
+ int hitCount = hitList.size();
+
+ // Track cluster statistics.
+ double xEnergyWeight = 0.0;
+ double yEnergyWeight = 0.0;
+ double[] hitTimes = new double[hitCount];
+ double totalHitEnergy = 0.0;
+
+ // Iterate over the hits and extract statistics from them.
+ for(int hitIndex = 0; hitIndex < hitCount; hitIndex++) {
+ hitTimes[hitIndex] = hitList.get(hitIndex).getTime();
+ totalHitEnergy += hitList.get(hitIndex).getRawEnergy();
+ xEnergyWeight += (hitList.get(hitIndex).getRawEnergy() * hitList.get(hitIndex).getIdentifierFieldValue("ix"));
+ yEnergyWeight += (hitList.get(hitIndex).getRawEnergy() * hitList.get(hitIndex).getIdentifierFieldValue("iy"));
+ }
+
+ // If the cluster energy exceeds zero, plot the cluster
+ // statistics.
+ if(cluster.getEnergy() > 0) {
+ clusterSizePlot.fill(hitCount);
+ clusterTimes.fill(StatUtils.mean(hitTimes, 0, hitCount));
+ clusterTimeSigma.fill(Math.sqrt(StatUtils.variance(hitTimes, 0, hitCount)));
+ edgePlot.fill(xEnergyWeight / totalHitEnergy, yEnergyWeight / totalHitEnergy);
+ }
+
+ // Fill the single cluster plots.
+ clusterEnergyPlot.fill(cluster.getEnergy());
+
+ // Cluster pairs are formed from all top/bottom cluster
+ // combinations. To create these pairs, separate the
+ // clusters into two lists based on their y-indices.
+ if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix") > 0) {
+ topList.add(cluster);
+ } else {
+ bottomList.add(cluster);
+ }
+ }
+
+ // Populate the event plots.
+ clusterCountPlot.fill(clusterList.size());
+ if(maxEnergy > 0) { clusterMaxEnergyPlot.fill(maxEnergy); }
+
+ // Create a list to store cluster pairs.
+ List<Cluster[]> pairList = new ArrayList<Cluster[]>(topList.size() * bottomList.size());
+
+ // Form pairs from all possible combinations of clusters
+ // from the top and bottom lists.
+ for(Cluster topCluster : topList) {
+ for(Cluster bottomCluster : bottomList) {
+ // Make a cluster pair array.
+ Cluster[] pair = new Cluster[2];
+
+ // The lower energy cluster goes in the second slot.
+ if(topCluster.getEnergy() > bottomCluster.getEnergy()) {
+ pair[0] = topCluster;
+ pair[1] = bottomCluster;
+ } else {
+ pair[0] = bottomCluster;
+ pair[1] = topCluster;
+ }
+
+ // Add the pair to the pair list.
+ pairList.add(pair);
+ }
+ }
+
+ // Iterate over each pair and calculate the pair cut values.
+ for(Cluster[] pair : pairList) {
+ // Get the energy slope value.
+ double energySumValue = TriggerModule.getValueEnergySum(pair);
+ double energyDifferenceValue = TriggerModule.getValueEnergyDifference(pair);
+ double energySlopeValue = TriggerModule.getValueEnergySlope(pair, 0.005500);
+ double coplanarityValue = TriggerModule.getValueCoplanarity(pair);
+ double xMean = ((pair[0].getEnergy() * pair[0].getPosition()[0]) +
+ (pair[1].getEnergy() * pair[1].getPosition()[0])) / energySumValue;
+ double yMean = ((pair[0].getEnergy() * pair[0].getPosition()[1]) +
+ (pair[1].getEnergy() * pair[1].getPosition()[1])) / energySumValue;
+
+ // Populate the cluster pair plots.
+ pairEnergySum.fill(energySumValue, 1);;
+ pairEnergyDifference.fill(energyDifferenceValue, 1);
+ pairEnergySlope.fill(energySlopeValue, 1);
+ pairCoplanarity.fill(coplanarityValue, 1);
+ pairEnergyPositionMeanX.fill(xMean);
+ pairEnergyPositionMeanY.fill(yMean);
+ }
+ }
+
+ // If the event does not contain clusters, update the "Event
+ // Clusters" plot accordingly.
+ else { clusterCountPlot.fill(0); }
}
/**
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java Wed Apr 27 11:11:32 2016
@@ -11,23 +11,19 @@
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.ecal.EcalChannel;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.detector.ecal.EcalCrystal;
import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.EventHeader;
import org.lcsim.geometry.Detector;
-import org.lcsim.geometry.compact.Subdetector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-/*Conditions system imports*/
-
/**
- * The driver <code>EcalDaqPlots</code> implements the histogram shown to the user in the fourth tab of the Monitoring Application, when using the Ecal monitoring lcsim file. It contains only a
- * sub-tab, showing the number of hits recorded by the different FADC channels. It is a very preliminary driver to monitor the DAQ status. These plots are updated continuosly.
+ * The driver <code>EcalDaqPlots</code> implements the histogram shown to the user in the fourth tab of the
+ * Monitoring Application, when using the Ecal monitoring lcsim file. It contains only a sub-tab, showing
+ * the number of hits recorded by the different FADC channels. It is a very preliminary driver to monitor
+ * the DAQ status. These plots are updated continuously.
+ *
* @author Andrea Celentano
- * @TODO: integrate with the new conditions system.
- *
*/
public class EcalDaqPlots extends Driver {
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java Wed Apr 27 11:11:32 2016
@@ -13,7 +13,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.lang.IllegalArgumentException;
import javax.swing.SwingUtilities;
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplayWithRawWaveform.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplayWithRawWaveform.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplayWithRawWaveform.java Wed Apr 27 11:11:32 2016
@@ -13,14 +13,12 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.lang.IllegalArgumentException;
import org.hps.monitoring.ecal.eventdisplay.ui.PDataEventViewer;
import org.hps.monitoring.ecal.eventdisplay.ui.PEventViewer;
import org.hps.monitoring.ecal.eventdisplay.ui.Viewer;
import org.hps.monitoring.ecal.eventdisplay.util.CrystalEvent;
import org.hps.monitoring.ecal.eventdisplay.util.CrystalListener;
-import org.hps.monitoring.ecal.plots.EcalMonitoringUtilities;
import org.hps.recon.ecal.EcalUtils;
import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.RawTrackerHit;
@@ -54,7 +52,7 @@
// Plotter objects and variables.
private IPlotter plotter;
private IPlotterFactory plotterFactory;
- private AIDA aida = AIDA.defaultInstance();
+ private AIDA aida = AIDA.defaultInstance();
// LCIO Collection names.
private String inputCollection = "EcalCalHits";
@@ -69,16 +67,16 @@
private ArrayList<IHistogram2D> channelTimeVsEnergyPlot;
// Internal variables.
- private PEventViewer viewer; // Single event display.
- private int pedSamples = 10; //
- private IPlotterStyle pstyle; // The plotter style for all plots.
- private long lastEventTime = 0; // Tracks the time at which the last event occurred.
- private int eventRefreshRate = 1; // The number of seconds before an update occurs.
- private boolean resetOnUpdate = true; // Clears the event display on each update.
- private double minEch = 10 * EcalUtils.MeV; // The energy scale minimum.
- private double maxEch = 3500 * EcalUtils.MeV; // The energy scale maximum.
- private int[] windowRaw = new int[NUM_CHANNELS]; // The number of samples in a waveform for each channel.
- private boolean[] isFirstRaw = new boolean[NUM_CHANNELS]; // Whether a waveform plot was initiated for each channel.
+ private PEventViewer viewer; // Single event display.
+ private int pedSamples = 10; //
+ private IPlotterStyle pstyle; // The plotter style for all plots.
+ private long lastEventTime = 0; // Tracks the time at which the last event occurred.
+ private int eventRefreshRate = 1; // The number of seconds before an update occurs.
+ private boolean resetOnUpdate = true; // Clears the event display on each update.
+ private double minEch = 10 * EcalUtils.MeV; // The energy scale minimum.
+ private double maxEch = 3500 * EcalUtils.MeV; // The energy scale maximum.
+ private int[] windowRaw = new int[NUM_CHANNELS]; // The number of samples in a waveform for each channel.
+ private boolean[] isFirstRaw = new boolean[NUM_CHANNELS]; // Whether a waveform plot was initiated for each channel.
// Plot style and title variables.
private static final String NO_TITLE = "";
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalHitPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalHitPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalHitPlots.java Wed Apr 27 11:11:32 2016
@@ -190,17 +190,17 @@
GenericObject triggerData = triggerList.get(0);
if (triggerData instanceof SSPData){
- // TODO: TOP, BOTTOM, OR, and AND triggers are test
- // run specific parameters and are not supported
- // by SSPData.
- orTrigTime = 0; //((SSPData)triggerData).getOrTrig();
- topTrigTime = 0; //((SSPData)triggerData).getTopTrig();
- botTrigTime = 0; //((SSPData)triggerData).getBotTrig();
-
-
- orTrigTimePlot.fill(orTrigTime);
+ // TODO: TOP, BOTTOM, OR, and AND triggers are test
+ // run specific parameters and are not supported
+ // by SSPData.
+ orTrigTime = 0; //((SSPData)triggerData).getOrTrig();
+ topTrigTime = 0; //((SSPData)triggerData).getTopTrig();
+ botTrigTime = 0; //((SSPData)triggerData).getBotTrig();
+
+
+ orTrigTimePlot.fill(orTrigTime);
topTrigTimePlot.fill(topTrigTime);
- botTrigTimePlot.fill(botTrigTime);
+ botTrigTimePlot.fill(botTrigTime);
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,5 @@
package org.hps.monitoring.ecal.plots;
+
import hep.aida.IEvaluator;
import hep.aida.IFitResult;
@@ -21,7 +22,6 @@
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -53,6 +53,8 @@
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
+
+
/* This is the driver used to determine the response of each calorimeter channel after a LED run
* @author Andrea Celentano <[log in to unmask]>
*/
@@ -64,16 +66,16 @@
private static final int runNumberMax = 9999;
private static final int nDrivers = 8;
private static final int nSteps = 100; //should be 56 but here is to avoid seg fault
-
-
+
+
String inputCollectionRaw = "EcalReadoutHits";
- String inputCollection = "EcalCalHits";
+ String inputCollection = "EcalCalHits";
AIDA aida;
DatabaseConditionsManager conditionsManager;
- private EcalChannelCollection ChannelCollection;
+ private EcalChannelCollection ChannelCollection;
private EcalLedCollection LedCollection;
private EcalConditions ecalConditions;
@@ -88,7 +90,7 @@
String outFileName;
- private int runNumber = 0;
+ private int runNumber = 0;
private int eventN = 0;
private int id,row,column,chid,ledId,driverId;
private int[][] expectedSequence = new int[][]{ /*A.C. it is a terrible thing to have this hard-coded here!*/
@@ -98,9 +100,9 @@
{112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,-1}, //missing 135 is ok
{168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223},
//second 4 are the flasher2 sequence, BOTTOM controller
- {2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,-1,-1},
+ {2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,-1,-1},
{56,57,58,59,60,61,62,63,64,65,66,67,68,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,-1}, //missing 69 is OK
- {112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167},
+ {112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167},
{168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223},
};
private int[][] actualSequence=new int[nDrivers][nSteps];
@@ -126,7 +128,8 @@
private IFunction fFunction,fFunction1;
private IProfile1D cProfile;
- private IHistogram2D hMeanCharge2D;
+ private IHistogram2D hMeanCharge2D;
+ private IHistogram2D hMeanCharge2DReferenceRatio;
private ArrayList<IHistogram1D> hCharge;
private ArrayList<IHistogram2D> hChargeVsEvn;
private IPlotterFactory factory;
@@ -149,6 +152,11 @@
private double fEvnMaxDraw=80000.;
private double fChargeMinDraw=0.;
private double fChargeMaxDraw=100.;
+
+ /*The reference run numbers*/
+ private int fRedReferenceID;
+ private int fBlueReferenceID;
+
/*Components for user interaction*/
private JDialog dialog;
@@ -161,6 +169,14 @@
private LedColor m_ret=LedColor.UNKNOWN; //use UNKNONW as CANCEL button
static Object modalMonitor = new Object();
+ public void setRedReferenceID(int redReference){
+ this.fRedReferenceID=redReference;
+ }
+
+ public void setBlueReferenceID(int blueReference){
+ this.fBlueReferenceID=blueReference;
+ }
+
public void setUseRawEnergy(boolean useRawEnergy) {
this.useRawEnergy=useRawEnergy;
}
@@ -218,29 +234,29 @@
conditionsManager = DatabaseConditionsManager.getInstance();
LedTopMap = new HashMap< Integer , Integer >(); //key: ecal channel ID. Value: led id
- LedBotMap = new HashMap< Integer , Integer >();
+ LedBotMap = new HashMap< Integer , Integer >();
LedTopMapInverted = new HashMap< Integer , Integer >(); //key: led id. Value: ecal channel id
LedBotMapInverted = new HashMap< Integer , Integer >();
- ChannelCollection = conditionsManager.getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData();
+ ChannelCollection = conditionsManager.getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData();
LedCollection = conditionsManager.getCachedConditions(EcalLedCollection.class, "ecal_leds").getCachedData();
- ecalConditions = conditionsManager.getEcalConditions();
+ ecalConditions = conditionsManager.getEcalConditions();
for (EcalChannel channel : ChannelCollection){
chid = channel.getChannelId();
- for (EcalLed Led : LedCollection) {
- if (Led.getEcalChannelId()==chid){
- if (channel.getY()>0){
- LedTopMap.put( chid , Led.getLedNumber() );
- LedTopMapInverted.put( Led.getLedNumber(), chid );
- }
- else if (channel.getY()<0){
- LedBotMap.put( chid , Led.getLedNumber() );
- LedBotMapInverted.put( Led.getLedNumber(), chid );
- }
- }
+ for (EcalLed Led : LedCollection) {
+ if (Led.getEcalChannelId()==chid){
+ if (channel.getY()>0){
+ LedTopMap.put( chid , Led.getLedNumber() );
+ LedTopMapInverted.put( Led.getLedNumber(), chid );
+ }
+ else if (channel.getY()<0){
+ LedBotMap.put( chid , Led.getLedNumber() );
+ LedBotMapInverted.put( Led.getLedNumber(), chid );
+ }
+ }
}
}
@@ -250,14 +266,16 @@
aida = AIDA.defaultInstance();
aida.tree().cd("/");
hMeanCharge2D = aida.histogram2D("Average LED response", 47, -23.5, 23.5, 11, -5.5, 5.5);
-
+ hMeanCharge2DReferenceRatio = aida.histogram2D("Ratio this run VS reference run", 47, -23.5, 23.5, 11, -5.5, 5.5);
+
factory= aida.analysisFactory().createPlotterFactory("Ecal Led Sequence");
pPlotter= factory.create("Drivers");
pPlotter.createRegions(4,2);
if (isMonitoringApp){
pPlotter2=factory.create("Sequence Map");
- pPlotter2.createRegions(1,1);
+ pPlotter2.createRegions(1,2);
pPlotter2.region(0).plot(hMeanCharge2D);
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
}
iTuple = new ArrayList<ITuple>(NUM_CHANNELS);
hCharge = new ArrayList<IHistogram1D>(NUM_CHANNELS);
@@ -269,7 +287,7 @@
for (int ii=0;ii<NUM_CHANNELS;ii++){
int row = EcalMonitoringUtilities.getRowFromHistoID(ii);
- int column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
+ int column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
iTuple.add(aida.analysisFactory().createTupleFactory(aida.tree()).create("nTuple"+ii,"nTuple"+ii,"int fEvn=0 , double fCharge=0.,double fTime=0.",""));
}
@@ -281,7 +299,7 @@
pPlotter.show();
if (isMonitoringApp) pPlotter2.show();
- }
+ }
@Override
public void process(EventHeader event) {
@@ -292,81 +310,83 @@
List<CalorimeterHit> hits = event.get(CalorimeterHit.class, inputCollection);
for (CalorimeterHit hit : hits) {
- column = hit.getIdentifierFieldValue("ix");
- row = hit.getIdentifierFieldValue("iy");
- id = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
- cellID=hit.getCellID();
- chid = ChannelCollection.findGeometric(cellID).getChannelId();
-
- energy = hit.getCorrectedEnergy();
-
- if (useRawEnergy){
- fillEnergy = getRawADCSum(energy,cellID);
- }
- else {
- fillEnergy = energy;
- }
- fillTime = hit.getTime();
-
-
- //find the LED
- if (row>0){
- ledId=LedTopMap.get(chid);
- }
- else if (row<0){
- ledId=LedBotMap.get(chid);
- }
- driverId=getDriver(ledId);
- if (row<0) driverId+=4;
-
-
-
- /*Skip the events under thr*/
- if (energy<energyCut) continue;
-
- /*First, check if this led is the one in the NEXT step. Therefore, increment by 1 the step*/
- if (iStep[driverId]==0){
+ column = hit.getIdentifierFieldValue("ix");
+ row = hit.getIdentifierFieldValue("iy");
+ id = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
+ cellID=hit.getCellID();
+ chid = ChannelCollection.findGeometric(cellID).getChannelId();
+
+ energy = hit.getCorrectedEnergy();
+
+ if (useRawEnergy){
+ fillEnergy = getRawADCSum(energy,cellID);
+ }
+ else {
+ fillEnergy = energy;
+ }
+ fillTime = hit.getTime();
+
+
+ //find the LED
+ if (row>0){
+ ledId=LedTopMap.get(chid);
+ }
+ else if (row<0){
+ ledId=LedBotMap.get(chid);
+ }
+ driverId=getDriver(ledId);
+ if (row<0) driverId+=4;
+
+
+
+ /*Skip the events under thr*/
+ if (energy<energyCut) continue;
+
+ /*First, check if this led is the one in the NEXT step. Therefore, increment by 1 the step*/
+ /*
+ * if (iStep[driverId]==0){
+
actualSequence[driverId][iStep[driverId]]=ledId;
- iStep[driverId]=1;
+ iStep[driverId]=1;
}
else if ((iStep[driverId]==1)&&(ledId!=actualSequence[driverId][0])){
- System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
+ System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
if (iStep[driverId]>0) drawProfiles(actualSequence[driverId][iStep[driverId]-1],driverId);
actualSequence[driverId][iStep[driverId]]=ledId;
iStep[driverId]++;
}
else if ((iStep[driverId]>1)&&(ledId!=actualSequence[driverId][iStep[driverId]-1])&&(ledId!=actualSequence[driverId][iStep[driverId]-2])){
- System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
+ System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
if (iStep[driverId]>0) drawProfiles(actualSequence[driverId][iStep[driverId]-1],driverId);
actualSequence[driverId][iStep[driverId]]=ledId;
iStep[driverId]++;
}
-
-
-
-
- if (iStep[driverId]==-1) continue; /*Not yet data*/
-
- /*Put this code here, since we want to always fill the ntuple*/
- iTuple.get(id).fill(0,nEvents[id]);
- iTuple.get(id).fill(1,fillEnergy);
- iTuple.get(id).fill(2,fillTime);
- iTuple.get(id).addRow();
- nEvents[id]++;
-
-
-
- /*Add a debug print */
- if (eventN % 10000==0){
- System.out.println("Debug. Event "+eventN+" LED ID: "+ledId+" DRIVER ID: "+driverId+" ECAL ID: "+id+" ROW: "+row+" COLUMN: "+column+ "HISTO ID: "+id);
- }
+ // if (iStep[driverId]==-1) continue;
+
+ */
+
+ if (iStep[driverId]==-1) continue; /*Not yet data*/
+
+ /*Put this code here, since we want to always fill the ntuple*/
+ iTuple.get(id).fill(0,nEvents[id]);
+ iTuple.get(id).fill(1,fillEnergy);
+ iTuple.get(id).fill(2,fillTime);
+ iTuple.get(id).addRow();
+ nEvents[id]++;
+
+
+
+ /*Add a debug print */
+ if (eventN % 10000==0){
+ System.out.println("Debug. Event "+eventN+" LED ID: "+ledId+" DRIVER ID: "+driverId+" ECAL ID: "+id+" ROW: "+row+" COLUMN: "+column+ "HISTO ID: "+id);
+ }
}
if (eventN % 10000==0){
- System.out.println("\n");
- }
- }
+ System.out.println("\n");
+ }
+ }
}
/*
@@ -392,7 +412,7 @@
double e,eMin,eMax;
double t;
- int n,nBins,nFits,nSkip;
+ int n,nBins,nSkip;
int row, column;
@@ -400,7 +420,7 @@
IFunctionFactory fFactory=aida.analysisFactory().createFunctionFactory(aida.tree());
IFitResult fResult;
- IFitter fFitter;
+ IFitter fFitter;
for (int id = 0; id < 11 * 47; id++) {
@@ -409,7 +429,7 @@
row = EcalMonitoringUtilities.getRowFromHistoID(id);
column = EcalMonitoringUtilities.getColumnFromHistoID(id);
System.out.println("");
- System.out.println("Doing channel: X= "+column+" Y= "+row);
+ System.out.println("Doing channel: X= "+column+" Y= "+row+ "id= "+id);
System.out.println("Number of entries in analysis ntuple: "+iTuple.get(id).rows());
System.out.println("Number of recognized events: "+nEvents[id]);
/*Create the profile. Create it for all the channels, to keep sync.*/
@@ -419,9 +439,9 @@
/*Clear previous*/
if (id>0){
- aida.tree().rm("strip");
- aida.tree().rm("fun0");
- aida.tree().rm("fun1");
+ aida.tree().rm("strip");
+ aida.tree().rm("fun0");
+ aida.tree().rm("fun1");
}
/*Create the profile.*/
cProfile=aida.profile1D("strip",nBins,-0.5,nEvents[id]*(1-skipInitial)+0.5);
@@ -431,98 +451,79 @@
fFunction1=fFactory.createFunctionByName("fun1","G");
if (EcalMonitoringUtilities.isInHole(row,column)==true){
- System.out.println("Channel X= "+column+" Y= "+row+" is in hole. Skip");
- hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
- System.out.println("In hole, skip");
- continue;
+ System.out.println("Channel X= "+column+" Y= "+row+" is in hole. Skip");
+ hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
+ System.out.println("In hole, skip");
+ continue;
}
else if (nEvents[id]<nEventsMin) {
- hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
- System.err.println("LedAnalysis:: the channel X= "+column+" Y= "+row+" has not enough events "+nEvents[id]+" "+nEventsMin);
-
- continue;
- }
+ hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
+ System.err.println("LedAnalysis:: the channel X= "+column+" Y= "+row+" has not enough events "+nEvents[id]+" "+nEventsMin);
+
+ continue;
+ }
//Fill the profile*/
nSkip=(int)(nEvents[id]*skipInitial);
if (nSkip>iTuple.get(id).rows()){
- System.out.println("Can't skip initial events?");
- nSkip=0;
+ System.out.println("Can't skip initial events?");
+ nSkip=0;
}
iTuple.get(id).start();
iTuple.get(id).skip(nSkip); //This is the work-around for those channels with charge starting from 0 and rapidly growing//
n=0;
iTuple.get(id).next();
while ( iTuple.get(id).next() ){
- e=iTuple.get(id).getDouble(1);
- if (e<eMin) eMin=e;
- if (e>eMax) eMax=e;
- cProfile.fill(1.*n,e);
- n++;
- }
+ e=iTuple.get(id).getDouble(1);
+ if (e<eMin) eMin=e;
+ if (e>eMax) eMax=e;
+ cProfile.fill(1.*n,e);
+ n++;
+ }
fFitter=aida.analysisFactory().createFitFactory().createFitter("chi2","","v");
if (doFullAnalysis){
- //Init function parameters
- double[] initialPars={eMax-eMin,nEvents[id]/10.,eMin};
- if (initialPars[0]<0) initialPars[0]=0;
- fFunction.setParameters(initialPars);
-
- //Do the fit
- System.out.println("LedAnalysis:: do profile fit "+id+" "+fFitter.engineName()+" "+fFitter.fitMethodName());
- System.out.println("LedAnalysis:: initial parameters "+initialPars[0]+" "+initialPars[1]+" "+initialPars[2]);
- fResult=fFitter.fit(cProfile,fFunction);
- fPars = fResult.fittedParameters();
- fParErrs = fResult.errors();
- fParNames = fResult.fittedParameterNames();
- System.out.println("LedAnalysis:: Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
- for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
- System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
- }
- fFunction.setParameters(fPars);
-
-
- //Do again the fit: it is a terrible work-around
- nFits=0;
- if (Double.isNaN(fParErrs[1])){
- fPars=fPrevPars;
- }
- while (Double.isNaN(fParErrs[1])){
- System.out.println("LedAnalysis:: redo fit");
- fFunction.setParameters(fPars);
- fResult=fFitter.fit(cProfile,fFunction);
- fPars = fResult.fittedParameters();
- fParErrs = fResult.errors();
- System.out.println("LedAnalysis:: Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
- for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
- System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
- }
- fFunction.setParameters(fPars);
- nFits++;
- if (nFits>=10){
- System.out.println("LedAnalysis:: Error, too many fits without convergence");
- break;
- }
- }
- fPrevPars=Arrays.copyOf(fPars,fPars.length);
- System.out.println("LedAnalysis:: fit "+id+" done");
-
- //Now we have the tau parameter. Take ONLY the events that are with N>5*tau/
- //As a cross-check, also verify that tau > Nevents/10, otherwise skip the first Nevents/2
- //and emit warning
- nSkip=(int)( fPars[1]*5);
- if (nSkip < (nEvents[id]*skipMin)){
- System.out.println("LedAnalysis:: Skip number too low: "+nSkip+" Increment it to "+nEvents[id]/2);
- nSkip=(int)(nEvents[id]*skipMin);
- }
- if (nSkip > nEvents[id]){
- System.out.println("LedAnalysis:: Skip number too high, reduce it");
- nSkip=(int)(nEvents[id]*skipMin);
- }
-
+ //Init function parameters
+ double[] initialPars={eMax-eMin,nEvents[id]/10.,eMin};
+ if (initialPars[0]<0) initialPars[0]=0;
+ fFunction.setParameters(initialPars);
+
+ //Do the fit
+ System.out.println("LedAnalysis:: do profile fit "+id+" "+fFitter.engineName()+" "+fFitter.fitMethodName());
+ System.out.println("LedAnalysis:: initial parameters "+initialPars[0]+" "+initialPars[1]+" "+initialPars[2]);
+ fResult=fFitter.fit(cProfile,fFunction);
+ fPars = fResult.fittedParameters();
+ fParErrs = fResult.errors();
+ fParNames = fResult.fittedParameterNames();
+ System.out.println("LedAnalysis:: Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
+ for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
+ System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
+ }
+ fFunction.setParameters(fPars);
+
+
+ //if fit failed, revert to simpler case
+ if ((fResult.isValid()==false)||Double.isNaN(fParErrs[0])||Double.isNaN(fParErrs[1])||Double.isNaN(fParErrs[2])){
+ System.out.println("LedAnalysis:: fit failed. Reverting to simpler case");
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ else{
+ //Now we have the tau parameter. Take ONLY the events that are with N>5*tau/
+ //As a cross-check, also verify that tau > Nevents/10, otherwise skip the first Nevents/2
+ //and emit warning
+ nSkip=(int)( fPars[1]*5);
+ if (nSkip < (nEvents[id]*skipMin)){
+ System.out.println("LedAnalysis:: Skip number too low: "+nSkip+" Increment it to "+nEvents[id]/2);
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ if (nSkip > nEvents[id]){
+ System.out.println("LedAnalysis:: Skip number too high, reduce it");
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ }
}
else{
- nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
}
System.out.println("LedAnalysis:: gaus fit :: Going to skip "+nSkip+" out of "+nEvents[id]);
@@ -534,11 +535,11 @@
iTuple.get(id).skip(nSkip);
n=0;
while (iTuple.get(id).next()){
- e=iTuple.get(id).getDouble(1);
- t=iTuple.get(id).getDouble(2);
- hCharge.get(id).fill(e);
- n++;
- }
+ e=iTuple.get(id).getDouble(1);
+ t=iTuple.get(id).getDouble(2);
+ hCharge.get(id).fill(e);
+ n++;
+ }
/*Finally do the fit with the gaussian*/
double[] initialPars1={hCharge.get(id).maxBinHeight(),hCharge.get(id).mean(),hCharge.get(id).rms()};
@@ -550,10 +551,10 @@
fResult=fFitter.fit(hCharge.get(id),fFunction1);
fPars = fResult.fittedParameters();
fParErrs = fResult.errors();
- fParNames = fResult.fittedParameterNames();
+ fParNames = fResult.fittedParameterNames();
System.out.println("Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
- System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
+ System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
}
fFunction1.setParameters(fPars);
mMean[id]=fPars[1];
@@ -572,6 +573,15 @@
style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
pPlotter2.region(0).plot(hMeanCharge2D);
pPlotter2.region(0).refresh();
+
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
}
else{
IPlotterStyle pstyle = aida.analysisFactory().createPlotterFactory().createPlotterStyle();
@@ -581,58 +591,72 @@
pstyle.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
pstyle.setParameter("hist2DStyle", "colorMap");
if (pPlotter2!=null){
- pPlotter2.createRegion().plot(hMeanCharge2D,pstyle);
- pPlotter2.show();
+ pPlotter2.createRegion().plot(hMeanCharge2D,pstyle);
+ pPlotter2.show();
}
}
if (isMonitoringApp){
askUploadToDBDialog();
synchronized (modalMonitor) {
- try{
- modalMonitor.wait(60000); //wait 1 minute for user interaction.
- }
- catch(InterruptedException excp){
- System.out.println("Got exception: "+excp);
- }
+ try{
+ modalMonitor.wait(60000); //wait 1 minute for user interaction.
+ }
+ catch(InterruptedException excp){
+ System.out.println("Got exception: "+excp);
+ }
}
if ((m_ret!=LedColor.UNKNOWN)){
- if (m_ret==LedColor.BLUE) System.out.println("OK, upload to DB BLUE");
- else System.out.println("OK, upload to DB RED");
- try {
- uploadToDB(m_ret);
- } catch (SQLException | DatabaseObjectException | ConditionsObjectException error) {
- throw new RuntimeException("Error uploading to the database ", error);
- }
-
- System.out.println("Save an Elog too");
- uploadToElog();
+ if (m_ret==LedColor.BLUE) System.out.println("OK, upload to DB BLUE");
+ else System.out.println("OK, upload to DB RED");
+ try {
+ uploadToDB(m_ret);
+ } catch (SQLException | DatabaseObjectException | ConditionsObjectException error) {
+ throw new RuntimeException("Error uploading to the database ", error);
+ }
+
+ System.out.println("Get reference data, produce reference ratio map");
+ compareWithReference(m_ret);
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
+ System.out.println("Save an Elog too");
+ uploadToElog();
}
else{
- System.out.println("Cancel pressed. Nothing to do");
- }
- }
+ System.out.println("Cancel pressed. Nothing to do");
+ }
+ }
+
+
/*Write a file with the LED values*/
try {
if (useRawEnergy){
- outFileName=runNumber+".raw.txt";
+ outFileName=runNumber+".raw.txt";
}
else{
- outFileName=runNumber+".energy.txt";
+ outFileName=runNumber+".energy.txt";
}
PrintWriter writer = new PrintWriter(outFileName, "UTF-8");
-
- for (int id = 0; id < 11 * 47; id++) {
-
- row = EcalMonitoringUtilities.getRowFromHistoID(id);
- column = EcalMonitoringUtilities.getColumnFromHistoID(id);
- if (EcalMonitoringUtilities.isInHole(row, column)) continue;
- if ((row == 0) || (column == 0)) continue;
-
- writer.print(column+" "+row+" "+" "+ mMean[id]+" "+mRMS[id]+"\r\n");
-
+ for (int cid = 1; cid <= 442; cid++) {/*This is a loop over the channel ID, as in the conditions system*/
+ EcalChannel cc = findChannel(cid);
+ column = cc.getX(); //This is the column
+ row = cc.getY(); //This is the row
+ id=EcalMonitoringUtilities.getHistoIDFromRowColumn(row,column);
+ row = EcalMonitoringUtilities.getRowFromHistoID(id);
+ column = EcalMonitoringUtilities.getColumnFromHistoID(id);
+ if (EcalMonitoringUtilities.isInHole(row, column)) continue;
+ if ((row == 0) || (column == 0)) continue;
+
+ writer.print(cid+" "+column+" "+row+" "+" "+ mMean[id]+" "+mRMS[id]+"\r\n");
+
}
writer.close();
@@ -648,30 +672,31 @@
System.out.println(ioe.getMessage());
}
-
-
+
+
+
+
System.out.println("EcalLedSequenceMonitor endOfData clear histograms");
- for(int ii = 0; ii < NUM_CHANNELS; ii++) {
+ for(int ii = 0; ii < NUM_CHANNELS; ii++) {
row=EcalMonitoringUtilities.getRowFromHistoID(ii);
- column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
- hName="charge_"+ii;
+ column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
+ hName="charge_"+ii;
try{
- aida.tree().rm(hName);
+ aida.tree().rm(hName);
}
catch(IllegalArgumentException ee){
- System.out.println("Got exception "+ee);
+ System.out.println("Got exception "+ee);
}
if (!saveTuple||(isMonitoringApp)){
- hName="nTuple"+ii;
- try{
- aida.tree().rm(hName);
- }
- catch(IllegalArgumentException ee){
- System.out.println("Got exception "+ee);
- }
- }
-
+ hName="nTuple"+ii;
+ try{
+ aida.tree().rm(hName);
+ }
+ catch(IllegalArgumentException ee){
+ System.out.println("Got exception "+ee);
+ }
+ }
}
System.out.println("EcalLedSequenceMonitor endOfData clear histograms done");
System.out.println("endOfData end");
@@ -682,10 +707,10 @@
/**
* This function returns the driver number (from 0 to 3) given the LED id.
* @param led
- * @return
+ * @return the driver number from the LED id
*/
public int getDriver(int led){
- int ret=-1;
+ int ret=-1;
if ((led>=2)&&(led<56)) ret=0;
else if ((led>=56)&&(led<112)) ret=1;
else if ((led>=112)&&(led<168)) ret=2;
@@ -698,7 +723,7 @@
* If the gain changes (because we do a re-calibration), I do not want to include this in the LED analysis
* @param energy
* @param cellID
- * @return
+ * @return the pedestal-subtracted raw energy
*/
public double getRawADCSum(double energy,long cellID){
EcalChannelConstants channelData = ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID));
@@ -713,8 +738,8 @@
int x,y,id;
double mean,rms;
System.out.println(String.format("Uploading new led data to the database, runMin=%d, runMax=%d, tag=%s ....",
- runNumber,runNumberMax,dbTag));
-
+ runNumber,runNumberMax,dbTag));
+
conditionsManager = DatabaseConditionsManager.getInstance();
EcalLedCalibrationCollection led_calibrations = new EcalLedCalibrationCollection();
led_calibrations.setConnection(conditionsManager.getConnection());
@@ -743,8 +768,8 @@
System.err.println("CollectionID: "+collectionId);
led_calibrations.insert();
ConditionsRecord conditionsRecord = new ConditionsRecord(
- led_calibrations.getCollectionId(), runNumber, runNumberMax, dbTableName, dbTableName,
- "Generated by LedAnalysis from Run #"+runNumber, dbTag);
+ led_calibrations.getCollectionId(), runNumber, runNumberMax, dbTableName, dbTableName,
+ "Generated by LedAnalysis from Run #"+runNumber, dbTag);
conditionsRecord.setConnection(conditionsManager.getConnection());
tableMetaData = conditionsManager.findTableMetaData("conditions");
conditionsRecord.setTableMetaData(tableMetaData);
@@ -789,6 +814,79 @@
}
}
+ private void compareWithReference(LedColor color){
+ int ID=0;
+ int x,y,chid;
+ double mean,rms,fillData=1;
+ if (color==LedColor.UNKNOWN){
+ System.out.println("LedMonitoringSequence::compare with reference, doing nothing");
+ return;
+ }
+ else if (color==LedColor.RED) ID=fRedReferenceID;
+ else if (color==LedColor.BLUE) ID=fBlueReferenceID;
+
+ conditionsManager = DatabaseConditionsManager.getInstance();
+
+
+ EcalLedCalibrationCollection referenceDataCollection = new EcalLedCalibrationCollection();
+ referenceDataCollection.setConnection(conditionsManager.getConnection());
+
+ TableMetaData tableMetaData = conditionsManager.findTableMetaData(dbTableName);
+ referenceDataCollection.setTableMetaData(tableMetaData);
+ System.out.println("Try to get reference data from DB. Collection ID is "+ID);
+ try {
+ referenceDataCollection.select(ID);
+ } catch (SQLException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (DatabaseObjectException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ /*Now data from the reference should be there*/
+ for (EcalLedCalibration referenceData : referenceDataCollection){
+
+ chid=referenceData.getFieldValue("ecal_channel_id");
+ mean=referenceData.getFieldValue("led_response");
+ rms=referenceData.getFieldValue("rms");
+
+ EcalChannel cc = findChannel(chid);
+ column = cc.getX(); //This is the column
+ row = cc.getY(); //This is the row
+ chid=EcalMonitoringUtilities.getHistoIDFromRowColumn(row,column);
+ row = EcalMonitoringUtilities.getRowFromHistoID(id);
+ column = EcalMonitoringUtilities.getColumnFromHistoID(id);
+
+
+ if (mean!=0) fillData=mMean[id]/mean;
+ else fillData=1;
+ System.out.println("row= "+row+" column= "+column+" data= "+mMean[id]+" ref= "+mean+" ratio= "+fillData);
+ hMeanCharge2DReferenceRatio.fill(column,row,fillData);
+
+
+
+ }
+
+
+
+ style = pPlotter2.region(0).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(0).plot(hMeanCharge2D);
+ pPlotter2.region(0).refresh();
+
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
+
+ }
private void drawProfiles(int ledID,int driverID){
@@ -836,9 +934,9 @@
okButtonBlue = new JButton("Yes, blue");
cancelButton = new JButton("Cancel");
labelString = "<html> Update conditions to DB <br> for run: <br> "+runNumber+" - "+runNumberMax+" <br> ???? <br> "
- + "Use the monitoring app to look at the map<br>"
- + "(Tab LED sequence)<br>"
- +"Reply in 60 seconds<br>"+"</html>";
+ + "Use the monitoring app to look at the map<br>"
+ + "(Tab LED sequence)<br>"
+ +"Reply in 60 seconds<br>"+"</html>";
label = new JLabel( labelString);
frame = new JFrame("Upload to DB?");
@@ -859,49 +957,49 @@
panel.add(cancelButton);
panel.add(okButtonBlue);
panel.add(okButtonRed);
-
+
frame.setVisible(true);
okButtonBlue.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.BLUE;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Blue pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.BLUE;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Blue pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
okButtonRed.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.RED;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Red pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.RED;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Red pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
cancelButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.UNKNOWN;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Cancel pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.UNKNOWN;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Cancel pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
System.out.println("askUploadDB done");
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java Wed Apr 27 11:11:32 2016
@@ -59,7 +59,7 @@
/**
* Set the refresh rate for histograms in this driver
- * @param eventRefreshRate: the refresh rate, defined as number of events to accumulate before
+ * @param eventRefreshRate the refresh rate, defined as number of events to accumulate before
* refreshing the plot
*/
public void setEventRefreshRate(int eventRefreshRate) {
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalPedestalViewer.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalPedestalViewer.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalPedestalViewer.java Wed Apr 27 11:11:32 2016
@@ -33,80 +33,80 @@
// this has to match the one in EcalPedstalCalculator:
private String histoNameFormat = "Ecal/Pedestals/Mode7/ped%03d";
- private AIDA aida = AIDA.defaultInstance();
- private IPlotter plotter;
- private IPlotterFactory plotterFactory;
- private IPlotterStyle pstyle;
- private PEventViewer viewer;
+ private AIDA aida = AIDA.defaultInstance();
+ private IPlotter plotter;
+ private IPlotterFactory plotterFactory;
+ private IPlotterStyle pstyle;
+ private PEventViewer viewer;
- static final String[] colors={"red","black","blue","green","yellow","pink","cyan","magenta","brown"};
- static final int nRows=3;
- static final int nColumns=3;
- private int theRegion=0;
-
- @Override
- public void detectorChanged(Detector detector) {
- plotterFactory = aida.analysisFactory().createPlotterFactory("ECal Peds");
- plotter = plotterFactory.create("ECal Peds");
- plotter.createRegions(nColumns,nRows);
- // Plot dummmy histos, else null plotter regions later:
- for (int ii=0; ii<nColumns*nRows; ii++) {
- plotter.region(ii).plot(aida.histogram1D("ASDF"+ii,100,11e9,11e11));
- }
- plotter.show();
-
- pstyle=plotterFactory.createPlotterStyle();
- pstyle.xAxisStyle().labelStyle().setBold(true);
- pstyle.yAxisStyle().labelStyle().setBold(true);
- pstyle.xAxisStyle().tickLabelStyle().setBold(true);
- pstyle.yAxisStyle().tickLabelStyle().setBold(true);
- pstyle.xAxisStyle().lineStyle().setColor("black");
- pstyle.yAxisStyle().lineStyle().setColor("black");
- pstyle.xAxisStyle().lineStyle().setThickness(2);
- pstyle.yAxisStyle().lineStyle().setThickness(2);
- pstyle.dataStyle().errorBarStyle().setThickness(0);
- pstyle.legendBoxStyle().setVisible(false);
- }
-
- @Override
- public void startOfData() {
- File config = new File("ecal-mapping-config.csv");
- if(config.exists() && config.canRead()) {
- try { viewer = new PDataEventViewer(config.getAbsolutePath()); }
- catch (IOException e) { viewer = new PEventViewer(); }
- } else { viewer = new PEventViewer(); }
- viewer.addCrystalListener(this);
- viewer.setVisible(true);
- }
-
- @Override
- public void actionPerformed(ActionEvent ae) { }
-
- @Override
- public void crystalActivated(CrystalEvent e) { }
-
- @Override
- public void crystalDeactivated(CrystalEvent e) { }
+ static final String[] colors={"red","black","blue","green","yellow","pink","cyan","magenta","brown"};
+ static final int nRows=3;
+ static final int nColumns=3;
+ private int theRegion=0;
+
+ @Override
+ public void detectorChanged(Detector detector) {
+ plotterFactory = aida.analysisFactory().createPlotterFactory("ECal Peds");
+ plotter = plotterFactory.create("ECal Peds");
+ plotter.createRegions(nColumns,nRows);
+ // Plot dummmy histos, else null plotter regions later:
+ for (int ii=0; ii<nColumns*nRows; ii++) {
+ plotter.region(ii).plot(aida.histogram1D("ASDF"+ii,100,11e9,11e11));
+ }
+ plotter.show();
+
+ pstyle=plotterFactory.createPlotterStyle();
+ pstyle.xAxisStyle().labelStyle().setBold(true);
+ pstyle.yAxisStyle().labelStyle().setBold(true);
+ pstyle.xAxisStyle().tickLabelStyle().setBold(true);
+ pstyle.yAxisStyle().tickLabelStyle().setBold(true);
+ pstyle.xAxisStyle().lineStyle().setColor("black");
+ pstyle.yAxisStyle().lineStyle().setColor("black");
+ pstyle.xAxisStyle().lineStyle().setThickness(2);
+ pstyle.yAxisStyle().lineStyle().setThickness(2);
+ pstyle.dataStyle().errorBarStyle().setThickness(0);
+ pstyle.legendBoxStyle().setVisible(false);
+ }
+
+ @Override
+ public void startOfData() {
+ File config = new File("ecal-mapping-config.csv");
+ if(config.exists() && config.canRead()) {
+ try { viewer = new PDataEventViewer(config.getAbsolutePath()); }
+ catch (IOException e) { viewer = new PEventViewer(); }
+ } else { viewer = new PEventViewer(); }
+ viewer.addCrystalListener(this);
+ viewer.setVisible(true);
+ }
+
+ @Override
+ public void actionPerformed(ActionEvent ae) { }
+
+ @Override
+ public void crystalActivated(CrystalEvent e) { }
+
+ @Override
+ public void crystalDeactivated(CrystalEvent e) { }
- @Override
- public void crystalClicked(CrystalEvent e) {
- aida.tree().cd("/");
- Point ecalPoint = Viewer.toEcalPoint(e.getCrystalID());
- if (ecalPoint.x == 0 || ecalPoint.y == 0) return;
- if (EcalMonitoringUtilities.isInHole(ecalPoint.y,ecalPoint.x)) return;
- final int cid=EcalMonitoringUtilities.getChannelIdFromRowColumn(ecalPoint.y,ecalPoint.x);
- IHistogram1D hist=aida.histogram1D(String.format(histoNameFormat,cid));
- if (hist==null) {
- System.err.println("Running the Driver?");
- } else {
- hist.setTitle(String.format("(%d,%d)",ecalPoint.x,ecalPoint.y));
+ @Override
+ public void crystalClicked(CrystalEvent e) {
+ aida.tree().cd("/");
+ Point ecalPoint = Viewer.toEcalPoint(e.getCrystalID());
+ if (ecalPoint.x == 0 || ecalPoint.y == 0) return;
+ if (EcalMonitoringUtilities.isInHole(ecalPoint.y,ecalPoint.x)) return;
+ final int cid=EcalMonitoringUtilities.getChannelIdFromRowColumn(ecalPoint.y,ecalPoint.x);
+ IHistogram1D hist=aida.histogram1D(String.format(histoNameFormat,cid));
+ if (hist==null) {
+ System.err.println("Running the Driver?");
+ } else {
+ hist.setTitle(String.format("(%d,%d)",ecalPoint.x,ecalPoint.y));
pstyle.dataStyle().lineStyle().setParameter("color", colors[theRegion%colors.length]);
- plotter.region(theRegion).clear();
- plotter.region(theRegion).plot(hist,pstyle);
- plotter.region(theRegion).refresh();
- theRegion=(theRegion+1)%(nColumns*nRows);
- }
- }
-
-
+ plotter.region(theRegion).clear();
+ plotter.region(theRegion).plot(hist,pstyle);
+ plotter.region(theRegion).refresh();
+ theRegion=(theRegion+1)%(nColumns*nRows);
+ }
+ }
+
+
}
Modified: java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java (original)
+++ java/branches/HPSJAVA-409/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java Wed Apr 27 11:11:32 2016
@@ -93,7 +93,7 @@
}
private void setupPlots() {
- System.out.println("ECAL WINDOW PLOTS START");
+ System.out.println("ECAL WINDOW PLOTS START");
//if (plotterFrame != null) {
// plotterFrame.dispose();
//}
@@ -108,8 +108,8 @@
IPlotterStyle pstyle = plotter.style();
pstyle.dataStyle().errorBarStyle().setVisible(false);
plotter.createRegions(1,1);
- windowPlot1 = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : dummy", 1, -0.5, 1 - 0.5);
- plotter.region(0).plot(windowPlot1);
+ windowPlot1 = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : dummy", 1, -0.5, 1 - 0.5);
+ plotter.region(0).plot(windowPlot1);
plotter.show();
}
@@ -126,15 +126,15 @@
dec.setID(hit.getCellID());
int x = dec.getValue("ix");
int y = dec.getValue("iy");
-// System.out.println("got hit: x= " + x + ", y= " + y);
+// System.out.println("got hit: x= " + x + ", y= " + y);
if (isFirst) {
- System.out.println("FIRST!!!");
+ System.out.println("FIRST!!!");
isFirst=false;
- window=hit.getADCValues().length;
- windowPlot = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Window Mode Data", window, -0.5, window - 0.5);
- plotter.region(0).clear();
- plotter.region(0).plot(windowPlot);
- plotter.region(0).refresh();
+ window=hit.getADCValues().length;
+ windowPlot = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Window Mode Data", window, -0.5, window - 0.5);
+ plotter.region(0).clear();
+ plotter.region(0).plot(windowPlot);
+ plotter.region(0).refresh();
}
if (testX && x != plotX) {
Modified: java/branches/HPSJAVA-409/monitoring-util/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/pom.xml (original)
+++ java/branches/HPSJAVA-409/monitoring-util/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-util/</url>
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java Wed Apr 27 11:11:32 2016
@@ -8,6 +8,9 @@
import java.awt.image.BufferedImage;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.List;
import java.util.logging.Logger;
@@ -42,12 +45,10 @@
/**
* Save a set of tabs containing plots to a file.
*
- * @param plotTabs the top level tab component (plots are actually in a set
- * of tabs without these tabs)
+ * @param plotters the list of plotters to save (from plots in the regions)
* @param fileName the file name
* @param runData the list of run data to save on the cover page
- * @throws IOException if there is a problem with the IO (e.g. writing to
- * PDF file)
+ * @throws IOException if there is a problem with the IO (e.g. writing to PDF file)
*/
public static void write(List<IPlotter> plotters, String fileName, List<String> runData)
throws IOException {
@@ -70,9 +71,23 @@
} catch (DocumentException e) {
throw new IOException(e);
}
+
+ // Sort plotters so output appears the same every time.
+ ArrayList<IPlotter> sortedPlotters = new ArrayList<IPlotter>(plotters);
+ Collections.sort(sortedPlotters, new Comparator<IPlotter>() {
+ public int compare(IPlotter object1, IPlotter object2) {
+ if (object1.title() == null) {
+ return -1;
+ }
+ if (object2.title() == null) {
+ return 1;
+ }
+ return object1.title().compareTo(object2.title());
+ }
+ });
// Write the graphics from each plotter on a new page.
- for (IPlotter plotter : plotters) {
+ for (IPlotter plotter : sortedPlotters) {
plotter.refresh();
document.newPage();
writePage(document, writer, plotter);
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java Wed Apr 27 11:11:32 2016
@@ -1,7 +1,5 @@
/**
* ET subsystem monitoring
- * <p>
- * {@link EtSystemMonitor} implements basic status checks of the ET system.
*
* @author Jeremy McCormick, SLAC
*/
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTablePanel.java Wed Apr 27 11:11:32 2016
@@ -26,224 +26,224 @@
* @see DiagnosticUpdatable
*/
public abstract class AbstractTablePanel extends JPanel implements DiagnosticUpdatable {
- // Static variables.
- private static final long serialVersionUID = 0L;
- public static final int ORIENTATION_HORIZONTAL = 0;
- public static final int ORIENTATION_VERTICAL = 1;
-
- // Components.
- private JLabel localHeader;
- private JLabel globalHeader;
- protected final JTable localTable;
- protected final JTable globalTable;
-
- // Component parameters.
- private boolean horizontal = true;
- private Dimension userPrefSize = null;
- private Dimension defaultPrefSize = new Dimension(0, 0);
-
- /**
- * Instantiates an <code>AbstractTablePanel</code>.
- * @param args Arguments to be used when generating the panel tables.
- */
- public AbstractTablePanel(Object... args) {
- // Initialize the tables.
- JTable[] tables = initializeTables(args);
- localTable = tables[0];
- globalTable = tables[1];
- add(globalTable);
- add(localTable);
-
- // Set the panels to their null starting values.
- updatePanel(null, null);
-
- // Define the panel layout.
- setLayout(null);
-
- // Create header labels for the tables.
- localHeader = new JLabel("Instantaneous Statistics");
- localHeader.setHorizontalAlignment(JLabel.CENTER);
- add(localHeader);
-
- globalHeader = new JLabel("Run Statistics");
- globalHeader.setHorizontalAlignment(JLabel.CENTER);
- add(globalHeader);
-
- // Track when the component changes size and reposition the
- // components accordingly.
- addComponentListener(new ComponentAdapter() {
- @Override
- public void componentResized(ComponentEvent e) { positionComponents(); }
- });
-
- // Define the component preferred size.
- defaultPrefSize.width = localTable.getPreferredSize().width +
- ComponentUtils.hinternal + globalTable.getPreferredSize().width;
- defaultPrefSize.height = localTable.getPreferredSize().height +
- ComponentUtils.vinternal + globalTable.getPreferredSize().height;
- }
-
- @Override
- public Dimension getPreferredSize() {
- // If there is a user-specified preferred size, return that.
- if(userPrefSize == null) { return defaultPrefSize; }
-
- // Otherwise, return the default calculated preferred size.
- else { return userPrefSize; }
- }
-
- @Override
- public void setBackground(Color bg) {
- // Set the base component background.
- super.setBackground(bg);
-
- // If the components have been initialized, pass the background
- // color change to them as appropriate. Note that the tables
- // will always retain the same background color.
- if(localTable != null) {
- // Set the header backgrounds.
- localHeader.setBackground(bg);
- globalHeader.setBackground(bg);
- }
- }
-
- @Override
- public void setFont(Font font) {
- // Set the base component font.
- super.setFont(font);
-
- // If the components have been initialized, pass the font change
- // to them as appropriate.
- if(localTable != null) {
- // Set the table fonts.
- localTable.setFont(font);
- globalTable.setFont(font);
-
- // Set the header fonts.
- Font headerFont = font.deriveFont(Font.BOLD, (float) Math.ceil(font.getSize2D() * 1.3));
- localHeader.setFont(headerFont);
- globalHeader.setFont(headerFont);
- }
- }
-
- @Override
- public void setForeground(Color fg) {
- // Set the base component foreground.
- super.setForeground(fg);
-
- // If the components have been initialized, pass the foreground
- // color change to them as appropriate. Note that the tables
- // will always retain the same foreground color.
- if(localTable != null) {
- // Set the header foregrounds.
- localHeader.setForeground(fg);
- globalHeader.setForeground(fg);
- }
- }
-
- /**
- * Sets the orientation of components on the panel.
- * @param orientation - The orientation identifier. Identifiers can
- * be obtained as static variables from the within the root object
- * <code>AbstractTable</code>.
- */
- public void setOrientation(int orientation) {
- if(orientation == ORIENTATION_HORIZONTAL) {
- if(!horizontal) {
- horizontal = true;
- positionComponents();
- }
- } else if(orientation == ORIENTATION_VERTICAL) {
- if(horizontal) {
- horizontal = false;
- positionComponents();
- }
- } else {
- throw new IllegalArgumentException("Invalid orienation identifier.");
- }
- }
-
- @Override
- public void setPreferredSize(Dimension preferredSize) {
- userPrefSize = preferredSize;
- }
-
- /**
- * Generates the two tables that are used by the component. This
- * must return an array of size two.
- * @param args - Any arguments that should be passed to the method
- * for generating tables.
- * @return Returns an array of size two, where the first index must
- * contain the local table and the second index the global table.
- */
- protected abstract JTable[] initializeTables(Object... args);
-
- /**
- * Repositions the components to the correct places on the parent
- * <code>JPanel</code>. This should be run whenever the panel
- * changes size.
- */
- private void positionComponents() {
- // Do not update if the components have not been initialized.
- if(localHeader == null) { return; }
-
- // If the components should be position horizontally...
- if(horizontal) {
- // The local components get the left half of the panel and the
- // global components the right. Find half of the panel width,
- // accounting for the internal spacing. This is an internal
- // component, so it does not employ additional spacing between
- // itself and the parent component's edges.
- int compWidth = (getWidth() - 10) / 2;
-
- // If there is any width remaining, it goes to the spacing.
- int horizontal = ComponentUtils.hinternal + (getWidth() - 10) % 2;
-
- // Place the header labels. These are given their preferred
- // height. Note that this means a very small panel may cut off
- // some of the components. First, get the preferred height of
- // the label with the larger preferred height. These should be
- // the same thing, but just in case...
- int labelHeight = localHeader.getPreferredSize().height;
- if(labelHeight < globalHeader.getPreferredSize().height) {
- labelHeight = globalHeader.getPreferredSize().height;
- }
-
- // Set the label sizes and positions.
- localHeader.setBounds(0, 0, compWidth, labelHeight);
- globalHeader.setLocation(ComponentUtils.getNextX(localHeader, horizontal), 0);
- globalHeader.setSize(compWidth, labelHeight);
-
- // The tables go under their respective labels and should fill
- // the remainder of the label height.
- int tableY = ComponentUtils.getNextY(localHeader, ComponentUtils.vinternal);
- localTable.setBounds(0, tableY, compWidth, localTable.getPreferredSize().height);
- globalTable.setBounds(globalHeader.getX(), tableY, compWidth, globalTable.getPreferredSize().height);
- }
-
- // Otherwise, position them vertically.
- else {
- // Place the header labels. These are given their preferred
- // height. Note that this means a very small panel may cut off
- // some of the components. First, get the preferred height of
- // the label with the larger preferred height. These should be
- // the same thing, but just in case...
- int labelHeight = localHeader.getPreferredSize().height;
- if(labelHeight < globalHeader.getPreferredSize().height) {
- labelHeight = globalHeader.getPreferredSize().height;
- }
-
- // The local components go first, taking up the entire upper
- // width of the panel.
- localHeader.setBounds(0, 0, getWidth(), labelHeight);
- localTable.setBounds(0, ComponentUtils.getNextY(localHeader, ComponentUtils.vinternal),
- getWidth(), localTable.getPreferredSize().height);
-
- // The global components go immediately below.
- globalHeader.setBounds(0, ComponentUtils.getNextY(localTable, ComponentUtils.vinternal),
- getWidth(), labelHeight);
- globalTable.setBounds(0, ComponentUtils.getNextY(globalHeader, ComponentUtils.vinternal),
- getWidth(), globalTable.getPreferredSize().height);
- }
- }
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+ public static final int ORIENTATION_HORIZONTAL = 0;
+ public static final int ORIENTATION_VERTICAL = 1;
+
+ // Components.
+ private JLabel localHeader;
+ private JLabel globalHeader;
+ protected final JTable localTable;
+ protected final JTable globalTable;
+
+ // Component parameters.
+ private boolean horizontal = true;
+ private Dimension userPrefSize = null;
+ private Dimension defaultPrefSize = new Dimension(0, 0);
+
+ /**
+ * Instantiates an <code>AbstractTablePanel</code>.
+ * @param args Arguments to be used when generating the panel tables.
+ */
+ public AbstractTablePanel(Object... args) {
+ // Initialize the tables.
+ JTable[] tables = initializeTables(args);
+ localTable = tables[0];
+ globalTable = tables[1];
+ add(globalTable);
+ add(localTable);
+
+ // Set the panels to their null starting values.
+ updatePanel(null, null);
+
+ // Define the panel layout.
+ setLayout(null);
+
+ // Create header labels for the tables.
+ localHeader = new JLabel("Instantaneous Statistics");
+ localHeader.setHorizontalAlignment(JLabel.CENTER);
+ add(localHeader);
+
+ globalHeader = new JLabel("Run Statistics");
+ globalHeader.setHorizontalAlignment(JLabel.CENTER);
+ add(globalHeader);
+
+ // Track when the component changes size and reposition the
+ // components accordingly.
+ addComponentListener(new ComponentAdapter() {
+ @Override
+ public void componentResized(ComponentEvent e) { positionComponents(); }
+ });
+
+ // Define the component preferred size.
+ defaultPrefSize.width = localTable.getPreferredSize().width +
+ ComponentUtils.hinternal + globalTable.getPreferredSize().width;
+ defaultPrefSize.height = localTable.getPreferredSize().height +
+ ComponentUtils.vinternal + globalTable.getPreferredSize().height;
+ }
+
+ @Override
+ public Dimension getPreferredSize() {
+ // If there is a user-specified preferred size, return that.
+ if(userPrefSize == null) { return defaultPrefSize; }
+
+ // Otherwise, return the default calculated preferred size.
+ else { return userPrefSize; }
+ }
+
+ @Override
+ public void setBackground(Color bg) {
+ // Set the base component background.
+ super.setBackground(bg);
+
+ // If the components have been initialized, pass the background
+ // color change to them as appropriate. Note that the tables
+ // will always retain the same background color.
+ if(localTable != null) {
+ // Set the header backgrounds.
+ localHeader.setBackground(bg);
+ globalHeader.setBackground(bg);
+ }
+ }
+
+ @Override
+ public void setFont(Font font) {
+ // Set the base component font.
+ super.setFont(font);
+
+ // If the components have been initialized, pass the font change
+ // to them as appropriate.
+ if(localTable != null) {
+ // Set the table fonts.
+ localTable.setFont(font);
+ globalTable.setFont(font);
+
+ // Set the header fonts.
+ Font headerFont = font.deriveFont(Font.BOLD, (float) Math.ceil(font.getSize2D() * 1.3));
+ localHeader.setFont(headerFont);
+ globalHeader.setFont(headerFont);
+ }
+ }
+
+ @Override
+ public void setForeground(Color fg) {
+ // Set the base component foreground.
+ super.setForeground(fg);
+
+ // If the components have been initialized, pass the foreground
+ // color change to them as appropriate. Note that the tables
+ // will always retain the same foreground color.
+ if(localTable != null) {
+ // Set the header foregrounds.
+ localHeader.setForeground(fg);
+ globalHeader.setForeground(fg);
+ }
+ }
+
+ /**
+ * Sets the orientation of components on the panel.
+ * @param orientation - The orientation identifier. Identifiers can
+ * be obtained as static variables from the within the root object
+ * <code>AbstractTable</code>.
+ */
+ public void setOrientation(int orientation) {
+ if(orientation == ORIENTATION_HORIZONTAL) {
+ if(!horizontal) {
+ horizontal = true;
+ positionComponents();
+ }
+ } else if(orientation == ORIENTATION_VERTICAL) {
+ if(horizontal) {
+ horizontal = false;
+ positionComponents();
+ }
+ } else {
+ throw new IllegalArgumentException("Invalid orienation identifier.");
+ }
+ }
+
+ @Override
+ public void setPreferredSize(Dimension preferredSize) {
+ userPrefSize = preferredSize;
+ }
+
+ /**
+ * Generates the two tables that are used by the component. This
+ * must return an array of size two.
+ * @param args - Any arguments that should be passed to the method
+ * for generating tables.
+ * @return Returns an array of size two, where the first index must
+ * contain the local table and the second index the global table.
+ */
+ protected abstract JTable[] initializeTables(Object... args);
+
+ /**
+ * Repositions the components to the correct places on the parent
+ * <code>JPanel</code>. This should be run whenever the panel
+ * changes size.
+ */
+ private void positionComponents() {
+ // Do not update if the components have not been initialized.
+ if(localHeader == null) { return; }
+
+ // If the components should be position horizontally...
+ if(horizontal) {
+ // The local components get the left half of the panel and the
+ // global components the right. Find half of the panel width,
+ // accounting for the internal spacing. This is an internal
+ // component, so it does not employ additional spacing between
+ // itself and the parent component's edges.
+ int compWidth = (getWidth() - 10) / 2;
+
+ // If there is any width remaining, it goes to the spacing.
+ int horizontal = ComponentUtils.hinternal + (getWidth() - 10) % 2;
+
+ // Place the header labels. These are given their preferred
+ // height. Note that this means a very small panel may cut off
+ // some of the components. First, get the preferred height of
+ // the label with the larger preferred height. These should be
+ // the same thing, but just in case...
+ int labelHeight = localHeader.getPreferredSize().height;
+ if(labelHeight < globalHeader.getPreferredSize().height) {
+ labelHeight = globalHeader.getPreferredSize().height;
+ }
+
+ // Set the label sizes and positions.
+ localHeader.setBounds(0, 0, compWidth, labelHeight);
+ globalHeader.setLocation(ComponentUtils.getNextX(localHeader, horizontal), 0);
+ globalHeader.setSize(compWidth, labelHeight);
+
+ // The tables go under their respective labels and should fill
+ // the remainder of the label height.
+ int tableY = ComponentUtils.getNextY(localHeader, ComponentUtils.vinternal);
+ localTable.setBounds(0, tableY, compWidth, localTable.getPreferredSize().height);
+ globalTable.setBounds(globalHeader.getX(), tableY, compWidth, globalTable.getPreferredSize().height);
+ }
+
+ // Otherwise, position them vertically.
+ else {
+ // Place the header labels. These are given their preferred
+ // height. Note that this means a very small panel may cut off
+ // some of the components. First, get the preferred height of
+ // the label with the larger preferred height. These should be
+ // the same thing, but just in case...
+ int labelHeight = localHeader.getPreferredSize().height;
+ if(labelHeight < globalHeader.getPreferredSize().height) {
+ labelHeight = globalHeader.getPreferredSize().height;
+ }
+
+ // The local components go first, taking up the entire upper
+ // width of the panel.
+ localHeader.setBounds(0, 0, getWidth(), labelHeight);
+ localTable.setBounds(0, ComponentUtils.getNextY(localHeader, ComponentUtils.vinternal),
+ getWidth(), localTable.getPreferredSize().height);
+
+ // The global components go immediately below.
+ globalHeader.setBounds(0, ComponentUtils.getNextY(localTable, ComponentUtils.vinternal),
+ getWidth(), labelHeight);
+ globalTable.setBounds(0, ComponentUtils.getNextY(globalHeader, ComponentUtils.vinternal),
+ getWidth(), globalTable.getPreferredSize().height);
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTriggerTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTriggerTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTriggerTablePanel.java Wed Apr 27 11:11:32 2016
@@ -14,189 +14,189 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public abstract class AbstractTriggerTablePanel extends AbstractTwoColumnTablePanel {
- // Static variables.
- private static final long serialVersionUID = 0L;
-
- // Internal variables.
- private final int numCuts;
- private final boolean singles;
-
- // Store reference index variables for local and run values.
- private static final int GLOBAL = 0;
- private static final int LOCAL = 1;
-
- // Reference variables to the default table rows.
- protected static final int ROW_RECON_COUNT = 0;
- protected static final int ROW_SSP_SIM_COUNT = 1;
- protected static final int ROW_SSP_BANK_COUNT = 2;
- protected static final int ROW_SSP_EFFICIENCY = 3;
- protected static final int ROW_TRIGGER_EFFICIENCY = 4;
- protected static final int ROW_EMPTY_SPACE = 5;
- protected static final int ROW_CUT_FAILS_TITLE = 6;
- protected static final int ROW_FIRST_TRIGGER_CUT = 7;
-
- /**
- * Instantiates an <code>AbstractTriggerTablePanel</code> with the
- * indicated cut names.
- * @param cutNames
- */
- public AbstractTriggerTablePanel(String[] cutNames, boolean isSingles) {
- // Instantiate the superclass.
- super(makeTitle(cutNames));
-
- // Store the number of cuts.
- numCuts = cutNames.length;
- updatePanel(null, null);
-
- // Store whether this is a singles or pair trigger panel.
- singles = isSingles;
- }
-
- @Override
- public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
- // If the snapshot is null, all values should be "N/A."
- if(runSnapshot == null || localSnapshot == null) {
- // Output cluster count data.
- String scalerNullValue = "---";
- setLocalRowValue(ROW_RECON_COUNT, scalerNullValue);
- setLocalRowValue(ROW_SSP_SIM_COUNT, scalerNullValue);
- setLocalRowValue(ROW_SSP_BANK_COUNT, scalerNullValue);
- setGlobalRowValue(ROW_RECON_COUNT, scalerNullValue);
- setGlobalRowValue(ROW_SSP_SIM_COUNT, scalerNullValue);
- setGlobalRowValue(ROW_SSP_BANK_COUNT, scalerNullValue);
-
- // Output the tracked statistical data.
- String percentNullValue = "--- / --- (---%)";
- setLocalRowValue(ROW_SSP_EFFICIENCY, percentNullValue);
- setLocalRowValue(ROW_TRIGGER_EFFICIENCY, percentNullValue);
- setGlobalRowValue(ROW_SSP_EFFICIENCY, percentNullValue);
- setGlobalRowValue(ROW_TRIGGER_EFFICIENCY, percentNullValue);
-
- int ROW_SECOND_TRIGGER_CUT = ROW_FIRST_TRIGGER_CUT + numCuts + 2;
- for(int cutRow = 0; cutRow < numCuts; cutRow++) {
- setLocalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, percentNullValue);
- setLocalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, percentNullValue);
- setGlobalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, percentNullValue);
- setGlobalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, percentNullValue);
- }
- } else {
- // Get the local and run trigger statistics from the snapshot.
- DiagnosticSnapshot[] stat = new DiagnosticSnapshot[2];
- stat[GLOBAL] = runSnapshot;
- stat[LOCAL] = localSnapshot;
-
- // Get the appropriate trigger statistical modules.
- TriggerStatModule[][] triggerStats = new TriggerStatModule[2][2];
- if(singles) {
- triggerStats[LOCAL][0] = stat[LOCAL].getSingles0Stats();
- triggerStats[LOCAL][1] = stat[LOCAL].getSingles1Stats();
- triggerStats[GLOBAL][0] = stat[GLOBAL].getSingles0Stats();
- triggerStats[GLOBAL][1] = stat[GLOBAL].getSingles1Stats();
- } else {
- triggerStats[LOCAL][0] = stat[LOCAL].getPair0Stats();
- triggerStats[LOCAL][1] = stat[LOCAL].getPair1Stats();
- triggerStats[GLOBAL][0] = stat[GLOBAL].getPair0Stats();
- triggerStats[GLOBAL][1] = stat[GLOBAL].getPair1Stats();
- }
-
- // Get the total number of triggers of each type.
- int[] sspSimTriggers = new int[2];
- int[] sspBankTriggers = new int[2];
- int[] reconSimTriggers = new int[2];
- int[] sspMatchedTriggers = new int[2];
- int[] reconMatchedTriggers = new int[2];
-
- for(int i = 0; i < 2; i++) {
- sspSimTriggers[i] = triggerStats[i][0].getSSPSimulatedTriggers() + triggerStats[i][1].getSSPSimulatedTriggers();
- sspBankTriggers[i] = triggerStats[i][0].getReportedTriggers() + triggerStats[i][1].getReportedTriggers();
- reconSimTriggers[i] = triggerStats[i][0].getReconSimulatedTriggers() + triggerStats[i][1].getReconSimulatedTriggers();
- sspMatchedTriggers[i] = triggerStats[i][0].getMatchedSSPSimulatedTriggers() + triggerStats[i][1].getMatchedSSPSimulatedTriggers();
- reconMatchedTriggers[i] = triggerStats[i][0].getMatchedReconSimulatedTriggers() + triggerStats[i][1].getMatchedReconSimulatedTriggers();
- }
-
- // Determine the most spaces needed to display the values.
- // Get the largest number of digits in any of the values.
- int mostDigits = ComponentUtils.max(reconSimTriggers[LOCAL], sspBankTriggers[LOCAL],
- sspSimTriggers[LOCAL], reconSimTriggers[GLOBAL], sspBankTriggers[GLOBAL],
- sspSimTriggers[GLOBAL]);
- int spaces = ComponentUtils.getDigits(mostDigits);
-
- // Update the single-value counters.
- String countFormat = "%" + spaces + "d";
- setLocalRowValue(ROW_RECON_COUNT, String.format(countFormat, reconSimTriggers[LOCAL]));
- setLocalRowValue(ROW_SSP_SIM_COUNT, String.format(countFormat, sspSimTriggers[LOCAL]));
- setLocalRowValue(ROW_SSP_BANK_COUNT, String.format(countFormat, sspBankTriggers[LOCAL]));
- setGlobalRowValue(ROW_RECON_COUNT, String.format(countFormat, reconSimTriggers[GLOBAL]));
- setGlobalRowValue(ROW_SSP_SIM_COUNT, String.format(countFormat, sspSimTriggers[GLOBAL]));
- setGlobalRowValue(ROW_SSP_BANK_COUNT, String.format(countFormat, sspBankTriggers[GLOBAL]));
-
- // Update the percentage counters.
- String percentFormat = "%" + spaces + "d / %" + spaces + "d (%7.3f)";
-
- setLocalRowValue(ROW_SSP_EFFICIENCY, String.format(percentFormat, sspMatchedTriggers[LOCAL],
- sspSimTriggers[LOCAL], (100.0 * sspMatchedTriggers[LOCAL] / sspSimTriggers[LOCAL])));
- setLocalRowValue(ROW_TRIGGER_EFFICIENCY, String.format(percentFormat, reconMatchedTriggers[LOCAL],
- reconSimTriggers[LOCAL], (100.0 * reconMatchedTriggers[LOCAL] / reconSimTriggers[LOCAL])));
- setGlobalRowValue(ROW_SSP_EFFICIENCY, String.format(percentFormat, sspMatchedTriggers[GLOBAL],
- sspSimTriggers[GLOBAL], (100.0 * sspMatchedTriggers[GLOBAL] / sspSimTriggers[GLOBAL])));
- setGlobalRowValue(ROW_TRIGGER_EFFICIENCY, String.format(percentFormat, reconMatchedTriggers[GLOBAL],
- reconSimTriggers[GLOBAL], (100.0 * reconMatchedTriggers[GLOBAL] / reconSimTriggers[GLOBAL])));
-
- int ROW_SECOND_TRIGGER_CUT = ROW_FIRST_TRIGGER_CUT + numCuts + 2;
- for(int cutRow = 0; cutRow < numCuts; cutRow++) {
- setLocalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, String.format(percentFormat,
- triggerStats[LOCAL][0].getSSPCutFailures(cutRow), triggerStats[LOCAL][0].getSSPSimulatedTriggers(),
- (100.0 * triggerStats[LOCAL][0].getSSPCutFailures(cutRow) / triggerStats[LOCAL][0].getSSPSimulatedTriggers())));
- setLocalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, String.format(percentFormat,
- triggerStats[LOCAL][1].getSSPCutFailures(cutRow), triggerStats[LOCAL][1].getSSPSimulatedTriggers(),
- (100.0 * triggerStats[LOCAL][1].getSSPCutFailures(cutRow) / triggerStats[LOCAL][1].getSSPSimulatedTriggers())));
- setGlobalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, String.format(percentFormat,
- triggerStats[GLOBAL][0].getSSPCutFailures(cutRow), triggerStats[GLOBAL][0].getSSPSimulatedTriggers(),
- (100.0 * triggerStats[GLOBAL][0].getSSPCutFailures(cutRow) / triggerStats[GLOBAL][0].getSSPSimulatedTriggers())));
- setGlobalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, String.format(percentFormat,
- triggerStats[GLOBAL][1].getSSPCutFailures(cutRow), triggerStats[GLOBAL][1].getSSPSimulatedTriggers(),
- (100.0 * triggerStats[GLOBAL][1].getSSPCutFailures(cutRow) / triggerStats[GLOBAL][1].getSSPSimulatedTriggers())));
- }
- }
- }
-
- /**
- * Creates the table appropriate table rows from the argument cut
- * names.
- * @param cutNames - An array containing the names of the cuts to
- * display.
- * @return Returns an array with the default table rows merged in
- * with the provided cut names.
- */
- private static final String[] makeTitle(String[] cutNames) {
- // Make a new array to hold all the text.
- String[] mergedArray = new String[cutNames.length + cutNames.length + 9];
-
- // Define the default trigger headers.
- mergedArray[0] = "Recon Triggers:";
- mergedArray[1] = "SSP Sim Triggers:";
- mergedArray[2] = "SSP Bank Triggers:";
- mergedArray[3] = "SSP Efficiency:";
- mergedArray[4] = "Trigger Efficiency:";
- mergedArray[5] = "";
- mergedArray[6] = "First Trigger Cut Failures";
-
- // Insert the cut names for the first trigger.
- for(int cutIndex = 0; cutIndex < cutNames.length; cutIndex++) {
- mergedArray[7 + cutIndex] = cutNames[cutIndex];
- }
-
- // Insert the header for the second trigger cut names.
- int startIndex = 7 + cutNames.length;
- mergedArray[startIndex] = "";
- mergedArray[startIndex + 1] = "Second Trigger Cut Failures";
-
- // Insert the next set of cut names.
- for(int cutIndex = 0; cutIndex < cutNames.length; cutIndex++) {
- mergedArray[startIndex + 2 + cutIndex] = cutNames[cutIndex];
- }
-
- // Return the resultant array.
- return mergedArray;
- }
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+
+ // Internal variables.
+ private final int numCuts;
+ private final boolean singles;
+
+ // Store reference index variables for local and run values.
+ private static final int GLOBAL = 0;
+ private static final int LOCAL = 1;
+
+ // Reference variables to the default table rows.
+ protected static final int ROW_RECON_COUNT = 0;
+ protected static final int ROW_SSP_SIM_COUNT = 1;
+ protected static final int ROW_SSP_BANK_COUNT = 2;
+ protected static final int ROW_SSP_EFFICIENCY = 3;
+ protected static final int ROW_TRIGGER_EFFICIENCY = 4;
+ protected static final int ROW_EMPTY_SPACE = 5;
+ protected static final int ROW_CUT_FAILS_TITLE = 6;
+ protected static final int ROW_FIRST_TRIGGER_CUT = 7;
+
+ /**
+ * Instantiates an <code>AbstractTriggerTablePanel</code> with the
+ * indicated cut names.
+ * @param cutNames
+ */
+ public AbstractTriggerTablePanel(String[] cutNames, boolean isSingles) {
+ // Instantiate the superclass.
+ super(makeTitle(cutNames));
+
+ // Store the number of cuts.
+ numCuts = cutNames.length;
+ updatePanel(null, null);
+
+ // Store whether this is a singles or pair trigger panel.
+ singles = isSingles;
+ }
+
+ @Override
+ public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
+ // If the snapshot is null, all values should be "N/A."
+ if(runSnapshot == null || localSnapshot == null) {
+ // Output cluster count data.
+ String scalerNullValue = "---";
+ setLocalRowValue(ROW_RECON_COUNT, scalerNullValue);
+ setLocalRowValue(ROW_SSP_SIM_COUNT, scalerNullValue);
+ setLocalRowValue(ROW_SSP_BANK_COUNT, scalerNullValue);
+ setGlobalRowValue(ROW_RECON_COUNT, scalerNullValue);
+ setGlobalRowValue(ROW_SSP_SIM_COUNT, scalerNullValue);
+ setGlobalRowValue(ROW_SSP_BANK_COUNT, scalerNullValue);
+
+ // Output the tracked statistical data.
+ String percentNullValue = "--- / --- (---%)";
+ setLocalRowValue(ROW_SSP_EFFICIENCY, percentNullValue);
+ setLocalRowValue(ROW_TRIGGER_EFFICIENCY, percentNullValue);
+ setGlobalRowValue(ROW_SSP_EFFICIENCY, percentNullValue);
+ setGlobalRowValue(ROW_TRIGGER_EFFICIENCY, percentNullValue);
+
+ int ROW_SECOND_TRIGGER_CUT = ROW_FIRST_TRIGGER_CUT + numCuts + 2;
+ for(int cutRow = 0; cutRow < numCuts; cutRow++) {
+ setLocalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, percentNullValue);
+ setLocalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, percentNullValue);
+ setGlobalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, percentNullValue);
+ setGlobalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, percentNullValue);
+ }
+ } else {
+ // Get the local and run trigger statistics from the snapshot.
+ DiagnosticSnapshot[] stat = new DiagnosticSnapshot[2];
+ stat[GLOBAL] = runSnapshot;
+ stat[LOCAL] = localSnapshot;
+
+ // Get the appropriate trigger statistical modules.
+ TriggerStatModule[][] triggerStats = new TriggerStatModule[2][2];
+ if(singles) {
+ triggerStats[LOCAL][0] = stat[LOCAL].getSingles0Stats();
+ triggerStats[LOCAL][1] = stat[LOCAL].getSingles1Stats();
+ triggerStats[GLOBAL][0] = stat[GLOBAL].getSingles0Stats();
+ triggerStats[GLOBAL][1] = stat[GLOBAL].getSingles1Stats();
+ } else {
+ triggerStats[LOCAL][0] = stat[LOCAL].getPair0Stats();
+ triggerStats[LOCAL][1] = stat[LOCAL].getPair1Stats();
+ triggerStats[GLOBAL][0] = stat[GLOBAL].getPair0Stats();
+ triggerStats[GLOBAL][1] = stat[GLOBAL].getPair1Stats();
+ }
+
+ // Get the total number of triggers of each type.
+ int[] sspSimTriggers = new int[2];
+ int[] sspBankTriggers = new int[2];
+ int[] reconSimTriggers = new int[2];
+ int[] sspMatchedTriggers = new int[2];
+ int[] reconMatchedTriggers = new int[2];
+
+ for(int i = 0; i < 2; i++) {
+ sspSimTriggers[i] = triggerStats[i][0].getSSPSimulatedTriggers() + triggerStats[i][1].getSSPSimulatedTriggers();
+ sspBankTriggers[i] = triggerStats[i][0].getReportedTriggers() + triggerStats[i][1].getReportedTriggers();
+ reconSimTriggers[i] = triggerStats[i][0].getReconSimulatedTriggers() + triggerStats[i][1].getReconSimulatedTriggers();
+ sspMatchedTriggers[i] = triggerStats[i][0].getMatchedSSPSimulatedTriggers() + triggerStats[i][1].getMatchedSSPSimulatedTriggers();
+ reconMatchedTriggers[i] = triggerStats[i][0].getMatchedReconSimulatedTriggers() + triggerStats[i][1].getMatchedReconSimulatedTriggers();
+ }
+
+ // Determine the most spaces needed to display the values.
+ // Get the largest number of digits in any of the values.
+ int mostDigits = ComponentUtils.max(reconSimTriggers[LOCAL], sspBankTriggers[LOCAL],
+ sspSimTriggers[LOCAL], reconSimTriggers[GLOBAL], sspBankTriggers[GLOBAL],
+ sspSimTriggers[GLOBAL]);
+ int spaces = ComponentUtils.getDigits(mostDigits);
+
+ // Update the single-value counters.
+ String countFormat = "%" + spaces + "d";
+ setLocalRowValue(ROW_RECON_COUNT, String.format(countFormat, reconSimTriggers[LOCAL]));
+ setLocalRowValue(ROW_SSP_SIM_COUNT, String.format(countFormat, sspSimTriggers[LOCAL]));
+ setLocalRowValue(ROW_SSP_BANK_COUNT, String.format(countFormat, sspBankTriggers[LOCAL]));
+ setGlobalRowValue(ROW_RECON_COUNT, String.format(countFormat, reconSimTriggers[GLOBAL]));
+ setGlobalRowValue(ROW_SSP_SIM_COUNT, String.format(countFormat, sspSimTriggers[GLOBAL]));
+ setGlobalRowValue(ROW_SSP_BANK_COUNT, String.format(countFormat, sspBankTriggers[GLOBAL]));
+
+ // Update the percentage counters.
+ String percentFormat = "%" + spaces + "d / %" + spaces + "d (%7.3f)";
+
+ setLocalRowValue(ROW_SSP_EFFICIENCY, String.format(percentFormat, sspMatchedTriggers[LOCAL],
+ sspSimTriggers[LOCAL], (100.0 * sspMatchedTriggers[LOCAL] / sspSimTriggers[LOCAL])));
+ setLocalRowValue(ROW_TRIGGER_EFFICIENCY, String.format(percentFormat, reconMatchedTriggers[LOCAL],
+ reconSimTriggers[LOCAL], (100.0 * reconMatchedTriggers[LOCAL] / reconSimTriggers[LOCAL])));
+ setGlobalRowValue(ROW_SSP_EFFICIENCY, String.format(percentFormat, sspMatchedTriggers[GLOBAL],
+ sspSimTriggers[GLOBAL], (100.0 * sspMatchedTriggers[GLOBAL] / sspSimTriggers[GLOBAL])));
+ setGlobalRowValue(ROW_TRIGGER_EFFICIENCY, String.format(percentFormat, reconMatchedTriggers[GLOBAL],
+ reconSimTriggers[GLOBAL], (100.0 * reconMatchedTriggers[GLOBAL] / reconSimTriggers[GLOBAL])));
+
+ int ROW_SECOND_TRIGGER_CUT = ROW_FIRST_TRIGGER_CUT + numCuts + 2;
+ for(int cutRow = 0; cutRow < numCuts; cutRow++) {
+ setLocalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, String.format(percentFormat,
+ triggerStats[LOCAL][0].getSSPCutFailures(cutRow), triggerStats[LOCAL][0].getSSPSimulatedTriggers(),
+ (100.0 * triggerStats[LOCAL][0].getSSPCutFailures(cutRow) / triggerStats[LOCAL][0].getSSPSimulatedTriggers())));
+ setLocalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, String.format(percentFormat,
+ triggerStats[LOCAL][1].getSSPCutFailures(cutRow), triggerStats[LOCAL][1].getSSPSimulatedTriggers(),
+ (100.0 * triggerStats[LOCAL][1].getSSPCutFailures(cutRow) / triggerStats[LOCAL][1].getSSPSimulatedTriggers())));
+ setGlobalRowValue(cutRow + ROW_FIRST_TRIGGER_CUT, String.format(percentFormat,
+ triggerStats[GLOBAL][0].getSSPCutFailures(cutRow), triggerStats[GLOBAL][0].getSSPSimulatedTriggers(),
+ (100.0 * triggerStats[GLOBAL][0].getSSPCutFailures(cutRow) / triggerStats[GLOBAL][0].getSSPSimulatedTriggers())));
+ setGlobalRowValue(cutRow + ROW_SECOND_TRIGGER_CUT, String.format(percentFormat,
+ triggerStats[GLOBAL][1].getSSPCutFailures(cutRow), triggerStats[GLOBAL][1].getSSPSimulatedTriggers(),
+ (100.0 * triggerStats[GLOBAL][1].getSSPCutFailures(cutRow) / triggerStats[GLOBAL][1].getSSPSimulatedTriggers())));
+ }
+ }
+ }
+
+ /**
+ * Creates the table appropriate table rows from the argument cut
+ * names.
+ * @param cutNames - An array containing the names of the cuts to
+ * display.
+ * @return Returns an array with the default table rows merged in
+ * with the provided cut names.
+ */
+ private static final String[] makeTitle(String[] cutNames) {
+ // Make a new array to hold all the text.
+ String[] mergedArray = new String[cutNames.length + cutNames.length + 9];
+
+ // Define the default trigger headers.
+ mergedArray[0] = "Recon Triggers:";
+ mergedArray[1] = "SSP Sim Triggers:";
+ mergedArray[2] = "SSP Bank Triggers:";
+ mergedArray[3] = "SSP Efficiency:";
+ mergedArray[4] = "Trigger Efficiency:";
+ mergedArray[5] = "";
+ mergedArray[6] = "First Trigger Cut Failures";
+
+ // Insert the cut names for the first trigger.
+ for(int cutIndex = 0; cutIndex < cutNames.length; cutIndex++) {
+ mergedArray[7 + cutIndex] = cutNames[cutIndex];
+ }
+
+ // Insert the header for the second trigger cut names.
+ int startIndex = 7 + cutNames.length;
+ mergedArray[startIndex] = "";
+ mergedArray[startIndex + 1] = "Second Trigger Cut Failures";
+
+ // Insert the next set of cut names.
+ for(int cutIndex = 0; cutIndex < cutNames.length; cutIndex++) {
+ mergedArray[startIndex + 2 + cutIndex] = cutNames[cutIndex];
+ }
+
+ // Return the resultant array.
+ return mergedArray;
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTwoColumnTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTwoColumnTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/AbstractTwoColumnTablePanel.java Wed Apr 27 11:11:32 2016
@@ -16,94 +16,94 @@
* @see AbstractTablePanel
*/
public abstract class AbstractTwoColumnTablePanel extends AbstractTablePanel {
- // Static variables.
- private static final long serialVersionUID = 0L;
-
- // Table models.
- private TableTextModel localModel;
- private TableTextModel globalModel;
-
- // Table model mappings.
- private static final int COL_TITLE = 0;
- private static final int COL_VALUE = 1;
-
- /**
- * Instantiates an <code>AbstractTwoColumnTablePanel</code> object
- * with the indicated row names.
- * @param rowNames - The names of the rows.
- */
- public AbstractTwoColumnTablePanel(String[] rowNames) {
- super((Object[]) rowNames);
- }
-
- @Override
- protected JTable[] initializeTables(Object... args) {
- // The arguments should be a string array.
- if(!(args instanceof String[])) {
- throw new IllegalArgumentException("Row names must be strings!");
- }
- String[] rowNames = (String[]) args;
-
- // Initialize the table models. They should have two columns
- // (one for values and one for headers) and a number of rows
- // equal to the number of row names.
- localModel = new TableTextModel(rowNames.length, 2);
- globalModel = new TableTextModel(rowNames.length, 2);
-
- // Initialize the titles.
- for(int i = 0; i < rowNames.length; i++) {
- localModel.setValueAt(rowNames[i], i, COL_TITLE);
- globalModel.setValueAt(rowNames[i], i, COL_TITLE);
- }
- updatePanel(null, null);
-
- // Make a cell renderer.
- DefaultTableCellRenderer centerRenderer = new DefaultTableCellRenderer();
- centerRenderer.setHorizontalAlignment(JLabel.CENTER);
-
- // Create JTable objects to display the data.
- JTable localTable = new JTable(localModel);
- localTable.setRowSelectionAllowed(false);
- localTable.setColumnSelectionAllowed(false);
- localTable.setCellSelectionEnabled(false);
- localTable.setShowVerticalLines(false);
- localTable.getColumnModel().getColumn(0).setMinWidth(200);
- localTable.getColumnModel().getColumn(0).setMaxWidth(200);
- localTable.getColumnModel().getColumn(1).setCellRenderer(centerRenderer);
- localTable.setFont(new Font("monospaced", localTable.getFont().getStyle(), localTable.getFont().getSize()));
-
- JTable globalTable = new JTable(globalModel);
- globalTable.setRowSelectionAllowed(false);
- globalTable.setColumnSelectionAllowed(false);
- globalTable.setCellSelectionEnabled(false);
- globalTable.setShowVerticalLines(false);
- globalTable.getColumnModel().getColumn(0).setMinWidth(200);
- globalTable.getColumnModel().getColumn(0).setMaxWidth(200);
- globalTable.getColumnModel().getColumn(1).setCellRenderer(centerRenderer);
- globalTable.setFont(new Font("monospaced", globalTable.getFont().getStyle(), globalTable.getFont().getSize()));
-
- // Return the two tables.
- return new JTable[] { localTable, globalTable };
- }
-
- /**
- * Sets the value of the indicated row for the global statistical
- * table.
- * @param rowIndex - The row.
- * @param value - The new value.
- */
- protected void setGlobalRowValue(int rowIndex, String value) {
- globalModel.setValueAt(value, rowIndex, COL_VALUE);
- }
-
- /**
- * Sets the value of the indicated row for the local statistical
- * table.
- * @param rowIndex - The row.
- * @param value - The new value.
- */
- protected void setLocalRowValue(int rowIndex, String value) {
- localModel.setValueAt(value, rowIndex, COL_VALUE);
- }
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+
+ // Table models.
+ private TableTextModel localModel;
+ private TableTextModel globalModel;
+
+ // Table model mappings.
+ private static final int COL_TITLE = 0;
+ private static final int COL_VALUE = 1;
+
+ /**
+ * Instantiates an <code>AbstractTwoColumnTablePanel</code> object
+ * with the indicated row names.
+ * @param rowNames - The names of the rows.
+ */
+ public AbstractTwoColumnTablePanel(String[] rowNames) {
+ super((Object[]) rowNames);
+ }
+
+ @Override
+ protected JTable[] initializeTables(Object... args) {
+ // The arguments should be a string array.
+ if(!(args instanceof String[])) {
+ throw new IllegalArgumentException("Row names must be strings!");
+ }
+ String[] rowNames = (String[]) args;
+
+ // Initialize the table models. They should have two columns
+ // (one for values and one for headers) and a number of rows
+ // equal to the number of row names.
+ localModel = new TableTextModel(rowNames.length, 2);
+ globalModel = new TableTextModel(rowNames.length, 2);
+
+ // Initialize the titles.
+ for(int i = 0; i < rowNames.length; i++) {
+ localModel.setValueAt(rowNames[i], i, COL_TITLE);
+ globalModel.setValueAt(rowNames[i], i, COL_TITLE);
+ }
+ updatePanel(null, null);
+
+ // Make a cell renderer.
+ DefaultTableCellRenderer centerRenderer = new DefaultTableCellRenderer();
+ centerRenderer.setHorizontalAlignment(JLabel.CENTER);
+
+ // Create JTable objects to display the data.
+ JTable localTable = new JTable(localModel);
+ localTable.setRowSelectionAllowed(false);
+ localTable.setColumnSelectionAllowed(false);
+ localTable.setCellSelectionEnabled(false);
+ localTable.setShowVerticalLines(false);
+ localTable.getColumnModel().getColumn(0).setMinWidth(200);
+ localTable.getColumnModel().getColumn(0).setMaxWidth(200);
+ localTable.getColumnModel().getColumn(1).setCellRenderer(centerRenderer);
+ localTable.setFont(new Font("monospaced", localTable.getFont().getStyle(), localTable.getFont().getSize()));
+
+ JTable globalTable = new JTable(globalModel);
+ globalTable.setRowSelectionAllowed(false);
+ globalTable.setColumnSelectionAllowed(false);
+ globalTable.setCellSelectionEnabled(false);
+ globalTable.setShowVerticalLines(false);
+ globalTable.getColumnModel().getColumn(0).setMinWidth(200);
+ globalTable.getColumnModel().getColumn(0).setMaxWidth(200);
+ globalTable.getColumnModel().getColumn(1).setCellRenderer(centerRenderer);
+ globalTable.setFont(new Font("monospaced", globalTable.getFont().getStyle(), globalTable.getFont().getSize()));
+
+ // Return the two tables.
+ return new JTable[] { localTable, globalTable };
+ }
+
+ /**
+ * Sets the value of the indicated row for the global statistical
+ * table.
+ * @param rowIndex - The row.
+ * @param value - The new value.
+ */
+ protected void setGlobalRowValue(int rowIndex, String value) {
+ globalModel.setValueAt(value, rowIndex, COL_VALUE);
+ }
+
+ /**
+ * Sets the value of the indicated row for the local statistical
+ * table.
+ * @param rowIndex - The row.
+ * @param value - The new value.
+ */
+ protected void setLocalRowValue(int rowIndex, String value) {
+ localModel.setValueAt(value, rowIndex, COL_VALUE);
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ClusterTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ClusterTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ClusterTablePanel.java Wed Apr 27 11:11:32 2016
@@ -14,99 +14,99 @@
* @see AbstractTablePanel
*/
public class ClusterTablePanel extends AbstractTwoColumnTablePanel {
- // Static variables.
- private static final long serialVersionUID = 0L;
- private static final String[] TABLE_TITLES = { "Recon Clusters", "SSP Clusters", "Matched Clusters",
- "Failed (Position)", "Failed (Energy)", "Failed (Hit Count)" };
-
- // Table model mappings.
- private static final int ROW_RECON_COUNT = 0;
- private static final int ROW_SSP_COUNT = 1;
- private static final int ROW_MATCHED = 2;
- private static final int ROW_FAILED_POSITION = 3;
- private static final int ROW_FAILED_ENERGY = 4;
- private static final int ROW_FAILED_HIT_COUNT = 5;
-
- /**
- * Instantiate a new <code>ClusterTablePanel</code>.
- */
- public ClusterTablePanel() { super(TABLE_TITLES); }
-
- @Override
- public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
- // If the snapshot is null, all values should be "N/A."
- if(localSnapshot == null || runSnapshot == null) {
- // Output cluster count data.
- String scalerNullValue = "---";
- setLocalRowValue(ROW_RECON_COUNT, scalerNullValue);
- setLocalRowValue(ROW_SSP_COUNT, scalerNullValue);
- setGlobalRowValue(ROW_RECON_COUNT, scalerNullValue);
- setGlobalRowValue(ROW_SSP_COUNT, scalerNullValue);
-
- // Output the tracked statistical data.
- String percentNullValue = "--- / --- (---%)";
- setLocalRowValue(ROW_MATCHED, percentNullValue);
- setLocalRowValue(ROW_FAILED_POSITION, percentNullValue);
- setLocalRowValue(ROW_FAILED_ENERGY, percentNullValue);
- setLocalRowValue(ROW_FAILED_HIT_COUNT, percentNullValue);
- setGlobalRowValue(ROW_MATCHED, percentNullValue);
- setGlobalRowValue(ROW_FAILED_POSITION, percentNullValue);
- setGlobalRowValue(ROW_FAILED_ENERGY, percentNullValue);
- setGlobalRowValue(ROW_FAILED_HIT_COUNT, percentNullValue);
- }
-
- // Otherwise, populate the table with the diagnostic data.
- else {
- // Get the cluster statistical banks.
- ClusterStatModule lstat = localSnapshot.getClusterStats();
- ClusterStatModule rstat = runSnapshot.getClusterStats();
-
- // Get the largest number of digits in any of the values.
- int mostDigits = ComponentUtils.max(lstat.getReconClusterCount(), lstat.getSSPClusterCount(), lstat.getMatches(),
- lstat.getPositionFailures(), lstat.getEnergyFailures(), lstat.getHitCountFailures(),
- rstat.getReconClusterCount(), rstat.getSSPClusterCount(), rstat.getMatches(),
- rstat.getPositionFailures(), rstat.getEnergyFailures(), rstat.getHitCountFailures());
- int spaces = ComponentUtils.getDigits(mostDigits);
-
- // Put the number of reconstructed and SSP clusters into
- // the tables.
- int[] clusterValue = {
- lstat.getReconClusterCount(),
- lstat.getSSPClusterCount(),
- rstat.getReconClusterCount(),
- rstat.getSSPClusterCount()
- };
- String countFormat = "%" + spaces + "d";
- setLocalRowValue(ROW_RECON_COUNT, String.format(countFormat, clusterValue[0]));
- setLocalRowValue(ROW_SSP_COUNT, String.format(countFormat, clusterValue[1]));
- setGlobalRowValue(ROW_RECON_COUNT, String.format(countFormat, clusterValue[2]));
- setGlobalRowValue(ROW_SSP_COUNT, String.format(countFormat, clusterValue[3]));
-
- // Output the tracked statistical data.
- int total;
- String percentFormat = "%" + spaces + "d / %" + spaces + "d (%7.3f)";
- int[] statValue = {
- lstat.getMatches(),
- lstat.getPositionFailures(),
- lstat.getEnergyFailures(),
- lstat.getHitCountFailures(),
- rstat.getMatches(),
- rstat.getPositionFailures(),
- rstat.getEnergyFailures(),
- rstat.getHitCountFailures()
- };
-
- total = lstat.getReconClusterCount();
- setLocalRowValue(ROW_MATCHED, String.format(percentFormat, statValue[0], total, 100.0 * statValue[0] / total));
- setLocalRowValue(ROW_FAILED_POSITION, String.format(percentFormat, statValue[1], total, 100.0 * statValue[1] / total));
- setLocalRowValue(ROW_FAILED_ENERGY, String.format(percentFormat, statValue[2], total, 100.0 * statValue[2] / total));
- setLocalRowValue(ROW_FAILED_HIT_COUNT, String.format(percentFormat, statValue[3], total, 100.0 * statValue[3] / total));
-
- total = rstat.getReconClusterCount();
- setGlobalRowValue(ROW_MATCHED, String.format(percentFormat, statValue[4], total, 100.0 * statValue[4] / total));
- setGlobalRowValue(ROW_FAILED_POSITION, String.format(percentFormat, statValue[5], total, 100.0 * statValue[5] / total));
- setGlobalRowValue(ROW_FAILED_ENERGY, String.format(percentFormat, statValue[6], total, 100.0 * statValue[6] / total));
- setGlobalRowValue(ROW_FAILED_HIT_COUNT, String.format(percentFormat, statValue[7], total, 100.0 * statValue[7] / total));
- }
- }
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+ private static final String[] TABLE_TITLES = { "Recon Clusters", "SSP Clusters", "Matched Clusters",
+ "Failed (Position)", "Failed (Energy)", "Failed (Hit Count)" };
+
+ // Table model mappings.
+ private static final int ROW_RECON_COUNT = 0;
+ private static final int ROW_SSP_COUNT = 1;
+ private static final int ROW_MATCHED = 2;
+ private static final int ROW_FAILED_POSITION = 3;
+ private static final int ROW_FAILED_ENERGY = 4;
+ private static final int ROW_FAILED_HIT_COUNT = 5;
+
+ /**
+ * Instantiate a new <code>ClusterTablePanel</code>.
+ */
+ public ClusterTablePanel() { super(TABLE_TITLES); }
+
+ @Override
+ public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
+ // If the snapshot is null, all values should be "N/A."
+ if(localSnapshot == null || runSnapshot == null) {
+ // Output cluster count data.
+ String scalerNullValue = "---";
+ setLocalRowValue(ROW_RECON_COUNT, scalerNullValue);
+ setLocalRowValue(ROW_SSP_COUNT, scalerNullValue);
+ setGlobalRowValue(ROW_RECON_COUNT, scalerNullValue);
+ setGlobalRowValue(ROW_SSP_COUNT, scalerNullValue);
+
+ // Output the tracked statistical data.
+ String percentNullValue = "--- / --- (---%)";
+ setLocalRowValue(ROW_MATCHED, percentNullValue);
+ setLocalRowValue(ROW_FAILED_POSITION, percentNullValue);
+ setLocalRowValue(ROW_FAILED_ENERGY, percentNullValue);
+ setLocalRowValue(ROW_FAILED_HIT_COUNT, percentNullValue);
+ setGlobalRowValue(ROW_MATCHED, percentNullValue);
+ setGlobalRowValue(ROW_FAILED_POSITION, percentNullValue);
+ setGlobalRowValue(ROW_FAILED_ENERGY, percentNullValue);
+ setGlobalRowValue(ROW_FAILED_HIT_COUNT, percentNullValue);
+ }
+
+ // Otherwise, populate the table with the diagnostic data.
+ else {
+ // Get the cluster statistical banks.
+ ClusterStatModule lstat = localSnapshot.getClusterStats();
+ ClusterStatModule rstat = runSnapshot.getClusterStats();
+
+ // Get the largest number of digits in any of the values.
+ int mostDigits = ComponentUtils.max(lstat.getReconClusterCount(), lstat.getSSPClusterCount(), lstat.getMatches(),
+ lstat.getPositionFailures(), lstat.getEnergyFailures(), lstat.getHitCountFailures(),
+ rstat.getReconClusterCount(), rstat.getSSPClusterCount(), rstat.getMatches(),
+ rstat.getPositionFailures(), rstat.getEnergyFailures(), rstat.getHitCountFailures());
+ int spaces = ComponentUtils.getDigits(mostDigits);
+
+ // Put the number of reconstructed and SSP clusters into
+ // the tables.
+ int[] clusterValue = {
+ lstat.getReconClusterCount(),
+ lstat.getSSPClusterCount(),
+ rstat.getReconClusterCount(),
+ rstat.getSSPClusterCount()
+ };
+ String countFormat = "%" + spaces + "d";
+ setLocalRowValue(ROW_RECON_COUNT, String.format(countFormat, clusterValue[0]));
+ setLocalRowValue(ROW_SSP_COUNT, String.format(countFormat, clusterValue[1]));
+ setGlobalRowValue(ROW_RECON_COUNT, String.format(countFormat, clusterValue[2]));
+ setGlobalRowValue(ROW_SSP_COUNT, String.format(countFormat, clusterValue[3]));
+
+ // Output the tracked statistical data.
+ int total;
+ String percentFormat = "%" + spaces + "d / %" + spaces + "d (%7.3f)";
+ int[] statValue = {
+ lstat.getMatches(),
+ lstat.getPositionFailures(),
+ lstat.getEnergyFailures(),
+ lstat.getHitCountFailures(),
+ rstat.getMatches(),
+ rstat.getPositionFailures(),
+ rstat.getEnergyFailures(),
+ rstat.getHitCountFailures()
+ };
+
+ total = lstat.getReconClusterCount();
+ setLocalRowValue(ROW_MATCHED, String.format(percentFormat, statValue[0], total, 100.0 * statValue[0] / total));
+ setLocalRowValue(ROW_FAILED_POSITION, String.format(percentFormat, statValue[1], total, 100.0 * statValue[1] / total));
+ setLocalRowValue(ROW_FAILED_ENERGY, String.format(percentFormat, statValue[2], total, 100.0 * statValue[2] / total));
+ setLocalRowValue(ROW_FAILED_HIT_COUNT, String.format(percentFormat, statValue[3], total, 100.0 * statValue[3] / total));
+
+ total = rstat.getReconClusterCount();
+ setGlobalRowValue(ROW_MATCHED, String.format(percentFormat, statValue[4], total, 100.0 * statValue[4] / total));
+ setGlobalRowValue(ROW_FAILED_POSITION, String.format(percentFormat, statValue[5], total, 100.0 * statValue[5] / total));
+ setGlobalRowValue(ROW_FAILED_ENERGY, String.format(percentFormat, statValue[6], total, 100.0 * statValue[6] / total));
+ setGlobalRowValue(ROW_FAILED_HIT_COUNT, String.format(percentFormat, statValue[7], total, 100.0 * statValue[7] / total));
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ComponentUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ComponentUtils.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ComponentUtils.java Wed Apr 27 11:11:32 2016
@@ -11,116 +11,116 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class ComponentUtils {
- /** The default spacing used between a horizontal edge of one
- * component and the horizontal edge of another. */
- static final int hinternal = 10;
- /** The default spacing used between a vertical edge of one
- * component and the vertical edge of another. */
- static final int vinternal = 10;
- /** The default spacing used between a horizontal edge of one
- * component and the edge of its parent component. */
- static final int hexternal = 0;
- /** The default spacing used between a vertical edge of one
- * component and the edge of its parent component. */
- static final int vexternal = 0;
-
- /**
- * Gets a <code>String</code> composed of a number of instances of
- * character <code>c</code> equal to <code>number</code>.
- * @param c - The character to repeat.
- * @param number - The number of repetitions.
- * @return Returns the repeated character as a <code>String</code>.
- */
- public static final String getChars(char c, int number) {
- // Create a buffer to store the characters in.
- StringBuffer s = new StringBuffer();
-
- // Add the indicated number of instances.
- for(int i = 0; i < number; i++) {
- s.append(c);
- }
-
- // Return the string.
- return s.toString();
- }
-
- /**
- * Gets the number of digits in the base-10 String representation
- * of an integer primitive. Negative signs are not included in the
- * digit count.
- * @param value - The value of which to obtain the length.
- * @return Returns the number of digits in the String representation
- * of the argument value.
- */
- public static final int getDigits(int value) {
- return TriggerDiagnosticUtil.getDigits(value);
- }
-
- /**
- * Gets the maximum value from a list of values.
- * @param values - The values to compare.
- * @return Returns the largest of the argument values.
- * @throws IllegalArgumentException Occurs if no values are given.
- */
- public static final int max(int... values) throws IllegalArgumentException {
- // Throw an error if no arguments are provided.
- if(values == null || values.length == 0) {
- throw new IllegalArgumentException("Can not determine maximum value from a list of 0 values.");
- }
-
- // If there is only one value, return it.
- if(values.length == 1) { return values[0]; }
-
- // Otherwise, get the largest value.
- int largest = Integer.MIN_VALUE;
- for(int value : values) {
- if(value > largest) { largest = value; }
- }
-
- // Return the result.
- return largest;
- }
-
- /**
- * Gets the x-coordinate immediately to the right of the given
- * component.
- * @param c - The component of which to find the edge.
- * @return Returns the x-coordinate as an <code>int</code> value.
- */
- static final int getNextX(Component c) {
- return getNextX(c, 0);
- }
-
- /**
- * Gets the x-coordinate a given distance to the right edge of the
- * argument component.
- * @param c - The component of which to find the edge.
- * @param spacing - The additional spacing past the edge of the
- * component to add.
- * @return Returns the x-coordinate as an <code>int</code> value.
- */
- static final int getNextX(Component c, int spacing) {
- return c.getX() + c.getWidth() + spacing;
- }
-
- /**
- * Gets the y-coordinate immediately below the given component.
- * @param c - The component of which to find the edge.
- * @return Returns the y-coordinate as an <code>int</code> value.
- */
- static final int getNextY(Component c) {
- return getNextY(c, 0);
- }
-
- /**
- * Gets the y-coordinate a given distance below the bottom edge
- * of the argument component.
- * @param c - The component of which to find the edge.
- * @param spacing - The additional spacing past the edge of the
- * component to add.
- * @return Returns the y-coordinate as an <code>int</code> value.
- */
- static final int getNextY(Component c, int spacing) {
- return c.getY() + c.getHeight() + spacing;
- }
+ /** The default spacing used between a horizontal edge of one
+ * component and the horizontal edge of another. */
+ static final int hinternal = 10;
+ /** The default spacing used between a vertical edge of one
+ * component and the vertical edge of another. */
+ static final int vinternal = 10;
+ /** The default spacing used between a horizontal edge of one
+ * component and the edge of its parent component. */
+ static final int hexternal = 0;
+ /** The default spacing used between a vertical edge of one
+ * component and the edge of its parent component. */
+ static final int vexternal = 0;
+
+ /**
+ * Gets a <code>String</code> composed of a number of instances of
+ * character <code>c</code> equal to <code>number</code>.
+ * @param c - The character to repeat.
+ * @param number - The number of repetitions.
+ * @return Returns the repeated character as a <code>String</code>.
+ */
+ public static final String getChars(char c, int number) {
+ // Create a buffer to store the characters in.
+ StringBuffer s = new StringBuffer();
+
+ // Add the indicated number of instances.
+ for(int i = 0; i < number; i++) {
+ s.append(c);
+ }
+
+ // Return the string.
+ return s.toString();
+ }
+
+ /**
+ * Gets the number of digits in the base-10 String representation
+ * of an integer primitive. Negative signs are not included in the
+ * digit count.
+ * @param value - The value of which to obtain the length.
+ * @return Returns the number of digits in the String representation
+ * of the argument value.
+ */
+ public static final int getDigits(int value) {
+ return TriggerDiagnosticUtil.getDigits(value);
+ }
+
+ /**
+ * Gets the maximum value from a list of values.
+ * @param values - The values to compare.
+ * @return Returns the largest of the argument values.
+ * @throws IllegalArgumentException Occurs if no values are given.
+ */
+ public static final int max(int... values) throws IllegalArgumentException {
+ // Throw an error if no arguments are provided.
+ if(values == null || values.length == 0) {
+ throw new IllegalArgumentException("Can not determine maximum value from a list of 0 values.");
+ }
+
+ // If there is only one value, return it.
+ if(values.length == 1) { return values[0]; }
+
+ // Otherwise, get the largest value.
+ int largest = Integer.MIN_VALUE;
+ for(int value : values) {
+ if(value > largest) { largest = value; }
+ }
+
+ // Return the result.
+ return largest;
+ }
+
+ /**
+ * Gets the x-coordinate immediately to the right of the given
+ * component.
+ * @param c - The component of which to find the edge.
+ * @return Returns the x-coordinate as an <code>int</code> value.
+ */
+ static final int getNextX(Component c) {
+ return getNextX(c, 0);
+ }
+
+ /**
+ * Gets the x-coordinate a given distance to the right edge of the
+ * argument component.
+ * @param c - The component of which to find the edge.
+ * @param spacing - The additional spacing past the edge of the
+ * component to add.
+ * @return Returns the x-coordinate as an <code>int</code> value.
+ */
+ static final int getNextX(Component c, int spacing) {
+ return c.getX() + c.getWidth() + spacing;
+ }
+
+ /**
+ * Gets the y-coordinate immediately below the given component.
+ * @param c - The component of which to find the edge.
+ * @return Returns the y-coordinate as an <code>int</code> value.
+ */
+ static final int getNextY(Component c) {
+ return getNextY(c, 0);
+ }
+
+ /**
+ * Gets the y-coordinate a given distance below the bottom edge
+ * of the argument component.
+ * @param c - The component of which to find the edge.
+ * @param spacing - The additional spacing past the edge of the
+ * component to add.
+ * @return Returns the y-coordinate as an <code>int</code> value.
+ */
+ static final int getNextY(Component c, int spacing) {
+ return c.getY() + c.getHeight() + spacing;
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java Wed Apr 27 11:11:32 2016
@@ -9,14 +9,15 @@
* alter their displayed or constituent values.
*
* @author Kyle McCarty <[log in to unmask]>
- * @see DiagSnapshot
+ * @see org.hps.analysis.trigger.data.DiagnosticSnapshot
*/
public interface DiagnosticUpdatable {
- /**
- * Updates the object with information from the trigger diagnostic
- * snapshot in the argument.
- * @param snapshot - The snapshot containing information with which
- * to update the object.
- */
- public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot);
+ /**
+ * Updates the object with information from the trigger diagnostic
+ * snapshot in the argument.
+ * @param runSnapshot the accumulated snapshot
+ * @param localSnapshot The snapshot containing information with which
+ * to update the object.
+ */
+ public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot);
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/EfficiencyTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/EfficiencyTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/EfficiencyTablePanel.java Wed Apr 27 11:11:32 2016
@@ -11,200 +11,200 @@
import org.hps.analysis.trigger.util.ComponentUtils;
public class EfficiencyTablePanel extends AbstractTablePanel implements DiagnosticUpdatable {
- // Static variables.
- private static final long serialVersionUID = 0L;
-
- // Table models.
- private TableTextModel localModel;
- private TableTextModel globalModel;
-
- // Column/row reference variables.
- private static final int ROWS = 7;
- private static final int COLUMNS = 6;
- /* private static final int COL_HEADER = 0;
- private static final int COL_SINGLES_0 = 1;
- private static final int COL_SINGLES_1 = 2;
- private static final int COL_PAIR_0 = 3;
- private static final int COL_PAIR_1 = 4; */
- private static final int COL_COUNT = 5;
- /* private static final int ROW_HEADER = 0;
- private static final int ROW_PULSER = 1;
- private static final int ROW_COSMIC = 2;
- private static final int ROW_SINGLES_0 = 3;
- private static final int ROW_SINGLES_1 = 4;
- private static final int ROW_PAIR_0 = 5;
- private static final int ROW_PAIR_1 = 6; */
-
- // Global/local reference variables.
- private static final int GLOBAL = 0;
- private static final int LOCAL = 1;
-
- // Trigger type reference variables.
- private static final int TYPE_SINGLES_0 = TriggerStatModule.SINGLES_0;
- private static final int TYPE_SINGLES_1 = TriggerStatModule.SINGLES_1;
- private static final int TYPE_PAIR_0 = TriggerStatModule.PAIR_0;
- private static final int TYPE_PAIR_1 = TriggerStatModule.PAIR_1;
-
- // Column/row header names.
- private static final String[] COL_NAMES = {
- "", "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Count"
- };
- private static final String[] ROW_NAMES = {
- "", "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Pulser", "Cosmic"
- //"", "Random", "Cosmic", "Singles 0", "Singles 1", "Pair 0", "Pair 1"
- };
-
- /**
- * Instantiates a new <code>EfficiencyTablePanel</code>.
- */
- public EfficiencyTablePanel() {
- // Instantiate the superclass.
- super();
-
- // Set the orientation to vertical.
- setOrientation(ORIENTATION_VERTICAL);
- }
-
- @Override
- public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
- // If there is no snapshot, the tables should all display an
- // empty value.
- if(runSnapshot == null || localSnapshot == null) {
- for(int row = 1; row < ROWS; row++) {
- for(int col = 1; col < COLUMNS; col++) {
- localModel.setValueAt("---", row, col);
- globalModel.setValueAt("---", row, col);
- }
- }
- }
-
- // Otherwise, update the table cells from the snapshot data.
- else {
- // Get the efficiency modules.
- DiagnosticSnapshot[] stat = new DiagnosticSnapshot[2];
- stat[GLOBAL] = runSnapshot;
- stat[LOCAL] = localSnapshot;
-
- // Get the trigger count for each trigger type for both the
- // local and global snapshots.
- int[][][] matched = new int[2][4][6];
- int[][][] triggers = new int[2][4][6];
- for(int i = 0; i < 2; i++) {
- for(int triggerType = 0; triggerType < 6; triggerType++) {
- // Get the total triggers seen for each type.
- triggers[i][TYPE_SINGLES_0][triggerType] = stat[i].getSingles0Stats().getSSPSimulatedTriggers(triggerType);
- triggers[i][TYPE_SINGLES_1][triggerType] = stat[i].getSingles1Stats().getSSPSimulatedTriggers(triggerType);
- triggers[i][TYPE_PAIR_0][triggerType] = stat[i].getPair0Stats().getSSPSimulatedTriggers(triggerType);
- triggers[i][TYPE_PAIR_1][triggerType] = stat[i].getPair1Stats().getSSPSimulatedTriggers(triggerType);
-
- // Get the total triggers matched for each type.
- matched[i][TYPE_SINGLES_0][triggerType] = stat[i].getSingles0Stats().getMatchedSSPSimulatedTriggers(triggerType);
- matched[i][TYPE_SINGLES_1][triggerType] = stat[i].getSingles1Stats().getMatchedSSPSimulatedTriggers(triggerType);
- matched[i][TYPE_PAIR_0][triggerType] = stat[i].getPair0Stats().getMatchedSSPSimulatedTriggers(triggerType);
- matched[i][TYPE_PAIR_1][triggerType] = stat[i].getPair1Stats().getMatchedSSPSimulatedTriggers(triggerType);
- }
- }
-
- // Determine the spacing needed to display the largest numerical
- // cell value.
- int numWidth = -1;
- for(int tiTriggerType = 0; tiTriggerType < 6; tiTriggerType++) {
- for(int seenTriggerType = 0; seenTriggerType < 4; seenTriggerType++) {
- int rSize = ComponentUtils.getDigits(triggers[GLOBAL][seenTriggerType][tiTriggerType]);
- int lSize = ComponentUtils.getDigits(triggers[LOCAL][seenTriggerType][tiTriggerType]);
- numWidth = ComponentUtils.max(numWidth, rSize, lSize);
- }
- }
-
- // Generate the format string for the cells.
- String format = "%" + numWidth + "d / %" + numWidth + "d";
-
- // Update the table.
- for(int tiTriggerType = 0; tiTriggerType < 6; tiTriggerType++) {
- // Fill the row/column combinations that hold trigger
- // statistical information.
- for(int seenTriggerType = 0; seenTriggerType < 4; seenTriggerType++) {
- // Fill the local table cell.
- String localText = String.format(format, matched[LOCAL][seenTriggerType][tiTriggerType],
- triggers[LOCAL][seenTriggerType][tiTriggerType]);
- if(triggers[LOCAL][seenTriggerType][tiTriggerType] == 0) {
- localText = localText + " ( N/A %)";
- } else {
- localText = String.format("%s (%7.3f%%)", localText,
- (100.0 * matched[LOCAL][seenTriggerType][tiTriggerType] / triggers[LOCAL][seenTriggerType][tiTriggerType]));
- }
- localModel.setValueAt(localText, tiTriggerType + 1, seenTriggerType + 1);
-
- // Fill the global table cell.
- String globalText = String.format(format, matched[GLOBAL][seenTriggerType][tiTriggerType],
- triggers[GLOBAL][seenTriggerType][tiTriggerType]);
- if(triggers[GLOBAL][seenTriggerType][tiTriggerType] == 0) {
- globalText = globalText + " ( N/A %)";
- } else {
- globalText = String.format("%s (%7.3f%%)", globalText,
- (100.0 * matched[GLOBAL][seenTriggerType][tiTriggerType] / triggers[GLOBAL][seenTriggerType][tiTriggerType]));
- }
- globalModel.setValueAt(globalText, tiTriggerType + 1, seenTriggerType + 1);
- }
-
- // Populate the count column.
- localModel.setValueAt("" + stat[LOCAL].getTITriggers(tiTriggerType, true), tiTriggerType + 1, COL_COUNT);
- globalModel.setValueAt("" + stat[GLOBAL].getTITriggers(tiTriggerType, true), tiTriggerType + 1, COL_COUNT);
- }
- }
- }
-
- @Override
- protected JTable[] initializeTables(Object... args) {
- // Initialize the table models. There should be one row and
- // one column for each type of trigger plus an additional one
- // of each for headers.
- localModel = new TableTextModel(ROWS, COLUMNS);
- globalModel = new TableTextModel(ROWS, COLUMNS);
-
- // Set the column headers.
- for(int col = 0; col < COLUMNS; col++) {
- localModel.setValueAt(COL_NAMES[col], 0, col);
- globalModel.setValueAt(COL_NAMES[col], 0, col);
- }
-
- // Set the row headers.
- for(int row = 0; row < ROWS; row++) {
- localModel.setValueAt(ROW_NAMES[row], row, 0);
- globalModel.setValueAt(ROW_NAMES[row], row, 0);
- }
-
- // Make a cell renderer.
- DefaultTableCellRenderer centerRenderer = new DefaultTableCellRenderer();
- centerRenderer.setHorizontalAlignment(JLabel.CENTER);
-
- // Create JTable objects to display the data.
- JTable localTable = new JTable(localModel);
- localTable.setRowSelectionAllowed(false);
- localTable.setColumnSelectionAllowed(false);
- localTable.setCellSelectionEnabled(false);
- localTable.setShowVerticalLines(false);
- localTable.getColumnModel().getColumn(0).setMaxWidth(150);
- localTable.getColumnModel().getColumn(COL_COUNT).setMaxWidth(150);
- for(int col = 1; col < COLUMNS; col++) {
- localTable.getColumnModel().getColumn(col).setCellRenderer(centerRenderer);
- }
- localTable.setFont(new Font("monospaced", localTable.getFont().getStyle(), localTable.getFont().getSize()));
-
- JTable globalTable = new JTable(globalModel);
- globalTable.setRowSelectionAllowed(false);
- globalTable.setColumnSelectionAllowed(false);
- globalTable.setCellSelectionEnabled(false);
- globalTable.setShowVerticalLines(false);
- globalTable.getColumnModel().getColumn(0).setMaxWidth(150);
- globalTable.getColumnModel().getColumn(COL_COUNT).setMaxWidth(150);
- for(int col = 1; col < COLUMNS; col++) {
- globalTable.getColumnModel().getColumn(col).setCellRenderer(centerRenderer);
- }
- globalTable.setFont(new Font("monospaced", globalTable.getFont().getStyle(), globalTable.getFont().getSize()));
-
- // Return the tables.
- return new JTable[] { localTable, globalTable };
- }
-
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+
+ // Table models.
+ private TableTextModel localModel;
+ private TableTextModel globalModel;
+
+ // Column/row reference variables.
+ private static final int ROWS = 7;
+ private static final int COLUMNS = 6;
+ /* private static final int COL_HEADER = 0;
+ private static final int COL_SINGLES_0 = 1;
+ private static final int COL_SINGLES_1 = 2;
+ private static final int COL_PAIR_0 = 3;
+ private static final int COL_PAIR_1 = 4; */
+ private static final int COL_COUNT = 5;
+ /* private static final int ROW_HEADER = 0;
+ private static final int ROW_PULSER = 1;
+ private static final int ROW_COSMIC = 2;
+ private static final int ROW_SINGLES_0 = 3;
+ private static final int ROW_SINGLES_1 = 4;
+ private static final int ROW_PAIR_0 = 5;
+ private static final int ROW_PAIR_1 = 6; */
+
+ // Global/local reference variables.
+ private static final int GLOBAL = 0;
+ private static final int LOCAL = 1;
+
+ // Trigger type reference variables.
+ private static final int TYPE_SINGLES_0 = TriggerStatModule.SINGLES_0;
+ private static final int TYPE_SINGLES_1 = TriggerStatModule.SINGLES_1;
+ private static final int TYPE_PAIR_0 = TriggerStatModule.PAIR_0;
+ private static final int TYPE_PAIR_1 = TriggerStatModule.PAIR_1;
+
+ // Column/row header names.
+ private static final String[] COL_NAMES = {
+ "", "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Count"
+ };
+ private static final String[] ROW_NAMES = {
+ "", "Singles 0", "Singles 1", "Pair 0", "Pair 1", "Pulser", "Cosmic"
+ //"", "Random", "Cosmic", "Singles 0", "Singles 1", "Pair 0", "Pair 1"
+ };
+
+ /**
+ * Instantiates a new <code>EfficiencyTablePanel</code>.
+ */
+ public EfficiencyTablePanel() {
+ // Instantiate the superclass.
+ super();
+
+ // Set the orientation to vertical.
+ setOrientation(ORIENTATION_VERTICAL);
+ }
+
+ @Override
+ public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
+ // If there is no snapshot, the tables should all display an
+ // empty value.
+ if(runSnapshot == null || localSnapshot == null) {
+ for(int row = 1; row < ROWS; row++) {
+ for(int col = 1; col < COLUMNS; col++) {
+ localModel.setValueAt("---", row, col);
+ globalModel.setValueAt("---", row, col);
+ }
+ }
+ }
+
+ // Otherwise, update the table cells from the snapshot data.
+ else {
+ // Get the efficiency modules.
+ DiagnosticSnapshot[] stat = new DiagnosticSnapshot[2];
+ stat[GLOBAL] = runSnapshot;
+ stat[LOCAL] = localSnapshot;
+
+ // Get the trigger count for each trigger type for both the
+ // local and global snapshots.
+ int[][][] matched = new int[2][4][6];
+ int[][][] triggers = new int[2][4][6];
+ for(int i = 0; i < 2; i++) {
+ for(int triggerType = 0; triggerType < 6; triggerType++) {
+ // Get the total triggers seen for each type.
+ triggers[i][TYPE_SINGLES_0][triggerType] = stat[i].getSingles0Stats().getSSPSimulatedTriggers(triggerType);
+ triggers[i][TYPE_SINGLES_1][triggerType] = stat[i].getSingles1Stats().getSSPSimulatedTriggers(triggerType);
+ triggers[i][TYPE_PAIR_0][triggerType] = stat[i].getPair0Stats().getSSPSimulatedTriggers(triggerType);
+ triggers[i][TYPE_PAIR_1][triggerType] = stat[i].getPair1Stats().getSSPSimulatedTriggers(triggerType);
+
+ // Get the total triggers matched for each type.
+ matched[i][TYPE_SINGLES_0][triggerType] = stat[i].getSingles0Stats().getMatchedSSPSimulatedTriggers(triggerType);
+ matched[i][TYPE_SINGLES_1][triggerType] = stat[i].getSingles1Stats().getMatchedSSPSimulatedTriggers(triggerType);
+ matched[i][TYPE_PAIR_0][triggerType] = stat[i].getPair0Stats().getMatchedSSPSimulatedTriggers(triggerType);
+ matched[i][TYPE_PAIR_1][triggerType] = stat[i].getPair1Stats().getMatchedSSPSimulatedTriggers(triggerType);
+ }
+ }
+
+ // Determine the spacing needed to display the largest numerical
+ // cell value.
+ int numWidth = -1;
+ for(int tiTriggerType = 0; tiTriggerType < 6; tiTriggerType++) {
+ for(int seenTriggerType = 0; seenTriggerType < 4; seenTriggerType++) {
+ int rSize = ComponentUtils.getDigits(triggers[GLOBAL][seenTriggerType][tiTriggerType]);
+ int lSize = ComponentUtils.getDigits(triggers[LOCAL][seenTriggerType][tiTriggerType]);
+ numWidth = ComponentUtils.max(numWidth, rSize, lSize);
+ }
+ }
+
+ // Generate the format string for the cells.
+ String format = "%" + numWidth + "d / %" + numWidth + "d";
+
+ // Update the table.
+ for(int tiTriggerType = 0; tiTriggerType < 6; tiTriggerType++) {
+ // Fill the row/column combinations that hold trigger
+ // statistical information.
+ for(int seenTriggerType = 0; seenTriggerType < 4; seenTriggerType++) {
+ // Fill the local table cell.
+ String localText = String.format(format, matched[LOCAL][seenTriggerType][tiTriggerType],
+ triggers[LOCAL][seenTriggerType][tiTriggerType]);
+ if(triggers[LOCAL][seenTriggerType][tiTriggerType] == 0) {
+ localText = localText + " ( N/A %)";
+ } else {
+ localText = String.format("%s (%7.3f%%)", localText,
+ (100.0 * matched[LOCAL][seenTriggerType][tiTriggerType] / triggers[LOCAL][seenTriggerType][tiTriggerType]));
+ }
+ localModel.setValueAt(localText, tiTriggerType + 1, seenTriggerType + 1);
+
+ // Fill the global table cell.
+ String globalText = String.format(format, matched[GLOBAL][seenTriggerType][tiTriggerType],
+ triggers[GLOBAL][seenTriggerType][tiTriggerType]);
+ if(triggers[GLOBAL][seenTriggerType][tiTriggerType] == 0) {
+ globalText = globalText + " ( N/A %)";
+ } else {
+ globalText = String.format("%s (%7.3f%%)", globalText,
+ (100.0 * matched[GLOBAL][seenTriggerType][tiTriggerType] / triggers[GLOBAL][seenTriggerType][tiTriggerType]));
+ }
+ globalModel.setValueAt(globalText, tiTriggerType + 1, seenTriggerType + 1);
+ }
+
+ // Populate the count column.
+ localModel.setValueAt("" + stat[LOCAL].getTITriggers(tiTriggerType, true), tiTriggerType + 1, COL_COUNT);
+ globalModel.setValueAt("" + stat[GLOBAL].getTITriggers(tiTriggerType, true), tiTriggerType + 1, COL_COUNT);
+ }
+ }
+ }
+
+ @Override
+ protected JTable[] initializeTables(Object... args) {
+ // Initialize the table models. There should be one row and
+ // one column for each type of trigger plus an additional one
+ // of each for headers.
+ localModel = new TableTextModel(ROWS, COLUMNS);
+ globalModel = new TableTextModel(ROWS, COLUMNS);
+
+ // Set the column headers.
+ for(int col = 0; col < COLUMNS; col++) {
+ localModel.setValueAt(COL_NAMES[col], 0, col);
+ globalModel.setValueAt(COL_NAMES[col], 0, col);
+ }
+
+ // Set the row headers.
+ for(int row = 0; row < ROWS; row++) {
+ localModel.setValueAt(ROW_NAMES[row], row, 0);
+ globalModel.setValueAt(ROW_NAMES[row], row, 0);
+ }
+
+ // Make a cell renderer.
+ DefaultTableCellRenderer centerRenderer = new DefaultTableCellRenderer();
+ centerRenderer.setHorizontalAlignment(JLabel.CENTER);
+
+ // Create JTable objects to display the data.
+ JTable localTable = new JTable(localModel);
+ localTable.setRowSelectionAllowed(false);
+ localTable.setColumnSelectionAllowed(false);
+ localTable.setCellSelectionEnabled(false);
+ localTable.setShowVerticalLines(false);
+ localTable.getColumnModel().getColumn(0).setMaxWidth(150);
+ localTable.getColumnModel().getColumn(COL_COUNT).setMaxWidth(150);
+ for(int col = 1; col < COLUMNS; col++) {
+ localTable.getColumnModel().getColumn(col).setCellRenderer(centerRenderer);
+ }
+ localTable.setFont(new Font("monospaced", localTable.getFont().getStyle(), localTable.getFont().getSize()));
+
+ JTable globalTable = new JTable(globalModel);
+ globalTable.setRowSelectionAllowed(false);
+ globalTable.setColumnSelectionAllowed(false);
+ globalTable.setCellSelectionEnabled(false);
+ globalTable.setShowVerticalLines(false);
+ globalTable.getColumnModel().getColumn(0).setMaxWidth(150);
+ globalTable.getColumnModel().getColumn(COL_COUNT).setMaxWidth(150);
+ for(int col = 1; col < COLUMNS; col++) {
+ globalTable.getColumnModel().getColumn(col).setCellRenderer(centerRenderer);
+ }
+ globalTable.setFont(new Font("monospaced", globalTable.getFont().getStyle(), globalTable.getFont().getSize()));
+
+ // Return the tables.
+ return new JTable[] { localTable, globalTable };
+ }
+
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/PairTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/PairTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/PairTablePanel.java Wed Apr 27 11:11:32 2016
@@ -7,14 +7,14 @@
* @author Kyle McCarty
*/
public class PairTablePanel extends AbstractTriggerTablePanel {
- // Static variables.
- private static final long serialVersionUID = 0L;
- private static final String[] CUT_NAMES = { " Energy Sum:",
- " Energy Difference:", " Energy Slope:", " Coplanarity:" };
-
- /**
- * Instantiates a <code>PairTablePanel</code>.
- */
- public PairTablePanel() { super(CUT_NAMES, false); }
-
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+ private static final String[] CUT_NAMES = { " Energy Sum:",
+ " Energy Difference:", " Energy Slope:", " Coplanarity:" };
+
+ /**
+ * Instantiates a <code>PairTablePanel</code>.
+ */
+ public PairTablePanel() { super(CUT_NAMES, false); }
+
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigPanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigPanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigPanel.java Wed Apr 27 11:11:32 2016
@@ -12,291 +12,291 @@
import org.hps.analysis.trigger.util.ComponentUtils;
public class ShifterTrigPanel extends JPanel {
- private static final Color BG_WARNING = new Color(255, 235, 20);
- private static final Color BG_CRITICAL = new Color(230, 0, 0);
- private static final Color FONT_WARNING = new Color(255, 157, 0);
- private static final Color FONT_CRITICAL = new Color(117, 0, 0);
- private static final long serialVersionUID = 1L;
-
- private JLabel panelTitle;
- private JLabel[] fieldTitle;
- private JLabel[] fieldValue;
-
- /**
- * Instantiates a new <code>ShifterTrigPanel</code> with the
- * indicated name.
- */
- public ShifterTrigPanel(String name) {
- // Instantiate a layout for the fields.
- SpringLayout layout = new SpringLayout();
- setLayout(layout);
-
- // Instantiate the header title.
- panelTitle = new JLabel(name);
- panelTitle.setVerticalAlignment(JLabel.CENTER);
- panelTitle.setHorizontalAlignment(JLabel.CENTER);
- add(panelTitle);
-
- // Instantiate the field title labels.
- String[] titleName = { "Cluster Efficiency", "Singles 0 Logic Efficiency",
- "Singles 0 Trigger Efficiency", "Singles 1 Logic Efficiency",
- "Singles 1 Trigger Efficiency", "Pair 0 Logic Efficiency",
- "Pair 0 Trigger Efficiency", "Pair 1 Logic Efficiency", "Pair 1 Trigger Efficiency" };
- fieldTitle = new JLabel[titleName.length];
- for(int index = 0; index < titleName.length; index++) {
- fieldTitle[index] = new JLabel(titleName[index]);
- fieldTitle[index].setVerticalAlignment(JLabel.CENTER);
- fieldTitle[index].setHorizontalAlignment(JLabel.RIGHT);
- fieldTitle[index].setOpaque(true);
- add(fieldTitle[index]);
- }
-
- // Instantiate the field value labels.
- fieldValue = new JLabel[titleName.length];
- for(int index = 0; index < titleName.length; index++) {
- fieldValue[index] = new JLabel("");
- fieldValue[index].setVerticalAlignment(JLabel.CENTER);
- fieldValue[index].setHorizontalAlignment(JLabel.LEFT);
- fieldValue[index].setOpaque(true);
- add(fieldValue[index]);
- }
-
- // Get the longest title.
- int maxWidth = -1;
- int maxIndex = -1;
- for(int index = 0; index < titleName.length; index++) {
- int width = fieldTitle[index].getFontMetrics(fieldTitle[index].getFont()).stringWidth(titleName[index]);
- if(width > maxWidth) {
- maxWidth = width;
- maxIndex = index;
- }
- }
-
- // Define border edge and spacing variables.
- String EAST = SpringLayout.EAST;
- String WEST = SpringLayout.WEST;
- String NORTH = SpringLayout.NORTH;
- String SOUTH = SpringLayout.SOUTH;
- int hinternal = 5;
- int vinternal = 10;
- int hexternal = 5;
- int vexternal = 5;
-
- // Position the panel header.
- layout.putConstraint(EAST, panelTitle, hexternal, EAST, this);
- layout.putConstraint(WEST, panelTitle, hexternal, WEST, this);
- layout.putConstraint(NORTH, panelTitle, vexternal, NORTH, this);
-
- // Position the field entries.
- Component lastComp = panelTitle;
- for(int index = 0; index < titleName.length; index++) {
- // For all field titles except the largest, lock the right
- // edge of the title to match the position of the largest
- // title's right edge. The largest title is allowed to size
- // itself to its preferred width.
- if(index == maxIndex) {
- layout.putConstraint(NORTH, fieldTitle[index], vinternal, SOUTH, lastComp);
- layout.putConstraint(WEST, fieldTitle[index], hexternal, WEST, this);
- } else {
- layout.putConstraint(NORTH, fieldTitle[index], vinternal, SOUTH, lastComp);
- layout.putConstraint(WEST, fieldTitle[index], hexternal, WEST, this);
- layout.putConstraint(EAST, fieldTitle[index], 0, EAST, fieldTitle[maxIndex]);
- }
-
- // Position the field value label to the right of the field
- // title label. It should use up the remainder of the width
- // allowed by the component.
- layout.putConstraint(WEST, fieldValue[index], hinternal, EAST, fieldTitle[index]);
- layout.putConstraint(EAST, fieldValue[index], hexternal, EAST, this);
- layout.putConstraint(NORTH, fieldValue[index], vinternal, SOUTH, lastComp);
-
- // Update the "last component" to the current field title
- // label.
- lastComp = fieldTitle[index];
- }
-
- // Update the fonts.
- setFont(getFont());
- }
-
- @Override
- public void setBackground(Color bg) {
- // Set the superclass background.
- super.setBackground(bg);
-
- // Set the component backgrounds.
- if(panelTitle != null) {
- panelTitle.setBackground(bg);
- for(int index = 0; index < fieldTitle.length; index++) {
- fieldTitle[index].setBackground(bg);
-
- // If the field value label has a special alert color,
- // then do not overwrite it.
- if(!fieldValue[index].getBackground().equals(BG_WARNING)
- && !fieldValue[index].getBackground().equals(BG_CRITICAL)) {
- fieldValue[index].setBackground(bg);
- }
- }
- }
- }
-
- @Override
- public void setFont(Font font) {
- // Set the superclass font.
- super.setFont(font);
-
- // Set the component fonts.
- if(panelTitle != null) {
- panelTitle.setFont(font.deriveFont(Font.BOLD, (float) (font.getSize2D() * 1.5)));
- for(int index = 0; index < fieldTitle.length; index++) {
- fieldTitle[index].setFont(font.deriveFont(Font.BOLD));
- fieldValue[index].setFont(font);
- }
- }
- }
-
- @Override
- public void setForeground(Color fg) {
- // Set the superclass foreground.
- super.setForeground(fg);
-
- // Set the component backgrounds.
- if(panelTitle != null) {
- panelTitle.setForeground(fg);
- for(int index = 0; index < fieldTitle.length; index++) {
- fieldTitle[index].setForeground(fg);
-
- // If the field value label has a special alert color,
- // then do not overwrite it.
- if(!fieldValue[index].getForeground().equals(FONT_WARNING)
- && !fieldValue[index].getForeground().equals(FONT_CRITICAL)) {
- fieldValue[index].setBackground(fg);
- }
- }
- }
- }
-
- /**
- * Updates the panel statistical display with data from the
- * argument snapshot.
- * @param stat - The snapshot from which to derive statistical
- * data.
- */
- public void updatePanel(DiagnosticSnapshot stat) {
- // If the snapshot is null, insert "null" values in the
- // field panels,
- if(stat == null) {
- // Populate the fields with a "null" entry.
- for(int index = 0; index < fieldValue.length; index++) {
- fieldValue[index].setText("--- / --- ( N/A %)");
- }
-
- // No data exists, so no further processing is needed.
- return;
- }
-
- // Define index constants.
- int RECON = 0;
- int SSP = 1;
- int TRIGGER_0 = 0;
- int TRIGGER_1 = 1;
-
- // Get the tracked values from the snapshot.
- int seenClusters = stat.getClusterStats().getReconClusterCount();
- int[][] seenSinglesTriggers = {
- { stat.getSingles0Stats().getReconSimulatedTriggers(), stat.getSingles1Stats().getReconSimulatedTriggers() },
- { stat.getSingles0Stats().getSSPSimulatedTriggers(), stat.getSingles1Stats().getSSPSimulatedTriggers() }
- };
- int[][] seenPairTriggers = {
+ private static final Color BG_WARNING = new Color(255, 235, 20);
+ private static final Color BG_CRITICAL = new Color(230, 0, 0);
+ private static final Color FONT_WARNING = new Color(255, 157, 0);
+ private static final Color FONT_CRITICAL = new Color(117, 0, 0);
+ private static final long serialVersionUID = 1L;
+
+ private JLabel panelTitle;
+ private JLabel[] fieldTitle;
+ private JLabel[] fieldValue;
+
+ /**
+ * Instantiates a new <code>ShifterTrigPanel</code> with the
+ * indicated name.
+ */
+ public ShifterTrigPanel(String name) {
+ // Instantiate a layout for the fields.
+ SpringLayout layout = new SpringLayout();
+ setLayout(layout);
+
+ // Instantiate the header title.
+ panelTitle = new JLabel(name);
+ panelTitle.setVerticalAlignment(JLabel.CENTER);
+ panelTitle.setHorizontalAlignment(JLabel.CENTER);
+ add(panelTitle);
+
+ // Instantiate the field title labels.
+ String[] titleName = { "Cluster Efficiency", "Singles 0 Logic Efficiency",
+ "Singles 0 Trigger Efficiency", "Singles 1 Logic Efficiency",
+ "Singles 1 Trigger Efficiency", "Pair 0 Logic Efficiency",
+ "Pair 0 Trigger Efficiency", "Pair 1 Logic Efficiency", "Pair 1 Trigger Efficiency" };
+ fieldTitle = new JLabel[titleName.length];
+ for(int index = 0; index < titleName.length; index++) {
+ fieldTitle[index] = new JLabel(titleName[index]);
+ fieldTitle[index].setVerticalAlignment(JLabel.CENTER);
+ fieldTitle[index].setHorizontalAlignment(JLabel.RIGHT);
+ fieldTitle[index].setOpaque(true);
+ add(fieldTitle[index]);
+ }
+
+ // Instantiate the field value labels.
+ fieldValue = new JLabel[titleName.length];
+ for(int index = 0; index < titleName.length; index++) {
+ fieldValue[index] = new JLabel("");
+ fieldValue[index].setVerticalAlignment(JLabel.CENTER);
+ fieldValue[index].setHorizontalAlignment(JLabel.LEFT);
+ fieldValue[index].setOpaque(true);
+ add(fieldValue[index]);
+ }
+
+ // Get the longest title.
+ int maxWidth = -1;
+ int maxIndex = -1;
+ for(int index = 0; index < titleName.length; index++) {
+ int width = fieldTitle[index].getFontMetrics(fieldTitle[index].getFont()).stringWidth(titleName[index]);
+ if(width > maxWidth) {
+ maxWidth = width;
+ maxIndex = index;
+ }
+ }
+
+ // Define border edge and spacing variables.
+ String EAST = SpringLayout.EAST;
+ String WEST = SpringLayout.WEST;
+ String NORTH = SpringLayout.NORTH;
+ String SOUTH = SpringLayout.SOUTH;
+ int hinternal = 5;
+ int vinternal = 10;
+ int hexternal = 5;
+ int vexternal = 5;
+
+ // Position the panel header.
+ layout.putConstraint(EAST, panelTitle, hexternal, EAST, this);
+ layout.putConstraint(WEST, panelTitle, hexternal, WEST, this);
+ layout.putConstraint(NORTH, panelTitle, vexternal, NORTH, this);
+
+ // Position the field entries.
+ Component lastComp = panelTitle;
+ for(int index = 0; index < titleName.length; index++) {
+ // For all field titles except the largest, lock the right
+ // edge of the title to match the position of the largest
+ // title's right edge. The largest title is allowed to size
+ // itself to its preferred width.
+ if(index == maxIndex) {
+ layout.putConstraint(NORTH, fieldTitle[index], vinternal, SOUTH, lastComp);
+ layout.putConstraint(WEST, fieldTitle[index], hexternal, WEST, this);
+ } else {
+ layout.putConstraint(NORTH, fieldTitle[index], vinternal, SOUTH, lastComp);
+ layout.putConstraint(WEST, fieldTitle[index], hexternal, WEST, this);
+ layout.putConstraint(EAST, fieldTitle[index], 0, EAST, fieldTitle[maxIndex]);
+ }
+
+ // Position the field value label to the right of the field
+ // title label. It should use up the remainder of the width
+ // allowed by the component.
+ layout.putConstraint(WEST, fieldValue[index], hinternal, EAST, fieldTitle[index]);
+ layout.putConstraint(EAST, fieldValue[index], hexternal, EAST, this);
+ layout.putConstraint(NORTH, fieldValue[index], vinternal, SOUTH, lastComp);
+
+ // Update the "last component" to the current field title
+ // label.
+ lastComp = fieldTitle[index];
+ }
+
+ // Update the fonts.
+ setFont(getFont());
+ }
+
+ @Override
+ public void setBackground(Color bg) {
+ // Set the superclass background.
+ super.setBackground(bg);
+
+ // Set the component backgrounds.
+ if(panelTitle != null) {
+ panelTitle.setBackground(bg);
+ for(int index = 0; index < fieldTitle.length; index++) {
+ fieldTitle[index].setBackground(bg);
+
+ // If the field value label has a special alert color,
+ // then do not overwrite it.
+ if(!fieldValue[index].getBackground().equals(BG_WARNING)
+ && !fieldValue[index].getBackground().equals(BG_CRITICAL)) {
+ fieldValue[index].setBackground(bg);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void setFont(Font font) {
+ // Set the superclass font.
+ super.setFont(font);
+
+ // Set the component fonts.
+ if(panelTitle != null) {
+ panelTitle.setFont(font.deriveFont(Font.BOLD, (float) (font.getSize2D() * 1.5)));
+ for(int index = 0; index < fieldTitle.length; index++) {
+ fieldTitle[index].setFont(font.deriveFont(Font.BOLD));
+ fieldValue[index].setFont(font);
+ }
+ }
+ }
+
+ @Override
+ public void setForeground(Color fg) {
+ // Set the superclass foreground.
+ super.setForeground(fg);
+
+ // Set the component backgrounds.
+ if(panelTitle != null) {
+ panelTitle.setForeground(fg);
+ for(int index = 0; index < fieldTitle.length; index++) {
+ fieldTitle[index].setForeground(fg);
+
+ // If the field value label has a special alert color,
+ // then do not overwrite it.
+ if(!fieldValue[index].getForeground().equals(FONT_WARNING)
+ && !fieldValue[index].getForeground().equals(FONT_CRITICAL)) {
+ fieldValue[index].setBackground(fg);
+ }
+ }
+ }
+ }
+
+ /**
+ * Updates the panel statistical display with data from the
+ * argument snapshot.
+ * @param stat - The snapshot from which to derive statistical
+ * data.
+ */
+ public void updatePanel(DiagnosticSnapshot stat) {
+ // If the snapshot is null, insert "null" values in the
+ // field panels,
+ if(stat == null) {
+ // Populate the fields with a "null" entry.
+ for(int index = 0; index < fieldValue.length; index++) {
+ fieldValue[index].setText("--- / --- ( N/A %)");
+ }
+
+ // No data exists, so no further processing is needed.
+ return;
+ }
+
+ // Define index constants.
+ int RECON = 0;
+ int SSP = 1;
+ int TRIGGER_0 = 0;
+ int TRIGGER_1 = 1;
+
+ // Get the tracked values from the snapshot.
+ int seenClusters = stat.getClusterStats().getReconClusterCount();
+ int[][] seenSinglesTriggers = {
+ { stat.getSingles0Stats().getReconSimulatedTriggers(), stat.getSingles1Stats().getReconSimulatedTriggers() },
+ { stat.getSingles0Stats().getSSPSimulatedTriggers(), stat.getSingles1Stats().getSSPSimulatedTriggers() }
+ };
+ int[][] seenPairTriggers = {
{ stat.getPair0Stats().getReconSimulatedTriggers(), stat.getPair1Stats().getReconSimulatedTriggers() },
{ stat.getPair0Stats().getSSPSimulatedTriggers(), stat.getPair1Stats().getSSPSimulatedTriggers() }
- };
- int matchedClusters = stat.getClusterStats().getMatches();
- int[][] matchedSinglesTriggers = {
+ };
+ int matchedClusters = stat.getClusterStats().getMatches();
+ int[][] matchedSinglesTriggers = {
{ stat.getSingles0Stats().getMatchedReconSimulatedTriggers(), stat.getSingles1Stats().getMatchedReconSimulatedTriggers() },
{ stat.getSingles0Stats().getMatchedSSPSimulatedTriggers(), stat.getSingles1Stats().getMatchedSSPSimulatedTriggers() }
- };
- int[][] matchedPairTriggers = {
+ };
+ int[][] matchedPairTriggers = {
{ stat.getPair0Stats().getMatchedReconSimulatedTriggers(), stat.getPair1Stats().getMatchedReconSimulatedTriggers() },
{ stat.getPair0Stats().getMatchedSSPSimulatedTriggers(), stat.getPair1Stats().getMatchedSSPSimulatedTriggers() }
- };
-
- // Get the largest digit of the tracked values. This should
- // always be one of the "seen" values.
- int mostDigits = ComponentUtils.max(seenClusters, seenSinglesTriggers[0][0], seenSinglesTriggers[0][1],
- seenSinglesTriggers[1][0], seenSinglesTriggers[1][1], seenPairTriggers[0][0], seenPairTriggers[0][1],
- seenPairTriggers[1][0], seenPairTriggers[1][1]);
- int spaces = ComponentUtils.getDigits(mostDigits);
-
- // Populate the cluster field panel.
- processEfficiency(seenClusters, matchedClusters, 0, spaces, 0.98, 0.94);
-
- // Populate the singles trigger field panels.
- processEfficiency(seenSinglesTriggers[RECON][TRIGGER_0], matchedSinglesTriggers[RECON][TRIGGER_0], 1, spaces, 0.99, 0.95);
+ };
+
+ // Get the largest digit of the tracked values. This should
+ // always be one of the "seen" values.
+ int mostDigits = ComponentUtils.max(seenClusters, seenSinglesTriggers[0][0], seenSinglesTriggers[0][1],
+ seenSinglesTriggers[1][0], seenSinglesTriggers[1][1], seenPairTriggers[0][0], seenPairTriggers[0][1],
+ seenPairTriggers[1][0], seenPairTriggers[1][1]);
+ int spaces = ComponentUtils.getDigits(mostDigits);
+
+ // Populate the cluster field panel.
+ processEfficiency(seenClusters, matchedClusters, 0, spaces, 0.98, 0.94);
+
+ // Populate the singles trigger field panels.
+ processEfficiency(seenSinglesTriggers[RECON][TRIGGER_0], matchedSinglesTriggers[RECON][TRIGGER_0], 1, spaces, 0.99, 0.95);
processEfficiency(seenSinglesTriggers[SSP][TRIGGER_0], matchedSinglesTriggers[SSP][TRIGGER_0], 2, spaces, 0.99, 0.95);
processEfficiency(seenSinglesTriggers[RECON][TRIGGER_1], matchedSinglesTriggers[RECON][TRIGGER_1], 3, spaces, 0.99, 0.95);
processEfficiency(seenSinglesTriggers[SSP][TRIGGER_1], matchedSinglesTriggers[SSP][TRIGGER_1], 4, spaces, 0.99, 0.95);
-
- // Populate the pair trigger field panels.
- processEfficiency(seenPairTriggers[RECON][TRIGGER_0], matchedPairTriggers[RECON][TRIGGER_0], 5, spaces, 0.99, 0.95);
+
+ // Populate the pair trigger field panels.
+ processEfficiency(seenPairTriggers[RECON][TRIGGER_0], matchedPairTriggers[RECON][TRIGGER_0], 5, spaces, 0.99, 0.95);
processEfficiency(seenPairTriggers[SSP][TRIGGER_0], matchedPairTriggers[SSP][TRIGGER_0], 6, spaces, 0.99, 0.95);
processEfficiency(seenPairTriggers[RECON][TRIGGER_1], matchedPairTriggers[RECON][TRIGGER_1], 7, spaces, 0.99, 0.95);
processEfficiency(seenPairTriggers[SSP][TRIGGER_1], matchedPairTriggers[SSP][TRIGGER_1], 8, spaces, 0.99, 0.95);
- }
-
- /**
- * Updates the indicated field value using the indicated number
- * seen and matched elements. Automatically handles the special
- * case of zero seen elements and also updates the colors of the
- * field labels to the appropriate color based on the efficiency
- * and the thresholds for warnings.
- * @param seen - The number of elements seen.
- * @param matched - The number of elements matched.
- * @param fieldIndex - The index for the field that should display
- * the statistical data.
- * @param spaces - The number of spaces to giveto each displayed
- * value.
- * @param threshWarning - The threshold at which the "warning
- * color should be used.
- * @param threshCritical - The threshold at which the "critical"
- * color should be used.
- */
- private void processEfficiency(int seen, int matched, int fieldIndex, int spaces, double threshWarning, double threshCritical) {
- // Calculate the efficiency.
- double efficiency = 100.0 * matched / seen;
-
- // Create the format string.
- String format = "%" + spaces + "d / %";
-
- // If the number of values seen is zero, there is no
- // percentage that can be calculated.
- if(seen == 0) {
- fieldValue[fieldIndex].setText(String.format(format + " ( N/A %%)", seen, matched));
- }
-
- // Otherwise, include the percentage.
- else {
- fieldValue[fieldIndex].setText(String.format(format + " (7.3f%%)", seen, matched, efficiency));
- }
-
- // If the efficiency is below the critical threshold,
- // change the field background to the critical color.
- if(efficiency < threshCritical) {
- fieldValue[fieldIndex].setBackground(BG_CRITICAL);
- fieldValue[fieldIndex].setForeground(FONT_CRITICAL);
- }
-
- // Otherwise, if the efficiency is below the warning
- // level, set the field background to the warning color.
- else if(efficiency < threshWarning) {
- fieldValue[fieldIndex].setBackground(BG_WARNING);
- fieldValue[fieldIndex].setForeground(FONT_WARNING);
- }
-
- // Otherwise, use the default component background.
- else {
- fieldValue[fieldIndex].setBackground(getBackground());
- fieldValue[fieldIndex].setForeground(getForeground());
- }
- }
+ }
+
+ /**
+ * Updates the indicated field value using the indicated number
+ * seen and matched elements. Automatically handles the special
+ * case of zero seen elements and also updates the colors of the
+ * field labels to the appropriate color based on the efficiency
+ * and the thresholds for warnings.
+ * @param seen - The number of elements seen.
+ * @param matched - The number of elements matched.
+ * @param fieldIndex - The index for the field that should display
+ * the statistical data.
+ * @param spaces - The number of spaces to giveto each displayed
+ * value.
+ * @param threshWarning - The threshold at which the "warning
+ * color should be used.
+ * @param threshCritical - The threshold at which the "critical"
+ * color should be used.
+ */
+ private void processEfficiency(int seen, int matched, int fieldIndex, int spaces, double threshWarning, double threshCritical) {
+ // Calculate the efficiency.
+ double efficiency = 100.0 * matched / seen;
+
+ // Create the format string.
+ String format = "%" + spaces + "d / %";
+
+ // If the number of values seen is zero, there is no
+ // percentage that can be calculated.
+ if(seen == 0) {
+ fieldValue[fieldIndex].setText(String.format(format + " ( N/A %%)", seen, matched));
+ }
+
+ // Otherwise, include the percentage.
+ else {
+ fieldValue[fieldIndex].setText(String.format(format + " (7.3f%%)", seen, matched, efficiency));
+ }
+
+ // If the efficiency is below the critical threshold,
+ // change the field background to the critical color.
+ if(efficiency < threshCritical) {
+ fieldValue[fieldIndex].setBackground(BG_CRITICAL);
+ fieldValue[fieldIndex].setForeground(FONT_CRITICAL);
+ }
+
+ // Otherwise, if the efficiency is below the warning
+ // level, set the field background to the warning color.
+ else if(efficiency < threshWarning) {
+ fieldValue[fieldIndex].setBackground(BG_WARNING);
+ fieldValue[fieldIndex].setForeground(FONT_WARNING);
+ }
+
+ // Otherwise, use the default component background.
+ else {
+ fieldValue[fieldIndex].setBackground(getBackground());
+ fieldValue[fieldIndex].setForeground(getForeground());
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigWindow.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigWindow.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/ShifterTrigWindow.java Wed Apr 27 11:11:32 2016
@@ -12,25 +12,25 @@
* to either yellow or red if the efficiencies drop too low.
*/
public class ShifterTrigWindow extends JPanel implements DiagnosticUpdatable {
- private static final long serialVersionUID = 1L;
- private ShifterTrigPanel localPanel = new ShifterTrigPanel("Instantaneous");
- private ShifterTrigPanel globalPanel = new ShifterTrigPanel("Run-Integrated");
-
- /**
- * Instantiates a new panel for displaying basic information
- * pertaining to trigger diagnostics.
- */
- public ShifterTrigWindow() {
- setLayout(new GridLayout(1, 2));
- add(localPanel);
- add(globalPanel);
- updatePanel(null, null);
- }
-
- @Override
- public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
- // Update each panel with the appropriate snapshot.
- localPanel.updatePanel(localSnapshot);
- globalPanel.updatePanel(runSnapshot);
- }
+ private static final long serialVersionUID = 1L;
+ private ShifterTrigPanel localPanel = new ShifterTrigPanel("Instantaneous");
+ private ShifterTrigPanel globalPanel = new ShifterTrigPanel("Run-Integrated");
+
+ /**
+ * Instantiates a new panel for displaying basic information
+ * pertaining to trigger diagnostics.
+ */
+ public ShifterTrigWindow() {
+ setLayout(new GridLayout(1, 2));
+ add(localPanel);
+ add(globalPanel);
+ updatePanel(null, null);
+ }
+
+ @Override
+ public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot) {
+ // Update each panel with the appropriate snapshot.
+ localPanel.updatePanel(localSnapshot);
+ globalPanel.updatePanel(runSnapshot);
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/SinglesTablePanel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/SinglesTablePanel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/SinglesTablePanel.java Wed Apr 27 11:11:32 2016
@@ -7,14 +7,14 @@
* @author Kyle McCarty
*/
public class SinglesTablePanel extends AbstractTriggerTablePanel {
- // Static variables.
- private static final long serialVersionUID = 0L;
- private static final String[] CUT_NAMES = { " Cluster Energy (Low):",
- " Cluster Energy (High):", " Hit Count:" };
-
- /**
- * Instantiates a <code>SinglesTablePanel</code>.
- */
- public SinglesTablePanel() { super(CUT_NAMES, true); }
-
+ // Static variables.
+ private static final long serialVersionUID = 0L;
+ private static final String[] CUT_NAMES = { " Cluster Energy (Low):",
+ " Cluster Energy (High):", " Hit Count:" };
+
+ /**
+ * Instantiates a <code>SinglesTablePanel</code>.
+ */
+ public SinglesTablePanel() { super(CUT_NAMES, true); }
+
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TableTextModel.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TableTextModel.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TableTextModel.java Wed Apr 27 11:11:32 2016
@@ -11,96 +11,96 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class TableTextModel extends AbstractTableModel {
- // Serial UID.
- private static final long serialVersionUID = 0L;
-
- // Stored values.
- private final int rows, columns;
- private final String[][] values;
-
- /**
- * Instantiates a new <code>TableTextModel</code> with the indicated
- * number of rows and columns.
- * @param rows - The number of rows.
- * @param columns - The number of columns.
- */
- public TableTextModel(int rows, int columns) {
- // Make sure that the arguments for rows and columns are valid.
- if(rows < 1) {
- throw new IllegalArgumentException("TableTextModel must have at least one row.");
- } else if(columns < 1) {
- throw new IllegalArgumentException("TableTextModel must have at least one column.");
- }
-
- // Define the number of rows and columns.
- this.rows = rows;
- this.columns = columns;
-
- // Instantiate the data storage array.
- values = new String[rows][columns];
- }
-
- @Override
- public int getRowCount() { return rows; }
-
- @Override
- public int getColumnCount() { return columns; }
-
- @Override
- public Object getValueAt(int rowIndex, int columnIndex) {
- // Ensure that the value is within the allowed range.
- validateIndex(rowIndex, columnIndex);
-
- // Return the value.
- return values[rowIndex][columnIndex];
- }
-
- @Override
- public void setValueAt(Object value, int rowIndex, int columnIndex) {
- // If the object is a string, pass it to the preferred handler.
- // This can also be performed if the value is null.
- if(value == null || value instanceof String) {
- setValueAt((String) value, rowIndex, columnIndex);
- }
-
- // Otherwise, cast the object to a string and use that instead.
- else { setValueAt(value.toString(), rowIndex, columnIndex); }
- }
-
- /**
- * Sets the text for the indicated column and row of the table.
- * @param value - The new text.
- * @param rowIndex - The row.
- * @param columnIndex - The column.
- * @throws IndexOutOfBoundsException Occurs if the row and column
- * are not a valid member of table model.
- */
- public void setValueAt(String value, int rowIndex, int columnIndex) throws IndexOutOfBoundsException {
- // Ensure that the value is within the allowed range.
- validateIndex(rowIndex, columnIndex);
-
- // Set the value.
- values[rowIndex][columnIndex] = value;
-
- // Update the table.
- this.fireTableCellUpdated(rowIndex, columnIndex);
- }
-
- /**
- * Checks to make sure that a given row/column pointer refers to
- * an extant position in the data array. In the event that the row
- * and column values are not valid, an <code>IndexOutOfBounds</code>
- * exception is thrown.
- * @param rowIndex - The row index.
- * @param columnIndex - The column index.
- * @throws IndexOutOfBoundsException Occurs if the row and column
- * are not a valid member of the data array.
- */
- private void validateIndex(int rowIndex, int columnIndex) throws IndexOutOfBoundsException {
- if(rowIndex < 0 || rowIndex >= getRowCount()) {
- throw new IndexOutOfBoundsException(String.format("Row index %d is out of bounds.", rowIndex));
- } else if(columnIndex < 0 || columnIndex >= getColumnCount()) {
- throw new IndexOutOfBoundsException(String.format("Column index %d is out of bounds.", columnIndex));
- }
- }
+ // Serial UID.
+ private static final long serialVersionUID = 0L;
+
+ // Stored values.
+ private final int rows, columns;
+ private final String[][] values;
+
+ /**
+ * Instantiates a new <code>TableTextModel</code> with the indicated
+ * number of rows and columns.
+ * @param rows - The number of rows.
+ * @param columns - The number of columns.
+ */
+ public TableTextModel(int rows, int columns) {
+ // Make sure that the arguments for rows and columns are valid.
+ if(rows < 1) {
+ throw new IllegalArgumentException("TableTextModel must have at least one row.");
+ } else if(columns < 1) {
+ throw new IllegalArgumentException("TableTextModel must have at least one column.");
+ }
+
+ // Define the number of rows and columns.
+ this.rows = rows;
+ this.columns = columns;
+
+ // Instantiate the data storage array.
+ values = new String[rows][columns];
+ }
+
+ @Override
+ public int getRowCount() { return rows; }
+
+ @Override
+ public int getColumnCount() { return columns; }
+
+ @Override
+ public Object getValueAt(int rowIndex, int columnIndex) {
+ // Ensure that the value is within the allowed range.
+ validateIndex(rowIndex, columnIndex);
+
+ // Return the value.
+ return values[rowIndex][columnIndex];
+ }
+
+ @Override
+ public void setValueAt(Object value, int rowIndex, int columnIndex) {
+ // If the object is a string, pass it to the preferred handler.
+ // This can also be performed if the value is null.
+ if(value == null || value instanceof String) {
+ setValueAt((String) value, rowIndex, columnIndex);
+ }
+
+ // Otherwise, cast the object to a string and use that instead.
+ else { setValueAt(value.toString(), rowIndex, columnIndex); }
+ }
+
+ /**
+ * Sets the text for the indicated column and row of the table.
+ * @param value - The new text.
+ * @param rowIndex - The row.
+ * @param columnIndex - The column.
+ * @throws IndexOutOfBoundsException Occurs if the row and column
+ * are not a valid member of table model.
+ */
+ public void setValueAt(String value, int rowIndex, int columnIndex) throws IndexOutOfBoundsException {
+ // Ensure that the value is within the allowed range.
+ validateIndex(rowIndex, columnIndex);
+
+ // Set the value.
+ values[rowIndex][columnIndex] = value;
+
+ // Update the table.
+ this.fireTableCellUpdated(rowIndex, columnIndex);
+ }
+
+ /**
+ * Checks to make sure that a given row/column pointer refers to
+ * an extant position in the data array. In the event that the row
+ * and column values are not valid, an <code>IndexOutOfBounds</code>
+ * exception is thrown.
+ * @param rowIndex - The row index.
+ * @param columnIndex - The column index.
+ * @throws IndexOutOfBoundsException Occurs if the row and column
+ * are not a valid member of the data array.
+ */
+ private void validateIndex(int rowIndex, int columnIndex) throws IndexOutOfBoundsException {
+ if(rowIndex < 0 || rowIndex >= getRowCount()) {
+ throw new IndexOutOfBoundsException(String.format("Row index %d is out of bounds.", rowIndex));
+ } else if(columnIndex < 0 || columnIndex >= getColumnCount()) {
+ throw new IndexOutOfBoundsException(String.format("Column index %d is out of bounds.", columnIndex));
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TriggerDiagnosticGUIDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TriggerDiagnosticGUIDriver.java (original)
+++ java/branches/HPSJAVA-409/monitoring-util/src/main/java/org/hps/monitoring/trigger/TriggerDiagnosticGUIDriver.java Wed Apr 27 11:11:32 2016
@@ -9,45 +9,45 @@
import org.lcsim.util.Driver;
public class TriggerDiagnosticGUIDriver extends Driver {
- private JFrame window = new JFrame();
- private ClusterTablePanel clusterTable = new ClusterTablePanel();
- private SinglesTablePanel singlesTable = new SinglesTablePanel();
- private PairTablePanel pairTable = new PairTablePanel();
- private EfficiencyTablePanel efficiencyTable = new EfficiencyTablePanel();
- private String diagnosticCollectionName = "DiagnosticSnapshot";
-
- @Override
- public void startOfData() {
- window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
- window.setSize(500, 400);
- //window.add(clusterTable);
- //window.add(singlesTable);
- //window.add(pairTable);
- window.add(efficiencyTable);
- window.setVisible(true);
- }
-
- @Override
- public void process(EventHeader event) {
- // Updates are only performed if a diagnostic snapshot object
- // exists. Otherwise, do nothing.
- if(event.hasCollection(DiagnosticSnapshot.class, diagnosticCollectionName)) {
- // Get the snapshot collection.
- List<DiagnosticSnapshot> snapshotList = event.get(DiagnosticSnapshot.class, diagnosticCollectionName);
-
- // Get the snapshot. There will only ever be one.
- DiagnosticSnapshot runSnapshot = snapshotList.get(1);
- DiagnosticSnapshot localSnapshot = snapshotList.get(0);
-
- // Feed it to the table.
- clusterTable.updatePanel(runSnapshot, localSnapshot);
- singlesTable.updatePanel(runSnapshot, localSnapshot);
- pairTable.updatePanel(runSnapshot, localSnapshot);
- efficiencyTable.updatePanel(runSnapshot, localSnapshot);
- }
- }
-
- public void setDiagnosticCollectionName(String name) {
- diagnosticCollectionName = name;
- }
+ private JFrame window = new JFrame();
+ private ClusterTablePanel clusterTable = new ClusterTablePanel();
+ private SinglesTablePanel singlesTable = new SinglesTablePanel();
+ private PairTablePanel pairTable = new PairTablePanel();
+ private EfficiencyTablePanel efficiencyTable = new EfficiencyTablePanel();
+ private String diagnosticCollectionName = "DiagnosticSnapshot";
+
+ @Override
+ public void startOfData() {
+ window.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ window.setSize(500, 400);
+ //window.add(clusterTable);
+ //window.add(singlesTable);
+ //window.add(pairTable);
+ window.add(efficiencyTable);
+ window.setVisible(true);
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Updates are only performed if a diagnostic snapshot object
+ // exists. Otherwise, do nothing.
+ if(event.hasCollection(DiagnosticSnapshot.class, diagnosticCollectionName)) {
+ // Get the snapshot collection.
+ List<DiagnosticSnapshot> snapshotList = event.get(DiagnosticSnapshot.class, diagnosticCollectionName);
+
+ // Get the snapshot. There will only ever be one.
+ DiagnosticSnapshot runSnapshot = snapshotList.get(1);
+ DiagnosticSnapshot localSnapshot = snapshotList.get(0);
+
+ // Feed it to the table.
+ clusterTable.updatePanel(runSnapshot, localSnapshot);
+ singlesTable.updatePanel(runSnapshot, localSnapshot);
+ pairTable.updatePanel(runSnapshot, localSnapshot);
+ efficiencyTable.updatePanel(runSnapshot, localSnapshot);
+ }
+ }
+
+ public void setDiagnosticCollectionName(String name) {
+ diagnosticCollectionName = name;
+ }
}
Modified: java/branches/HPSJAVA-409/parent/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/parent/pom.xml (original)
+++ java/branches/HPSJAVA-409/parent/pom.xml Wed Apr 27 11:11:32 2016
@@ -6,13 +6,14 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<packaging>pom</packaging>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
<name>parent</name>
<description>HPS Java parent POM</description>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<org.lcsim.cacheDir>${user.home}</org.lcsim.cacheDir>
- <lcsimVersion>3.1.6-SNAPSHOT</lcsimVersion>
+ <lcsimVersion>3.6</lcsimVersion>
<skipSite>false</skipSite>
<skipPlugin>false</skipPlugin>
</properties>
@@ -35,17 +36,22 @@
</repository>
<repository>
<id>freehep-repo-public</id>
- <name>FreeHEP Maven Public</name>
+ <name>FreeHEP</name>
<url>http://srs.slac.stanford.edu/nexus/content/groups/freehep-maven2-public/</url>
</repository>
<repository>
+ <id>srs-repo-public</id>
+ <name>SRS</name>
+ <url>http://srs.slac.stanford.edu/nexus/content/groups/srs-maven2-public/</url>
+ </repository>
+ <repository>
<id>lcsim-repo-public</id>
- <name>LCSIM Public Maven Repository</name>
+ <name>LCSim</name>
<url>http://srs.slac.stanford.edu/nexus/content/groups/lcsim-maven2-public/</url>
</repository>
<repository>
<id>jlab-coda-repo-public</id>
- <name>JLAB CODA Maven Repository</name>
+ <name>CODA</name>
<url>https://coda.jlab.org/maven/</url>
</repository>
</repositories>
@@ -59,14 +65,14 @@
<distributionManagement>
<repository>
<id>lcsim-repo-releases</id>
- <name>LCSIM Releases maven repository</name>
- <!--<url>http://srs.slac.stanford.edu/nexus/content/repositories/lcsim-maven2-releases/</url>-->
+ <name>LCSim Releases</name>
+ <!--<url>http://srs.slac.stanford.edu/nexus/content/repositories/lcsim-maven2-releases/</url> -->
<url>http://scalnx-v01.slac.stanford.edu:8180/nexus/content/repositories/lcsim-maven2-releases/</url>
</repository>
<snapshotRepository>
<id>lcsim-repo-snapshots</id>
- <name>LCSIM Snapshots maven repository</name>
- <!--<url>http://srs.slac.stanford.edu/nexus/content/repositories/lcsim-maven2-snapshot/</url>-->
+ <name>LCSim Snapshots</name>
+ <!--<url>http://srs.slac.stanford.edu/nexus/content/repositories/lcsim-maven2-snapshot/</url> -->
<url>http://scalnx-v01.slac.stanford.edu:8180/nexus/content/repositories/lcsim-maven2-snapshot/</url>
</snapshotRepository>
<site>
@@ -137,117 +143,112 @@
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-util</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-detector-data</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-detector-model</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-conditions</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-recon</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-readout-sim</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-tracking</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-evio</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-recon</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-analysis</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-drivers</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-app</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-users</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-steering-files</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-distribution</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-event-display</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-record-util</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-util</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-run-database</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-crawler</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.hps</groupId>
- <artifactId>hps-datacat-client</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-job</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-logging</artifactId>
- <version>${project.version}</version>
+ <version>3.9-SNAPSHOT</version>
</dependency>
<!-- Next are external dependencies used in multiple modules. -->
<dependency>
@@ -278,7 +279,7 @@
<dependency>
<groupId>jfreechart-aida-experimental</groupId>
<artifactId>jfreechart-aida-experimental</artifactId>
- <version>1.8-SNAPSHOT</version>
+ <version>1.8</version>
<exclusions>
<exclusion>
<groupId>jdom</groupId>
@@ -289,6 +290,11 @@
<artifactId>commons-math</artifactId>
</exclusion>
</exclusions>
+ </dependency>
+ <dependency>
+ <groupId>srs</groupId>
+ <artifactId>org-srs-datacat-client</artifactId>
+ <version>0.5-TEST3</version>
</dependency>
</dependencies>
</dependencyManagement>
@@ -336,10 +342,8 @@
<artifactId>maven-surefire-report-plugin</artifactId>
<version>2.17</version>
</plugin>
- <plugin>
- <groupId>org.kuali.maven.plugins</groupId>
- <artifactId>graph-maven-plugin</artifactId>
- </plugin>
+ <!-- <plugin> <groupId>org.kuali.maven.plugins</groupId> <artifactId>graph-maven-plugin</artifactId>
+ </plugin> -->
</plugins>
</reporting>
<build>
@@ -372,7 +376,8 @@
<artifactId>maven-antrun-plugin</artifactId>
<version>1.7</version>
</plugin>
- <!-- This fixes the lifecycle not covered by plugin execution error in Eclipse. -->
+ <!-- This fixes the lifecycle not covered by plugin execution
+ error in Eclipse. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
@@ -399,11 +404,8 @@
</lifecycleMappingMetadata>
</configuration>
</plugin>
- <plugin>
- <groupId>org.kuali.maven.plugins</groupId>
- <artifactId>graph-maven-plugin</artifactId>
- <version>1.2.3</version>
- </plugin>
+ <!-- <plugin> <groupId>org.kuali.maven.plugins</groupId>
+ <artifactId>graph-maven-plugin</artifactId> <version>1.2.3</version> </plugin> -->
</plugins>
</pluginManagement>
<plugins>
@@ -423,10 +425,7 @@
<target>1.7</target>
<showWarnings>true</showWarnings>
<showDeprecation>true</showDeprecation>
- <!--
- <staleMillis>1</staleMillis>
- <useIncrementalCompilation>false</useIncrementalCompilation>
- -->
+ <!-- <staleMillis>1</staleMillis> <useIncrementalCompilation>false</useIncrementalCompilation> -->
</configuration>
</plugin>
<plugin>
@@ -436,7 +435,7 @@
<configuration>
<argLine>-Xmx1024m</argLine>
<forkMode>pertest</forkMode>
- <systemPropertyVariables>
+ <systemPropertyVariables>
<org.lcsim.cacheDir>${org.lcsim.cacheDir}</org.lcsim.cacheDir>
<java.util.logging.config.class>org.hps.logging.config.TestLoggingConfig</java.util.logging.config.class>
</systemPropertyVariables>
@@ -476,7 +475,44 @@
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>2.7</version>
- </plugin>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-checkstyle-plugin</artifactId>
+ <version>2.17</version>
+ <configuration>
+ <excludes>org/hps/users/**/*</excludes>
+ <logViolationsToConsole>true</logViolationsToConsole>
+ <consoleOutput>true</consoleOutput>
+ <includeResources>true</includeResources>
+ <includeTestResources>true</includeTestResources>
+ <checkstyleRules>
+ <module name="Checker">
+ <module name="FileTabCharacter">
+ <property name="fileExtensions" value="java,xml,lcsim,prop,properties" />
+ </module>
+ <module name="TreeWalker">
+ <module name="UnusedImports">
+ <property name="processJavadoc" value="false" />
+ </module>
+ <module name="RedundantImport" />
+ <!--
+ <module name="EmptyBlock" />
+ <module name="EmptyStatement" />
+ -->
+ </module>
+ </module>
+ </checkstyleRules>
+ </configuration>
+ <executions>
+ <execution>
+ <phase>compile</phase>
+ <goals>
+ <goal>check</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
<developers>
@@ -490,9 +526,6 @@
<role>release manager</role>
</roles>
<timezone>-8</timezone>
- <properties>
- <aim>jeremyslac</aim>
- </properties>
</developer>
<developer>
<name>Norman Graf</name>
@@ -503,9 +536,6 @@
<role>developer</role>
</roles>
<timezone>-8</timezone>
- <properties>
- <aim>ngraf137</aim>
- </properties>
</developer>
<developer>
<name>Maurik Holtrop</name>
@@ -556,7 +586,7 @@
</roles>
<timezone>-8</timezone>
</developer>
- <developer>
+ <developer>
<name>Holly Szumila-Vance</name>
<email>[log in to unmask]</email>
<organization>Old Dominion University</organization>
Modified: java/branches/HPSJAVA-409/plugin/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/plugin/pom.xml (original)
+++ java/branches/HPSJAVA-409/plugin/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/plugin/</url>
Modified: java/branches/HPSJAVA-409/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/pom.xml (original)
+++ java/branches/HPSJAVA-409/pom.xml Wed Apr 27 11:11:32 2016
@@ -1,5 +1,4 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>hps-modules</artifactId>
<packaging>pom</packaging>
@@ -10,7 +9,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>svn://svn.freehep.org/hps/java/trunk/</url>
@@ -108,7 +107,6 @@
<module>analysis</module>
<module>conditions</module>
<module>crawler</module>
- <module>datacat-client</module>
<module>detector-data</module>
<module>detector-model</module>
<module>distribution</module>
Modified: java/branches/HPSJAVA-409/recon/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/recon/pom.xml (original)
+++ java/branches/HPSJAVA-409/recon/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/recon/</url>
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalGainCalibFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalGainCalibFilter.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalGainCalibFilter.java Wed Apr 27 11:11:32 2016
@@ -7,11 +7,11 @@
public class EcalGainCalibFilter extends EventReconFilter {
- private double feeCut = 0.6;
- private double molCut = 0.6;
- private double tMin = 16.0;
- private double tMax = 80.0;
- private double dtMax = 12.0;
+ private double feeCut = 0.6;
+ private double molCut = 0.6;
+ private double tMin = 16.0;
+ private double tMax = 80.0;
+ private double dtMax = 12.0;
private String clusterCollectionName = "EcalClusters";
public void setFeeCut(double feeCut) { this.feeCut = feeCut; }
@@ -27,32 +27,32 @@
@Override
public void process(EventHeader event)
{
- incrementEventProcessed();
+ incrementEventProcessed();
if (!event.hasCollection(Cluster.class, clusterCollectionName)) skipEvent();
List<Cluster> cc = event.get(Cluster.class, clusterCollectionName);
if (cc.size() < 1) skipEvent();
boolean keepEvent = false;
for (Cluster c1 : cc)
{
- final double t1 = ClusterUtilities.getSeedHitTime(c1);
- if (t1<tMin || t1>tMax) continue;
- if (c1.getEnergy() > feeCut)
- {
- keepEvent = true;
- break;
- }
- for (Cluster c2 : cc)
- {
- final double t2 = ClusterUtilities.getSeedHitTime(c2);
- if (c1 == c2) continue;
- if (t2<tMin || t2>tMax) continue;
- if (Math.abs(t1-t2) > dtMax) continue;
- if (c1.getEnergy() + c2.getEnergy() > molCut)
- {
- keepEvent = true;
- break;
- }
- }
+ final double t1 = ClusterUtilities.getSeedHitTime(c1);
+ if (t1<tMin || t1>tMax) continue;
+ if (c1.getEnergy() > feeCut)
+ {
+ keepEvent = true;
+ break;
+ }
+ for (Cluster c2 : cc)
+ {
+ final double t2 = ClusterUtilities.getSeedHitTime(c2);
+ if (c1 == c2) continue;
+ if (t2<tMin || t2>tMax) continue;
+ if (Math.abs(t1-t2) > dtMax) continue;
+ if (c1.getEnergy() + c2.getEnergy() > molCut)
+ {
+ keepEvent = true;
+ break;
+ }
+ }
}
if (!keepEvent) skipEvent();
incrementEventPassed();
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java Wed Apr 27 11:11:32 2016
@@ -18,6 +18,7 @@
private String clusterCollectionName = "EcalClusters";
private double maxDt = 2.5;
+ private boolean strictPairs = false;
public void setClusterCollectionName(String clusterCollectionName) {
this.clusterCollectionName = clusterCollectionName;
@@ -27,12 +28,19 @@
this.maxDt = maxDt;
}
+ public void setStrictPairs(boolean strictPairs) {
+ this.strictPairs = strictPairs;
+ }
+
@Override
public void process(EventHeader event) {
incrementEventProcessed();
if (event.hasCollection(Cluster.class, clusterCollectionName)) {
List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
if (clusters.size() < 2) {
+ skipEvent();
+ }
+ if (strictPairs && clusters.size() > 2) {
skipEvent();
}
List<Double> clusterTimes = new ArrayList<Double>();
@@ -52,7 +60,7 @@
skipEvent();
}
} else {
- skipEvent();
+ skipEvent();
}
incrementEventPassed();
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java Wed Apr 27 11:11:32 2016
@@ -1,16 +1,38 @@
package org.hps.recon.filtering;
+import org.lcsim.event.Cluster;
+import org.hps.recon.ecal.cluster.ClusterUtilities;
+import org.hps.record.epics.EpicsData;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.TIData;
+import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
-
-import org.lcsim.event.Cluster;
-import org.lcsim.event.EventHeader;
import org.lcsim.util.Driver;
-import org.hps.recon.ecal.cluster.ClusterUtilities;
-import org.hps.record.epics.EpicsData;
public class FEEFilterDriver extends Driver
{
+ //Set min seed energy value, default to 2015 run
+ private double seedCut = 0.4;
+
+ //set min cluster energy value, default to 2015 run
+ private double clusterCut = 0.6;
+
+ /**
+ * Set the cut value for seed energy in GeV
+ * @param seedCut
+ */
+ public void setSeedCut(double seedCut) {
+ this.seedCut = seedCut;
+ }
+
+ /**
+ * Set the cut value for cluster energy in GeV
+ * @param clusterCut
+ */
+ public void setClusterCut(double clusterCut) {
+ this.clusterCut = clusterCut;
+ }
+
+
public void process(EventHeader event) {
// don't drop any events with EPICS data:
@@ -44,9 +66,10 @@
// ClusterUtilities.findSeedHit(cc).getRawEnergy() < 0.4)
// cc.Delete();
- // keep events with a cluster over 600 MeV with seed over 400 MeV:
- if (cc.getEnergy() > 0.6 &&
- ClusterUtilities.findSeedHit(cc).getCorrectedEnergy() > 0.4)
+ // keep events with a cluster over 600 MeV with seed over 400 MeV (for 2015 running).
+ // keep events with cluster over 1.2 GeV and seed over 650 MeV for 2016 running.
+ if (cc.getEnergy() > clusterCut &&
+ ClusterUtilities.findSeedHit(cc).getCorrectedEnergy() > seedCut )
return;
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/MinimumHitsFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/MinimumHitsFilter.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/MinimumHitsFilter.java Wed Apr 27 11:11:32 2016
@@ -61,7 +61,7 @@
private boolean isHitOnTop(RawTrackerHit hit){
- HpsSiSensor sensor=(HpsSiSensor) hit.getDetectorElement();
+ HpsSiSensor sensor=(HpsSiSensor) hit.getDetectorElement();
IIdentifier id=hit.getIdentifier();
SiTrackerIdentifierHelper _sid_helper=(SiTrackerIdentifierHelper) sensor.getIdentifierHelper();
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/PulserScalerAndEpicsFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/PulserScalerAndEpicsFilter.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/PulserScalerAndEpicsFilter.java Wed Apr 27 11:11:32 2016
@@ -7,101 +7,97 @@
import java.io.File;
import java.io.IOException;
-import org.lcsim.event.Cluster;
+import org.hps.conditions.ConditionsDriver;
+import org.hps.record.epics.EpicsData;
+import org.hps.record.triggerbank.AbstractIntData;
+import org.hps.record.triggerbank.TIData;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.lcio.LCIOReader;
import org.lcsim.lcio.LCIOWriter;
import org.lcsim.util.Driver;
-import org.hps.conditions.ConditionsDriver;
-import org.hps.recon.ecal.cluster.ClusterUtilities;
//import org.hps.recon.ecal.triggerbank.AbstractIntData;
//import org.hps.recon.ecal.triggerbank.TIData;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.TIData;
-import org.hps.record.epics.EpicsData;
-import org.hps.record.scalers.ScalerData;
-
public class PulserScalerAndEpicsFilter extends Driver{
- public void process(EventHeader event) {
+ public void process(EventHeader event) {
- // only keep pulser triggers:
- if (!event.hasCollection(GenericObject.class,"TriggerBank"))
- throw new Driver.NextEventException();
- boolean isPulser=false;
- for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
- {
- if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
- TIData tid = new TIData(gob);
- if (tid.isPulserTrigger())
- {
- isPulser=true;
- break;
- }
- }
+ // only keep pulser triggers:
+ if (!event.hasCollection(GenericObject.class,"TriggerBank"))
+ throw new Driver.NextEventException();
+ boolean isPulser=false;
+ for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+ {
+ if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+ TIData tid = new TIData(gob);
+ if (tid.isPulserTrigger())
+ {
+ isPulser=true;
+ break;
+ }
+ }
- // don't drop any events with EPICS data or scalers data
- // (could also do this via event tag=31)
- final EpicsData edata = EpicsData.read(event);
- if (edata != null) return;
-
- if( event.hasCollection(GenericObject.class, "ScalerData"))
- return;
+ // don't drop any events with EPICS data or scalers data
+ // (could also do this via event tag=31)
+ final EpicsData edata = EpicsData.read(event);
+ if (edata != null) return;
+
+ if( event.hasCollection(GenericObject.class, "ScalerData"))
+ return;
- if (!isPulser) throw new Driver.NextEventException();
+ if (!isPulser) throw new Driver.NextEventException();
- }
- /**
- * standalone way to run this:
- *
- * @param arg [0] inputFile [1] outputFile [2] run number [3] detectorName (optional, default = "HPS-EngRun2015-Nominal-v1")
- * @throws IOException
- */
- public static void main(String arg[]) throws IOException{
- ConditionsDriver hack = new ConditionsDriver();
-
- String detectorName = "HPS-EngRun2015-Nominal-v1";
- if(arg.length >3)
- hack.setDetectorName(arg[3]);
- hack.setFreeze(true);
- hack.setRunNumber(Integer.parseInt(arg[2]));
- hack.initialize();
- PulserScalerAndEpicsFilter pf = new PulserScalerAndEpicsFilter();
- LCIOWriter writer = new LCIOWriter(arg[1]);
- File file = new File(arg[0]);
- LCIOReader reader = new LCIOReader(file);
- System.out.println(file.getPath());
- int nEventsKept = 0;
- int nEvents = 0;
- try{
- while(true){
- try{
-
- EventHeader eh = reader.read();
- if(eh.getEventNumber() %1000 == 0){
- //Driver.this.
- System.out.println("PulserFitter:");
- System.out.println(" " + nEventsKept + " events kept");
- System.out.println(" " + nEvents + "events read");
- }
- nEvents ++;
- pf.process(eh); //might throw NextEventException
-
- nEventsKept++;
- writer.write(eh);
- }catch(Driver.NextEventException e){
+ }
+ /**
+ * standalone way to run this:
+ *
+ * @param arg [0] inputFile [1] outputFile [2] run number [3] detectorName (optional, default = "HPS-EngRun2015-Nominal-v1")
+ * @throws IOException
+ */
+ public static void main(String arg[]) throws IOException{
+ ConditionsDriver hack = new ConditionsDriver();
+
+ String detectorName = "HPS-EngRun2015-Nominal-v1";
+ if(arg.length >3)
+ hack.setDetectorName(arg[3]);
+ hack.setFreeze(true);
+ hack.setRunNumber(Integer.parseInt(arg[2]));
+ hack.initialize();
+ PulserScalerAndEpicsFilter pf = new PulserScalerAndEpicsFilter();
+ LCIOWriter writer = new LCIOWriter(arg[1]);
+ File file = new File(arg[0]);
+ LCIOReader reader = new LCIOReader(file);
+ System.out.println(file.getPath());
+ int nEventsKept = 0;
+ int nEvents = 0;
+ try{
+ while(true){
+ try{
+
+ EventHeader eh = reader.read();
+ if(eh.getEventNumber() %1000 == 0){
+ //Driver.this.
+ System.out.println("PulserFitter:");
+ System.out.println(" " + nEventsKept + " events kept");
+ System.out.println(" " + nEvents + "events read");
+ }
+ nEvents ++;
+ pf.process(eh); //might throw NextEventException
+
+ nEventsKept++;
+ writer.write(eh);
+ }catch(Driver.NextEventException e){
- }
- }
- }catch(IOException e){
- e.printStackTrace();
- reader.close();
- }
+ }
+ }
+ }catch(IOException e){
+ e.printStackTrace();
+ reader.close();
+ }
- writer.close();
- }
+ writer.close();
+ }
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/V0CandidateFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/V0CandidateFilter.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/filtering/V0CandidateFilter.java Wed Apr 27 11:11:32 2016
@@ -4,15 +4,22 @@
import java.util.List;
import org.hps.recon.ecal.cluster.ClusterUtilities;
import org.hps.recon.particle.ReconParticleDriver;
+import org.hps.recon.tracking.TrackData;
+import org.hps.recon.tracking.TrackType;
import org.hps.record.epics.EpicsData;
+import org.hps.record.scalers.ScalerData;
import org.lcsim.event.EventHeader;
import org.lcsim.event.ReconstructedParticle;
/**
* Class to strip off trident candidates. Currently defined as: e+ e- events
- * with tracks. If the tight constraint is enabled, tracks must be matched to
- * clusters and the Ecal cluster times must be within _timingCut [2.5ns] of each
- * other.
+ * with tracks; track and vertex chi2 must be better than values defined by
+ * cuts, and track times must be within trackDtCut of each other. If the tight
+ * constraint is enabled, tracks must be matched to clusters, the Ecal cluster
+ * times must be within _timingCut [2.5ns] of each other, and there must be
+ * exactly one V0 passing all cuts.
+ *
+ * Only GBL vertices are considered.
*
* @author Norman A Graf
*
@@ -20,8 +27,13 @@
*/
public class V0CandidateFilter extends EventReconFilter {
- private String _V0CandidateCollectionName = "TargetConstrainedV0Candidates";
+ private String _V0CandidateCollectionName = "UnconstrainedV0Candidates";
private double _clusterTimingCut = 2.5;
+ private double v0Chi2Cut = 10.0;
+ private double trackChi2Cut = 20.0;
+ private double trackDtCut = 5.0;
+ private double trackPMax = 0.9;
+ private double v0PMax = 1.4;
private boolean _tight = false;
private boolean _keepEpicsDataEvents = false;
@@ -30,9 +42,15 @@
protected void process(EventHeader event) {
incrementEventProcessed();
if (_keepEpicsDataEvents) {
- // don't drop any events with EPICS data:
- final EpicsData data = EpicsData.read(event);
- if (data != null) {
+ // don't drop any events with EPICS or scaler data:
+ final EpicsData epicsData = EpicsData.read(event);
+ if (epicsData != null) {
+ incrementEventPassed();
+ return;
+ }
+
+ final ScalerData scalerData = ScalerData.read(event);
+ if (scalerData != null) {
incrementEventPassed();
return;
}
@@ -41,45 +59,96 @@
skipEvent();
}
List<ReconstructedParticle> V0Candidates = event.get(ReconstructedParticle.class, _V0CandidateCollectionName);
- if (V0Candidates.isEmpty()) {
- skipEvent();
- }
+ int nV0 = 0; //number of good V0
+ for (ReconstructedParticle v0 : V0Candidates) {
+ ReconstructedParticle electron = v0.getParticles().get(ReconParticleDriver.ELECTRON);
+ ReconstructedParticle positron = v0.getParticles().get(ReconParticleDriver.POSITRON);
- // tight requires ONLY ONE real vertex fit
- if (_tight) {
- if (V0Candidates.size() != 2) {
- skipEvent();
+ if (!TrackType.isGBL(v0.getType())) { //we only care about GBL vertices
+ continue;
}
- for (ReconstructedParticle rp : V0Candidates) {
-
- ReconstructedParticle electron;
- ReconstructedParticle positron;
-
- List<ReconstructedParticle> fsParticles = rp.getParticles();
- if (fsParticles.size() != 2) {
- skipEvent();
+ if (v0.getStartVertex().getChi2() > v0Chi2Cut) {
+ continue;
+ }
+ if (electron.getTracks().get(0).getChi2() > trackChi2Cut || positron.getTracks().get(0).getChi2() > trackChi2Cut) {
+ continue;
+ }
+ if (electron.getMomentum().magnitude() > trackPMax || positron.getMomentum().magnitude() > trackPMax) {
+ continue;
+ }
+ if (v0.getMomentum().magnitude() > v0PMax) {
+ continue;
+ }
+ double eleTime = TrackData.getTrackTime(TrackData.getTrackData(event, electron.getTracks().get(0)));
+ double posTime = TrackData.getTrackTime(TrackData.getTrackData(event, positron.getTracks().get(0)));
+ if (Math.abs(eleTime - posTime) > trackDtCut) {
+ continue;
+ }
+ if (_tight) { // tight requires cluster matches and cluster time cut
+ if (electron.getClusters().isEmpty() || positron.getClusters().isEmpty()) {
+ continue;
}
- // require both electrons to be associated with an ECal cluster
- electron = fsParticles.get(ReconParticleDriver.ELECTRON);
- if (electron.getClusters().isEmpty()) {
- skipEvent();
- }
- positron = fsParticles.get(ReconParticleDriver.POSITRON);
- if (positron.getClusters().isEmpty()) {
- skipEvent();
- }
-
// calorimeter cluster timing cut
// first CalorimeterHit in the list is the seed crystal
double t1 = ClusterUtilities.getSeedHitTime(electron.getClusters().get(0));
double t2 = ClusterUtilities.getSeedHitTime(positron.getClusters().get(0));
if (abs(t1 - t2) > _clusterTimingCut) {
- skipEvent();
+ continue;
}
}
+ nV0++;
+ }
+ if (nV0 == 0) {
+ skipEvent();
+ }
+ // tight requires ONLY ONE candidate vertex
+ if (_tight && nV0 != 1) {
+ skipEvent();
}
incrementEventPassed();
+ }
+
+ /**
+ * Maximum vertex chi2 for a V0 to be counted.
+ *
+ * @param v0Chi2Cut default of 10.0.
+ */
+ public void setV0Chi2Cut(double v0Chi2Cut) {
+ this.v0Chi2Cut = v0Chi2Cut;
+ }
+
+ /**
+ * Maximum track chi2 for a V0 to be counted. A V0 is rejected if either of
+ * the final state tracks has a chi2 exceeding the cut.
+ *
+ * @param trackChi2Cut default of 20.0.
+ */
+ public void setTrackChi2Cut(double trackChi2Cut) {
+ this.trackChi2Cut = trackChi2Cut;
+ }
+
+ /**
+ * Maximum track time different for a V0 to be counted.
+ *
+ * @param trackDtCut units of ns, default of 5.0
+ */
+ public void setTrackDtCut(double trackDtCut) {
+ this.trackDtCut = trackDtCut;
+ }
+
+ /**
+ * Maximum track momentum for a V0 to be counted. A V0 is rejected if either
+ * of the final state tracks has momentum exceeding this cut.
+ *
+ * @param trackPMax units of GeV, default of 0.9
+ */
+ public void setTrackPMax(double trackPMax) {
+ this.trackPMax = trackPMax;
+ }
+
+ public void setV0PMax(double v0PMax) {
+ this.v0PMax = v0PMax;
}
/**
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java Wed Apr 27 11:11:32 2016
@@ -242,6 +242,7 @@
BilliorVertexer vtxFitter = new BilliorVertexer(bField);
// TODO: The beam size should come from the conditions database.
vtxFitter.setBeamSize(beamSize);
+ vtxFitter.setDebug(debug);
// Perform the vertexing based on the specified constraint.
switch (constraint) {
@@ -319,7 +320,7 @@
// Generate a candidate vertex and particle.
BilliorVertex vtxFit = fitVertex(constraint, firstElectronBTrack, secondElectronBTrack);
- ReconstructedParticle candidate = this.makeReconstructedParticle(topElectron, botElectron, vtxFit);
+ ReconstructedParticle candidate = makeReconstructedParticle(topElectron, botElectron, vtxFit);
// Add the candidate vertex and particle to the
// appropriate LCIO collection.
@@ -354,7 +355,7 @@
* @return Returns a reconstructed particle with properties generated from
* the child particles and vertex given as an argument.
*/
- private ReconstructedParticle makeReconstructedParticle(ReconstructedParticle electron, ReconstructedParticle positron, BilliorVertex vtxFit) {
+ public static ReconstructedParticle makeReconstructedParticle(ReconstructedParticle electron, ReconstructedParticle positron, BilliorVertex vtxFit) {
// Create a new reconstructed particle to represent the V0
// candidate and populate it with the electron and positron.
@@ -396,9 +397,8 @@
((BaseReconstructedParticle) candidate).setCharge(particleCharge);
// VERBOSE :: Output the fitted momentum data.
- printDebug("Fitted momentum in tracking frame: " + fittedMomentum.toString());
- printDebug("Fitted momentum in detector frame: " + fittedMomentum.toString());
-
+// printDebug("Fitted momentum in tracking frame: " + fittedMomentum.toString());
+// printDebug("Fitted momentum in detector frame: " + fittedMomentum.toString());
// Add the ReconstructedParticle to the vertex.
vtxFit.setAssociatedParticle(candidate);
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java Wed Apr 27 11:11:32 2016
@@ -7,12 +7,14 @@
import hep.physics.vec.VecOp;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.hps.recon.ecal.cluster.ClusterUtilities;
import org.hps.recon.tracking.CoordinateTransformations;
+import org.hps.recon.tracking.TrackUtils;
import org.hps.recon.utils.TrackClusterMatcher;
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
@@ -22,7 +24,9 @@
import org.lcsim.event.base.BaseCluster;
import org.lcsim.event.base.BaseReconstructedParticle;
import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.subdetector.HPSEcal3;
import org.lcsim.util.Driver;
+
/**
* Driver framework for generating reconstructed particles and matching clusters
@@ -44,6 +48,11 @@
public static final int POSITRON = 1;
public static final int MOLLER_TOP = 0;
public static final int MOLLER_BOT = 1;
+
+ // normalized cluster-track distance required for qualifying as a match:
+ private double MAXNSIGMAPOSITIONMATCH=30.0;
+
+ HPSEcal3 ecal;
/**
* Sets the name of the LCIO collection for beam spot constrained V0
@@ -173,6 +182,17 @@
this.trackCollectionNames = trackCollectionNames;
}
+ /**
+ * Set the requirement on cluster-track position matching in terms of N-sigma.
+ *
+ * @param nsigma
+ */
+ public void setNSigmaPositionMatch(double nsigma) {
+ MAXNSIGMAPOSITIONMATCH=nsigma;
+ }
+
+
+
/**
* Updates the magnetic field parameters to match the appropriate values for
* the current detector settings.
@@ -188,6 +208,7 @@
flipSign = -1;
}
+ ecal = (HPSEcal3) detector.getSubdetector("Ecal");
matcher.setBFieldMap(detector.getFieldMap());
}
@@ -209,7 +230,7 @@
* clusters. Clusters will be matched with tracks when this is possible.
*
* @param clusters - The list of event clusters.
- * @param tracks - The list of event tracks.
+ * @param trackCollections - The list of event tracks.
* @return Returns a <code>List</code> collection containing all of the
* <code>ReconstructedParticle</code> objects generated from the argument
* data.
@@ -222,15 +243,19 @@
// Create a list of unmatched clusters. A cluster should be
// removed from the list if a matching track is found.
Set<Cluster> unmatchedClusters = new HashSet<Cluster>(clusters);
-
+
+ // Create a mapping of matched clusters to corresponding tracks.
+ HashMap<Cluster, Track> clusterToTrack = new HashMap<Cluster,Track>();
+
// Loop through all of the track collections and try to match every
// track to a cluster. Allow a cluster to be matched to multiple
// tracks and use a probability (to be coded later) to determine what
// the best match is.
// TODO: At some point, pull this out to it's own method
for (List<Track> tracks : trackCollections) {
+
for (Track track : tracks) {
-
+
// Create a reconstructed particle to represent the track.
ReconstructedParticle particle = new BaseReconstructedParticle();
@@ -244,6 +269,9 @@
// Derive the charge of the particle from the track.
((BaseReconstructedParticle) particle).setCharge(track.getCharge() * flipSign);
+
+ // initialize PID quality to a junk value:
+ ((BaseReconstructedParticle)particle).setGoodnessOfPid(9999);
// Extrapolate the particle ID from the track. Positively
// charged particles are assumed to be positrons and those
@@ -254,33 +282,48 @@
((BaseReconstructedParticle) particle).setParticleIdUsed(new SimpleParticleID(11, 0, 0, 0));
}
+ // normalized distance of the closest match:
+ double smallestNSigma=Double.MAX_VALUE;
+
+ // try to find a matching cluster:
Cluster matchedCluster = null;
-
- // Track the best matching cluster for the track.
- // TODO: This should find the best match not just the first match.
- clusterLoop:
for (Cluster cluster : clusters) {
- // Check if the cluster and track are a valid match.
- if (matcher.isMatch(cluster, track)) {
-
- // Store the matched cluster.
- matchedCluster = cluster;
-
- // Since a match has been found, the loop can be
- // terminated.
- break clusterLoop;
+
+ // normalized distance between this cluster and track:
+ final double thisNSigma=matcher.getNSigmaPosition(cluster, particle);
+
+ // ignore if matching quality doesn't make the cut:
+ if (thisNSigma > MAXNSIGMAPOSITIONMATCH) continue;
+
+ // ignore if we already found a cluster that's a better match:
+ if (thisNSigma > smallestNSigma) continue;
+
+ // we found a new best cluster candidate for this track:
+ smallestNSigma = thisNSigma;
+ matchedCluster = cluster;
+
+ // prefer using GBL tracks to correct (later) the clusters, for some consistency:
+ if (track.getType() >= 32 || !clusterToTrack.containsKey(matchedCluster)) {
+ clusterToTrack.put(matchedCluster,track);
}
}
// If a cluster was found that matches the track...
if (matchedCluster != null) {
+
+ // add cluster to the particle:
particle.addCluster(matchedCluster);
- int pid = particle.getParticleIDUsed().getPDG();
+ // use pid quality to store track-cluster matching quality:
+ ((BaseReconstructedParticle)particle).setGoodnessOfPid(smallestNSigma);
+
+ // propogate pid to the cluster:
+ final int pid = particle.getParticleIDUsed().getPDG();
if (Math.abs(pid) == 11) {
((BaseCluster) matchedCluster).setParticleId(pid);
}
+ // unmatched clusters will (later) be used to create photon particles:
unmatchedClusters.remove(matchedCluster);
}
@@ -290,35 +333,40 @@
}
// Iterate over the remaining unmatched clusters.
- if (!unmatchedClusters.isEmpty()) {
- for (Cluster unmatchedCluster : unmatchedClusters) {
-
- // Create a reconstructed particle to represent the unmatched cluster.
- ReconstructedParticle particle = new BaseReconstructedParticle();
-
- // The particle is assumed to be a photon, since it did not leave a track.
- ((BaseReconstructedParticle) particle).setParticleIdUsed(new SimpleParticleID(22, 0, 0, 0));
-
- int pid = particle.getParticleIDUsed().getPDG();
- if (Math.abs(pid) != 11) {
- ((BaseCluster) unmatchedCluster).setParticleId(pid);
- }
-
- // Add the cluster to the particle.
- particle.addCluster(unmatchedCluster);
-
- // Set the reconstructed particle properties based on the cluster properties.
- ((BaseReconstructedParticle) particle).setCharge(0);
-
- // Add the particle to the reconstructed particle list.
- particles.add(particle);
- }
- }
-
- // Apply the corrections to the Ecal clusters
+ for (Cluster unmatchedCluster : unmatchedClusters) {
+
+ // Create a reconstructed particle to represent the unmatched cluster.
+ ReconstructedParticle particle = new BaseReconstructedParticle();
+
+ // The particle is assumed to be a photon, since it did not leave a track.
+ ((BaseReconstructedParticle) particle).setParticleIdUsed(new SimpleParticleID(22, 0, 0, 0));
+
+ int pid = particle.getParticleIDUsed().getPDG();
+ if (Math.abs(pid) != 11) {
+ ((BaseCluster) unmatchedCluster).setParticleId(pid);
+ }
+
+ // Add the cluster to the particle.
+ particle.addCluster(unmatchedCluster);
+
+ // Set the reconstructed particle properties based on the cluster properties.
+ ((BaseReconstructedParticle) particle).setCharge(0);
+
+ // Add the particle to the reconstructed particle list.
+ particles.add(particle);
+ }
+
+ // Apply the corrections to the Ecal clusters using track information, if available
for (Cluster cluster : clusters) {
if (cluster.getParticleId() != 0) {
- ClusterUtilities.applyCorrections(cluster);
+ if (clusterToTrack.containsKey(cluster)){
+ Track matchedT = clusterToTrack.get(cluster);
+ double ypos = TrackUtils.getTrackStateAtECal(matchedT).getReferencePoint()[2];
+ ClusterUtilities.applyCorrections(ecal, cluster, ypos);
+ }
+ else {
+ ClusterUtilities.applyCorrections(ecal, cluster);
+ }
}
}
@@ -525,7 +573,7 @@
/**
* Indicates whether debug text should be output or not.
*/
- private boolean debug = false;
+ protected boolean debug = false;
/**
* The simple name of the class used for debug print statements.
@@ -622,7 +670,7 @@
// Beam size variables.
// The beamsize array is in the tracking frame
/* TODO mg-May 14, 2014: the the beam size from the conditions db...also beam position! */
- protected double[] beamSize = {0.001, 0.2, 0.02};
+ protected double[] beamSize = {0.001, 0.130, 0.050}; //rough estimate from harp scans during engineering run production running
protected double bField;
// flipSign is a kludge...
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/SimpleParticleID.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/SimpleParticleID.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/particle/SimpleParticleID.java Wed Apr 27 11:11:32 2016
@@ -9,67 +9,67 @@
*
*/
public class SimpleParticleID implements ParticleID {
-
- int algorithmType = 0;
- int pdgID = UnknownPDG;
- int type = 0;
-
- double likelihood = 0.;
- // TODO: Need to define what other parameters are needed.
- double[] parameters = new double[1];
+
+ int algorithmType = 0;
+ int pdgID = UnknownPDG;
+ int type = 0;
+
+ double likelihood = 0.;
+ // TODO: Need to define what other parameters are needed.
+ double[] parameters = new double[1];
- public SimpleParticleID(){}
-
- public SimpleParticleID(int pdgID, int algorithmType, int type, double likelihood){
- this.pdgID = pdgID;
- this.algorithmType = algorithmType;
- this.type = type;
- this.likelihood = likelihood;
- }
-
- @Override
- public int getAlgorithmType() {
- return algorithmType;
- }
+ public SimpleParticleID(){}
+
+ public SimpleParticleID(int pdgID, int algorithmType, int type, double likelihood){
+ this.pdgID = pdgID;
+ this.algorithmType = algorithmType;
+ this.type = type;
+ this.likelihood = likelihood;
+ }
+
+ @Override
+ public int getAlgorithmType() {
+ return algorithmType;
+ }
- @Override
- public double getLikelihood() {
- return likelihood;
- }
+ @Override
+ public double getLikelihood() {
+ return likelihood;
+ }
- @Override
- public int getPDG() {
- return pdgID;
- }
+ @Override
+ public int getPDG() {
+ return pdgID;
+ }
- @Override
- public double[] getParameters() {
- return parameters;
- }
+ @Override
+ public double[] getParameters() {
+ return parameters;
+ }
- @Override
- public int getType() {
- return type;
- }
-
- public void setAlgorithmType(int algorithmType){
- this.algorithmType = algorithmType;
- }
-
- public void setLikelihood(int likelihood){
- this.likelihood = likelihood;
- }
-
- public void setPDG(int pdgID){
- this.pdgID = pdgID;
- }
-
- public void setType(int type){
- this.type = type;
- }
-
- public void setParameters(double[] parameters){
- this.parameters = parameters;
- }
+ @Override
+ public int getType() {
+ return type;
+ }
+
+ public void setAlgorithmType(int algorithmType){
+ this.algorithmType = algorithmType;
+ }
+
+ public void setLikelihood(int likelihood){
+ this.likelihood = likelihood;
+ }
+
+ public void setPDG(int pdgID){
+ this.pdgID = pdgID;
+ }
+
+ public void setType(int type){
+ this.type = type;
+ }
+
+ public void setParameters(double[] parameters){
+ this.parameters = parameters;
+ }
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java Wed Apr 27 11:11:32 2016
@@ -1,8 +1,4 @@
package org.hps.recon.utils;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
import hep.aida.IAnalysisFactory;
import hep.aida.IHistogram1D;
@@ -13,12 +9,17 @@
import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.hps.recon.tracking.CoordinateTransformations;
+import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.Cluster;
+import org.lcsim.event.ReconstructedParticle;
import org.lcsim.event.Track;
import org.lcsim.event.TrackState;
import org.lcsim.geometry.FieldMap;
-import org.hps.recon.tracking.CoordinateTransformations;
-import org.hps.recon.tracking.TrackUtils;
/**
* Utility used to determine if a track and cluster are matched.
@@ -65,6 +66,53 @@
private double bottomClusterTrackMatchDeltaYHigh = 24; // mm
/**
+ * Rafo's parameterization of cluster-seed x/y position residuals as function of energy.
+ *
+ * Derived using GBL/seed tracks, non-analytic extrapolation, uncorrected cluster positions,
+ * and EngRun2015-Nominal-v4-4-fieldmap detector.
+ *
+ * f = p0+e*(p1+e*(p2+e*(p3+e*(p4+e*p5))))
+ */
+ private static final double dxMeanTopPosiGBL[] = { 6.67414,-9.57296, 5.70647, 27.4523,-28.1103,-9.11424 };
+ private static final double dxSigmTopPosiGBL[] = { 52.6437,-478.805, 1896.73,-3761.48, 3676.77,-1408.31 };
+ private static final double dxMeanBotPosiGBL[] = { 4.13802, 15.8887,-74.2844,-9.78944, 308.541,-287.668 };
+ private static final double dxSigmBotPosiGBL[] = { 37.6513,-294.851, 1002.15,-1639.08, 1228.02,-308.754 };
+
+ private static final double dxMeanTopElecGBL[] = {-1.6473, 5.58701, 25.3977,-17.1523,-121.025, 145.969 };
+ private static final double dxSigmTopElecGBL[] = { 48.7018,-423.599, 1592.66,-2959.99, 2668.97,-919.876 };
+ private static final double dxMeanBotElecGBL[] = {-6.63558, 83.7763,-460.451, 1275.63,-1702.83, 873.913 };
+ private static final double dxSigmBotElecGBL[] = { 47.0029,-411.784, 1586.52,-3083.37, 2985.58,-1145.53 };
+
+ private static final double dyMeanTopPosiGBL[] = { 0.31245, 5.57585,-6.50267,-8.21688, 39.8607,-43.9661 };
+ private static final double dySigmTopPosiGBL[] = { 33.0213,-275.174, 1168.77,-2642.34, 3045.52,-1406.21 };
+ private static final double dyMeanBotPosiGBL[] = {-7.032, 74.9738,-383.972, 977.849,-1250.28, 637.75 };
+ private static final double dySigmBotPosiGBL[] = { 19.019, -83.9253, 133.813, 119.883,-546.951, 405.207 };
+
+ private static final double dyMeanTopElecGBL[] = { 2.48498,-20.4101, 62.9689, 25.6386,-259.957, 207.145 };
+ private static final double dySigmTopElecGBL[] = { 8.65583, 120.676,-1166.43, 3811.72,-5383.19, 2787.42 };
+ private static final double dyMeanBotElecGBL[] = {-10.5228, 112.329,-489.761, 953.037,-829.96, 260.772 };
+ private static final double dySigmBotElecGBL[] = { 23.4856,-108.19, 158.7, 189.261,-682.034, 459.15 };
+
+ private static final double dxMeanTopPosiSeed[] ={ 11.6245,-28.5061, 13.0332, 59.9465,-21.1014,-63.6126 };
+ private static final double dxSigmTopPosiSeed[] ={ 61.5911,-540.596, 2077.22,-3973.22, 3704.45,-1332.07 };
+ private static final double dxMeanBotPosiSeed[] ={ 4.53394, 11.3773,-63.7127,-2.81629, 273.868,-264.709 };
+ private static final double dxSigmBotPosiSeed[] ={ 48.3163,-409.249, 1590.36,-3212.85, 3326.04,-1402.3 };
+
+ private static final double dxMeanTopElecSeed[] ={ 2.14163,-20.8713, 76.3054, 34.894,-340.272, 295.24 };
+ private static final double dxSigmTopElecSeed[] ={ 48.585, -385.166, 1320.26,-2157.45, 1581.06,-366.012 };
+ private static final double dxMeanBotElecSeed[] ={-3.44302, 12.4687, 4.09878,-30.0057,-13.3151, 40.2707 };
+ private static final double dxSigmBotElecSeed[] ={ 48.4089,-385.494, 1341.37,-2271.52, 1814.02,-526.555 };
+
+ private static final double dyMeanTopPosiSeed[] ={-0.527741,10.4944, -18.242,-12.9155, 81.0116,-73.9773 };
+ private static final double dySigmTopPosiSeed[] ={ 37.3097, -357.55, 1607.03,-3709.55, 4282.36,-1957.91 };
+ private static final double dyMeanBotPosiSeed[] ={ 0.74392,-55.2003, 405.04,-1250.64, 1731.47,-887.262 };
+ private static final double dySigmBotPosiSeed[] ={ 25.5776,-199.731, 754.59,-1408.72, 1240.36,-400.912 };
+
+ private static final double dyMeanTopElecSeed[] ={ 2.85429,-24.0858, 69.0145, 34.1213,-297.752, 239.939 };
+ private static final double dySigmTopElecSeed[] ={ 19.9111,-53.2699,-261.915, 1593.2,-2774.01, 1605.54 };
+ private static final double dyMeanBotElecSeed[] ={-9.22963, 98.1346, -427.91, 840.225,-751.188, 250.792 };
+ private static final double dySigmBotElecSeed[] ={ 21.7909,-85.4757,-56.9423, 977.522,-1902.05, 1137.92 };
+ /**
* Z position to start extrapolation from
*/
double extStartPos = 700; // mm
@@ -172,6 +220,122 @@
this.topClusterTrackMatchDeltaYLow = yLow;
this.topClusterTrackMatchDeltaYHigh = yHigh;
}
+
+ /**
+ * Get distance between track and cluster.
+ *
+ * @param cluster
+ * @param track
+ * @return distance between cluster and track
+ */
+ public double getDistance(Cluster cluster,Track track) {
+
+ // Get the cluster position
+ Hep3Vector cPos = new BasicHep3Vector(cluster.getPosition());
+
+ // Extrapolate the track to the Ecal cluster position
+ Hep3Vector tPos = null;
+ if (this.useAnalyticExtrapolator) {
+ tPos = TrackUtils.extrapolateTrack(track, cPos.z());
+ } else {
+ TrackState trackStateAtEcal = TrackUtils.getTrackStateAtECal(track);
+ tPos = new BasicHep3Vector(trackStateAtEcal.getReferencePoint());
+ tPos = CoordinateTransformations.transformVectorToDetector(tPos);
+ }
+
+ return Math.sqrt(Math.pow(cPos.x()-tPos.x(),2)+Math.pow(cPos.y()-tPos.y(),2));
+ }
+
+ /**
+ * Calculate #sigma between cluster-track x/y position at calorimeter.
+ *
+ * Based on Rafo's parameterizations. Requires non-analytic extrapolation
+ * and uncorrected cluster positions.
+ *
+ * @param cluster = position-uncorrected cluster
+ * @param particle recon particle with tracks
+ *
+ * @return #sigma between cluster and track positions
+ */
+ public double getNSigmaPosition(Cluster cluster, ReconstructedParticle particle) {
+
+ if (particle.getTracks().size()<1) return Double.MAX_VALUE;
+ Track track=particle.getTracks().get(0);
+
+ if (this.useAnalyticExtrapolator)
+ throw new RuntimeException("This is to be used with non-analytic extrapolator only.");
+
+ // Get the cluster position:
+ Hep3Vector cPos = new BasicHep3Vector(cluster.getPosition());
+
+ // whether track is in top half of detector:
+ final boolean isTopTrack = track.getTrackStates().get(0).getTanLambda() > 0;
+
+ // ignore if track and cluster in different halves:
+ if (isTopTrack != cPos.y()>0) return Double.MAX_VALUE;
+
+ // Get the extrapolated track position at the calorimeter:
+ TrackState trackStateAtEcal = TrackUtils.getTrackStateAtECal(track);
+ Hep3Vector tPos = new BasicHep3Vector(trackStateAtEcal.getReferencePoint());
+ tPos = CoordinateTransformations.transformVectorToDetector(tPos);
+
+ // whether it's a GBL track:
+ final boolean isGBL = track.getType() >= 32;
+
+ // choose which parameterization of mean and sigma to use:
+ double dxMean[],dyMean[],dxSigm[],dySigm[];
+ if (particle.getCharge()>0) {
+ if (isTopTrack) {
+ dxMean = isGBL ? dxMeanTopPosiGBL : dxMeanTopPosiSeed;
+ dxSigm = isGBL ? dxSigmTopPosiGBL : dxSigmTopPosiSeed;
+ dyMean = isGBL ? dyMeanTopPosiGBL : dyMeanTopPosiSeed;
+ dySigm = isGBL ? dySigmTopPosiGBL : dySigmTopPosiSeed;
+ }
+ else {
+ dxMean = isGBL ? dxMeanBotPosiGBL : dxMeanBotPosiSeed;
+ dxSigm = isGBL ? dxSigmBotPosiGBL : dxSigmBotPosiSeed;
+ dyMean = isGBL ? dyMeanBotPosiGBL : dyMeanBotPosiSeed;
+ dySigm = isGBL ? dySigmBotPosiGBL : dySigmBotPosiSeed;
+ }
+ }
+ else if (particle.getCharge()<0) {
+ if (isTopTrack) {
+ dxMean = isGBL ? dxMeanTopElecGBL : dxMeanTopElecSeed;
+ dxSigm = isGBL ? dxSigmTopElecGBL : dxSigmTopElecSeed;
+ dyMean = isGBL ? dyMeanTopElecGBL : dyMeanTopElecSeed;
+ dySigm = isGBL ? dySigmTopElecGBL : dySigmTopElecSeed;
+ }
+ else {
+ dxMean = isGBL ? dxMeanBotElecGBL : dxMeanBotElecSeed;
+ dxSigm = isGBL ? dxSigmBotElecGBL : dxSigmBotElecSeed;
+ dyMean = isGBL ? dyMeanBotElecGBL : dyMeanBotElecSeed;
+ dySigm = isGBL ? dySigmBotElecGBL : dySigmBotElecSeed;
+ }
+ }
+ else return Double.MAX_VALUE;
+
+ // get particle energy:
+ Hep3Vector p3 = new BasicHep3Vector(track.getTrackStates().get(0).getMomentum());
+ p3 = CoordinateTransformations.transformVectorToDetector(p3);
+ double ee = p3.magnitude();
+
+ // Rafo's parameterization isn't measured above 650 MeV/c but expected to be constant:
+ if (ee > 0.65) ee=0.65;
+
+ // calculate measured mean and sigma of deltaX and deltaY for this energy:
+ double aDxMean=0,aDxSigm=0,aDyMean=0,aDySigm=0;
+ for (int ii=dxMean.length-1; ii>=0; ii--) aDxMean = dxMean[ii] + ee*aDxMean;
+ for (int ii=dxSigm.length-1; ii>=0; ii--) aDxSigm = dxSigm[ii] + ee*aDxSigm;
+ for (int ii=dyMean.length-1; ii>=0; ii--) aDyMean = dyMean[ii] + ee*aDyMean;
+ for (int ii=dySigm.length-1; ii>=0; ii--) aDySigm = dySigm[ii] + ee*aDySigm;
+
+ // calculate nSigma between track and cluster:
+ final double nSigmaX = (cPos.x() - tPos.x() - aDxMean) / aDxSigm;
+ final double nSigmaY = (cPos.y() - tPos.y() - aDyMean) / aDySigm;
+ return Math.sqrt(nSigmaX*nSigmaX + nSigmaY*nSigmaY);
+ //return Math.sqrt( 1 / ( 1/nSigmaX/nSigmaX + 1/nSigmaY/nSigmaY ) );
+ }
+
/**
* Determine if a track is matched to a cluster. Currently, this is
@@ -363,4 +527,16 @@
e.printStackTrace();
}
}
+
+ /**
+ * Class to store track-cluster matching qualities.
+ */
+ public class TrackClusterMatch {
+ private double nSigmaPositionMatch=Double.MAX_VALUE;
+ public TrackClusterMatch(ReconstructedParticle pp, Cluster cc) {
+ nSigmaPositionMatch = getNSigmaPosition(cc,pp);
+ }
+ public double getNSigmaPositionMatch() { return nSigmaPositionMatch; }
+ }
+
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BaseSimpleVertexer.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BaseSimpleVertexer.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BaseSimpleVertexer.java Wed Apr 27 11:11:32 2016
@@ -12,25 +12,25 @@
public abstract class BaseSimpleVertexer implements SimpleVertexer {
- protected boolean _debug = false;
- protected Vertex _fitted_vertex = null;
+ protected boolean _debug = false;
+ protected Vertex _fitted_vertex = null;
- public BaseSimpleVertexer() {
- }
+ public BaseSimpleVertexer() {
+ }
- @Override
- public abstract void fitVertex();
+ @Override
+ public abstract void fitVertex();
- @Override
- public Vertex getFittedVertex() {
- return _fitted_vertex;
- }
-
- public void clear() {
- _fitted_vertex = null;
- }
-
- public abstract boolean isValid();
-
+ @Override
+ public Vertex getFittedVertex() {
+ return _fitted_vertex;
+ }
+
+ public void clear() {
+ _fitted_vertex = null;
+ }
+
+ public abstract boolean isValid();
+
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertex.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertex.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertex.java Wed Apr 27 11:11:32 2016
@@ -14,8 +14,8 @@
/**
*
- * @author Mathew Thomas Graham <[log in to unmask]>
- * @version $Id:$
+ * @author Mathew Thomas Graham <[log in to unmask]>
+ * @version $Id:$
*
*/
public class BilliorVertex implements Vertex {
@@ -74,12 +74,12 @@
* @param particle : The ReconstructedParticle Associated with this Vertex
*/
public void setAssociatedParticle(ReconstructedParticle particle){
- this._particle = particle;
+ this._particle = particle;
}
@Override
public boolean isPrimary() {
- return _isPrimary;
+ return _isPrimary;
}
@Override
@@ -94,7 +94,7 @@
@Override
public double getProbability() {
- return _probability;
+ return _probability;
}
@Override
@@ -108,7 +108,7 @@
}
// TODO: These should be pulled out and accessed by their own
- // getter methods.
+ // getter methods.
@Override
public Map<String, Double> getParameters() {
Map<String, Double> pars = new HashMap<String, Double>();
@@ -128,6 +128,6 @@
@Override
public ReconstructedParticle getAssociatedParticle() {
- return _particle;
+ return _particle;
}
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertexer.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertexer.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/BilliorVertexer.java Wed Apr 27 11:11:32 2016
@@ -16,394 +16,97 @@
/**
* @version $Id: BilliorVertexer.java,v 1.3 2013/03/13 19:24:20 mgraham Exp $
- * @version Vertex tracks using least-squares method laid out by billior etal used in the HPS Java package.
+ * @version Vertex tracks using least-squares method laid out by billior etal
+ * used in the HPS Java package.
*/
public class BilliorVertexer {
// the value of the magnetic field in the vicinity of the vertex
// default is a constant field along the z axis
private boolean _debug = false;
- private double _bField;
- private boolean _beamspotConstraint = true;
- private boolean _targetConstraint = false;
- private double[] _beamSize = {0.001, 0.01, 0.01}; //10um in y and z
+ private final double _bField;
+ private boolean _beamspotConstraint;
+ private boolean _targetConstraint;
+ private String _constraintType;
+ private final double[] _beamSize = {0.001, 0.01, 0.01}; //10um in y and z
+ private final double[] _beamPosition = {0.0, 0.0, 0.0}; //origin
private int _ntracks;
- private List<Matrix> paramList = new ArrayList<Matrix>();
- private List<Matrix> WList = new ArrayList<Matrix>();
- private List<Matrix> DList = new ArrayList<Matrix>();
- private List<Matrix> EList = new ArrayList<Matrix>();
- private Matrix A;
- private Matrix T;
- private List<Matrix> BList = new ArrayList<Matrix>();
- private List<Matrix> CinvList = new ArrayList<Matrix>();
- private List<Matrix> CList = new ArrayList<Matrix>();
- private List<Matrix> UList = new ArrayList<Matrix>();
- private List<Matrix> dqList = new ArrayList<Matrix>();
- private double[] _v0 = {0.0, 0.0, 0.0};
+ private double[] _v0 = {0.0, 0.0, 0.0}; //initial guess for unconstrained vertex fit
// private double[] _vertexPosition = {0., 0.0, 0.0};
- private Matrix _vertexPosition = new BasicMatrix(3, 1);
- private Matrix _covVtx = new BasicMatrix(3, 3);
- private List<Matrix> _pFit = new ArrayList<Matrix>();
+ private Matrix _vertexPosition;
+ private Matrix _covVtx;
+ private List<Matrix> _pFit;
;//theta,phi_v,rho
- private List<Matrix> covVtxMomList = new ArrayList<Matrix>();
- private Matrix[][] covMomList = new Matrix[2][2];//max 2 tracks...just make this bigger for more
+ private List<Matrix> covVtxMomList;
+ private Matrix[][] covMomList;//max 2 tracks...just make this bigger for more
private Matrix _constrainedFit;
private Matrix _constrainedCov;
private double _chiSq;
- private String _constraintType="Unspecified";
- // constructor
- public BilliorVertexer() {
- }
public BilliorVertexer(double bField) {
_bField = bField;
- _constraintType="Unconstrained";
- _beamspotConstraint =false;
+ _constraintType = "Unconstrained";
+ _beamspotConstraint = false;
_targetConstraint = false;
}
-
- public BilliorVertexer(double bField,boolean bsConst, boolean constToBS) {
+
+ public BilliorVertexer(double bField, boolean bsConst, boolean constToBS) {
_bField = bField;
- _beamspotConstraint =bsConst;
+ _beamspotConstraint = bsConst;
_targetConstraint = constToBS;
- if(_beamspotConstraint&&_targetConstraint)
+ if (_beamspotConstraint && _targetConstraint) {
System.out.println("BilliorVertexer::Warning!!! Setting both _beamspotConstraint and _targetConstraint to true!");
- if(_beamspotConstraint)
- _constraintType="BeamspotConstrained";
- if(_targetConstraint)
- _constraintType="TargetConstrained";
+ }
+ if (_beamspotConstraint) {
+ _constraintType = "BeamspotConstrained";
+ }
+ if (_targetConstraint) {
+ _constraintType = "TargetConstrained";
+ }
+ }
+
+ public void setDebug(boolean debug) {
+ _debug = debug;
}
public BilliorVertex fitVertex(List<BilliorTrack> tracks) {
_ntracks = tracks.size();
follow1985Paper(tracks);
- if (_beamspotConstraint)
- addV0fromBSConstraint();
- else if (_targetConstraint)
- constrainV0toBS();
- Map<Integer,Hep3Vector> pFitMap=new HashMap<Integer,Hep3Vector>();
- for(int i=0;i<tracks.size();i++){
- Hep3Vector pFit=new BasicHep3Vector(this.getFittedMomentum(i));
- pFitMap.put(i, pFit);
- }
- Hep3Vector vert=new BasicHep3Vector(_vertexPosition.e(0, 0),_vertexPosition.e(1, 0),_vertexPosition.e(2, 0));
- Hep3Vector vertDet=CoordinateTransformations.transformVectorToDetector(vert);
- SymmetricMatrix covVtxDet=CoordinateTransformations.transformCovarianceToDetector(new SymmetricMatrix( _covVtx));
- return new BilliorVertex(vertDet,covVtxDet,_chiSq,getInvMass(),pFitMap,_constraintType);
- }
-
- public BilliorVertex fitFastVertex(List<BilliorTrack> tracks) {
- _ntracks = tracks.size();
- fastVertex(tracks);
- Hep3Vector vert=new BasicHep3Vector(_vertexPosition.e(0, 0),_vertexPosition.e(1, 0),_vertexPosition.e(2, 0));
- return new BilliorVertex((Hep3Vector)_vertexPosition,_covVtx,_chiSq,getInvMass());
- }
-
- private void calculateCovariance() {
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix b = (BasicMatrix) BList.get(i);
- BasicMatrix cinv = (BasicMatrix) CinvList.get(i);
- BasicMatrix bt = (BasicMatrix) MatrixOp.transposed(b);
- covVtxMomList.add((MatrixOp.mult(-1, MatrixOp.mult(_covVtx, MatrixOp.mult(b, cinv)))));
- for (int j = 0; j < _ntracks; j++) {
- BasicMatrix bj = (BasicMatrix) BList.get(j);
- BasicMatrix cjinv = (BasicMatrix) CinvList.get(j);
- BasicMatrix tmp = (BasicMatrix) MatrixOp.mult(cinv, MatrixOp.mult(bt, MatrixOp.mult(_covVtx, MatrixOp.mult(bj, cjinv))));
- if (i == j)
- tmp = (BasicMatrix) MatrixOp.add(tmp, cinv);
- covMomList[i][j] = tmp;
- }
- }
- }
-
- private void calculateMomenta() {
-
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix params = (BasicMatrix) paramList.get(i);
- BasicMatrix b = (BasicMatrix) BList.get(i);
- BasicMatrix cinv = (BasicMatrix) CinvList.get(i);
- BasicMatrix u = (BasicMatrix) UList.get(i);
- //not sure following line is correct...mg 10/21/10
- BasicMatrix CinvU = (BasicMatrix) MatrixOp.mult(cinv, u);
- BasicMatrix CinvBTdV = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(cinv, MatrixOp.mult(MatrixOp.transposed(b), _vertexPosition)));
-// if(_debug)System.out.println(" B = "+b.toString());
-// if(_debug)System.out.println(" cinv = "+cinv.toString());
-// if(_debug)System.out.println(" u = "+u.toString());
-// if(_debug)System.out.println(" CinvU = "+CinvU.toString());
-// if(_debug)System.out.println(" CinvBTdV = "+CinvBTdV.toString());
- BasicMatrix tmpP = (BasicMatrix) MatrixOp.add(CinvBTdV, CinvU);
- tmpP.setElement(0, 0, tmpP.e(0, 0) + params.e(2, 0));
- tmpP.setElement(1, 0, tmpP.e(1, 0) + params.e(3, 0));
- tmpP.setElement(2, 0, tmpP.e(2, 0) + params.e(4, 0));
- _pFit.add(tmpP);
-// if(_debug)System.out.println("Track "+i+" orig parameters = "+params);
-// if(_debug)System.out.println("Track "+i+" deltaP = "+MatrixOp.add(CinvBTdV, CinvU));
-// if(_debug)System.out.println("Track " + i + " _pFit = " + tmpP);
- }
- }
-
- private void calculateVertexPosition() {
- BasicMatrix tmpcov = new BasicMatrix(3, 3);
- BasicMatrix tmp = new BasicMatrix(3, 1);
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix b = (BasicMatrix) BList.get(i);
- BasicMatrix cinv = (BasicMatrix) CinvList.get(i);
- BasicMatrix u = (BasicMatrix) UList.get(i);
-// if(_debug)System.out.println("Cinv matrix " + cinv.toString());
-// if(_debug)System.out.println("B matrix " + b.toString());
-// if(_debug)System.out.println("U matrix " + u.toString());
- BasicMatrix bt = (BasicMatrix) MatrixOp.transposed(b);
- // if(_debug)System.out.println("Adding this to tmpcov : " + MatrixOp.mult(-1, MatrixOp.mult(b, MatrixOp.mult(cinv, bt))));
- if (i == 0) {
- tmpcov = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(b, MatrixOp.mult(cinv, bt)));
- tmp = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(b, MatrixOp.mult(cinv, u)));
- } else {
- tmpcov = (BasicMatrix) MatrixOp.add(tmpcov, MatrixOp.mult(-1, MatrixOp.mult(b, MatrixOp.mult(cinv, bt))));
- tmp = (BasicMatrix) MatrixOp.add(tmp, MatrixOp.mult(-1, MatrixOp.mult(b, MatrixOp.mult(cinv, u))));
- }
-// if(_debug)System.out.println("tmpCov matrix " + tmpcov.toString());
-// if(_debug)System.out.println("tmp matrix " + tmp.toString());
- }
-//
-// if(_debug)System.out.println("A matrix " + A.toString());
-// if(_debug)System.out.println("tmpCov matrix " + tmpcov.toString());
-// if(_debug)System.out.println("sum of A and tmpCov = " + MatrixOp.add(A, tmpcov).toString());
- _covVtx = MatrixOp.inverse(MatrixOp.add(A, tmpcov));
-// if(_debug)System.out.println("_covVtx matrix " + _covVtx.toString());
-// if(_debug)System.out.println("T matrix " + T.toString());
- _vertexPosition = (BasicMatrix) MatrixOp.mult(_covVtx, MatrixOp.add(T, tmp));
-
- }
-
- private void makeOtherMatrices() {
- BasicMatrix tmpA = new BasicMatrix(3, 3);
- BasicMatrix tmpT = new BasicMatrix(3, 1);
-
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix tmpD = (BasicMatrix) DList.get(i);
- BasicMatrix tmpE = (BasicMatrix) EList.get(i);
- BasicMatrix dq = (BasicMatrix) dqList.get(i);
- BasicMatrix tmpW = (BasicMatrix) WList.get(i);
-
- if (i == 0) {
- tmpA = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, tmpD));
- tmpT = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, dq));
- } else {
- tmpT = (BasicMatrix) MatrixOp.add(tmpT, MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, dq)));
- tmpA = (BasicMatrix) MatrixOp.add(tmpA, MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, tmpD)));
- }
- BList.add(MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, tmpE)));
- BasicMatrix tmpC = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpE), MatrixOp.mult(tmpW, tmpE));
- CList.add(tmpC);
- CinvList.add(MatrixOp.inverse(tmpC));
- UList.add(MatrixOp.mult(MatrixOp.transposed(tmpE), MatrixOp.mult(tmpW, dq)));
-
- }
- A = tmpA;
- T = tmpT;
- }
-
- private void calculateChisq() {
- _chiSq = 0;
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix params = (BasicMatrix) paramList.get(i);
- BasicMatrix d = (BasicMatrix) DList.get(i);
- BasicMatrix e = (BasicMatrix) EList.get(i);
- BasicMatrix w = (BasicMatrix) WList.get(i);
- BasicMatrix pi = (BasicMatrix) _pFit.get(i);
- BasicMatrix Vtilde = (BasicMatrix) MatrixOp.mult(d, _vertexPosition);
- BasicMatrix Trtilde = (BasicMatrix) MatrixOp.mult(e, pi);
- BasicMatrix ptilde = (BasicMatrix) MatrixOp.add(Vtilde, Trtilde);
- // if(_debug)System.out.println("Vtilde = "+Vtilde);
- // if(_debug)System.out.println("Trtilde = "+Trtilde);
- BasicMatrix resid = (BasicMatrix) MatrixOp.add(params, MatrixOp.mult(-1, ptilde));
- BasicMatrix residT = (BasicMatrix) MatrixOp.transposed(resid);
-// if(_debug)System.out.println("ptilde = "+ptilde);
-// if(_debug)System.out.println("params = "+params);
-// if(_debug)System.out.println("resid = "+resid);
-// if(_debug)System.out.println("Covariance = "+MatrixOp.inverse(w));
-// if(_debug)System.out.println("Weight = "+w);
- _chiSq = _chiSq + (MatrixOp.mult(residT, MatrixOp.mult(w, resid))).e(0, 0);
-// if(_debug)System.out.println("_chiSq = "+_chiSq);
- }
- }
-
- private void fastVertex(List<BilliorTrack> tracks) {
- boolean firstTrack = true;
- BasicMatrix sumwi = new BasicMatrix(3, 3);
- BasicMatrix sumwiXi = new BasicMatrix(3, 1);
- BasicMatrix dX = new BasicMatrix(3, 1);
-
- for (BilliorTrack bt : tracks) {
- double[] par = bt.parameters();
-// if(_debug)System.out.println("Track parameters = (" + par[0] + ", " + par[1] + ", " + par[2] + ", " + par[3] + ", " + par[4] + ")");
- double cotth = 1. / tan(par[2]);
- double phiv = par[3];
- double cosf = cos(phiv);
- double sinf = sin(phiv);
-
- double xi = par[0] * sin(par[3]);
- double yi = -par[0] * cos(par[3]);
- double zi = par[1];
-
- dX.setElement(0, 0, xi);
- dX.setElement(1, 0, yi);
- dX.setElement(2, 0, zi);
-
- BasicMatrix tmpD = new BasicMatrix(2, 3);
- tmpD.setElement(0, 0, sinf);
- tmpD.setElement(0, 1, -cosf);
- tmpD.setElement(1, 0, -cotth * cosf);
- tmpD.setElement(1, 1, -cotth * sinf);
- tmpD.setElement(1, 2, 1);
- BasicMatrix trkCov = new BasicMatrix(2, 2);
- trkCov.setElement(0, 0, bt.covariance().e(0, 0));
- trkCov.setElement(0, 1, bt.covariance().e(0, 1));
- trkCov.setElement(1, 0, bt.covariance().e(1, 0));
- trkCov.setElement(1, 1, bt.covariance().e(1, 1));
- BasicMatrix tmpW = (BasicMatrix) MatrixOp.inverse(trkCov);
- BasicMatrix wi = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, tmpD));
- if (firstTrack) {
- sumwi = wi;
- sumwiXi = (BasicMatrix) MatrixOp.mult(wi, dX);
- } else {
- sumwi = (BasicMatrix) MatrixOp.add(sumwi, wi);
- sumwiXi = (BasicMatrix) MatrixOp.add(sumwiXi, MatrixOp.mult(wi, dX));
- }
- firstTrack = false;
- }
- _covVtx = MatrixOp.inverse(sumwi);
- if (_debug)
- System.out.println("fastVertex::_covVtx matrix " + _covVtx.toString());
- _vertexPosition = (BasicMatrix) MatrixOp.mult(_covVtx, sumwiXi);
- _chiSq = 0;
- //get the chisq
- for (BilliorTrack bt : tracks) {
- double[] par = bt.parameters();
-// if(_debug)System.out.println("Track parameters = (" + par[0] + ", " + par[1] + ", " + par[2] + ", " + par[3] + ", " + par[4] + ")");
- double cotth = 1. / tan(par[2]);
- double phiv = par[3];
- double cosf = cos(phiv);
- double sinf = sin(phiv);
-
- double xi = par[0] * sin(par[3]);
- double yi = -par[0] * cos(par[3]);
- double zi = par[1];
- //this is xi - fitted vertex now
- dX.setElement(0, 0, xi - _vertexPosition.e(0, 0));
- dX.setElement(1, 0, yi - _vertexPosition.e(1, 0));
- dX.setElement(2, 0, zi - _vertexPosition.e(2, 0));
-
- BasicMatrix tmpD = new BasicMatrix(2, 3);
- tmpD.setElement(0, 0, sinf);
- tmpD.setElement(0, 1, -cosf);
- tmpD.setElement(1, 0, -cotth * cosf);
- tmpD.setElement(1, 1, -cotth * sinf);
- tmpD.setElement(1, 2, 1);
- BasicMatrix trkCov = new BasicMatrix(2, 2);
- trkCov.setElement(0, 0, bt.covariance().e(0, 0));
- trkCov.setElement(0, 1, bt.covariance().e(0, 1));
- trkCov.setElement(1, 0, bt.covariance().e(1, 0));
- trkCov.setElement(1, 1, bt.covariance().e(1, 1));
- BasicMatrix tmpW = (BasicMatrix) MatrixOp.inverse(trkCov);
- BasicMatrix wi = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpD), MatrixOp.mult(tmpW, tmpD));
- _chiSq += MatrixOp.mult(MatrixOp.transposed(dX), MatrixOp.mult(wi, dX)).e(0, 0);
- }
- }
-
- private void makeDerivativeMatrices(List<BilliorTrack> tracks) {
-
- //DList.clear();
- //EList.clear();
- //paramList.clear();
- //dqList.clear();
- //WList.clear();
- BasicMatrix dq = new BasicMatrix(5, 1);
- BasicMatrix tmpW = new BasicMatrix(5, 5);
- for (BilliorTrack bt : tracks) {
- double[] par = bt.parameters();
- BasicMatrix tmpPar = new BasicMatrix(5, 1);
- tmpPar.setElement(0, 0, par[0]);
- tmpPar.setElement(1, 0, par[1]);
- tmpPar.setElement(2, 0, par[2]);
- tmpPar.setElement(3, 0, par[3]);
- tmpPar.setElement(4, 0, par[4]);
- paramList.add(tmpPar);
- double cotth = 1. / tan(par[2]);
- double uu = _v0[0] * cos(par[3]) + _v0[1] * sin(par[3]);//Q
- double vv = _v0[1] * cos(par[3]) - _v0[0] * sin(par[3]);//R
- double eps = -vv - .5 * uu * uu * par[4];
- double zp = _v0[2] - uu * (1 - vv * par[4]) * cotth;
- // * phi at vertex with these parameters
- double phiv = par[3] + uu * par[4];
- double cosf = cos(phiv);
- double sinf = sin(phiv);
-
- BasicMatrix tmpD = new BasicMatrix(5, 3);
- tmpD.setElement(0, 0, sinf);
- tmpD.setElement(0, 1, -cosf);
- tmpD.setElement(1, 0, -cotth * cosf);
- tmpD.setElement(1, 1, -cotth * sinf);
- tmpD.setElement(1, 2, 1);
- tmpD.setElement(3, 0, -par[4] * cosf);
- tmpD.setElement(3, 1, -par[4] * sinf);
-
- BasicMatrix tmpE = new BasicMatrix(5, 3);
- tmpE.setElement(0, 1, uu);
- tmpE.setElement(0, 2, -uu * uu / 2);
- tmpE.setElement(1, 0, uu * (1 + cotth * cotth));
- tmpE.setElement(1, 1, -vv * cotth);
- tmpE.setElement(1, 2, uu * vv * cotth);
- tmpE.setElement(3, 1, 1);
- tmpE.setElement(3, 2, -uu);
- tmpE.setElement(2, 0, 1); //partial(theta)/dtheta
- tmpE.setElement(4, 2, 1); //partial (rho)/drho
- DList.add(tmpD);
- EList.add(tmpE);
-
- double deps = par[0] - eps;
- double dzp = par[1] - zp;
- double dphi = par[3] - phiv;
-
- dq.setElement(0, 0, deps);
- dq.setElement(1, 0, dzp);
- dq.setElement(3, 0, dphi);
- dqList.add(dq);
- tmpW = (BasicMatrix) MatrixOp.inverse(bt.covariance());
- WList.add(tmpW);
-
- if (_debug)
- System.out.println("makeDerivativeMatrices::Params = \n" + tmpPar);
- if (_debug)
- System.out.println("D = \n" + tmpD);
- if (_debug)
- System.out.println("E = \n" + tmpE);
- if (_debug)
- System.out.println("dq = \n" + dq);
- if (_debug)
- System.out.println("W = \n" + tmpW);
- }
-
- }
-
- /* Add the constraint that V0 points back to beamspot
+ if (_beamspotConstraint) {
+ applyBSconstraint(true);
+ } else if (_targetConstraint) {
+ applyBSconstraint(false);
+ }
+ Map<Integer, Hep3Vector> pFitMap = new HashMap<Integer, Hep3Vector>();
+ for (int i = 0; i < tracks.size(); i++) {
+ Hep3Vector pFit = new BasicHep3Vector(this.getFittedMomentum(i));
+ pFitMap.put(i, pFit);
+ }
+ Hep3Vector vert = new BasicHep3Vector(_vertexPosition.e(0, 0), _vertexPosition.e(1, 0), _vertexPosition.e(2, 0));
+ Hep3Vector vertDet = CoordinateTransformations.transformVectorToDetector(vert);
+ SymmetricMatrix covVtxDet = CoordinateTransformations.transformCovarianceToDetector(new SymmetricMatrix(_covVtx));
+ return new BilliorVertex(vertDet, covVtxDet, _chiSq, getInvMass(), pFitMap, _constraintType);
+ }
+
+ /* Add the constraint that V0 is at/points back to beamspot
* this method is based on progressive least squares fit
* using the unconstrained fit result as the (k-1) fit
*
* all notation is taken from:
* W. Hulsbergen, NIM 552 (2005) 566-575
*/
- private void addV0fromBSConstraint() {
- BasicMatrix Hk = new BasicMatrix(3 * (_ntracks + 1), 3);
+ private void applyBSconstraint(boolean pointback) {
+ String methodName = pointback ? "constrainV0toBS" : "constrainV0toTarget";
BasicMatrix Ckm1 = new BasicMatrix(3 * (_ntracks + 1), 3 * (_ntracks + 1));
BasicMatrix Xkm1 = new BasicMatrix(3 * (_ntracks + 1), 1);
MatrixOp.setSubMatrix(Ckm1, _covVtx, 0, 0);
MatrixOp.setSubMatrix(Xkm1, _vertexPosition, 0, 0);
int n = 1;
for (Matrix covVtxMom : covVtxMomList) {
- if (_debug)
- System.out.println("addV0fromBSConstraint::Track " + n + " covVtxMom : " + covVtxMom.toString());
+ if (_debug) {
+ System.out.println(methodName + "::Track " + n + " covVtxMom : " + covVtxMom.toString());
+ }
MatrixOp.setSubMatrix(Ckm1, covVtxMom, 0, 3 * n);
MatrixOp.setSubMatrix(Ckm1, MatrixOp.transposed(covVtxMom), 3 * n, 0);
n++;
@@ -411,10 +114,12 @@
for (int i = 0; i < _ntracks; i++) {
BasicMatrix pi = (BasicMatrix) _pFit.get(i);
MatrixOp.setSubMatrix(Xkm1, pi, 3 * (i + 1), 0);
- if (_debug)
- System.out.println("addV0fromBSConstraint::Track " + i + " p : " + pi.toString());
- for (int j = 0; j < _ntracks; j++)
+ if (_debug) {
+ System.out.println(methodName + "::Track " + i + " p : " + pi.toString());
+ }
+ for (int j = 0; j < _ntracks; j++) {
MatrixOp.setSubMatrix(Ckm1, covMomList[i][j], 3 * (i + 1), 3 * (j + 1));
+ }
}
// now calculate the derivative matrix for the beam constraint.
@@ -443,17 +148,96 @@
pztot += pz;
}
//calculate the position of the A' at X=0
- BasicMatrix rk = new BasicMatrix(3, 1);
- if (_debug)
- System.out.println("addV0fromBSConstraint::Vx = " + Vx + "; Vy = " + Vy + "; Vz = " + Vz + "; pxtot = " + pxtot + "; pytot = " + pytot + "; pztot = " + pztot);
- rk.setElement(0, 0, 0);
- rk.setElement(1, 0, 0 - (Vy - pytot / pxtot * Vx));
- rk.setElement(2, 0, 0 - (Vz - pztot / pxtot * Vx));
-
+ BasicMatrix rk = makeRk(Vx, Vy, Vz, pxtot, pytot, pztot, pointback);
+ if (_debug) {
+ System.out.println(methodName + "::rk = " + rk);
+ }
+
+ BasicMatrix Hk = makeHk(Vx, pxtot, pytot, pztot, pointback);
+ if (_debug) {
+ System.out.println(methodName + "::Hk = " + Hk);
+ }
+
+ // the beam covariance
+ BasicMatrix Vk = new BasicMatrix(3, 3);
+ Vk.setElement(0, 0, _beamSize[0] * _beamSize[0]);
+ Vk.setElement(1, 1, _beamSize[1] * _beamSize[1]);
+ Vk.setElement(2, 2, _beamSize[2] * _beamSize[2]);
+
+ //now do the matrix operations to get the constrained parameters
+ BasicMatrix Hkt = (BasicMatrix) MatrixOp.transposed(Hk);
+ if (_debug) {
+ System.out.println(methodName + "::Ckm1Hk = " + MatrixOp.mult(Ckm1, Hk));
+ }
+
+ BasicMatrix Rk = (BasicMatrix) MatrixOp.mult(Hkt, MatrixOp.mult(Ckm1, Hk));
+ if (_debug) {
+ System.out.println("Pre Vk: Rk = " + Rk.toString());
+ }
+ Rk = (BasicMatrix) MatrixOp.add(Rk, Vk);
+ if (_debug) {
+ System.out.println("Post Vk: Rk = " + Rk.toString());
+ }
+ BasicMatrix Rkinv = (BasicMatrix) MatrixOp.inverse(Rk);
+ BasicMatrix Kk = (BasicMatrix) MatrixOp.mult(Ckm1, MatrixOp.mult(Hk, Rkinv));
+
+// if(_debug)System.out.println("Ckm1 = " + Ckm1.toString());
+// if(_debug)System.out.println("Hk = " + Hk.toString());
+// if(_debug)System.out.println("Rk = " + Rk.toString());
+// if(_debug)System.out.println("Vk = " + Vk.toString());
+// if(_debug)System.out.println("rk = " + rk.toString());
+// if(_debug)System.out.println("Kk = " + Kk.toString());
+ _constrainedFit = MatrixOp.mult(Kk, rk);
+ _constrainedFit = MatrixOp.add(_constrainedFit, Xkm1);//Xk
+
+ //ok, get the new covariance
+ BasicMatrix RkKkt = (BasicMatrix) MatrixOp.mult(Rk, MatrixOp.transposed(Kk));
+ BasicMatrix HkCkm1 = (BasicMatrix) MatrixOp.mult(Hkt, Ckm1);
+ RkKkt = (BasicMatrix) MatrixOp.mult(1, RkKkt);
+ HkCkm1 = (BasicMatrix) MatrixOp.mult(-2, HkCkm1);
+ BasicMatrix sumMatrix = (BasicMatrix) MatrixOp.mult(Kk, MatrixOp.add(HkCkm1, RkKkt));
+ _constrainedCov = (BasicMatrix) MatrixOp.add(Ckm1, sumMatrix);
+
+ //update the regular parameter names to the constrained result
+// if(_debug)System.out.println("Without Constraint : " + _vertexPosition.toString());
+// if(_debug)System.out.println("Without Constraint: x= "+_vertexPosition.e(0,0));
+ // if(_debug)System.out.println(_constrainedFit.toString());
+// if(_debug)System.out.println("Without Constraint : " + _covVtx.toString());
+ _vertexPosition = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 0, 0, 3, 1);
+ _covVtx = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedCov, 0, 0, 3, 3);
+// if(_debug)System.out.println("With Constraint : " + _vertexPosition.toString());
+// if(_debug)System.out.println("With Constraint : " + _covVtx.toString());
+
+ if (_debug) {
+ System.out.println("Constrained vertex: " + _vertexPosition);
+ }
+
+ for (int i = 0; i < _ntracks; i++) {
+ BasicMatrix ptmp = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 3 * (i + 1), 0, 3, 1);
+ _pFit.set(i, ptmp);
+ }
+
+// if(_debug)System.out.println("Unconstrained chi^2 = "+_chiSq);
+ //ok...add to the chi^2
+ if (_debug) {
+ System.out.println("Chisq contribution: " + MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)));
+ }
+ _chiSq += MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)).e(0, 0);
+// if(_debug)System.out.println("Constrained chi^2 = "+_chiSq);
+ }
+
+ private BasicMatrix makeHk(double Vx, double pxtot, double pytot, double pztot, boolean bscon) {
+ BasicMatrix Hk = new BasicMatrix(3 * (_ntracks + 1), 3);
// ok, can set the derivitives wrt to V
- Hk.setElement(0, 0, 0);
- Hk.setElement(0, 1, pytot / pxtot);
- Hk.setElement(0, 2, pztot / pxtot);
+ if (bscon) {
+ Hk.setElement(0, 0, 0);
+ Hk.setElement(0, 1, pytot / pxtot);
+ Hk.setElement(0, 2, pztot / pxtot);
+ } else {
+ Hk.setElement(0, 0, 1);
+ Hk.setElement(0, 1, 0);
+ Hk.setElement(0, 2, 0);
+ }
Hk.setElement(1, 0, 0);
Hk.setElement(1, 1, 1);
Hk.setElement(1, 2, 0);
@@ -467,243 +251,67 @@
double phiv = pi.e(1, 0);
double rho = pi.e(2, 0);
double Pt = Math.abs((1. / rho) * _bField * Constants.fieldConversion);
- double px = Pt * Math.cos(phiv);
- double py = Pt * Math.sin(phiv);
- double pz = Pt * 1 / Math.tan(theta);
+// double px = Pt * Math.cos(phiv);
+// double py = Pt * Math.sin(phiv);
+// double pz = Pt * 1 / Math.tan(theta);
//derivities wrt theta
Hk.setElement(3 * (i + 1), 0, 0);
Hk.setElement(3 * (i + 1), 1, 0);
- Hk.setElement(3 * (i + 1), 2, -Pt / Math.pow(sin(theta), 2) * Vx);
+ if (bscon) {
+ Hk.setElement(3 * (i + 1), 2,
+ -Pt / Math.pow(sin(theta), 2) * (Vx - _beamPosition[0]));
+ } else {
+ Hk.setElement(3 * (i + 1), 2, 0);
+ }
//derivities wrt phi
Hk.setElement(3 * (i + 1) + 1, 0, 0);
- Hk.setElement(3 * (i + 1) + 1, 1,
- (Pt * Pt * cos(phiv) * sin(phiv) / (pxtot * pxtot)) * Vx);
- Hk.setElement(3 * (i + 1) + 1, 2, (Pt * sin(phiv) / (pxtot * pxtot)) * Vx * pztot);
+ if (bscon) {
+ Hk.setElement(3 * (i + 1) + 1, 1,
+ (Pt * Pt * cos(phiv) * sin(phiv) / (pxtot * pxtot)) * (Vx - _beamPosition[0]));
+ Hk.setElement(3 * (i + 1) + 1, 2,
+ (Pt * sin(phiv) / (pxtot * pxtot)) * (Vx - _beamPosition[0]) * pztot);
+ } else {
+ Hk.setElement(3 * (i + 1) + 1, 1, 0);
+ Hk.setElement(3 * (i + 1) + 1, 2, 0);
+ }
//derivities wrt rho
Hk.setElement(3 * (i + 1) + 2, 0, 0);
// Hk.setElement(3 * (i + 1) + 2, 1,
// (pytot / pxtot - 1) * (Pt / rho) * (1 / pxtot) * Vx);
// Hk.setElement(3 * (i + 1) + 2, 2,
// (pztot / pxtot - 1) * (Pt / rho) * (1 / pxtot) * Vx);
- Hk.setElement(3 * (i + 1) + 2, 1,
- (cos(phiv) * pytot / pxtot - sin(phiv)) * (Pt / rho) * (1 / pxtot) * Vx);
- Hk.setElement(3 * (i + 1) + 2, 2,
- (cos(phiv) * pztot / pxtot - sin(phiv)) * (Pt / rho) * (1 / pxtot) * Vx);
+ if (bscon) {
+ Hk.setElement(3 * (i + 1) + 2, 1,
+ (cos(phiv) * pytot / pxtot - sin(phiv)) * (Pt / rho) * (1 / pxtot) * (Vx - _beamPosition[0]));
+ Hk.setElement(3 * (i + 1) + 2, 2,
+ (cos(phiv) * pztot / pxtot - sin(phiv)) * (Pt / rho) * (1 / pxtot) * (Vx - _beamPosition[0]));
+ } else {
+ Hk.setElement(3 * (i + 1) + 2, 1, 0);
+ Hk.setElement(3 * (i + 1) + 2, 2, 0);
+ }
// if(_debug)System.out.println("pxtot = "+pxtot+"; rho = "+rho+"; Pt = "+Pt);
// if(_debug)System.out.println("cos(phiv)*pytot / pxtot - sin(phiv) = "+(cos(phiv)*pytot / pxtot - sin(phiv)));
// if(_debug)System.out.println("Pt/(rho*pxtot) = "+(Pt / rho) * (1 / pxtot));
}
- // the beam covariance
- BasicMatrix Vk = new BasicMatrix(3, 3);
- Vk.setElement(0, 0, _beamSize[0] * _beamSize[0]);
- Vk.setElement(1, 1, _beamSize[1] * _beamSize[1]);
- Vk.setElement(2, 2, _beamSize[2] * _beamSize[2]);
-
- //now do the matrix operations to get the constrained parameters
- BasicMatrix Hkt = (BasicMatrix) MatrixOp.transposed(Hk);
- if (_debug)
- System.out.println("addV0fromBSConstraint::Ckm1Hk = " + MatrixOp.mult(Ckm1, Hk));
-
- BasicMatrix Rk = (BasicMatrix) MatrixOp.mult(Hkt, MatrixOp.mult(Ckm1, Hk));
- if (_debug)
- System.out.println("Pre Vk: Rk = " + Rk.toString());
- Rk = (BasicMatrix) MatrixOp.add(Rk, Vk);
- if (_debug)
- System.out.println("Post Vk: Rk = " + Rk.toString());
- BasicMatrix Rkinv = (BasicMatrix) MatrixOp.inverse(Rk);
- BasicMatrix Kk = (BasicMatrix) MatrixOp.mult(Ckm1, MatrixOp.mult(Hk, Rkinv));
-
-// if(_debug)System.out.println("Ckm1 = " + Ckm1.toString());
-// if(_debug)System.out.println("Hk = " + Hk.toString());
-// if(_debug)System.out.println("Rk = " + Rk.toString());
-// if(_debug)System.out.println("Vk = " + Vk.toString());
-// if(_debug)System.out.println("rk = " + rk.toString());
-// if(_debug)System.out.println("Kk = " + Kk.toString());
- _constrainedFit = MatrixOp.mult(Kk, rk);
- _constrainedFit = MatrixOp.add(_constrainedFit, Xkm1);//Xk
-
- //ok, get the new covariance
- BasicMatrix RkKkt = (BasicMatrix) MatrixOp.mult(Rk, MatrixOp.transposed(Kk));
- BasicMatrix HkCkm1 = (BasicMatrix) MatrixOp.mult(Hkt, Ckm1);
- RkKkt = (BasicMatrix) MatrixOp.mult(1, RkKkt);
- HkCkm1 = (BasicMatrix) MatrixOp.mult(-2, HkCkm1);
- BasicMatrix sumMatrix = (BasicMatrix) MatrixOp.mult(Kk, MatrixOp.add(HkCkm1, RkKkt));
- _constrainedCov = (BasicMatrix) MatrixOp.add(Ckm1, sumMatrix);
-
- //update the regular parameter names to the constrained result
-// if(_debug)System.out.println("Without Constraint : " + _vertexPosition.toString());
-// if(_debug)System.out.println("Without Constraint: x= "+_vertexPosition.e(0,0));
- // if(_debug)System.out.println(_constrainedFit.toString());
-// if(_debug)System.out.println("Without Constraint : " + _covVtx.toString());
- _vertexPosition = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 0, 0, 3, 1);
- _covVtx = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedCov, 0, 0, 3, 3);
-// if(_debug)System.out.println("With Constraint : " + _vertexPosition.toString());
-// if(_debug)System.out.println("With Constraint : " + _covVtx.toString());
-
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix ptmp = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 3 * (i + 1), 0, 3, 1);
- _pFit.set(i, ptmp);
- }
-
-// if(_debug)System.out.println("Unconstrained chi^2 = "+_chiSq);
- //ok...add to the chi^2
- if (_debug)
- System.out.println(MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)));
- _chiSq += MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)).e(0, 0);
-// if(_debug)System.out.println("Constrained chi^2 = "+_chiSq);
- }
-
- private void constrainV0toBS() {
- BasicMatrix Hk = new BasicMatrix(3 * (_ntracks + 1), 3);
- BasicMatrix Ckm1 = new BasicMatrix(3 * (_ntracks + 1), 3 * (_ntracks + 1));
- BasicMatrix Xkm1 = new BasicMatrix(3 * (_ntracks + 1), 1);
- MatrixOp.setSubMatrix(Ckm1, _covVtx, 0, 0);
- MatrixOp.setSubMatrix(Xkm1, _vertexPosition, 0, 0);
-
- int n = 1;
- for (Matrix covVtxMom : covVtxMomList) {
- if (_debug)
- System.out.println("constrainV0toBS::Track " + n + " covVtxMom : " + covVtxMom.toString());
- MatrixOp.setSubMatrix(Ckm1, covVtxMom, 0, 3 * n);
- MatrixOp.setSubMatrix(Ckm1, MatrixOp.transposed(covVtxMom), 3 * n, 0);
- n++;
- }
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix pi = (BasicMatrix) _pFit.get(i);
- MatrixOp.setSubMatrix(Xkm1, pi, 3 * (i + 1), 0);
- // if(_debug)System.out.println("Track "+i+" p : " + pi.toString());
- for (int j = 0; j < _ntracks; j++)
- MatrixOp.setSubMatrix(Ckm1, covMomList[i][j], 3 * (i + 1), 3 * (j + 1));
- }
- // now calculate the derivative matrix for the beam constraint.
- // the beamspot is assumed to be at bvec=(0,0,0)
- // the V0 production position is Vbvec=(0,0,0)
- // where ptot=sum_i (pi)
- // need derivites wrt to the vertex position and momentum (theta,phi_v,rho)
- double Vx = _vertexPosition.e(0, 0);
- double Vy = _vertexPosition.e(1, 0);
- double Vz = _vertexPosition.e(2, 0);
- //first, get the sum of momenta...
- double pxtot = 0;
- double pytot = 0;
- double pztot = 0;
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix pi = (BasicMatrix) _pFit.get(i);
- double theta = pi.e(0, 0);
- double phiv = pi.e(1, 0);
- double rho = pi.e(2, 0);
- double Pt = Math.abs((1. / rho) * _bField * Constants.fieldConversion);
- double px = Pt * Math.cos(phiv);
- double py = Pt * Math.sin(phiv);
- double pz = Pt * 1 / Math.tan(theta);
- pxtot += px;
- pytot += py;
- pztot += pz;
- }
+ return Hk;
+ }
+
+ private BasicMatrix makeRk(double Vx, double Vy, double Vz, double pxtot, double pytot, double pztot, boolean bscon) {
//calculate the position of the A' at X=0
BasicMatrix rk = new BasicMatrix(3, 1);
- // if(_debug)System.out.println("Vx = " + Vx + "; Vy = " + Vy + "; Vz = " + Vz + "; pxtot = " + pxtot + "; pytot = " + pytot + "; pztot = " + pztot);
- rk.setElement(0, 0, -Vx);
- rk.setElement(1, 0, -Vy);
- rk.setElement(2, 0, -Vz);
-
-// ok, can set the derivitives wrt to V
- Hk.setElement(0, 0, 1);
- Hk.setElement(0, 1, 0);
- Hk.setElement(0, 2, 0);
- Hk.setElement(1, 0, 0);
- Hk.setElement(1, 1, 1);
- Hk.setElement(1, 2, 0);
- Hk.setElement(2, 0, 0);
- Hk.setElement(2, 1, 0);
- Hk.setElement(2, 2, 1);
-//ok, loop over tracks again to set the derivitives wrt track momenta (theta,phi,rho)
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix pi = (BasicMatrix) _pFit.get(i);
- double theta = pi.e(0, 0);
- double phiv = pi.e(1, 0);
- double rho = pi.e(2, 0);
- double Pt = Math.abs((1. / rho) * _bField * Constants.fieldConversion);
- double px = Pt * Math.cos(phiv);
- double py = Pt * Math.sin(phiv);
- double pz = Pt * 1 / Math.tan(theta);
- //derivities wrt theta
- Hk.setElement(3 * (i + 1), 0, 0);
- Hk.setElement(3 * (i + 1), 1, 0);
- Hk.setElement(3 * (i + 1), 2, 0);
- //derivities wrt phi
- Hk.setElement(3 * (i + 1) + 1, 0, 0);
- Hk.setElement(3 * (i + 1) + 1, 1,
- 0);
- Hk.setElement(3 * (i + 1) + 1, 2, 0);
- //derivities wrt rho
- Hk.setElement(3 * (i + 1) + 2, 0, 0);
-// Hk.setElement(3 * (i + 1) + 2, 1,
-// (pytot / pxtot - 1) * (Pt / rho) * (1 / pxtot) * Vx);
-// Hk.setElement(3 * (i + 1) + 2, 2,
-// (pztot / pxtot - 1) * (Pt / rho) * (1 / pxtot) * Vx);
- Hk.setElement(3 * (i + 1) + 2, 1,
- 0);
- Hk.setElement(3 * (i + 1) + 2, 2,
- 0);
- // if(_debug)System.out.println("pxtot = "+pxtot+"; rho = "+rho+"; Pt = "+Pt);
- // if(_debug)System.out.println("cos(phiv)*pytot / pxtot - sin(phiv) = "+(cos(phiv)*pytot / pxtot - sin(phiv)));
- // if(_debug)System.out.println("Pt/(rho*pxtot) = "+(Pt / rho) * (1 / pxtot));
- }
- // the beam covariance
- BasicMatrix Vk = new BasicMatrix(3, 3);
- Vk.setElement(0, 0, _beamSize[0] * _beamSize[0]);
- Vk.setElement(1, 1, _beamSize[1] * _beamSize[1]);
- Vk.setElement(2, 2, _beamSize[2] * _beamSize[2]);
-
- //now do the matrix operations to get the constrained parameters
- BasicMatrix Hkt = (BasicMatrix) MatrixOp.transposed(Hk);
-// if(_debug)System.out.println("Ckm1Hk = " + MatrixOp.mult(Ckm1, Hk));
-
- BasicMatrix Rk = (BasicMatrix) MatrixOp.mult(Hkt, MatrixOp.mult(Ckm1, Hk));
-// if(_debug)System.out.println("Pre Vk: Rk = " + Rk.toString());
- Rk = (BasicMatrix) MatrixOp.add(Rk, Vk);
- BasicMatrix Rkinv = (BasicMatrix) MatrixOp.inverse(Rk);
- BasicMatrix Kk = (BasicMatrix) MatrixOp.mult(Ckm1, MatrixOp.mult(Hk, Rkinv));
-
-// if(_debug)System.out.println("Ckm1 = " + Ckm1.toString());
-// if(_debug)System.out.println("Hk = " + Hk.toString());
-// if(_debug)System.out.println("Rk = " + Rk.toString());
-// if(_debug)System.out.println("Vk = " + Vk.toString());
-// if(_debug)System.out.println("rk = " + rk.toString());
-// if(_debug)System.out.println("Kk = " + Kk.toString());
- _constrainedFit = MatrixOp.mult(Kk, rk);
- _constrainedFit = MatrixOp.add(_constrainedFit, Xkm1);//Xk
-
- //ok, get the new covariance
- BasicMatrix RkKkt = (BasicMatrix) MatrixOp.mult(Rk, MatrixOp.transposed(Kk));
- BasicMatrix HkCkm1 = (BasicMatrix) MatrixOp.mult(Hkt, Ckm1);
- RkKkt = (BasicMatrix) MatrixOp.mult(1, RkKkt);
- HkCkm1 = (BasicMatrix) MatrixOp.mult(-2, HkCkm1);
- BasicMatrix sumMatrix = (BasicMatrix) MatrixOp.mult(Kk, MatrixOp.add(HkCkm1, RkKkt));
- _constrainedCov = (BasicMatrix) MatrixOp.add(Ckm1, sumMatrix);
-
- //update the regular parameter names to the constrained result
-// if(_debug)System.out.println("Without Constraint : " + _vertexPosition.toString());
-// if(_debug)System.out.println("Without Constraint: x= "+_vertexPosition.e(0,0));
- // if(_debug)System.out.println(_constrainedFit.toString());
-// if(_debug)System.out.println("Without Constraint : " + _covVtx.toString());
- _vertexPosition = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 0, 0, 3, 1);
- _covVtx = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedCov, 0, 0, 3, 3);
-// if(_debug)System.out.println("With Constraint : " + _vertexPosition.toString());
-// if(_debug)System.out.println("With Constraint : " + _covVtx.toString());
-
- for (int i = 0; i < _ntracks; i++) {
- BasicMatrix ptmp = (BasicMatrix) MatrixOp.getSubMatrix(_constrainedFit, 3 * (i + 1), 0, 3, 1);
- _pFit.set(i, ptmp);
- }
-
-// if(_debug)System.out.println("Unconstrained chi^2 = "+_chiSq);
- //ok...add to the chi^2
- if (_debug)
- System.out.println(MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)));
- _chiSq += MatrixOp.mult(MatrixOp.transposed(rk), MatrixOp.mult(Rkinv, rk)).e(0, 0);
-// if(_debug)System.out.println("Constrained chi^2 = "+_chiSq);
+ if (_debug) {
+ System.out.println("makeRk::Vx = " + Vx + "; Vy = " + Vy + "; Vz = " + Vz + "; pxtot = " + pxtot + "; pytot = " + pytot + "; pztot = " + pztot);
+ }
+ if (bscon) {
+ rk.setElement(0, 0, 0);
+ rk.setElement(1, 0, _beamPosition[1] - (Vy - pytot / pxtot * (Vx - _beamPosition[0])));
+ rk.setElement(2, 0, _beamPosition[2] - (Vz - pztot / pxtot * (Vx - _beamPosition[0])));
+ } else {
+ rk.setElement(0, 0, _beamPosition[0] - Vx);
+ rk.setElement(1, 0, _beamPosition[1] - Vy);
+ rk.setElement(2, 0, _beamPosition[2] - Vz);
+ }
+ return rk;
}
public void setV0(double[] v0) {
@@ -716,15 +324,23 @@
_beamSize[2] = bs[2];
}
+ public void setBeamPosition(double[] bp) {
+ _beamPosition[0] = bp[0];
+ _beamPosition[1] = bp[1];
+ _beamPosition[2] = bp[2];
+ }
+
public void doBeamSpotConstraint(boolean bsconst) {
_beamspotConstraint = bsconst;
- _constraintType="BeamspotConstrained";
-
+ _targetConstraint = false;
+ _constraintType = "BeamspotConstrained";
+
}
public void doTargetConstraint(boolean bsconst) {
+ _beamspotConstraint = false;
_targetConstraint = bsconst;
- _constraintType="TargetConstrained";
+ _constraintType = "TargetConstrained";
}
public double getChiSq() {
@@ -742,12 +358,12 @@
mom[0] = Pt * Math.cos(phiv);
mom[1] = Pt * Math.sin(phiv);
mom[2] = Pt * 1 / Math.tan(theta);
- if (_debug){
- System.out.println("getFittedMomentum:: "+mom[0] + "; " + mom[1] + "; " + mom[2]);
-
- System.out.println("pT= "+Pt+"; phi = "+phiv+"; B = "+ _bField);
- }
- return mom;
+ if (_debug) {
+ System.out.println("getFittedMomentum:: " + mom[0] + "; " + mom[1] + "; " + mom[2]);
+
+ System.out.println("pT= " + Pt + "; phi = " + phiv + "; B = " + _bField);
+ }
+ return mom;
}
private double getInvMass() {
@@ -772,15 +388,17 @@
double psum = Math.sqrt(pxsum * pxsum + pysum * pysum + pzsum * pzsum);
double evtmass = esum * esum - psum * psum;
- if (evtmass > 0)
+ if (evtmass > 0) {
return Math.sqrt(evtmass);
- else
+ } else {
return -99;
- }
-
+ }
+ }
+
+ @Override
public String toString() {
- StringBuffer sb = new StringBuffer("Vertex at : \nx= " + _vertexPosition.e(0, 0) + " +/- " + Math.sqrt(_covVtx.e(0, 0)) + "\ny= " + _vertexPosition.e(1, 0) + " +/- " + Math.sqrt(_covVtx.e(1, 1)) + "\nz= " + _vertexPosition.e(2, 0) + " +/- " + Math.sqrt(_covVtx.e(2, 2)));
- return sb.toString();
+ String sb = "Vertex at : \nx= " + _vertexPosition.e(0, 0) + " +/- " + Math.sqrt(_covVtx.e(0, 0)) + "\ny= " + _vertexPosition.e(1, 0) + " +/- " + Math.sqrt(_covVtx.e(1, 1)) + "\nz= " + _vertexPosition.e(2, 0) + " +/- " + Math.sqrt(_covVtx.e(2, 2));
+ return sb;
}
private void follow1985Paper(List<BilliorTrack> tracks) {
@@ -789,8 +407,8 @@
v0.setElement(0, 0, _v0[0]);
v0.setElement(1, 0, _v0[1]);
v0.setElement(2, 0, _v0[2]);
- List<Matrix> params = new ArrayList<Matrix>();
- List<Matrix> q0s = new ArrayList<Matrix>();
+// List<Matrix> params = new ArrayList<Matrix>();
+// List<Matrix> q0s = new ArrayList<Matrix>();
List<Matrix> Gs = new ArrayList<Matrix>();
List<Matrix> Ds = new ArrayList<Matrix>();
List<Matrix> Es = new ArrayList<Matrix>();
@@ -809,13 +427,11 @@
tmpPar.setElement(2, 0, par[2]);
tmpPar.setElement(3, 0, par[3]);
tmpPar.setElement(4, 0, par[4]);
- params.add(tmpPar);
+// params.add(tmpPar);
double theta = par[2];
- double phiv = par[3];
+// double phiv = par[3];
double rho = par[4];
- double Pt = Math.abs((1. / rho) * _bField * Constants.fieldConversion);
-
-
+// double Pt = Math.abs((1. / rho) * _bField * Constants.fieldConversion);
double cotth = 1. / tan(par[2]);
double uu = v0.e(0, 0) * cos(par[3]) + v0.e(1, 0) * sin(par[3]);//Q
@@ -833,15 +449,15 @@
BasicMatrix q0 = new BasicMatrix(3, 1);
/* this looks just wrong...
- q0.setElement(0, 0, Pt * Math.cos(phiv));
- q0.setElement(1, 0, Pt * Math.sin(phiv));
- q0.setElement(2, 0, Pt * 1 / Math.tan(theta));
- q0s.add(q0);
+ q0.setElement(0, 0, Pt * Math.cos(phiv));
+ q0.setElement(1, 0, Pt * Math.sin(phiv));
+ q0.setElement(2, 0, Pt * 1 / Math.tan(theta));
+ q0s.add(q0);
*/
q0.setElement(0, 0, theta);
q0.setElement(1, 0, phiVert);
q0.setElement(2, 0, rho);
- q0s.add(q0);
+// q0s.add(q0);
double cosf = cos(phiVert);
double sinf = sin(phiVert);
@@ -875,10 +491,11 @@
BasicMatrix tmpG = (BasicMatrix) MatrixOp.inverse(bt.covariance());
Gs.add(tmpG);
- if (firstTrack)
+ if (firstTrack) {
D0 = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpA), MatrixOp.mult(tmpG, tmpA));
- else
+ } else {
D0 = (BasicMatrix) MatrixOp.add(D0, MatrixOp.mult(MatrixOp.transposed(tmpA), MatrixOp.mult(tmpG, tmpA)));
+ }
BasicMatrix tmpDi = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpA), MatrixOp.mult(tmpG, tmpB));
BasicMatrix tmpEi = (BasicMatrix) MatrixOp.mult(MatrixOp.transposed(tmpB), MatrixOp.mult(tmpG, tmpB));
@@ -904,19 +521,21 @@
BasicMatrix beIbTg = (BasicMatrix) MatrixOp.mult(b, MatrixOp.mult(MatrixOp.inverse(e), MatrixOp.mult(MatrixOp.transposed(b), g)));
BasicMatrix MinusaTgbeIbTg = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(aTg, beIbTg));
- if (firstTrack)
+ if (firstTrack) {
bigsum = (BasicMatrix) MatrixOp.mult(MatrixOp.add(aTg, MinusaTgbeIbTg), p);
- else
+ } else {
bigsum = (BasicMatrix) MatrixOp.add(bigsum, MatrixOp.mult(MatrixOp.add(aTg, MinusaTgbeIbTg), p));
+ }
}
BasicMatrix covVtx = (BasicMatrix) MatrixOp.inverse(tmpCovVtx);
BasicMatrix xtilde = (BasicMatrix) MatrixOp.mult(covVtx, bigsum);
- if (_debug)
+ if (_debug) {
System.out.println("follow1985Paper::Vertex at : \nx= " + xtilde.e(0, 0) + " +/- " + Math.sqrt(covVtx.e(0, 0)) + "\ny= " + xtilde.e(1, 0) + " +/- " + Math.sqrt(covVtx.e(1, 1)) + "\nz= " + xtilde.e(2, 0) + " +/- " + Math.sqrt(covVtx.e(2, 2)));
+ }
//ok, now the momentum
- List<Matrix> qtildes = new ArrayList<Matrix>();
- List<Matrix> ptildes = new ArrayList<Matrix>();
+// List<Matrix> qtildes = new ArrayList<Matrix>();
+// List<Matrix> ptildes = new ArrayList<Matrix>();
List<Matrix> C0j = new ArrayList<Matrix>();
List<Matrix> pfit = new ArrayList<Matrix>();
Matrix[][] Cij = new Matrix[2][2];//max 2 tracks...just make this bigger for more
@@ -934,32 +553,38 @@
BasicMatrix second = (BasicMatrix) MatrixOp.mult(MatrixOp.inverse(e), MatrixOp.mult(MatrixOp.transposed(b), g));
second = (BasicMatrix) MatrixOp.mult(second, p);
BasicMatrix qtilde = (BasicMatrix) MatrixOp.add(first, second);
- qtildes.add(qtilde);
+// qtildes.add(qtilde);
BasicMatrix ptilde = (BasicMatrix) MatrixOp.add(MatrixOp.mult(a, xtilde), MatrixOp.mult(b, qtilde));
- ptildes.add(ptilde);
+// ptildes.add(ptilde);
chisq += MatrixOp.mult(MatrixOp.transposed(MatrixOp.add(p, MatrixOp.mult(-1, ptilde))), MatrixOp.mult(g, MatrixOp.add(p, MatrixOp.mult(-1, ptilde)))).e(0, 0);
- if (_debug)
+ if (_debug) {
System.out.println("\n\nfollow1985Paper::Track #" + j);
- if (_debug)
+ }
+ if (_debug) {
System.out.println("eps(meas) = " + p.e(0, 0) + " eps(fit) =" + ptilde.e(0, 0));
- if (_debug)
+ }
+ if (_debug) {
System.out.println("zp(meas) = " + p.e(1, 0) + " zp(fit) =" + ptilde.e(1, 0));
- if (_debug)
+ }
+ if (_debug) {
System.out.println("theta(meas) = " + p.e(2, 0) + " theta(fit) =" + ptilde.e(2, 0));
- if (_debug)
+ }
+ if (_debug) {
System.out.println("phi(meas) = " + p.e(3, 0) + " phi(fit) =" + ptilde.e(3, 0));
- if (_debug)
+ }
+ if (_debug) {
System.out.println("rho(meas) = " + p.e(4, 0) + " rho(fit) =" + ptilde.e(4, 0));
+ }
BasicMatrix tmpC0j = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(covVtx, MatrixOp.mult(d, MatrixOp.inverse(e))));
C0j.add(tmpC0j);
for (int i = 0; i < _ntracks; i++) {
- BasicMatrix ai = (BasicMatrix) As.get(i);
- BasicMatrix bi = (BasicMatrix) Bs.get(i);
- BasicMatrix di = (BasicMatrix) Ds.get(i);
- BasicMatrix ei = (BasicMatrix) Es.get(i);
- BasicMatrix gi = (BasicMatrix) Gs.get(i);
- BasicMatrix pi = (BasicMatrix) pis.get(i);
+// BasicMatrix ai = (BasicMatrix) As.get(i);
+// BasicMatrix bi = (BasicMatrix) Bs.get(i);
+// BasicMatrix di = (BasicMatrix) Ds.get(i);
+// BasicMatrix ei = (BasicMatrix) Es.get(i);
+// BasicMatrix gi = (BasicMatrix) Gs.get(i);
+// BasicMatrix pi = (BasicMatrix) pis.get(i);
BasicMatrix tmpCij = (BasicMatrix) MatrixOp.mult(-1, MatrixOp.mult(MatrixOp.inverse(e), MatrixOp.mult(MatrixOp.transposed(d), tmpC0j)));
Cij[i][j] = tmpCij;
}
@@ -970,8 +595,9 @@
pfit.add(tmppfit);
}
- if (_debug)
+ if (_debug) {
System.out.println("follow1985Paper::chi^2 = " + chisq);
+ }
_chiSq = chisq;
_covVtx = covVtx;
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoLineVertexer.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoLineVertexer.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoLineVertexer.java Wed Apr 27 11:11:32 2016
@@ -20,21 +20,21 @@
*/
public class TwoLineVertexer extends BaseSimpleVertexer {
protected Hep3Vector A1,A2,B1,B2;
-
- public TwoLineVertexer() {
-
- }
-
+
+ public TwoLineVertexer() {
+
+ }
+
public void setLines(Hep3Vector PA1, Hep3Vector PA2, Hep3Vector PB1, Hep3Vector PB2) {
- this.A1 = PA1;
- this.A2 = PA2;
- this.B1 = PB1;
- this.B2 = PB2;
+ this.A1 = PA1;
+ this.A2 = PA2;
+ this.B1 = PB1;
+ this.B2 = PB2;
}
public void clear() {
- super.clear();
- setLines(null,null,null,null);
+ super.clear();
+ setLines(null,null,null,null);
}
public boolean isValid() {
@@ -44,11 +44,11 @@
@Override
public void fitVertex() {
- assert isValid();
- Hep3Vector vtxPosition = getPOCALineToLine();
- if (vtxPosition!=null) {
- _fitted_vertex = new BaseVertex(true, "Two Line Vertexer", 0, 0, new SymmetricMatrix(0), vtxPosition, null);
- }
+ assert isValid();
+ Hep3Vector vtxPosition = getPOCALineToLine();
+ if (vtxPosition!=null) {
+ _fitted_vertex = new BaseVertex(true, "Two Line Vertexer", 0, 0, new SymmetricMatrix(0), vtxPosition, null);
+ }
}
/**
@@ -94,57 +94,57 @@
*/
private Hep3Vector getPOCALineToLine() {
- if(_debug) System.out.printf("%s: A1=%s A2=%s B1=%s B2=%s\n", this.getClass().getSimpleName(), A1.toString(), A2.toString(), B1.toString(), B2.toString());
+ if(_debug) System.out.printf("%s: A1=%s A2=%s B1=%s B2=%s\n", this.getClass().getSimpleName(), A1.toString(), A2.toString(), B1.toString(), B2.toString());
- double ya[][] = {VecOp.mult(-1,VecOp.sub(B1, A1)).v()};
- BasicMatrix y = (BasicMatrix)MatrixOp.transposed(new BasicMatrix(ya));
- Hep3Vector dB = VecOp.sub(B2, B1);
- Hep3Vector dA = VecOp.sub(A2, A1);
- BasicMatrix X = new BasicMatrix(3,2);
- for(int col=0;col<2;++col) {
- if(col==0) {
- X.setElement(0, col, dB.x());
- X.setElement(1, col, dB.y());
- X.setElement(2, col, dB.z());
- } else {
- X.setElement(0, col, -1*dA.x());
- X.setElement(1, col, -1*dA.y());
- X.setElement(2, col, -1*dA.z());
- }
- }
+ double ya[][] = {VecOp.mult(-1,VecOp.sub(B1, A1)).v()};
+ BasicMatrix y = (BasicMatrix)MatrixOp.transposed(new BasicMatrix(ya));
+ Hep3Vector dB = VecOp.sub(B2, B1);
+ Hep3Vector dA = VecOp.sub(A2, A1);
+ BasicMatrix X = new BasicMatrix(3,2);
+ for(int col=0;col<2;++col) {
+ if(col==0) {
+ X.setElement(0, col, dB.x());
+ X.setElement(1, col, dB.y());
+ X.setElement(2, col, dB.z());
+ } else {
+ X.setElement(0, col, -1*dA.x());
+ X.setElement(1, col, -1*dA.y());
+ X.setElement(2, col, -1*dA.z());
+ }
+ }
- BasicMatrix X_T = (BasicMatrix)MatrixOp.transposed(X);
- BasicMatrix XX_T = (BasicMatrix)MatrixOp.mult(X_T, X);
- BasicMatrix IXX_T = null;
- try {
- IXX_T = (BasicMatrix)MatrixOp.inverse(XX_T);
- }
- catch(MatrixOp.IndeterminateMatrixException e) {
- System.out.printf("%s: caught indeterminate exception %s\n",this.getClass().getSimpleName(),e.getMessage());
- return null;
- }
- BasicMatrix X_Ty = (BasicMatrix)MatrixOp.mult(X_T,y);
- BasicMatrix b = (BasicMatrix)MatrixOp.mult(IXX_T, X_Ty);
- double t = b.e(0, 0);
- double s = b.e(1, 0);
- Hep3Vector Bpca = VecOp.add(B1, VecOp.mult(t, dB));
- Hep3Vector Apca = VecOp.add(A1, VecOp.mult(s, dA));
+ BasicMatrix X_T = (BasicMatrix)MatrixOp.transposed(X);
+ BasicMatrix XX_T = (BasicMatrix)MatrixOp.mult(X_T, X);
+ BasicMatrix IXX_T = null;
+ try {
+ IXX_T = (BasicMatrix)MatrixOp.inverse(XX_T);
+ }
+ catch(MatrixOp.IndeterminateMatrixException e) {
+ System.out.printf("%s: caught indeterminate exception %s\n",this.getClass().getSimpleName(),e.getMessage());
+ return null;
+ }
+ BasicMatrix X_Ty = (BasicMatrix)MatrixOp.mult(X_T,y);
+ BasicMatrix b = (BasicMatrix)MatrixOp.mult(IXX_T, X_Ty);
+ double t = b.e(0, 0);
+ double s = b.e(1, 0);
+ Hep3Vector Bpca = VecOp.add(B1, VecOp.mult(t, dB));
+ Hep3Vector Apca = VecOp.add(A1, VecOp.mult(s, dA));
Hep3Vector vertex = VecOp.add(Apca, VecOp.mult(0.5, VecOp.sub(Bpca, Apca)));
- if(_debug) {
- System.out.printf("y:\n%s\n",y.toString());
- System.out.printf("X:\n%s\n",X.toString());
- System.out.printf("b:\n%s\n",b.toString());
- Hep3Vector ymin = VecOp.add(VecOp.mult(t, dB) , VecOp.mult(s, dA) );
- Hep3Vector yminprime = VecOp.add(VecOp.sub(B1, A1), ymin);
- System.out.printf("ymin:\n%s\n",ymin.toString());
- System.out.printf("yminprime:\n%s\n",yminprime.toString());
- System.out.printf("Apca:\n%s\n",Apca.toString());
- System.out.printf("Bpca:\n%s\n",Bpca.toString());
- System.out.printf("vertex:\n%s\n",vertex.toString());
- }
- return vertex;
-
-
+ if(_debug) {
+ System.out.printf("y:\n%s\n",y.toString());
+ System.out.printf("X:\n%s\n",X.toString());
+ System.out.printf("b:\n%s\n",b.toString());
+ Hep3Vector ymin = VecOp.add(VecOp.mult(t, dB) , VecOp.mult(s, dA) );
+ Hep3Vector yminprime = VecOp.add(VecOp.sub(B1, A1), ymin);
+ System.out.printf("ymin:\n%s\n",ymin.toString());
+ System.out.printf("yminprime:\n%s\n",yminprime.toString());
+ System.out.printf("Apca:\n%s\n",Apca.toString());
+ System.out.printf("Bpca:\n%s\n",Bpca.toString());
+ System.out.printf("vertex:\n%s\n",vertex.toString());
+ }
+ return vertex;
+
+
}
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoParticleVertexer.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoParticleVertexer.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoParticleVertexer.java Wed Apr 27 11:11:32 2016
@@ -18,7 +18,7 @@
*/
public class TwoParticleVertexer extends TwoLineVertexer {
- public TwoParticleVertexer() {
+ public TwoParticleVertexer() {
}
public void setParticle(MCParticle track1,MCParticle track2) {
@@ -34,8 +34,8 @@
Hep3Vector PB2 = this.propAlongLine(PB1, p2, dz);
if(_debug) {
- System.out.printf("A1 %s p1 %s B1 %s p2 %s\n", PA1.toString(), p1.toString(), PB1.toString(), p2.toString());
- System.out.printf("A2 %s B2 %s\n", PA2.toString(), PB2.toString());
+ System.out.printf("A1 %s p1 %s B1 %s p2 %s\n", PA1.toString(), p1.toString(), PB1.toString(), p2.toString());
+ System.out.printf("A2 %s B2 %s\n", PA2.toString(), PB2.toString());
}
//set the member variables
Modified: java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoTrackFringeVertexer.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoTrackFringeVertexer.java (original)
+++ java/branches/HPSJAVA-409/recon/src/main/java/org/hps/recon/vertexing/TwoTrackFringeVertexer.java Wed Apr 27 11:11:32 2016
@@ -4,14 +4,14 @@
import hep.physics.vec.Hep3Vector;
import org.hps.recon.tracking.BeamlineConstants;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.HelixConverter;
-import org.hps.recon.tracking.StraightLineTrack;
+import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.Track;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
import org.lcsim.fit.helicaltrack.HelixUtils;
+import org.lcsim.geometry.FieldMap;
import org.lcsim.recon.tracking.seedtracker.SeedTrack;
-import org.lcsim.util.swim.Helix;
/**
*
@@ -20,38 +20,42 @@
* @author phansson
*
*/
+
public class TwoTrackFringeVertexer extends TwoTrackVertexer {
protected HelixConverter converter = new HelixConverter(0.);
- public void setTracks(Track track1, Track track2) {
- SeedTrack s1 = (SeedTrack) track1;
+ public void setTracks(Track track1, Track track2, FieldMap fieldMap) {
+ SeedTrack s1 = (SeedTrack) track1;
HelicalTrackFit htf1 = s1.getSeedCandidate().getHelix();
- HPSTrack hpstrk1 = new HPSTrack(htf1);
+ HpsHelicalTrackFit hpstrk1 = new HpsHelicalTrackFit(htf1);
SeedTrack s2 = (SeedTrack) track2;
HelicalTrackFit htf2 = s2.getSeedCandidate().getHelix();
- HPSTrack hpstrk2 = new HPSTrack(htf2);
+ HpsHelicalTrackFit hpstrk2 = new HpsHelicalTrackFit(htf2);
boolean debug = false;
+
+ Hep3Vector posAtConv1 = new BasicHep3Vector( TrackUtils.extrapolateTrackUsingFieldMap(track1, 100.0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0, fieldMap).getReferencePoint());
+ Hep3Vector posAtConv2 = new BasicHep3Vector( TrackUtils.extrapolateTrackUsingFieldMap(track2, 100.0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0, fieldMap).getReferencePoint());
+
+ //FIXME the straight line objects are not working
- Hep3Vector posAtConv1 = hpstrk1.getPositionAtZMap(100.0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0)[0];
- Hep3Vector posAtConv2 = hpstrk2.getPositionAtZMap(100.0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0)[0];
+ //StraightLineTrack slt1_conv = converter.Convert((Helix)hpstrk1.getTrajectory());
+ //StraightLineTrack slt2_conv = converter.Convert((Helix)hpstrk2.getTrajectory());
+
+ //A1 = new BasicHep3Vector(slt1_conv.x0(),slt1_conv.y0(),slt1_conv.z0());
+ //B1 = new BasicHep3Vector(slt2_conv.x0(),slt2_conv.y0(),slt2_conv.z0());
- StraightLineTrack slt1_conv = converter.Convert((Helix)hpstrk1.getTrajectory());
- StraightLineTrack slt2_conv = converter.Convert((Helix)hpstrk2.getTrajectory());
+ //double YZAtConv1[] = slt1_conv.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN);
+ //double YZAtConv2[] = slt2_conv.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN);
- A1 = new BasicHep3Vector(slt1_conv.x0(),slt1_conv.y0(),slt1_conv.z0());
- B1 = new BasicHep3Vector(slt2_conv.x0(),slt2_conv.y0(),slt2_conv.z0());
-
- double YZAtConv1[] = slt1_conv.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN);
- double YZAtConv2[] = slt2_conv.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN);
+ //A2 = new BasicHep3Vector(BeamlineConstants.HARP_POSITION_TESTRUN,YZAtConv1[0],YZAtConv1[1]);
+ //B2 = new BasicHep3Vector(BeamlineConstants.HARP_POSITION_TESTRUN,YZAtConv2[0],YZAtConv2[1]);
- A2 = new BasicHep3Vector(BeamlineConstants.HARP_POSITION_TESTRUN,YZAtConv1[0],YZAtConv1[1]);
- B2 = new BasicHep3Vector(BeamlineConstants.HARP_POSITION_TESTRUN,YZAtConv2[0],YZAtConv2[1]);
if(debug) {
System.out.printf("%s: original track1 direction at x=0 %s \n",this.getClass().getSimpleName(),HelixUtils.Direction(hpstrk1,0.).toString());
System.out.printf("%s: original track2 direction at x=0 %s \n",this.getClass().getSimpleName(),HelixUtils.Direction(hpstrk2,0.).toString());
- System.out.printf("%s: track1 direction at conv %s \n",this.getClass().getSimpleName(),hpstrk1.getTrajectory().getUnitTangentAtLength(0.).toString());
- System.out.printf("%s: track2 direction at conv %s \n",this.getClass().getSimpleName(),hpstrk2.getTrajectory().getUnitTangentAtLength(0.).toString());
+ //System.out.printf("%s: track1 direction at conv %s \n",this.getClass().getSimpleName(),hpstrk1.getTrajectory().getUnitTangentAtLength(0.).toString());
+ //System.out.printf("%s: track2 direction at conv %s \n",this.getClass().getSimpleName(),hpstrk2.getTrajectory().getUnitTangentAtLength(0.).toString());
System.out.printf("%s: pos at converter track1 %s \n",this.getClass().getSimpleName(),posAtConv1.toString());
@@ -67,5 +71,5 @@
}
-
+
}
Modified: java/branches/HPSJAVA-409/recon/src/test/java/org/hps/recon/particle/HpsReconParticleDriverTest.java
=============================================================================
--- java/branches/HPSJAVA-409/recon/src/test/java/org/hps/recon/particle/HpsReconParticleDriverTest.java (original)
+++ java/branches/HPSJAVA-409/recon/src/test/java/org/hps/recon/particle/HpsReconParticleDriverTest.java Wed Apr 27 11:11:32 2016
@@ -26,165 +26,165 @@
*/
public class HpsReconParticleDriverTest extends TestCase {
- private static final double B_FIELD = 0.5; // Tesla
- double[] trackParameters = new double[5];
- List<Track> tracks = new ArrayList<Track>();
- List<Cluster> clusters = new ArrayList<Cluster>();
- List<ReconstructedParticle> particleTracks;
- HpsReconParticleDriver particleDriver = null;
-
- public void setUp() throws Exception {
-
- System.out.println("\n#=== Creating Ideal Tracks ===#\n");
+ private static final double B_FIELD = 0.5; // Tesla
+ double[] trackParameters = new double[5];
+ List<Track> tracks = new ArrayList<Track>();
+ List<Cluster> clusters = new ArrayList<Cluster>();
+ List<ReconstructedParticle> particleTracks;
+ HpsReconParticleDriver particleDriver = null;
+
+ public void setUp() throws Exception {
+
+ System.out.println("\n#=== Creating Ideal Tracks ===#\n");
- // Create a pair of ideal e+e- tracks in opposite detector volumes.
- // The e+ track is created on the bottom half of the detector while
- // the e- track is created on the top half.
- Track electronTrack = new BaseTrack();
- trackParameters[BaseTrack.D0] = 0.41051;
- trackParameters[BaseTrack.OMEGA] = -2.2584e-4;
- trackParameters[BaseTrack.PHI] = 6.2626;
- trackParameters[BaseTrack.TANLAMBDA] = 0.046548;
- trackParameters[BaseTrack.Z0] = .23732;
- ((BaseTrack) electronTrack).setTrackParameters(trackParameters, B_FIELD);
-
- System.out.println("\n[ Track ] Electron: \n" + electronTrack.toString());
-
- Track positronTrack = new BaseTrack();
- trackParameters[BaseTrack.D0] = 0.19691;
- trackParameters[BaseTrack.OMEGA] = 1.005e-4;
- trackParameters[BaseTrack.PHI] = 6.2447;
- trackParameters[BaseTrack.TANLAMBDA] = -0.024134;
- trackParameters[BaseTrack.Z0] = -0.040231;
- ((BaseTrack) positronTrack).setTrackParameters(trackParameters, B_FIELD);
+ // Create a pair of ideal e+e- tracks in opposite detector volumes.
+ // The e+ track is created on the bottom half of the detector while
+ // the e- track is created on the top half.
+ Track electronTrack = new BaseTrack();
+ trackParameters[BaseTrack.D0] = 0.41051;
+ trackParameters[BaseTrack.OMEGA] = -2.2584e-4;
+ trackParameters[BaseTrack.PHI] = 6.2626;
+ trackParameters[BaseTrack.TANLAMBDA] = 0.046548;
+ trackParameters[BaseTrack.Z0] = .23732;
+ ((BaseTrack) electronTrack).setTrackParameters(trackParameters, B_FIELD);
+
+ System.out.println("\n[ Track ] Electron: \n" + electronTrack.toString());
+
+ Track positronTrack = new BaseTrack();
+ trackParameters[BaseTrack.D0] = 0.19691;
+ trackParameters[BaseTrack.OMEGA] = 1.005e-4;
+ trackParameters[BaseTrack.PHI] = 6.2447;
+ trackParameters[BaseTrack.TANLAMBDA] = -0.024134;
+ trackParameters[BaseTrack.Z0] = -0.040231;
+ ((BaseTrack) positronTrack).setTrackParameters(trackParameters, B_FIELD);
- System.out.println("\n[ Track ] Positron: \n" + positronTrack.toString());
+ System.out.println("\n[ Track ] Positron: \n" + positronTrack.toString());
- // Add the tracks to the list of tracks that will be used for test
- // purposes.
- tracks.add(electronTrack);
- tracks.add(positronTrack);
-
- System.out.println("\n#=== Creating Ideal Ecal Clusters ===#\n");
-
- // Create a pair of ideal clusters to match the e+e- pairs created
- // above. Since the properties of a cluster cannot be modified
- // directly via setter methods, first create a CalorimeterHit and
- // then use that to create a cluster.
- //Hep3Vector topHitPosition = new BasicHep3Vector(190.27, 69.729, 1422.8);
- //BaseCalorimeterHit topHit
- // = new BaseCalorimeterHit(.4600, .4600, 0, 0, 0, topHitPosition, 0);
-
- //System.out.println("\n[ Calorimeter Hit ] Top: \n" + topHit.toString());
-
- Cluster topCluster = new BaseCluster();
- //((BaseCluster) topCluster).addHit(topHit);
-
-
- System.out.print("\n[ Cluster ] Top: " + topCluster.toString());
- System.out.println(" and position= [" + topCluster.getPosition()[0] + ", "
- + topCluster.getPosition()[1] + ", "
- + topCluster.getPosition()[2] + " ]");
-
- //Hep3Vector bottomHitPosition = new BasicHep3Vector(-148.46, -39.27, 1430.5);
- //BaseCalorimeterHit bottomHit
- // = new BaseCalorimeterHit(1.1420, 1.1420, 0, 0, 0, bottomHitPosition, 0);
+ // Add the tracks to the list of tracks that will be used for test
+ // purposes.
+ tracks.add(electronTrack);
+ tracks.add(positronTrack);
+
+ System.out.println("\n#=== Creating Ideal Ecal Clusters ===#\n");
+
+ // Create a pair of ideal clusters to match the e+e- pairs created
+ // above. Since the properties of a cluster cannot be modified
+ // directly via setter methods, first create a CalorimeterHit and
+ // then use that to create a cluster.
+ //Hep3Vector topHitPosition = new BasicHep3Vector(190.27, 69.729, 1422.8);
+ //BaseCalorimeterHit topHit
+ // = new BaseCalorimeterHit(.4600, .4600, 0, 0, 0, topHitPosition, 0);
+
+ //System.out.println("\n[ Calorimeter Hit ] Top: \n" + topHit.toString());
+
+ Cluster topCluster = new BaseCluster();
+ //((BaseCluster) topCluster).addHit(topHit);
+
+
+ System.out.print("\n[ Cluster ] Top: " + topCluster.toString());
+ System.out.println(" and position= [" + topCluster.getPosition()[0] + ", "
+ + topCluster.getPosition()[1] + ", "
+ + topCluster.getPosition()[2] + " ]");
+
+ //Hep3Vector bottomHitPosition = new BasicHep3Vector(-148.46, -39.27, 1430.5);
+ //BaseCalorimeterHit bottomHit
+ // = new BaseCalorimeterHit(1.1420, 1.1420, 0, 0, 0, bottomHitPosition, 0);
- //System.out.println("\n[ Calorimeter Hit ] Bottom:\n " + bottomHit.toString());
-
- Cluster bottomCluster = new BaseCluster();
- //((BaseCluster) bottomCluster).addHit(bottomHit);
-
- System.out.print("\n[ Cluster ] bottom: " + bottomCluster.toString());
- System.out.println(" and position= [ " + topCluster.getPosition()[0] + ", "
- + topCluster.getPosition()[1] + ", "
- + topCluster.getPosition()[2] + " ]");
+ //System.out.println("\n[ Calorimeter Hit ] Bottom:\n " + bottomHit.toString());
+
+ Cluster bottomCluster = new BaseCluster();
+ //((BaseCluster) bottomCluster).addHit(bottomHit);
+
+ System.out.print("\n[ Cluster ] bottom: " + bottomCluster.toString());
+ System.out.println(" and position= [ " + topCluster.getPosition()[0] + ", "
+ + topCluster.getPosition()[1] + ", "
+ + topCluster.getPosition()[2] + " ]");
- // Add the clusters to the list of clusters that will be used for test
- // purposes.
- clusters.add(topCluster);
- clusters.add(bottomCluster);
-
- particleDriver = new HpsReconParticleDriver();
- particleDriver.setDebug(true);
- }
-
- public void testMakeReconstructedParticles(){
-
- System.out.println("\n#=== Running makeReconstructedParticles Test ===#");
-
-
- // Create two ReconstructedParticles with tracks only
- List<Cluster> emptyClusters = new ArrayList<Cluster>();
- List<List<Track>> trackCollections = new ArrayList<List<Track>>(0);
- trackCollections.add(tracks);
- particleTracks = particleDriver.makeReconstructedParticles(emptyClusters, trackCollections);
+ // Add the clusters to the list of clusters that will be used for test
+ // purposes.
+ clusters.add(topCluster);
+ clusters.add(bottomCluster);
+
+ particleDriver = new HpsReconParticleDriver();
+ particleDriver.setDebug(true);
+ }
- //
- // The list contains two Tracks which should result in two
- // ReconstructedParticles.
- //
- assertTrue("More particles than expected were created.", particleTracks.size() == 2);
- System.out.println("\nThe number of ReconstructedParticles created: " + particleTracks.size());
+ public void testMakeReconstructedParticles(){
+
+ System.out.println("\n#=== Running makeReconstructedParticles Test ===#");
+
+
+ // Create two ReconstructedParticles with tracks only
+ List<Cluster> emptyClusters = new ArrayList<Cluster>();
+ List<List<Track>> trackCollections = new ArrayList<List<Track>>(0);
+ trackCollections.add(tracks);
+ particleTracks = particleDriver.makeReconstructedParticles(emptyClusters, trackCollections);
- for(int particleN = 0; particleN < particleTracks.size(); particleN++){
-
- //
- // Check if the RecontructedParticle track is the same as the track
- // that created it
- //
- assertTrue("The particle track does not match the track that created it",
- particleTracks.get(particleN).getTracks().get(0).equals(tracks.get(particleN)));
-
-
- //
- // Check that the charge of the ReconstructedParticles was set properly
- //
- assertTrue("The charge of the ReconstructedParticle is equal to zero.",
- Math.abs(particleTracks.get(particleN).getCharge()) != 0);
- System.out.println("The charge of ReconstructedParticle number " + particleN + ": " + particleTracks.get(particleN).getCharge());
-
-
- //
- // Check that the particle ID was set correctly
- //
- assertTrue("The particle ID of the ReconstructedParticle is equal to zero.",
- particleTracks.get(particleN).getParticleIDUsed().getPDG() != 0);
- System.out.println("The particle ID of ReconstructedParticle number " + particleN + ": " + particleTracks.get(particleN).getParticleIDUsed().getPDG());
- }
-
- //
- // Check that the momentum of the ReconstructedParticles was set properly
- // and rotated to the detector frame.
- //
- Hep3Vector electronMomentum = new BasicHep3Vector(tracks.get(0).getTrackStates().get(0).getMomentum());
- electronMomentum = CoordinateTransformations.transformVectorToDetector(electronMomentum);
- assertTrue("The momentum of the track and ReconstructedParticle don't match! Top track p = "
- + electronMomentum.toString() + " Recon particle p = " + particleTracks.get(0).getMomentum().toString(),
- particleTracks.get(0).getMomentum().equals(electronMomentum));
-
- System.out.println("The momentum of the first ReconstructedParticle: " + particleTracks.get(0).getMomentum().toString());
-
- Hep3Vector positronMomentum = new BasicHep3Vector(tracks.get(1).getTrackStates().get(0).getMomentum());
- positronMomentum = CoordinateTransformations.transformVectorToDetector(positronMomentum);
- assertTrue("The momentum of track and ReconstructedParticle don't march! Bottom track p = "
- + positronMomentum.toString() + " Recon particle p = " + particleTracks.get(1).getMomentum().toString(),
- particleTracks.get(1).getMomentum().equals(positronMomentum));
-
- System.out.println("The momentum of the second ReconstructedParticle: " + particleTracks.get(1).getMomentum().toString());
-
- }
-
- public void testVertexParticles(){
-
- // Create two ReconstructedParticles with tracks only
- //List<Cluster> emptyClusters = new ArrayList<Cluster>();
- //particleTracks = particleDriver.makeReconstructedParticles(emptyClusters, tracks);
+ //
+ // The list contains two Tracks which should result in two
+ // ReconstructedParticles.
+ //
+ assertTrue("More particles than expected were created.", particleTracks.size() == 2);
+ System.out.println("\nThe number of ReconstructedParticles created: " + particleTracks.size());
+
+ for(int particleN = 0; particleN < particleTracks.size(); particleN++){
+
+ //
+ // Check if the RecontructedParticle track is the same as the track
+ // that created it
+ //
+ assertTrue("The particle track does not match the track that created it",
+ particleTracks.get(particleN).getTracks().get(0).equals(tracks.get(particleN)));
+
+
+ //
+ // Check that the charge of the ReconstructedParticles was set properly
+ //
+ assertTrue("The charge of the ReconstructedParticle is equal to zero.",
+ Math.abs(particleTracks.get(particleN).getCharge()) != 0);
+ System.out.println("The charge of ReconstructedParticle number " + particleN + ": " + particleTracks.get(particleN).getCharge());
+
+
+ //
+ // Check that the particle ID was set correctly
+ //
+ assertTrue("The particle ID of the ReconstructedParticle is equal to zero.",
+ particleTracks.get(particleN).getParticleIDUsed().getPDG() != 0);
+ System.out.println("The particle ID of ReconstructedParticle number " + particleN + ": " + particleTracks.get(particleN).getParticleIDUsed().getPDG());
+ }
+
+ //
+ // Check that the momentum of the ReconstructedParticles was set properly
+ // and rotated to the detector frame.
+ //
+ Hep3Vector electronMomentum = new BasicHep3Vector(tracks.get(0).getTrackStates().get(0).getMomentum());
+ electronMomentum = CoordinateTransformations.transformVectorToDetector(electronMomentum);
+ assertTrue("The momentum of the track and ReconstructedParticle don't match! Top track p = "
+ + electronMomentum.toString() + " Recon particle p = " + particleTracks.get(0).getMomentum().toString(),
+ particleTracks.get(0).getMomentum().equals(electronMomentum));
+
+ System.out.println("The momentum of the first ReconstructedParticle: " + particleTracks.get(0).getMomentum().toString());
+
+ Hep3Vector positronMomentum = new BasicHep3Vector(tracks.get(1).getTrackStates().get(0).getMomentum());
+ positronMomentum = CoordinateTransformations.transformVectorToDetector(positronMomentum);
+ assertTrue("The momentum of track and ReconstructedParticle don't march! Bottom track p = "
+ + positronMomentum.toString() + " Recon particle p = " + particleTracks.get(1).getMomentum().toString(),
+ particleTracks.get(1).getMomentum().equals(positronMomentum));
+
+ System.out.println("The momentum of the second ReconstructedParticle: " + particleTracks.get(1).getMomentum().toString());
+
+ }
+
+ public void testVertexParticles(){
+
+ // Create two ReconstructedParticles with tracks only
+ //List<Cluster> emptyClusters = new ArrayList<Cluster>();
+ //particleTracks = particleDriver.makeReconstructedParticles(emptyClusters, tracks);
- //List<ReconstructedParticle> electrons = particleTracks.subList(0, 1);
- //List<ReconstructedParticle> positrons = particleTracks.subList(1, 2);
-
- //particleDriver.vertexParticles(electrons, positrons);
-
- }
+ //List<ReconstructedParticle> electrons = particleTracks.subList(0, 1);
+ //List<ReconstructedParticle> positrons = particleTracks.subList(1, 2);
+
+ //particleDriver.vertexParticles(electrons, positrons);
+
+ }
}
Modified: java/branches/HPSJAVA-409/record-util/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/record-util/pom.xml (original)
+++ java/branches/HPSJAVA-409/record-util/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/record-util/</url>
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordProcessor.java Wed Apr 27 11:11:32 2016
@@ -10,6 +10,8 @@
*/
public abstract class AbstractRecordProcessor<RecordType> implements RecordProcessor<RecordType> {
+ private boolean active = true;
+
/**
* End of job action.
*/
@@ -59,4 +61,12 @@
@Override
public void suspend() {
}
+
+ protected void setActive(boolean active) {
+ this.active = active;
+ }
+
+ public boolean isActive() {
+ return this.active;
+ }
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java Wed Apr 27 11:11:32 2016
@@ -45,7 +45,8 @@
/**
* Class constructor with the timeout in seconds.
*
- * @param timeoutSeconds the timeout in seconds
+ * @param timeOutMillis the timeout in seconds
+ * @param maxSize the maximum size of the queue
*/
public AbstractRecordQueue(final long timeOutMillis, final int maxSize) {
this.timeOutMillis = timeOutMillis;
@@ -55,7 +56,7 @@
/**
* Add a record to the queue if there is space.
*
- * @param event the LCIO event to add
+ * @param record the LCIO event to add
*/
// FIXME: Should drain queue if over capacity.
public void addRecord(final RecordType record) {
@@ -102,7 +103,7 @@
* @throws NoSuchRecordException if there are no records available from the queue
*/
@Override
- public void next() throws IOException, NoSuchRecordException {
+ public synchronized void next() throws IOException, NoSuchRecordException {
try {
if (this.timeOutMillis > 0L) {
// Poll the queue for the next record until timeout is exceeded.
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/RecordProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/RecordProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/RecordProcessor.java Wed Apr 27 11:11:32 2016
@@ -45,4 +45,11 @@
* Suspend processing action.
*/
void suspend();
+
+ /**
+ * Return <code>true</code> if processor is active.
+ *
+ * @return <code>true</code> if processor is active
+ */
+ boolean isActive();
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java Wed Apr 27 11:11:32 2016
@@ -44,7 +44,7 @@
/**
* Activate the <code>endJob</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated <code>finish</code>
+ * @param loopEvent the <code>LoopEvent</code> which activated <code>finish</code>
*/
@Override
public void finish(final LoopEvent loopEvent) {
@@ -80,7 +80,7 @@
/**
* Activate the <code>startJob</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated the start
+ * @param loopEvent the <code>LoopEvent</code> which activated the start
*/
@Override
public void start(final LoopEvent loopEvent) {
@@ -103,7 +103,7 @@
/**
* Activate the <code>suspend</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated <code>suspend</code>.
+ * @param loopEvent the <code>LoopEvent</code> which activated <code>suspend</code>.
*/
@Override
public void suspend(final LoopEvent loopEvent) {
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfig.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfig.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,8 @@
package org.hps.record.daqconfig;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
/**
* Class <code>DAQConfig</code> holds all of the supported parameters
@@ -47,13 +51,23 @@
}
@Override
- public void printConfig() {
+ public void printConfig(PrintStream ps) {
// Print the system-specific objects.
- fadcConfig.printConfig();
- System.out.println();
- gtpConfig.printConfig();
- System.out.println();
- sspConfig.printConfig();
+ fadcConfig.printConfig(ps);
+ ps.println();
+ gtpConfig.printConfig(ps);
+ ps.println();
+ sspConfig.printConfig(ps);
}
-
-}
+
+ public String toString() {
+ ByteArrayOutputStream os = new ByteArrayOutputStream();
+ PrintStream ps = new PrintStream(os);
+ printConfig(ps);
+ try {
+ return os.toString("UTF8");
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigDriver.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigDriver.java Wed Apr 27 11:11:32 2016
@@ -39,57 +39,57 @@
* @see ConfigurationManager
*/
public class DAQConfigDriver extends Driver {
- private int runNumber = -1;
- private String filepath = null;
- private boolean firstEvent = true;
- private boolean readDataFiles = false;
- private File[] dataFiles = new File[3];
- private int[] crateNumber = { 46, 37, 39 };
-
- /**
- * Verifies the parameter <code>filepath</code> for the data file
- * repository and checks that appropriate data files exist for the
- * requested run number if the driver is set to read from data files.
- * Otherwise, this does nothing.
- */
- @Override
- public void startOfData() {
- // Check whether to use stored data files or the EvIO data stream
- // as the source of the DAQ settings. Nothing needs to be done
- // in the latter case.
- if(readDataFiles) {
- // The user must define a data file prefix and repository
- // location for this option to be used.
- if(filepath == null) {
- throw new NullPointerException("DAQ settings repository filepath must be defined.");
- } if(runNumber == -1) {
- throw new NullPointerException("Run number must be defined.");
- }
-
- // Verify that the repository actually exist.
- File repository = new File(filepath);
- if(!repository.exists() || !repository.isDirectory()) {
- throw new IllegalArgumentException("Repository location \"" + filepath + "\" must be an existing directory.");
- }
-
- // Define the data file objects.
- for(int i = 0; i < dataFiles.length; i++) {
- try {
- dataFiles[i] = new File(repository.getCanonicalPath() + "/" + runNumber + "_" + crateNumber[i] + ".txt");
- } catch(IOException e) {
- throw new RuntimeException("Error resolving absolute repository filepath.");
- }
- }
-
- // Verify that the data files actually exist.
- for(File dataFile : dataFiles) {
- if(!dataFile.exists() || !dataFile.canRead()) {
- throw new IllegalArgumentException("Data file \"" + dataFile.getName() + "\" does not exist or can not be read.");
- }
- }
- }
- }
-
+ private int runNumber = -1;
+ private String filepath = null;
+ private boolean firstEvent = true;
+ private boolean readDataFiles = false;
+ private File[] dataFiles = new File[3];
+ private int[] crateNumber = { 46, 37, 39 };
+
+ /**
+ * Verifies the parameter <code>filepath</code> for the data file
+ * repository and checks that appropriate data files exist for the
+ * requested run number if the driver is set to read from data files.
+ * Otherwise, this does nothing.
+ */
+ @Override
+ public void startOfData() {
+ // Check whether to use stored data files or the EvIO data stream
+ // as the source of the DAQ settings. Nothing needs to be done
+ // in the latter case.
+ if(readDataFiles) {
+ // The user must define a data file prefix and repository
+ // location for this option to be used.
+ if(filepath == null) {
+ throw new NullPointerException("DAQ settings repository filepath must be defined.");
+ } if(runNumber == -1) {
+ throw new NullPointerException("Run number must be defined.");
+ }
+
+ // Verify that the repository actually exist.
+ File repository = new File(filepath);
+ if(!repository.exists() || !repository.isDirectory()) {
+ throw new IllegalArgumentException("Repository location \"" + filepath + "\" must be an existing directory.");
+ }
+
+ // Define the data file objects.
+ for(int i = 0; i < dataFiles.length; i++) {
+ try {
+ dataFiles[i] = new File(repository.getCanonicalPath() + "/" + runNumber + "_" + crateNumber[i] + ".txt");
+ } catch(IOException e) {
+ throw new RuntimeException("Error resolving absolute repository filepath.");
+ }
+ }
+
+ // Verify that the data files actually exist.
+ for(File dataFile : dataFiles) {
+ if(!dataFile.exists() || !dataFile.canRead()) {
+ throw new IllegalArgumentException("Data file \"" + dataFile.getName() + "\" does not exist or can not be read.");
+ }
+ }
+ }
+ }
+
/**
* Checks an event for the DAQ configuration banks and passes them
* to the <code>ConfigurationManager</code> if the driver is set to
@@ -99,28 +99,28 @@
*/
@Override
public void process(EventHeader event) {
- // If this is the first event and data files are to be read,
- // import the data files and generate the DAQ information.
- if(firstEvent && readDataFiles) {
- // Get the data files in the form of a data array.
- String[][] data;
- try { data = getDataFileArrays(dataFiles); }
- catch(IOException e) {
- throw new RuntimeException("An error occurred when processing the data files.");
- }
-
- // Instantiate an EvIO DAQ parser and feed it the data.
- EvioDAQParser daqConfig = new EvioDAQParser();
- for(int i = 0; i < dataFiles.length; i++) {
- daqConfig.parse(crateNumber[i], runNumber, data[i]);
- }
-
- // Update the configuration manager.
- ConfigurationManager.updateConfiguration(daqConfig);
- }
-
+ // If this is the first event and data files are to be read,
+ // import the data files and generate the DAQ information.
+ if(firstEvent && readDataFiles) {
+ // Get the data files in the form of a data array.
+ String[][] data;
+ try { data = getDataFileArrays(dataFiles); }
+ catch(IOException e) {
+ throw new RuntimeException("An error occurred when processing the data files.");
+ }
+
+ // Instantiate an EvIO DAQ parser and feed it the data.
+ EvioDAQParser daqConfig = new EvioDAQParser();
+ for(int i = 0; i < dataFiles.length; i++) {
+ daqConfig.parse(crateNumber[i], runNumber, data[i]);
+ }
+
+ // Update the configuration manager.
+ ConfigurationManager.updateConfiguration(daqConfig);
+ }
+
// Check if a trigger configuration bank exists.
- if(!readDataFiles && event.hasCollection(EvioDAQParser.class, "TriggerConfig")) {
+ if(!readDataFiles && event.hasCollection(EvioDAQParser.class, "TriggerConfig")) {
// Get the trigger configuration bank. There should only be
// one in the list.
List<EvioDAQParser> configList = event.get(EvioDAQParser.class, "TriggerConfig");
@@ -134,7 +134,7 @@
// Note that it is no longer the first event.
firstEvent = false;
}
-
+
/**
* Converts DAQ configuration data files into an array of strings
* where each array entry represents a line in the configuration
@@ -152,44 +152,44 @@
* or reading the objects in the objects referred to by the files
* pointed to in the <code>dataFiles</code> array.
*/
- private static final String[][] getDataFileArrays(File[] dataFiles) throws IOException {
- // Create file readers to process the data files.
- FileReader[] fr = new FileReader[dataFiles.length];
- BufferedReader[] reader = new BufferedReader[dataFiles.length];
- for(int i = 0; i < dataFiles.length; i++) {
- fr[i] = new FileReader(dataFiles[i]);
- reader[i] = new BufferedReader(fr[i]);
- }
-
- // Generate String arrays where each entry in the array is
- // a line from the data file.
- String[][] data = new String[dataFiles.length][0];
- for(int i = 0; i < dataFiles.length; i++) {
- // Create a list to hold the raw strings.
- List<String> rawData = new ArrayList<String>();
-
- // Add each line from the current data file to the list
- // as a single entry.
- String curLine = null;
- while((curLine = reader[i].readLine()) != null) {
- rawData.add(curLine);
- }
-
- // Convert the list into a String array.
- data[i] = rawData.toArray(new String[rawData.size()]);
- }
-
- // Return the data array.
- return data;
- }
-
- /**
- * Sets the run number of the DAQ configuration being processed.
- * This is only used when reading from data files.
- * @param run - The run number of the data files to be used.
- */
+ private static final String[][] getDataFileArrays(File[] dataFiles) throws IOException {
+ // Create file readers to process the data files.
+ FileReader[] fr = new FileReader[dataFiles.length];
+ BufferedReader[] reader = new BufferedReader[dataFiles.length];
+ for(int i = 0; i < dataFiles.length; i++) {
+ fr[i] = new FileReader(dataFiles[i]);
+ reader[i] = new BufferedReader(fr[i]);
+ }
+
+ // Generate String arrays where each entry in the array is
+ // a line from the data file.
+ String[][] data = new String[dataFiles.length][0];
+ for(int i = 0; i < dataFiles.length; i++) {
+ // Create a list to hold the raw strings.
+ List<String> rawData = new ArrayList<String>();
+
+ // Add each line from the current data file to the list
+ // as a single entry.
+ String curLine = null;
+ while((curLine = reader[i].readLine()) != null) {
+ rawData.add(curLine);
+ }
+
+ // Convert the list into a String array.
+ data[i] = rawData.toArray(new String[rawData.size()]);
+ }
+
+ // Return the data array.
+ return data;
+ }
+
+ /**
+ * Sets the run number of the DAQ configuration being processed.
+ * This is only used when reading from data files.
+ * @param run - The run number of the data files to be used.
+ */
public void setRunNumber(int run) {
- runNumber = run;
+ runNumber = run;
}
/**
@@ -198,7 +198,7 @@
* @param filepath - The file path of the data file repository.
*/
public void setDataFileRepository(String filepath) {
- this.filepath = filepath;
+ this.filepath = filepath;
}
/**
@@ -211,6 +211,6 @@
* should be read from the EvIO stream.
*/
public void setReadDataFiles(boolean state) {
- readDataFiles = state;
+ readDataFiles = state;
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/EvioDAQParser.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/EvioDAQParser.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/EvioDAQParser.java Wed Apr 27 11:11:32 2016
@@ -42,7 +42,7 @@
*
* TODO: Restructure, clean up...
*/
- /** The EvIO bank identification tag for DAQ configuration banks. */
+ /** The EvIO bank identification tag for DAQ configuration banks. */
public static final int BANK_TAG = 0xE10E;
// Stores the hardware codes for each trigger type.
@@ -204,7 +204,7 @@
* Instantiates the <code>EvioDAQParser</code>.
*/
public EvioDAQParser() {
- // Create a map to map crystals to their database channel object.
+ // Create a map to map crystals to their database channel object.
ecalConditions = DatabaseConditionsManager.getInstance().getEcalConditions();
for (int ii = 0; ii < 442; ii++) {
channels.add(findChannel(ii + 1));
@@ -220,9 +220,9 @@
* parameters.
*/
public void parse(int crate, int runNumber, String[] configurationTables) {
- // Track the number of banks that have been parsed. If the
- // parameter values have not been populated after a certain
- // number of banks, there is missing information.
+ // Track the number of banks that have been parsed. If the
+ // parameter values have not been populated after a certain
+ // number of banks, there is missing information.
nBanks++;
// Create a map that maps an identifier for each configuration
@@ -256,10 +256,10 @@
* contain the DAQ configuration parameters.
*/
private void loadConfigMap(int crate, String[] configTables) {
- // Iterate over each configuration table.
+ // Iterate over each configuration table.
for(String configTable : configTables) {
- // Split each table into rows and iterate over the rows.
- rowLoop:
+ // Split each table into rows and iterate over the rows.
+ rowLoop:
for(String line : configTable.trim().split("\n")) {
// Split the first column from the row.
String[] cols = line.trim().split(" +", 2);
@@ -290,8 +290,8 @@
// This entry indicates which triggers are enabled and
// needs to be parsed differently than normal.
else if(key.startsWith("SSP_HPS_SET_IO_SRC")) {
- // The first "parameter value" is a hardware code
- // that identifies the trigger. Obtain it.
+ // The first "parameter value" is a hardware code
+ // that identifies the trigger. Obtain it.
int trig = Integer.valueOf(vals.get(1));
// There are two trigger of each type, singles and
@@ -313,8 +313,8 @@
// This indicates a regular parameter that does not
// require any special parsing.
if(vals.size() > 1 && key.startsWith("SSP")) {
- // List the parameter by "[ROW NAME]_[KEY]" and
- // remove the key so that only the values remain.
+ // List the parameter by "[ROW NAME]_[KEY]" and
+ // remove the key so that only the values remain.
key += "_" + vals.remove(0);
}
@@ -330,7 +330,7 @@
* format of <code>[PARAMETER KEY] --> { [PARAMETER VALUES] }</code>.
*/
public void parseConfigMap() {
- // Parse simple FADC data.
+ // Parse simple FADC data.
fadcNSA = Integer.valueOf(getConfigParameter("FADC250_NSA", 0));
fadcNSB = Integer.valueOf(getConfigParameter("FADC250_NSB", 0));
fadcNPEAK = Integer.valueOf(getConfigParameter("FADC250_NPEAK", 0));
@@ -345,12 +345,12 @@
// Parse trigger data.
for(int ii = 0; ii < 2; ii++) {
- // Check singles trigger cuts enabled status.
+ // Check singles trigger cuts enabled status.
singlesNhitsEn[ii] = getBoolConfigSSP(ii, "SINGLES_NMIN", 1);
singlesEnergyMinEn[ii] = getBoolConfigSSP(ii, "SINGLES_EMIN", 1);
singlesEnergyMaxEn[ii] = getBoolConfigSSP(ii, "SINGLES_EMAX", 1);
- // Check pair trigger cuts enabled status.
+ // Check pair trigger cuts enabled status.
pairsEnergySumMaxMinEn[ii] = getBoolConfigSSP(ii, "PAIRS_SUMMAX_MIN", 2);
pairsEnergyDiffEn[ii] = getBoolConfigSSP(ii, "PAIRS_DIFFMAX", 1);
pairsCoplanarityEn[ii] = getBoolConfigSSP(ii, "PAIRS_COPLANARITY", 1);
@@ -383,7 +383,7 @@
* used to determine if the run is a "bugged" run.
*/
private void fixConfigMap2014Run(int runNumber) {
- // If this is a good run, noting should be done. Return.
+ // If this is a good run, noting should be done. Return.
if(runNumber > 3470 || runNumber < 3100) { return; }
// Populate missing GTP entries.
@@ -412,10 +412,10 @@
* the FADC channel with which they are associated.
*/
private void parseFADC(int crate, String key, List<String> vals) {
- // The FADC slot is not stored on the same line as the other
- // data and must be parsed and retained, as it is necessary
- // for handling the subsequent lines. If this line is the
- // FADC slot, store it.
+ // The FADC slot is not stored on the same line as the other
+ // data and must be parsed and retained, as it is necessary
+ // for handling the subsequent lines. If this line is the
+ // FADC slot, store it.
if(key.equals("FADC250_SLOT")) {
thisFadcSlot = Integer.valueOf(vals.get(0));
}
@@ -449,8 +449,8 @@
* to FADC channels 0 - 15.
*/
private void setChannelParsFloat(int crate, int slot, Map<EcalChannel, Float> map, List<String> vals) {
- // Iterate over each channel and map the database channel object
- // to the corresponding list value.
+ // Iterate over each channel and map the database channel object
+ // to the corresponding list value.
for(int ii = 0; ii < 16; ii++) {
map.put(findChannel(crate, slot, ii), Float.valueOf(vals.get(ii)));
}
@@ -468,8 +468,8 @@
* objects representing the channel values.
*/
private void setChannelParsInt(int crate, int slot, Map<EcalChannel, Integer> map, List<String> vals) {
- // Iterate over each channel and map the database channel object
- // to the corresponding list value.
+ // Iterate over each channel and map the database channel object
+ // to the corresponding list value.
for(int ii = 0; ii < 16; ii++) {
map.put(findChannel(crate, slot, ii), Integer.valueOf(vals.get(ii)));
}
@@ -619,9 +619,9 @@
* can not be found, an error message is passed to the logger.
*/
public String getConfigParameter(String key, int ival) {
- // Check the parameter map for the requested parameter key.
+ // Check the parameter map for the requested parameter key.
if(configMap.containsKey(key)) {
- // Get the list of values associated with this parameter key.
+ // Get the list of values associated with this parameter key.
List<String> vals = configMap.get(key);
// Check that the list of values contains a parameter for
@@ -639,7 +639,7 @@
// If the key is not present...
else {
// If more than 2 banks have been read, the absence of a
- // key represents an error. Log that this has occurred.
+ // key represents an error. Log that this has occurred.
if(nBanks > 2) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "ConfigMap MISSING KEY: " + key);
}
@@ -659,13 +659,13 @@
* if it exists, and <code>null</code> if it does not.
*/
public EcalChannel findChannel(int crate, int fadcSlot, int fadcChan) {
- // Search through the database channels for a channel that
- // matches the the argument parameters.
+ // Search through the database channels for a channel that
+ // matches the the argument parameters.
for (EcalChannel cc : channels) {
- // A channel matches the argument if the slot and channel
- // values are the same. Crate number must also match, but
- // note that EcalChannel follows a different convention
- // with respect to crate numbering.
+ // A channel matches the argument if the slot and channel
+ // values are the same. Crate number must also match, but
+ // note that EcalChannel follows a different convention
+ // with respect to crate numbering.
if( ((cc.getCrate() - 1) * 2 == crate - 37) && (cc.getSlot() == fadcSlot) && (cc.getChannel() == fadcChan) ) {
return cc;
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/FADCConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/FADCConfig.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/FADCConfig.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,7 @@
package org.hps.record.daqconfig;
import java.awt.Point;
+import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
@@ -129,13 +130,13 @@
* per ADC.
*/
public float getGain(Point ixy) {
- // Get the channel index.
- Integer index = indexChannelMap.get(ixy);
-
- // If the channel index was defined, return the pedestal.
+ // Get the channel index.
+ Integer index = indexChannelMap.get(ixy);
+
+ // If the channel index was defined, return the pedestal.
if(index != null) { return getGain(index); }
else {
- throw new IllegalArgumentException(String.format("Crystal (%3d, %3d) does not exist.", ixy.x, ixy.y));
+ throw new IllegalArgumentException(String.format("Crystal (%3d, %3d) does not exist.", ixy.x, ixy.y));
}
}
@@ -225,13 +226,13 @@
* of ADC.
*/
public float getPedestal(Point ixy) {
- // Get the channel index.
- Integer index = indexChannelMap.get(ixy);
-
- // If the channel index was defined, return the pedestal.
+ // Get the channel index.
+ Integer index = indexChannelMap.get(ixy);
+
+ // If the channel index was defined, return the pedestal.
if(index != null) { return getPedestal(index); }
else {
- throw new IllegalArgumentException(String.format("Crystal (%3d, %3d) does not exist.", ixy.x, ixy.y));
+ throw new IllegalArgumentException(String.format("Crystal (%3d, %3d) does not exist.", ixy.x, ixy.y));
}
}
@@ -305,39 +306,39 @@
}
@Override
- public void printConfig() {
- // Print the basic configuration information.
- System.out.println("FADC Configuration:");
- System.out.printf("\tMode :: %d%n", mode);
- System.out.printf("\tNSA :: %d%n", nsa);
- System.out.printf("\tNSB :: %d%n", nsb);
- System.out.printf("\tWindow Width :: %d%n", windowWidth);
- System.out.printf("\tWindow Offset :: %d%n", offset);
- System.out.printf("\tMax Peaks :: %d%n", maxPulses);
+ public void printConfig(PrintStream ps) {
+ // Print the basic configuration information.
+ ps.println("FADC Configuration:");
+ ps.printf("\tMode :: %d%n", mode);
+ ps.printf("\tNSA :: %d%n", nsa);
+ ps.printf("\tNSB :: %d%n", nsb);
+ ps.printf("\tWindow Width :: %d%n", windowWidth);
+ ps.printf("\tWindow Offset :: %d%n", offset);
+ ps.printf("\tMax Peaks :: %d%n", maxPulses);
// Output the pedestal/gain write-out header.
- System.out.println("\tix\tiy\tPedestal (ADC)\tGain (MeV/ADC)\tThreshold (ADC)");
+ ps.println("\tix\tiy\tPedestal (ADC)\tGain (MeV/ADC)\tThreshold (ADC)");
// Iterate over each crystal y-index.
yLoop:
for(int iy = -5; iy <= 5; iy++) {
- // iy = 0 does not exists; skip it!
- if(iy == 0) { continue yLoop; }
-
- // Iterate over each crystal x-index.
- xLoop:
- for(int ix = -23; ix <= 23; ix++) {
- // ix = 0 and the beam hole do not exist; skip these!
- if(ix == 0) { continue xLoop; }
- if((ix >= -10 && ix <= -2) && (iy == -1 || iy == 1)) {
- continue xLoop;
- }
-
- // Output the crystal indices, pedestal, and gain.
- int channelID = indexChannelMap.get(new Point(ix, iy));
- System.out.printf("\t%3d\t%3d\t%8.3f\t%8.3f\t%4d%n", ix, iy,
- getPedestal(channelID), getGain(channelID), getThreshold(channelID));
- }
+ // iy = 0 does not exists; skip it!
+ if(iy == 0) { continue yLoop; }
+
+ // Iterate over each crystal x-index.
+ xLoop:
+ for(int ix = -23; ix <= 23; ix++) {
+ // ix = 0 and the beam hole do not exist; skip these!
+ if(ix == 0) { continue xLoop; }
+ if((ix >= -10 && ix <= -2) && (iy == -1 || iy == 1)) {
+ continue xLoop;
+ }
+
+ // Output the crystal indices, pedestal, and gain.
+ int channelID = indexChannelMap.get(new Point(ix, iy));
+ ps.printf("\t%3d\t%3d\t%8.3f\t%8.3f\t%4d%n", ix, iy,
+ getPedestal(channelID), getGain(channelID), getThreshold(channelID));
+ }
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/GTPConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/GTPConfig.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/GTPConfig.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,6 @@
package org.hps.record.daqconfig;
+
+import java.io.PrintStream;
/**
* Class <code>GTPConfig</code> stores GTP configuration settings
@@ -57,14 +59,14 @@
}
@Override
- public void printConfig() {
+ public void printConfig(PrintStream ps) {
// Print the configuration header.
- System.out.println("GTP Configuration:");
+ ps.println("GTP Configuration:");
// Print the GTP settings.
- System.out.printf("\tTime Window Before :: %d clock-cycles%n", windowBefore);
- System.out.printf("\tTime Window After :: %d clock-cycles%n", windowAfter);
- System.out.printf("\tSeed Energy Min :: %5.3f GeV%n", seedCut.getLowerBound());
+ ps.printf("\tTime Window Before :: %d clock-cycles%n", windowBefore);
+ ps.printf("\tTime Window After :: %d clock-cycles%n", windowAfter);
+ ps.printf("\tSeed Energy Min :: %5.3f GeV%n", seedCut.getLowerBound());
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/IDAQConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/IDAQConfig.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/IDAQConfig.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,6 @@
package org.hps.record.daqconfig;
+
+import java.io.PrintStream;
/**
@@ -20,5 +22,5 @@
* Prints a textual representation of the configuration bank to the
* terminal.
*/
- public abstract void printConfig();
+ public abstract void printConfig(PrintStream ps);
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/SSPConfig.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/SSPConfig.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/daqconfig/SSPConfig.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,6 @@
package org.hps.record.daqconfig;
+
+import java.io.PrintStream;
/**
@@ -98,64 +100,64 @@
}
@Override
- public void printConfig() {
+ public void printConfig(PrintStream ps) {
// Print the configuration header.
- System.out.println("SSP Configuration:");
+ ps.println("SSP Configuration:");
// Print the singles triggers.
for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- System.out.printf("\tSingles Trigger %d%n", (triggerNum + 1));
- System.out.println("\t\tCluster Energy Lower Bound Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getEnergyMinCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", singlesTrigger[triggerNum].getEnergyMinCutConfig().getLowerBound());
+ ps.printf("\tSingles Trigger %d%n", (triggerNum + 1));
+ ps.println("\t\tCluster Energy Lower Bound Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getEnergyMinCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", singlesTrigger[triggerNum].getEnergyMinCutConfig().getLowerBound());
- System.out.println("\t\tCluster Energy Upper Bound Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getEnergyMaxCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", singlesTrigger[triggerNum].getEnergyMaxCutConfig().getUpperBound());
+ ps.println("\t\tCluster Energy Upper Bound Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getEnergyMaxCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", singlesTrigger[triggerNum].getEnergyMaxCutConfig().getUpperBound());
- System.out.println("\t\tCluster Hit Count Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getHitCountCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %1.0f hits%n", singlesTrigger[triggerNum].getHitCountCutConfig().getLowerBound());
- System.out.println();
+ ps.println("\t\tCluster Hit Count Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", singlesTrigger[triggerNum].getHitCountCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %1.0f hits%n", singlesTrigger[triggerNum].getHitCountCutConfig().getLowerBound());
+ ps.println();
}
// Print the pair triggers.
for(int triggerNum = 0; triggerNum < 2; triggerNum++) {
- System.out.printf("\tPair Trigger %d%n", (triggerNum + 1));
- System.out.println("\t\tCluster Energy Lower Bound Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyMinCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyMinCutConfig().getLowerBound());
+ ps.printf("\tPair Trigger %d%n", (triggerNum + 1));
+ ps.println("\t\tCluster Energy Lower Bound Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyMinCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyMinCutConfig().getLowerBound());
- System.out.println("\t\tCluster Energy Upper Bound Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyMaxCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyMaxCutConfig().getUpperBound());
+ ps.println("\t\tCluster Energy Upper Bound Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyMaxCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyMaxCutConfig().getUpperBound());
- System.out.println("\t\tCluster Hit Count Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getHitCountCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %1.0f hits%n", pairTrigger[triggerNum].getHitCountCutConfig().getLowerBound());
+ ps.println("\t\tCluster Hit Count Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getHitCountCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %1.0f hits%n", pairTrigger[triggerNum].getHitCountCutConfig().getLowerBound());
- System.out.println("\t\tPair Energy Sum Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergySumCutConfig().isEnabled());
- System.out.printf("\t\t\tMin :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySumCutConfig().getLowerBound());
- System.out.printf("\t\t\tMax :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySumCutConfig().getUpperBound());
+ ps.println("\t\tPair Energy Sum Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergySumCutConfig().isEnabled());
+ ps.printf("\t\t\tMin :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySumCutConfig().getLowerBound());
+ ps.printf("\t\t\tMax :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySumCutConfig().getUpperBound());
- System.out.println("\t\tPair Energy Difference Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyDifferenceCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyDifferenceCutConfig().getUpperBound());
+ ps.println("\t\tPair Energy Difference Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergyDifferenceCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergyDifferenceCutConfig().getUpperBound());
- System.out.println("\t\tPair Energy Slope Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().getLowerBound());
- System.out.printf("\t\t\tParam F :: %6.4f GeV/mm%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().getParameterF());
+ ps.println("\t\tPair Energy Slope Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %5.3f GeV%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().getLowerBound());
+ ps.printf("\t\t\tParam F :: %6.4f GeV/mm%n", pairTrigger[triggerNum].getEnergySlopeCutConfig().getParameterF());
- System.out.println("\t\tPair Coplanarity Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getCoplanarityCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %3.0f degrees%n", pairTrigger[triggerNum].getCoplanarityCutConfig().getUpperBound());
+ ps.println("\t\tPair Coplanarity Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getCoplanarityCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %3.0f degrees%n", pairTrigger[triggerNum].getCoplanarityCutConfig().getUpperBound());
- System.out.println("\t\tPair Time Coincidence Cut");
- System.out.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getTimeDifferenceCutConfig().isEnabled());
- System.out.printf("\t\t\tValue :: %1.0f clock-cycles%n", pairTrigger[triggerNum].getTimeDifferenceCutConfig().getUpperBound());
- System.out.println();
+ ps.println("\t\tPair Time Coincidence Cut");
+ ps.printf("\t\t\tEnabled :: %b%n", pairTrigger[triggerNum].getTimeDifferenceCutConfig().isEnabled());
+ ps.printf("\t\t\tValue :: %1.0f clock-cycles%n", pairTrigger[triggerNum].getTimeDifferenceCutConfig().getUpperBound());
+ ps.println();
}
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsData.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsData.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsData.java Wed Apr 27 11:11:32 2016
@@ -276,12 +276,13 @@
* @param evioEvent the EVIO event
* @return the EPICS data or <code>null</code> if it is not present in the event
*/
+ // FIXME: Not currently used.
public static EpicsData getEpicsData(EvioEvent evioEvent) {
EpicsData epicsData = null;
// Is this an EPICS event?
- if (EventTagConstant.EPICS.equals(evioEvent)) {
+ if (EventTagConstant.EPICS.matches(evioEvent)) {
// Find the bank with the EPICS data string.
final BaseStructure epicsBank = EvioBankTag.EPICS_STRING.findBank(evioEvent);
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java Wed Apr 27 11:11:32 2016
@@ -45,7 +45,7 @@
public void process(final EvioEvent evioEvent) {
// Is this an EPICS event?
- if (EventTagConstant.EPICS.equals(evioEvent)) {
+ if (EventTagConstant.EPICS.matches(evioEvent)) {
LOGGER.fine("processing EPICS event " + evioEvent.getEventNumber());
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java Wed Apr 27 11:11:32 2016
@@ -39,7 +39,7 @@
private final EpicsEvioProcessor processor = new EpicsEvioProcessor();
/**
- * Create an EPICs log.
+ * Create a processor that will make a list of EPICS data.
*/
public EpicsRunProcessor() {
}
@@ -66,9 +66,9 @@
// Add EPICS data to the collection.
if (this.currentEpicsData != null) {
- LOGGER.info("adding EPICS data for run " + this.currentEpicsData.getEpicsHeader().getRun() + " and timestamp "
- + this.currentEpicsData.getEpicsHeader().getTimestamp() + " with seq "
- + this.currentEpicsData.getEpicsHeader().getSequence());
+ LOGGER.fine("Adding EPICS data with run " + this.currentEpicsData.getEpicsHeader().getRun() + "; timestamp "
+ + this.currentEpicsData.getEpicsHeader().getTimestamp() + "; seq "
+ + this.currentEpicsData.getEpicsHeader().getSequence() + ".");
this.epicsDataSet.add(this.currentEpicsData);
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagConstant.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagConstant.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EventTagConstant.java Wed Apr 27 11:11:32 2016
@@ -63,7 +63,7 @@
return tag == this.tag;
}
- public boolean equals(final EvioEvent evioEvent) {
+ public boolean matches(final EvioEvent evioEvent) {
return evioEvent.getHeader().getTag() == this.tag;
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioBankTag.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioBankTag.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioBankTag.java Wed Apr 27 11:11:32 2016
@@ -25,8 +25,10 @@
/** Scaler data bank. */
SCALERS(57621),
/** Trigger configuration bank. */
- TRIGGER_CONFIG(0xE10E);
-
+ TRIGGER_CONFIG(0xE10E),
+ /** TI trigger bank. */
+ TI_TRIGGER(0xe10a);
+
/**
* The bank's tag value.
*/
@@ -46,7 +48,7 @@
*
* @param startBank the starting bank
* @return the first bank matching the tag or <code>null<code> if not found
- */
+ */
public BaseStructure findBank(final BaseStructure startBank) {
BaseStructure foundBank = null;
if (this.equals(startBank)) {
@@ -60,8 +62,8 @@
}
}
return foundBank;
- }
-
+ }
+
/**
* Get the bank tag value.
*
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioDetectorConditionsProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioDetectorConditionsProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioDetectorConditionsProcessor.java Wed Apr 27 11:11:32 2016
@@ -39,21 +39,28 @@
*/
@Override
public void process(final EvioEvent evioEvent) throws Exception {
+
// Get the head head bank from event.
final BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
- // Is the head bank present?
- if (headBank != null) {
+ // Initialize from head bank.
+ if (headBank != null) {
+ initializeConditions(headBank.getIntData()[1]);
+ }
+
+ // Initialize from PRESTART.
+ if (EventTagConstant.PRESTART.matches(evioEvent)) {
+ int runNumber = EvioEventUtilities.getControlEventData(evioEvent)[1];
+ initializeConditions(runNumber);
+ }
+ }
- // Get the run number from the head bank.
- final int runNumber = headBank.getIntData()[1];
-
- // Initialize the conditions system from the detector name and run number.
- try {
- ConditionsManager.defaultInstance().setDetector(this.detectorName, runNumber);
- } catch (final ConditionsNotFoundException e) {
- throw new RuntimeException("Error setting up conditions from EVIO head bank.", e);
- }
+ private void initializeConditions(final int runNumber) {
+ // Initialize the conditions system from the detector name and run number.
+ try {
+ ConditionsManager.defaultInstance().setDetector(this.detectorName, runNumber);
+ } catch (final ConditionsNotFoundException e) {
+ throw new RuntimeException("Error setting up conditions from EVIO head bank.", e);
}
}
@@ -65,6 +72,7 @@
* @param evioEvent the <code>EvioEvent</code> to process
*/
@Override
+ // FIXME: not activated by EvioLoop
public void startRun(final EvioEvent evioEvent) {
// System.out.println("EvioDetectorConditionsProcessor.startRun");
if (EvioEventUtilities.isPreStartEvent(evioEvent)) {
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java Wed Apr 27 11:11:32 2016
@@ -14,7 +14,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.jlab.coda.jevio.EventWriter;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioReader;
@@ -51,7 +51,7 @@
*/
public static void main(String[] args) {
- DefaultParser parser = new DefaultParser();
+ PosixParser parser = new PosixParser();
CommandLine commandLine = null;
try {
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventUtilities.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioEventUtilities.java Wed Apr 27 11:11:32 2016
@@ -13,9 +13,6 @@
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.record.daqconfig.EvioDAQParser;
-import org.hps.record.epics.EpicsData;
-import org.hps.record.epics.EpicsHeader;
-import org.hps.record.scalers.ScalerData;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
@@ -113,23 +110,18 @@
/**
* Get the run number from an EVIO event.
*
- * @return the run number
- * @throws IllegalArgumentException if event does not have a head bank
- */
- public static int getRunNumber(final EvioEvent event) {
+ * @return the run number or <code>null</code> if not present in event
+ */
+ public static Integer getRunNumber(final EvioEvent event) {
if (isControlEvent(event)) {
return getControlEventData(event)[1];
} else if (isPhysicsEvent(event)) {
final BaseStructure headBank = EvioEventUtilities.getHeadBank(event);
if (headBank != null) {
return headBank.getIntData()[1];
- } else {
- throw new IllegalArgumentException("Head bank is missing from physics event.");
- }
- } else {
- // Not sure if this would ever happen.
- throw new IllegalArgumentException("Wrong event type: " + event.getHeader().getTag());
- }
+ }
+ }
+ return null;
}
/**
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.jlab.coda.et.EtAttachment;
import org.jlab.coda.et.EtConstants;
import org.jlab.coda.et.EtEvent;
@@ -195,7 +195,7 @@
public void run(final String[] args) {
// Command line parser.
- final DefaultParser parser = new DefaultParser();
+ final PosixParser parser = new PosixParser();
try {
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java Wed Apr 27 11:11:32 2016
@@ -4,6 +4,8 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.freehep.record.source.NoSuchRecordException;
import org.hps.record.AbstractRecordQueue;
@@ -12,15 +14,15 @@
import org.jlab.coda.jevio.EvioReader;
/**
- * A basic implementation of an <tt>AbstractRecordSource</tt> for supplying <tt>EvioEvent</tt> objects to a loop from a
- * list of EVIO files.
- * <p>
- * Unlike the LCIO record source, it has no rewind or indexing capabilities.
+ * A basic implementation of an <code>AbstractRecordSource</code> for supplying <code>EvioEvent</code> objects to a
+ * loop from a list of EVIO files.
*
* @author Jeremy McCormick, SLAC
*/
public final class EvioFileSource extends AbstractRecordQueue<EvioEvent> {
+ private static final Logger LOGGER = Logger.getLogger(EvioFileSource.class.getPackage().getName());
+
/**
* The current event.
*/
@@ -40,7 +42,12 @@
* The reader to use for reading and parsing the EVIO data.
*/
private EvioReader reader;
-
+
+ /**
+ * Whether to continue on parse errors or not.
+ */
+ private boolean continueOnErrors = false;
+
/**
* Constructor taking a single EVIO file.
*
@@ -60,7 +67,15 @@
this.files.addAll(files);
this.openReader();
}
-
+
+ /**
+ * Set whether to continue on errors or not.
+ * @param continueOnErrors <code>true</code> to continue on errors
+ */
+ public void setContinueOnErrors(boolean continueOnErrors) {
+ this.continueOnErrors = continueOnErrors;
+ }
+
/**
* Close the current reader.
*/
@@ -135,20 +150,26 @@
for (;;) {
try {
this.currentEvent = this.reader.parseNextEvent();
- } catch (final EvioException e) {
- throw new IOException(e);
+ if (this.reader.getNumEventsRemaining() == 0 && this.currentEvent == null) {
+ this.closeReader();
+ this.fileIndex++;
+ if (!this.endOfFiles()) {
+ this.openReader();
+ } else {
+ throw new NoSuchRecordException("End of data.");
+ }
+ } else {
+ LOGGER.finest("Read EVIO event " + this.currentEvent.getEventNumber() + " okay.");
+ break;
+ }
+ } catch (EvioException | NegativeArraySizeException e) {
+ LOGGER.log(Level.SEVERE, "Error parsing next EVIO event.", e);
+ if (!continueOnErrors) {
+ throw new IOException("Fatal error parsing next EVIO event.", e);
+ }
+ } catch (Exception e) {
+ throw new IOException("Error parsing EVIO event.", e);
}
- if (this.currentEvent == null) {
- this.closeReader();
- this.fileIndex++;
- if (!this.endOfFiles()) {
- this.openReader();
- continue;
- } else {
- throw new NoSuchRecordException();
- }
- }
- return;
}
}
@@ -159,10 +180,9 @@
*/
private void openReader() {
try {
- System.out.println("Opening reader for file " + this.files.get(this.fileIndex) + " ...");
- // FIXME: this should use the reader directly and cached paths should be managed externally
+ // FIXME: This should use the reader directly and MSS paths should be transformed externally.
+ LOGGER.info("opening EVIO file " + this.files.get(this.fileIndex).getPath() + " ...");
this.reader = EvioFileUtilities.open(this.files.get(this.fileIndex), true);
- System.out.println("Done opening file.");
} catch (EvioException | IOException e) {
throw new RuntimeException(e);
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java Wed Apr 27 11:11:32 2016
@@ -25,24 +25,7 @@
* Milliseconds constant for conversion to/from second.
*/
private static final long MILLISECONDS = 1000L;
-
- /**
- * Get a cached file path, assuming that the input file path is on the JLAB MSS e.g. it starts with "/mss".
- *
- * @param file the MSS file path
- * @return the cached file path (prepends "/cache" to the path)
- * @throws IllegalArgumentException if the file is not on the MSS (e.g. path does not start with "/mss")
- */
- public static File getCachedFile(final File file) {
- if (!isMssFile(file)) {
- throw new IllegalArgumentException("File " + file.getPath() + " is not on the JLab MSS.");
- }
- if (isCachedFile(file)) {
- throw new IllegalArgumentException("File " + file.getPath() + " is already on the cache disk.");
- }
- return new File("/cache" + file.getPath());
- }
-
+
/**
* Get the run number from the file name.
*
@@ -70,26 +53,6 @@
}
/**
- * Return <code>true</code> if this is a file on the cache disk e.g. the path starts with "/cache".
- *
- * @param file the file
- * @return <code>true</code> if the file is a cached file
- */
- public static boolean isCachedFile(final File file) {
- return file.getPath().startsWith("/cache");
- }
-
- /**
- * Return <code>true</code> if this file is on the JLAB MSS e.g. the path starts with "/mss".
- *
- * @param file the file
- * @return <code>true</code> if the file is on the MSS
- */
- public static boolean isMssFile(final File file) {
- return file.getPath().startsWith("/mss");
- }
-
- /**
* Open an EVIO file using an <code>EvioReader</code> in memory mapping mode.
*
* @param file the EVIO file
@@ -98,7 +61,7 @@
* @throws EvioException if there is an error reading the EVIO data
*/
public static EvioReader open(final File file) throws IOException, EvioException {
- return open(file, false);
+ return open(file, true);
}
/**
@@ -111,14 +74,11 @@
* @throws EvioException if there is an error reading the EVIO data
*/
public static EvioReader open(final File file, final boolean sequential) throws IOException, EvioException {
- File openFile = file;
- if (isMssFile(file)) {
- openFile = getCachedFile(file);
- }
+ LOGGER.info("opening " + file.getPath() + " in " + (sequential ? "sequential" : "mmap" + " mode"));
final long start = System.currentTimeMillis();
- final EvioReader reader = new EvioReader(openFile, false, sequential);
+ final EvioReader reader = new EvioReader(file, false, sequential);
final long end = System.currentTimeMillis() - start;
- LOGGER.info("opened " + openFile.getPath() + " in " + (double) end / (double) MILLISECONDS + " seconds in "
+ LOGGER.info("opened " + file.getPath() + " in " + (double) end / (double) MILLISECONDS + " seconds in "
+ (sequential ? "sequential" : "mmap" + " mode"));
return reader;
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoop.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoop.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoop.java Wed Apr 27 11:11:32 2016
@@ -1,52 +1,24 @@
package org.hps.record.evio;
-import org.freehep.record.loop.DefaultRecordLoop;
+import org.hps.record.AbstractRecordLoop;
+import org.jlab.coda.jevio.EvioEvent;
/**
* Implementation of a Freehep <code>RecordLoop</code> for EVIO data.
*
* @author Jeremy McCormick, SLAC
*/
-public class EvioLoop extends DefaultRecordLoop {
-
- /**
- * The record adapter.
- */
- private final EvioLoopAdapter adapter = new EvioLoopAdapter();
+public class EvioLoop extends AbstractRecordLoop<EvioEvent> {
/**
* Create a new record loop.
*/
public EvioLoop() {
+ this.adapter = new EvioLoopAdapter();
this.addLoopListener(adapter);
this.addRecordListener(adapter);
}
-
- /**
- * Add an EVIO event processor to the adapter which will be activated for every EVIO event that is processed.
- *
- * @param evioEventProcessor the EVIO processor to add
- */
- public void addEvioEventProcessor(final EvioEventProcessor evioEventProcessor) {
- adapter.addEvioEventProcessor(evioEventProcessor);
- }
-
- /**
- * Loop over events from the source.
- *
- * @param number the number of events to process or -1L for all events from the source
- * @return the number of records that were processed
- */
- public long loop(final long number) {
- if (number < 0L) {
- this.execute(Command.GO, true);
- } else {
- this.execute(Command.GO_N, number, true);
- this.execute(Command.STOP);
- }
- return this.getSupplied();
- }
-
+
/**
* Set the EVIO data source.
*
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java Wed Apr 27 11:11:32 2016
@@ -1,14 +1,6 @@
package org.hps.record.evio;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Logger;
-
-import org.freehep.record.loop.AbstractLoopListener;
-import org.freehep.record.loop.LoopEvent;
-import org.freehep.record.loop.LoopListener;
-import org.freehep.record.loop.RecordEvent;
-import org.freehep.record.loop.RecordListener;
+import org.hps.record.AbstractLoopAdapter;
import org.jlab.coda.jevio.EvioEvent;
/**
@@ -16,79 +8,5 @@
*
* @author Jeremy McCormick, SLAC
*/
-public final class EvioLoopAdapter extends AbstractLoopListener implements RecordListener, LoopListener {
-
- /**
- * Initialize the logger.
- */
- private static final Logger LOGGER = Logger.getLogger(EvioLoopAdapter.class.getPackage().getName());
-
- /**
- * List of event processors to activate.
- */
- private final List<EvioEventProcessor> processors = new ArrayList<EvioEventProcessor>();
-
- /**
- * Create a new loop adapter.
- */
- EvioLoopAdapter() {
- }
-
- /**
- * Add an EVIO processor to the adapter.
- *
- * @param processor the EVIO processor to add to the adapter
- */
- void addEvioEventProcessor(final EvioEventProcessor processor) {
- LOGGER.info("adding " + processor.getClass().getName() + " to EVIO processors");
- this.processors.add(processor);
- }
-
- /**
- * Implementation of the finish hook which activates the {@link EvioEventProcessor#endJob()} method of all
- * registered processors.
- */
- @Override
- protected void finish(final LoopEvent event) {
- LOGGER.info("finish");
- for (final EvioEventProcessor processor : processors) {
- processor.endJob();
- }
- }
-
- /**
- * Primary event processing method that activates the {@link EvioEventProcessor#process(EvioEvent)} method of all
- * registered processors.
- *
- * @param recordEvent the record event to process which should have an EVIO event
- * @throws IllegalArgumentException if the record is the wrong type
- */
- @Override
- public void recordSupplied(final RecordEvent recordEvent) {
- final Object record = recordEvent.getRecord();
- if (record instanceof EvioEvent) {
- final EvioEvent evioEvent = EvioEvent.class.cast(record);
- for (final EvioEventProcessor processor : processors) {
- try {
- processor.process(evioEvent);
- } catch (final Exception e) {
- throw new RuntimeException(e);
- }
- }
- } else {
- throw new IllegalArgumentException("The supplied record has the wrong type: " + record.getClass());
- }
- }
-
- /**
- * Implementation of the start hook which activates the {@link EvioEventProcessor#startJob()} method of all
- * registered processors.
- */
- @Override
- protected void start(final LoopEvent event) {
- LOGGER.info("start");
- for (final EvioEventProcessor processor : processors) {
- processor.startJob();
- }
- }
+public final class EvioLoopAdapter extends AbstractLoopAdapter<EvioEvent> {
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java Wed Apr 27 11:11:32 2016
@@ -74,7 +74,6 @@
// [67]/[68] = CLOCK
final double clock = (double) clockGated / (double) clockUngated;
- // Compute the live times.
final double[] liveTimes = new double[3];
liveTimes[LiveTimeIndex.FCUP_TDC.ordinal()] = fcupTdc;
liveTimes[LiveTimeIndex.FCUP_TRG.ordinal()] = fcupTrg;
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java Wed Apr 27 11:11:32 2016
@@ -1,11 +1,16 @@
package org.hps.record.svt;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.jlab.coda.jevio.BaseStructure;
+import org.jlab.coda.jevio.DataType;
+import org.jlab.coda.jevio.StructureType;
/**
* A set of static utility methods used to decode SVT data.
*
* @author Omar Moreno <[log in to unmask]>
- * @date November 20, 2014
*/
public class SvtEvioUtils {
@@ -19,9 +24,7 @@
private static final int APV_HEADER_BUFFER_ADDRESS_MASK = 0xFF; //[8:1]
private static final int APV_HEADER_DATA_FRAME_COUNT_MASK = 0xF; //[12:9]
private static final int APV_HEADER_DATA_APV_NR_MASK = 0x3; //[15:13]
-
-
-
+
// TODO: Move these to constants class
public static final int APV25_PER_HYBRID = 5;
public static final int CHANNELS_PER_APV25 = 128;
@@ -90,13 +93,12 @@
* Extract and return the front end board (FEB) ID associated with the
* multisample tail
*
- * @param data : a multisample header
+ * @param multisampleTail : a multisample header
* @return A FEB ID in the range 0-10
*/
public static int getFebIDFromMultisampleTail(int multisampleTail) {
return (multisampleTail >>> 8) & FEB_MASK;
}
-
/**
* Extract and return the front end board (FEB) hybrid ID associated with
@@ -113,7 +115,7 @@
* Extract and return the front end board (FEB) hybrid ID associated with
* the multisample tail
*
- * @param multisample : a multisample tail
+ * @param multisampleTail : a multisample tail
* @return A FEB hybrid ID in the range 0-3
*/
public static int getFebHybridIDFromMultisampleTail(int multisampleTail) {
@@ -258,7 +260,7 @@
/**
* Extract the error bit from the multisample header.
*
- * @param data : multisample of data
+ * @param multisampleHeader : multisample of data
* @return value of the error bit. This is non-zero if there is an error.
*/
public static int getErrorBitFromMultisampleHeader(int multisampleHeader) {
@@ -441,6 +443,56 @@
return samples;
}
+ /**
+ * Retrieve all the banks in an event that match the given tag in their
+ * header and are not data banks.
+ *
+ * @param evioEvent : The event/bank being queried
+ * @param tag : The tag to match
+ * @return A collection of all bank structures that pass the filter
+ * provided by the event
+ */
+ public static List<BaseStructure> getROCBanks(BaseStructure evioEvent, int minROCTag, int maxROCTag) {
+ List<BaseStructure> matchingBanks = new ArrayList<BaseStructure>();
+ if (evioEvent.getChildCount() > 0) {
+ for (BaseStructure childBank : evioEvent.getChildrenList()) {
+ if (childBank.getStructureType() == StructureType.BANK
+ && childBank.getHeader().getDataType() == DataType.ALSOBANK
+ && childBank.getHeader().getTag() >= minROCTag
+ && childBank.getHeader().getTag() <= maxROCTag) {
+ matchingBanks.add(childBank);
+ }
+ }
+ }
+ return matchingBanks;
+ }
+
+ public static List<BaseStructure> getDataBanks(BaseStructure evioEvent, int minROCTag, int maxROCTag, int minDataTag, int maxDataTag) {
+ List<BaseStructure> rocBanks = getROCBanks(evioEvent, minROCTag, maxROCTag);
+ List<BaseStructure> matchingBanks = new ArrayList<BaseStructure>();
+ for (BaseStructure rocBank : rocBanks) {
+ if (rocBank.getChildCount() > 0) {
+ for (BaseStructure childBank : rocBank.getChildrenList()) {
+ if (childBank.getHeader().getTag() >= minDataTag
+ && childBank.getHeader().getTag() <= maxDataTag) {
+ matchingBanks.add(childBank);
+ }
+ }
+ }
+ }
+ return matchingBanks;
+ }
+
+ public static List<int[]> getMultisamples(int[] data, int sampleCount, int headerLength) {
+ List<int[]> sampleList = new ArrayList<int[]>();
+ // Loop through all of the samples and make hits
+ for (int samplesN = 0; samplesN < sampleCount; samplesN += 4) {
+ int[] samples = new int[4];
+ System.arraycopy(data, headerLength + samplesN, samples, 0, samples.length);
+ sampleList.add(samples);
+ }
+ return sampleList;
+ }
/**
* Private constructor to prevent the class from being instantiated.
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java Wed Apr 27 11:11:32 2016
@@ -74,7 +74,6 @@
* Return the int bank of an AbstractIntData read from LCIO.
*
* @param object
- * @return
*/
public static int[] getBank(GenericObject object) {
int N = object.getNInt() - 1;
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPData.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPData.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPData.java Wed Apr 27 11:11:32 2016
@@ -175,7 +175,7 @@
* of <code>SSPCosmicTrigger</code> objects.
*/
public List<SSPCosmicTrigger> getCosmicTriggers() {
- return cosmicList;
+ return cosmicList;
}
/**
@@ -184,7 +184,7 @@
* of <code>SSPPairTrigger</code> objects.
*/
public List<SSPPairTrigger> getPairTriggers() {
- return pairList;
+ return pairList;
}
/**
@@ -193,7 +193,7 @@
* of <code>SSPSinglesTrigger</code> objects.
*/
public List<SSPSinglesTrigger> getSinglesTriggers() {
- return singlesList;
+ return singlesList;
}
/**
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPNumberedTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPNumberedTrigger.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPNumberedTrigger.java Wed Apr 27 11:11:32 2016
@@ -10,16 +10,41 @@
* @see SSPTrigger
*/
public abstract class SSPNumberedTrigger extends SSPTrigger {
- /**
- * Instantiates the <code>SSPNumberedTrigger</code>.
+ /**
+ * Instantiates the <code>SSPNumberedTrigger</code>.
* @param type - The type of trigger.
* @param time - The time at which the trigger occurred in ns.
* @param data - The trigger bit data.
- */
- public SSPNumberedTrigger(int type, int time, int data) {
- super(type, time, data);
- }
-
+ */
+ public SSPNumberedTrigger(int type, int time, int data) {
+ super(type, time, data);
+ }
+
+ /**
+ * Gets the number of the trigger which generated this object.
+ * @return Returns either <code>0</code> or </code>1</code>.
+ */
+ public abstract int getTriggerNumber();
+
+ /**
+ * Indicates whether the trigger was reported by the trigger number
+ * 0 trigger.
+ * @return <code>true</code> if the trigger was reported by the
+ * trigger number 0 trigger and <code>false</code> if by either
+ * the trigger number 1 or an unknown trigger.
+ */
+ public abstract boolean isTrigger0();
+
+ /**
+ * Indicates whether the trigger was reported by the trigger number
+ * 1 trigger.
+ * @return <code>true</code> if the trigger was reported by the
+ * trigger number 1 trigger and <code>false</code> if by either
+ * the trigger number 0 or an unknown trigger.
+ */
+ public abstract boolean isTrigger1();
+
+
/**
* Indicates whether the trigger was reported by the first of the
* singles triggers.
@@ -27,6 +52,7 @@
* first trigger and <code>false</code> if it was reported by the
* second trigger.
*/
+ @Deprecated
public abstract boolean isFirstTrigger();
/**
@@ -36,5 +62,6 @@
* second trigger and <code>false</code> if it was reported by
* the first trigger.
*/
+ @Deprecated
public abstract boolean isSecondTrigger();
-}
+}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPPairTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPPairTrigger.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPPairTrigger.java Wed Apr 27 11:11:32 2016
@@ -20,13 +20,30 @@
}
@Override
+ public int getTriggerNumber() {
+ if(isFirstTrigger()) { return 0; }
+ else if(isSecondTrigger()) { return 1; }
+ else { return -1; }
+ }
+
+ @Override
+ public boolean isTrigger0() {
+ return (type == SSPData.TRIG_TYPE_PAIR0);
+ }
+
+ @Override
+ public boolean isTrigger1() {
+ return (type == SSPData.TRIG_TYPE_PAIR1);
+ }
+
+ @Override
public boolean isFirstTrigger() {
- return (type == SSPData.TRIG_TYPE_PAIR0);
+ return isTrigger0();
}
@Override
public boolean isSecondTrigger() {
- return (type == SSPData.TRIG_TYPE_PAIR1);
+ return isTrigger1();
}
/**
@@ -71,9 +88,9 @@
@Override
public String toString() {
- return String.format("Trigger %d :: %3d ns :: ESum: %d, EDiff: %d, ESlope: %d, Coplanarity: %d",
- isFirstTrigger() ? 1 : 2, getTime(), passCutEnergySum() ? 1 : 0,
- passCutEnergyDifference() ? 1 : 0, passCutEnergySlope() ? 1 : 0,
- passCutCoplanarity() ? 1 : 0);
+ return String.format("Trigger %d :: %3d ns :: ESum: %d, EDiff: %d, ESlope: %d, Coplanarity: %d",
+ isFirstTrigger() ? 1 : 2, getTime(), passCutEnergySum() ? 1 : 0,
+ passCutEnergyDifference() ? 1 : 0, passCutEnergySlope() ? 1 : 0,
+ passCutCoplanarity() ? 1 : 0);
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPSinglesTrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPSinglesTrigger.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/SSPSinglesTrigger.java Wed Apr 27 11:11:32 2016
@@ -26,13 +26,30 @@
}
@Override
+ public int getTriggerNumber() {
+ if(isFirstTrigger()) { return 0; }
+ else if(isSecondTrigger()) { return 1; }
+ else { return -1; }
+ }
+
+ @Override
+ public boolean isTrigger0() {
+ return (type == SSPData.TRIG_TYPE_SINGLES0_BOT) || (type == SSPData.TRIG_TYPE_SINGLES0_TOP);
+ }
+
+ @Override
+ public boolean isTrigger1() {
+ return (type == SSPData.TRIG_TYPE_SINGLES1_BOT) || (type == SSPData.TRIG_TYPE_SINGLES1_TOP);
+ }
+
+ @Override
public boolean isFirstTrigger() {
- return (type == SSPData.TRIG_TYPE_SINGLES0_BOT) || (type == SSPData.TRIG_TYPE_SINGLES0_TOP);
+ return isTrigger0();
}
@Override
public boolean isSecondTrigger() {
- return (type == SSPData.TRIG_TYPE_SINGLES1_BOT) || (type == SSPData.TRIG_TYPE_SINGLES1_TOP);
+ return isTrigger1();
}
/**
@@ -67,8 +84,8 @@
@Override
public String toString() {
- return String.format("Trigger %d :: %3d ns :: EClusterLow: %d; EClusterHigh %d; HitCount: %d",
- isFirstTrigger() ? 1 : 2, getTime(), passCutEnergyMin() ? 1 : 0,
- passCutEnergyMax() ? 1 : 0, passCutHitCount() ? 1 : 0);
+ return String.format("Trigger %d :: %3d ns :: EClusterLow: %d; EClusterHigh %d; HitCount: %d",
+ isFirstTrigger() ? 1 : 2, getTime(), passCutEnergyMin() ? 1 : 0,
+ passCutEnergyMax() ? 1 : 0, passCutHitCount() ? 1 : 0);
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TIData.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TIData.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TIData.java Wed Apr 27 11:11:32 2016
@@ -5,18 +5,25 @@
/**
* Class <code>TIData</code> is an implementation of abstract class
* <code>AbstractIntData</code> that represents a TI trigger bit bank.
- * It contains both a time window length and a set of flags that track
- * whether a trigger of a given type was registered with the event to
- * which this bank is attached.
*
* @author Nathan Baltzell <[log in to unmask]>
*/
public class TIData extends AbstractIntData {
- /** The EvIO bank header tag for TI data banks. */
+
+ /**
+ * The EvIO bank header tag for TI data banks.
+ */
public static final int BANK_TAG = 0xe10a; // EvioEventConstants.TI_TRIGGER_BANK_TAG;
- /** The expected number of entries in the data bank. */
- public static final int BANK_SIZE = 4;
-
+ /**
+ * The expected number of entries in the data bank for the 2015 data.
+ */
+ private static final int BANK_SIZE_2015 = 4;
+ /**
+ * The expected number of entries in the data bank for the 2016 data (after
+ * unprescaled trigger bits were added).
+ */
+ private static final int BANK_SIZE_2016 = 5;
+
// Store the parsed data bank parameters.
private long time = 0;
private boolean singles0 = false;
@@ -25,36 +32,59 @@
private boolean pairs1 = false;
private boolean calib = false;
private boolean pulser = false;
-
- /**
- * Creates a <code>TIData</code> bank from a raw EvIO data bank.
- * It is expected that the EvIO reader will verify that the bank
- * tag is of the appropriate type.
+ private boolean hasUnprescaledTriggerBits = false;
+ private boolean singles0Unprescaled = false;
+ private boolean singles1Unprescaled = false;
+ private boolean pairs0Unprescaled = false;
+ private boolean pairs1Unprescaled = false;
+ private boolean calibUnprescaled = false;
+ private boolean pulserUnprescaled = false;
+
+ /**
+ * Creates a <code>TIData</code> bank from a raw EvIO data bank. It is
+ * expected that the EvIO reader will verify that the bank tag is of the
+ * appropriate type.
+ *
* @param bank - The EvIO data bank.
*/
public TIData(int[] bank) {
super(bank);
decodeData();
}
-
+
/**
* Creates a <code>TIData</code> object from an existing LCIO
* <code>GenericObject</code>.
+ *
* @param tiData - The source data bank object.
*/
public TIData(GenericObject tiData) {
super(tiData, BANK_TAG);
decodeData();
}
-
+
@Override
protected final void decodeData() {
- // Check that the data bank is the expected size. If not, throw
- // and exception.
- if(this.bank.length != BANK_SIZE) {
- throw new RuntimeException("Invalid Data Length: " + bank.length);
- }
-
+ // Check that the data bank is the expected size. If not, throw
+ // and exception.
+ switch (this.bank.length) {
+ case BANK_SIZE_2015:
+// System.out.println("2015-style TI bank");
+ break;
+ case BANK_SIZE_2016:
+// System.out.format("2016-style TI bank, first word %x, last word %x\n", bank[0], bank[4]);
+ hasUnprescaledTriggerBits = true;
+ singles0Unprescaled = ((bank[0]) & 1) == 1;
+ singles1Unprescaled = ((bank[0] >> 1) & 1) == 1;
+ pairs0Unprescaled = ((bank[0] >> 2) & 1) == 1;
+ pairs1Unprescaled = ((bank[0] >> 3) & 1) == 1;
+ calibUnprescaled = ((bank[0] >> 4) & 1) == 1;
+ pulserUnprescaled = ((bank[0] >> 5) & 1) == 1;
+ break;
+ default:
+ throw new RuntimeException("Invalid Data Length: " + bank.length);
+ }
+
// Check each trigger bit to see if it is active. A value of
// 1 indicates a trigger of that type occurred, and 0 that it
// did not.
@@ -64,83 +94,175 @@
pairs1 = ((bank[0] >> 27) & 1) == 1;
calib = ((bank[0] >> 28) & 1) == 1;
pulser = ((bank[0] >> 29) & 1) == 1;
-
- // Get the unprocessed start and end times for the bank.
- long w1 = bank[2] & 0xffffffffL;
- long w2 = bank[3] & 0xffffffffL;
-
- // Process the times into units of clock-cycles.
+
+ // interpret time:
+ final long w1 = bank[2] & 0xffffffffL;
+ final long w2 = bank[3] & 0xffffffffL;
final long timelo = w1;
final long timehi = (w2 & 0xffff) << 32;
-
- // Store the time difference in nanoseconds.
time = 4 * (timelo + timehi);
}
-
+
@Override
public int getTag() {
return BANK_TAG;
}
-
- /**
- * Gets the time window for the bank.
- * @return Returns the time window length in nanoseconds.
- */
+
public long getTime() {
return time;
}
-
+
/**
* Indicates whether a singles 0 trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isSingle0Trigger() {
return singles0;
}
-
+
/**
* Indicates whether a singles 1 trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isSingle1Trigger() {
return singles1;
}
-
+
/**
* Indicates whether a pair 0 trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isPair0Trigger() {
return pairs0;
}
-
+
/**
* Indicates whether a pair 1 trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isPair1Trigger() {
return pairs1;
}
-
+
/**
* Indicates whether a cosmic trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isCalibTrigger() {
return calib;
}
-
+
/**
* Indicates whether a random/pulser trigger was registered.
+ *
* @return Returns <code>true</code> if the trigger occurred, and
* <code>false</code> otherwise.
*/
public boolean isPulserTrigger() {
return pulser;
}
-}
+
+ /**
+ * Indicates whether this TI data has unprescaled trigger bits.
+ *
+ * @return Returns <code>true</code> if the TI data has a fifth int
+ * containing unprescaled trigger bits, and <code>false</code> otherwise.
+ */
+ public boolean hasUnprescaledTriggerBits() {
+ return hasUnprescaledTriggerBits;
+ }
+
+ /**
+ * Indicates whether a singles 0 (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isSingle0UnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return singles0Unprescaled;
+ }
+
+ /**
+ * Indicates whether a singles 1 (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isSingle1UnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return singles1Unprescaled;
+ }
+
+ /**
+ * Indicates whether a pairs 0 (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isPair0UnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return pairs0Unprescaled;
+ }
+
+ /**
+ * Indicates whether a pairs 1 (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isPair1UnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return pairs1Unprescaled;
+ }
+
+ /**
+ * Indicates whether a cosmic (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isCalibUnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return calibUnprescaled;
+ }
+
+ /**
+ * Indicates whether a random/pulser (unprescaled) trigger was registered.
+ *
+ * @return Returns <code>true</code> if the trigger occurred, and
+ * <code>false</code> otherwise. Throws a RuntimeException if this data does
+ * not have unprescaled trigger bits.
+ */
+ public boolean isPulserUnprescaledTrigger() {
+ if (!hasUnprescaledTriggerBits) {
+ throw new RuntimeException("This TI data does not have unprescaled trigger bits.");
+ }
+ return pulserUnprescaled;
+ }
+}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java Wed Apr 27 11:11:32 2016
@@ -55,13 +55,30 @@
}
}
}
+
+ public long getMinOffset() {
+ return this.minOffset;
+ }
+
+ public long getMaxOffset() {
+ return this.maxOffset;
+ }
+
+ public int getNumOutliers() {
+ return this.nOutliers;
+ }
+
+ public long getTiTimeOffset() {
+ final long offsetRange = maxOffset - minOffset;
+ if (offsetRange > minRange && nOutliers < maxOutliers) {
+ return minOffset;
+ } else {
+ return 0L;
+ }
+ }
public void updateTriggerConfig(final TriggerConfig triggerConfig) {
- final long offsetRange = maxOffset - minOffset;
- if (offsetRange > minRange && nOutliers < maxOutliers) {
- triggerConfig.put(TriggerConfigVariable.TI_TIME_OFFSET, minOffset);
- } else {
- triggerConfig.put(TriggerConfigVariable.TI_TIME_OFFSET, 0L);
- }
+ long tiTimeOffset = getTiTimeOffset();
+ triggerConfig.put(TriggerConfigVariable.TI_TIME_OFFSET, tiTimeOffset);
}
}
Modified: java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerModule.java
=============================================================================
--- java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerModule.java (original)
+++ java/branches/HPSJAVA-409/record-util/src/main/java/org/hps/record/triggerbank/TriggerModule.java Wed Apr 27 11:11:32 2016
@@ -44,8 +44,8 @@
* @see SSPCluster
*/
public final class TriggerModule {
- /** The calorimeter mid-plane, defined by the photon beam position
- * (30.52 mrad) at the calorimeter face (z = 1393 mm). */
+ /** The calorimeter mid-plane, defined by the photon beam position
+ * (30.52 mrad) at the calorimeter face (z = 1393 mm). */
private static final double ORIGIN_X = 1393.0 * Math.tan(0.03052);
/** The value of the parameter "F" in the energy slope equation
@@ -104,23 +104,23 @@
* above zero) will be accepted.
*/
public TriggerModule() {
- // Set the cluster singles cuts to accept all values by default.
- cuts.put(CLUSTER_HIT_COUNT_LOW, 0.0);
- cuts.put(CLUSTER_SEED_ENERGY_LOW, 0.0);
- cuts.put(CLUSTER_SEED_ENERGY_HIGH, Double.MAX_VALUE);
- cuts.put(CLUSTER_TOTAL_ENERGY_LOW, 0.0);
- cuts.put(CLUSTER_TOTAL_ENERGY_HIGH, Double.MAX_VALUE);
-
- // Set the cluster pair cuts to accept all values by default.
- cuts.put(PAIR_COPLANARITY_HIGH, 180.0);
- cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, Double.MAX_VALUE);
- cuts.put(PAIR_ENERGY_SLOPE_LOW, 0.0);
- cuts.put(PAIR_ENERGY_SUM_LOW, 0.0);
- cuts.put(PAIR_ENERGY_SUM_HIGH, Double.MAX_VALUE);
- cuts.put(PAIR_TIME_COINCIDENCE, Double.MAX_VALUE);
-
- // Set the default value of the energy slope parameter F.
- cuts.put(PAIR_ENERGY_SLOPE_F, 0.0055);
+ // Set the cluster singles cuts to accept all values by default.
+ cuts.put(CLUSTER_HIT_COUNT_LOW, 0.0);
+ cuts.put(CLUSTER_SEED_ENERGY_LOW, 0.0);
+ cuts.put(CLUSTER_SEED_ENERGY_HIGH, Double.MAX_VALUE);
+ cuts.put(CLUSTER_TOTAL_ENERGY_LOW, 0.0);
+ cuts.put(CLUSTER_TOTAL_ENERGY_HIGH, Double.MAX_VALUE);
+
+ // Set the cluster pair cuts to accept all values by default.
+ cuts.put(PAIR_COPLANARITY_HIGH, 180.0);
+ cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, Double.MAX_VALUE);
+ cuts.put(PAIR_ENERGY_SLOPE_LOW, 0.0);
+ cuts.put(PAIR_ENERGY_SUM_LOW, 0.0);
+ cuts.put(PAIR_ENERGY_SUM_HIGH, Double.MAX_VALUE);
+ cuts.put(PAIR_TIME_COINCIDENCE, Double.MAX_VALUE);
+
+ // Set the default value of the energy slope parameter F.
+ cuts.put(PAIR_ENERGY_SLOPE_F, 0.0055);
}
/**
@@ -142,25 +142,25 @@
* </ul>
*/
public TriggerModule(double... cutValues) {
- // Set the cuts to the default values.
- this();
-
- // Define the cuts in the order that they correspond to the
- // value arguments.
- String[] cutID = { CLUSTER_HIT_COUNT_LOW, CLUSTER_SEED_ENERGY_LOW, CLUSTER_SEED_ENERGY_HIGH,
- CLUSTER_TOTAL_ENERGY_LOW, CLUSTER_TOTAL_ENERGY_HIGH, PAIR_ENERGY_SUM_LOW, PAIR_ENERGY_SUM_HIGH,
- PAIR_ENERGY_DIFFERENCE_HIGH, PAIR_ENERGY_SLOPE_LOW, PAIR_COPLANARITY_HIGH, PAIR_ENERGY_SLOPE_F };
-
- // Iterate over the value arguments and assign them to the
- // appropriate cut.
- for(int i = 0; i < cutValues.length; i++) {
- // If more values were given then cuts exist, break from
- // the loop.
- if(i == 11) { break; }
-
- // Set the current cut to its corresponding value.
- cuts.put(cutID[i], cutValues[i]);
- }
+ // Set the cuts to the default values.
+ this();
+
+ // Define the cuts in the order that they correspond to the
+ // value arguments.
+ String[] cutID = { CLUSTER_HIT_COUNT_LOW, CLUSTER_SEED_ENERGY_LOW, CLUSTER_SEED_ENERGY_HIGH,
+ CLUSTER_TOTAL_ENERGY_LOW, CLUSTER_TOTAL_ENERGY_HIGH, PAIR_ENERGY_SUM_LOW, PAIR_ENERGY_SUM_HIGH,
+ PAIR_ENERGY_DIFFERENCE_HIGH, PAIR_ENERGY_SLOPE_LOW, PAIR_COPLANARITY_HIGH, PAIR_ENERGY_SLOPE_F };
+
+ // Iterate over the value arguments and assign them to the
+ // appropriate cut.
+ for(int i = 0; i < cutValues.length; i++) {
+ // If more values were given then cuts exist, break from
+ // the loop.
+ if(i == 11) { break; }
+
+ // Set the current cut to its corresponding value.
+ cuts.put(cutID[i], cutValues[i]);
+ }
}
/**
@@ -172,14 +172,14 @@
* specified in the argument is not valid.
*/
public double getCutValue(String cut) throws IllegalArgumentException {
- // Try to get the indicated cut.
- Double value = cuts.get(cut);
-
- // If the cut is valid, return it.
- if(value != null) { return value.doubleValue(); }
-
- // Otherwise, produce an exception.
- else { throw new IllegalArgumentException(String.format("Cut \"%s\" does not exist.", cut)); }
+ // Try to get the indicated cut.
+ Double value = cuts.get(cut);
+
+ // If the cut is valid, return it.
+ if(value != null) { return value.doubleValue(); }
+
+ // Otherwise, produce an exception.
+ else { throw new IllegalArgumentException(String.format("Cut \"%s\" does not exist.", cut)); }
}
/**
@@ -190,22 +190,22 @@
* @param config - The DAQ configuration settings.
*/
public void loadDAQConfiguration(SinglesTriggerConfig config) {
- // Set the trigger values.
- setCutValue(CLUSTER_TOTAL_ENERGY_LOW, config.getEnergyMinCutConfig().getLowerBound());
- setCutValue(CLUSTER_TOTAL_ENERGY_HIGH, config.getEnergyMaxCutConfig().getUpperBound());
- setCutValue(CLUSTER_HIT_COUNT_LOW, config.getHitCountCutConfig().getLowerBound());
-
- // The remaining triggers should be set to their default values.
- // These settings effectively accept all possible clusters.
- cuts.put(PAIR_COPLANARITY_HIGH, 180.0);
- cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, Double.MAX_VALUE);
- cuts.put(PAIR_ENERGY_SLOPE_LOW, 0.0);
- cuts.put(PAIR_ENERGY_SUM_LOW, 0.0);
- cuts.put(PAIR_ENERGY_SUM_HIGH, Double.MAX_VALUE);
- cuts.put(PAIR_TIME_COINCIDENCE, Double.MAX_VALUE);
-
- // Set the default value of the energy slope parameter F.
- cuts.put(PAIR_ENERGY_SLOPE_F, 0.0055);
+ // Set the trigger values.
+ setCutValue(CLUSTER_TOTAL_ENERGY_LOW, config.getEnergyMinCutConfig().getLowerBound());
+ setCutValue(CLUSTER_TOTAL_ENERGY_HIGH, config.getEnergyMaxCutConfig().getUpperBound());
+ setCutValue(CLUSTER_HIT_COUNT_LOW, config.getHitCountCutConfig().getLowerBound());
+
+ // The remaining triggers should be set to their default values.
+ // These settings effectively accept all possible clusters.
+ cuts.put(PAIR_COPLANARITY_HIGH, 180.0);
+ cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, Double.MAX_VALUE);
+ cuts.put(PAIR_ENERGY_SLOPE_LOW, 0.0);
+ cuts.put(PAIR_ENERGY_SUM_LOW, 0.0);
+ cuts.put(PAIR_ENERGY_SUM_HIGH, Double.MAX_VALUE);
+ cuts.put(PAIR_TIME_COINCIDENCE, Double.MAX_VALUE);
+
+ // Set the default value of the energy slope parameter F.
+ cuts.put(PAIR_ENERGY_SLOPE_F, 0.0055);
}
/**
@@ -215,22 +215,22 @@
* @param config - The DAQ configuration settings.
*/
public void loadDAQConfiguration(PairTriggerConfig config) {
- // Set the trigger values.
- setCutValue(CLUSTER_TOTAL_ENERGY_LOW, config.getEnergyMinCutConfig().getLowerBound());
- setCutValue(CLUSTER_TOTAL_ENERGY_HIGH, config.getEnergyMaxCutConfig().getUpperBound());
- setCutValue(CLUSTER_HIT_COUNT_LOW, config.getHitCountCutConfig().getLowerBound());
-
- // The remaining triggers should be set to their default values.
- // These settings effectively accept all possible clusters.
- cuts.put(PAIR_COPLANARITY_HIGH, config.getCoplanarityCutConfig().getUpperBound());
- cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, config.getEnergyDifferenceCutConfig().getUpperBound());
- cuts.put(PAIR_ENERGY_SLOPE_LOW, config.getEnergySlopeCutConfig().getLowerBound());
- cuts.put(PAIR_ENERGY_SUM_LOW, config.getEnergySumCutConfig().getLowerBound());
- cuts.put(PAIR_ENERGY_SUM_HIGH, config.getEnergySumCutConfig().getUpperBound());
- cuts.put(PAIR_TIME_COINCIDENCE, config.getTimeDifferenceCutConfig().getUpperBound() * 4.0);
-
- // Set the default value of the energy slope parameter F.
- cuts.put(PAIR_ENERGY_SLOPE_F, config.getEnergySlopeCutConfig().getParameterF());
+ // Set the trigger values.
+ setCutValue(CLUSTER_TOTAL_ENERGY_LOW, config.getEnergyMinCutConfig().getLowerBound());
+ setCutValue(CLUSTER_TOTAL_ENERGY_HIGH, config.getEnergyMaxCutConfig().getUpperBound());
+ setCutValue(CLUSTER_HIT_COUNT_LOW, config.getHitCountCutConfig().getLowerBound());
+
+ // The remaining triggers should be set to their default values.
+ // These settings effectively accept all possible clusters.
+ cuts.put(PAIR_COPLANARITY_HIGH, config.getCoplanarityCutConfig().getUpperBound());
+ cuts.put(PAIR_ENERGY_DIFFERENCE_HIGH, config.getEnergyDifferenceCutConfig().getUpperBound());
+ cuts.put(PAIR_ENERGY_SLOPE_LOW, config.getEnergySlopeCutConfig().getLowerBound());
+ cuts.put(PAIR_ENERGY_SUM_LOW, config.getEnergySumCutConfig().getLowerBound());
+ cuts.put(PAIR_ENERGY_SUM_HIGH, config.getEnergySumCutConfig().getUpperBound());
+ cuts.put(PAIR_TIME_COINCIDENCE, config.getTimeDifferenceCutConfig().getUpperBound() * 4.0);
+
+ // Set the default value of the energy slope parameter F.
+ cuts.put(PAIR_ENERGY_SLOPE_F, config.getEnergySlopeCutConfig().getParameterF());
}
/**
@@ -244,76 +244,76 @@
* identifier is not valid.
*/
public void setCutValue(String cut, double value) throws IllegalArgumentException {
- // Make sure that the cut exists. If it does, change it to the
- // new cut value.
- if(cuts.containsKey(cut)) {
- cuts.put(cut, value);
- }
-
- // Otherwise, throw an exception.
- else { throw new IllegalArgumentException(String.format("Cut \"%s\" does not exist.", cut)); }
- }
-
- /**
- * Sets the cluster singles cuts to the values parsed from an
- * argument string.
- * @param isSingles - Indicates whether the parser should expect
- * 10 cut values (for pairs) or 3 (for singles).
- * @param cutValues - A string representing the cuts values. This
- * must be formatted in the style of "Emin Emax Nmin ...".
- */
+ // Make sure that the cut exists. If it does, change it to the
+ // new cut value.
+ if(cuts.containsKey(cut)) {
+ cuts.put(cut, value);
+ }
+
+ // Otherwise, throw an exception.
+ else { throw new IllegalArgumentException(String.format("Cut \"%s\" does not exist.", cut)); }
+ }
+
+ /**
+ * Sets the cluster singles cuts to the values parsed from an
+ * argument string.
+ * @param isSingles - Indicates whether the parser should expect
+ * 10 cut values (for pairs) or 3 (for singles).
+ * @param cutValues - A string representing the cuts values. This
+ * must be formatted in the style of "Emin Emax Nmin ...".
+ */
// TODO: Specify in JavaDoc what the order of these arguments is.
- public void setCutValues(boolean isSingles, String cutValues) {
- // Make sure that the string is not null.
- if(cutValues == null) {
- throw new NullPointerException(String.format("Cut arguments for trigger are null!"));
- }
-
- // Tokenize the argument string.
- StringTokenizer tokens = new StringTokenizer(cutValues);
-
- // Store the cut values. Entry format is:
- // clusterEnergyMin clusterEnergyMax hitCountMin
- // clusterEnergyMin clusterEnergyMax hitCountMin pairSumMin pairSumMax pairDiffMax pairSlopeMin pairSlopeF pairCoplanarityMax pairTimeCoincidence
- double cuts[];
- if(isSingles) { cuts = new double[] { 0.0, 8.191, 0 }; }
- else { cuts = new double[] { 0.0, 8.191, 0, 0, 8.191, 8.191, 0, 0.0055, 180, Double.MAX_VALUE }; }
- String[] cutNames = { "clusterEnergyMin", "clusterEnergyMax", "hitCountMin",
- "pairSumMin", "pairSumMax", "pairDiffMax", "pairSlopeMin", "pairSlopeF",
- "pairCoplanarityMax", "pairTimeCoincidence" };
-
- // Iterate over the number of cuts and extract that many values
- // from the cut value string.
- for(int cutNum = 0; cutNum < cuts.length; cutNum++) {
- // If there are no more tokens left, the argument string
- // is missing some values. Throw an exception!
- if(tokens.hasMoreTokens()) {
- // Get the next token from the string.
- String arg = tokens.nextToken();
-
- // Try to parse the token as a double. All cut values
- // should be rendered as doubles (or integers, which
- // can be parsed as doubles). If it is not, the string
- // is improperly formatted.
- try { cuts[cutNum] = Double.parseDouble(arg); }
- catch(NumberFormatException e) {
- throw new NumberFormatException(String.format("Argument for \"%s\" improperly formatted: %s", cutNames[cutNum], arg));
- }
- }
- }
-
- // Store the cuts in the trigger.
- setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, cuts[0]);
- setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, cuts[1]);
- setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, cuts[2]);
- setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, cuts[3]);
- setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, cuts[4]);
- setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, cuts[5]);
- setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, cuts[6]);
- setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, cuts[7]);
- setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, cuts[8]);
- setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, cuts[9]);
- }
+ public void setCutValues(boolean isSingles, String cutValues) {
+ // Make sure that the string is not null.
+ if(cutValues == null) {
+ throw new NullPointerException(String.format("Cut arguments for trigger are null!"));
+ }
+
+ // Tokenize the argument string.
+ StringTokenizer tokens = new StringTokenizer(cutValues);
+
+ // Store the cut values. Entry format is:
+ // clusterEnergyMin clusterEnergyMax hitCountMin
+ // clusterEnergyMin clusterEnergyMax hitCountMin pairSumMin pairSumMax pairDiffMax pairSlopeMin pairSlopeF pairCoplanarityMax pairTimeCoincidence
+ double cuts[];
+ if(isSingles) { cuts = new double[] { 0.0, 8.191, 0 }; }
+ else { cuts = new double[] { 0.0, 8.191, 0, 0, 8.191, 8.191, 0, 0.0055, 180, Double.MAX_VALUE }; }
+ String[] cutNames = { "clusterEnergyMin", "clusterEnergyMax", "hitCountMin",
+ "pairSumMin", "pairSumMax", "pairDiffMax", "pairSlopeMin", "pairSlopeF",
+ "pairCoplanarityMax", "pairTimeCoincidence" };
+
+ // Iterate over the number of cuts and extract that many values
+ // from the cut value string.
+ for(int cutNum = 0; cutNum < cuts.length; cutNum++) {
+ // If there are no more tokens left, the argument string
+ // is missing some values. Throw an exception!
+ if(tokens.hasMoreTokens()) {
+ // Get the next token from the string.
+ String arg = tokens.nextToken();
+
+ // Try to parse the token as a double. All cut values
+ // should be rendered as doubles (or integers, which
+ // can be parsed as doubles). If it is not, the string
+ // is improperly formatted.
+ try { cuts[cutNum] = Double.parseDouble(arg); }
+ catch(NumberFormatException e) {
+ throw new NumberFormatException(String.format("Argument for \"%s\" improperly formatted: %s", cutNames[cutNum], arg));
+ }
+ }
+ }
+
+ // Store the cuts in the trigger.
+ setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, cuts[0]);
+ setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, cuts[1]);
+ setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, cuts[2]);
+ setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, cuts[3]);
+ setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, cuts[4]);
+ setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, cuts[5]);
+ setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, cuts[6]);
+ setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, cuts[7]);
+ setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, cuts[8]);
+ setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, cuts[9]);
+ }
/**
* Checks whether a cluster passes the cluster hit count cut. This
@@ -346,7 +346,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterSeedEnergyCut(Cluster cluster) {
- return clusterSeedEnergyCut(getValueClusterSeedEnergy(cluster));
+ return clusterSeedEnergyCut(getValueClusterSeedEnergy(cluster));
}
/**
@@ -357,7 +357,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterSeedEnergyCutHigh(Cluster cluster) {
- return clusterSeedEnergyCutHigh(getValueClusterSeedEnergy(cluster));
+ return clusterSeedEnergyCutHigh(getValueClusterSeedEnergy(cluster));
}
/**
@@ -368,7 +368,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterSeedEnergyCutLow(Cluster cluster) {
- return clusterSeedEnergyCutLow(getValueClusterSeedEnergy(cluster));
+ return clusterSeedEnergyCutLow(getValueClusterSeedEnergy(cluster));
}
/**
@@ -380,7 +380,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCut(Cluster cluster) {
- return clusterTotalEnergyCut(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCut(getValueClusterTotalEnergy(cluster));
}
/**
@@ -392,7 +392,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCutHigh(Cluster cluster) {
- return clusterTotalEnergyCutHigh(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCutHigh(getValueClusterTotalEnergy(cluster));
}
/**
@@ -404,7 +404,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCutLow(Cluster cluster) {
- return clusterTotalEnergyCutLow(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCutLow(getValueClusterTotalEnergy(cluster));
}
/**
@@ -416,7 +416,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCut(SSPCluster cluster) {
- return clusterTotalEnergyCut(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCut(getValueClusterTotalEnergy(cluster));
}
/**
@@ -428,7 +428,7 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCutHigh(SSPCluster cluster) {
- return clusterTotalEnergyCutHigh(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCutHigh(getValueClusterTotalEnergy(cluster));
}
/**
@@ -440,7 +440,39 @@
* and <code>false</code> if the cluster does not.
*/
public boolean clusterTotalEnergyCutLow(SSPCluster cluster) {
- return clusterTotalEnergyCutLow(getValueClusterTotalEnergy(cluster));
+ return clusterTotalEnergyCutLow(getValueClusterTotalEnergy(cluster));
+ }
+
+ /**
+ * Gets the angle between the cluster and the centerpoint of the
+ * calorimeter.
+ * @param cluster - The cluster for which to calculate the angle.
+ * @return Returns the cluster angle as an <code>int</code> in units
+ * of degrees.
+ */
+ public static int getClusterAngle(Cluster cluster) {
+ // Get the cluster position.
+ double x = getClusterX(cluster);
+ double y = getClusterY(cluster);
+
+ // Return the cluster angle.
+ return getClusterAngle(x, y);
+ }
+
+ /**
+ * Gets the angle between the cluster and the centerpoint of the
+ * calorimeter.
+ * @param cluster - The cluster for which to calculate the angle.
+ * @return Returns the cluster angle as an <code>int</code> in units
+ * of degrees.
+ */
+ public static int getClusterAngle(SSPCluster cluster) {
+ // Get the cluster position.
+ double x = getClusterX(cluster);
+ double y = getClusterY(cluster);
+
+ // Return the cluster angle.
+ return getClusterAngle(x, y);
}
/**
@@ -449,14 +481,28 @@
* be calculated.
* @return Returns displacement of the cluster.
*/
- // TODO: What defines cluster distance?
public static double getClusterDistance(Cluster cluster) {
- // Get the variables from the cluster.
- double x = getClusterX(cluster);
- double y = getClusterY(cluster);
-
- // Perform the calculation.
- return getClusterDistance(x, y);
+ // Get the variables from the cluster.
+ double x = getClusterX(cluster);
+ double y = getClusterY(cluster);
+
+ // Perform the calculation.
+ return getClusterDistance(x, y);
+ }
+
+ /**
+ * Calculates the distance between the origin and a cluster.
+ * @param cluster - The cluster pair from which the value should
+ * be calculated.
+ * @return Returns displacement of the cluster.
+ */
+ public static double getClusterDistance(SSPCluster cluster) {
+ // Get the variables from the cluster.
+ double x = getClusterX(cluster);
+ double y = getClusterY(cluster);
+
+ // Perform the calculation.
+ return getClusterDistance(x, y);
}
/**
@@ -466,7 +512,7 @@
* @return Returns the size as an <code>int</code>.
*/
public static final double getClusterHitCount(Cluster cluster) {
- return cluster.getCalorimeterHits().size();
+ return cluster.getCalorimeterHits().size();
}
/**
@@ -476,7 +522,7 @@
* @return Returns the size as an <code>int</code>.
*/
public static final double getClusterHitCount(SSPCluster cluster) {
- return cluster.getHitCount();
+ return cluster.getHitCount();
}
/**
@@ -486,11 +532,11 @@
* object.
*/
public static final CalorimeterHit getClusterSeedHit(Cluster cluster) {
- if(getClusterHitCount(cluster) > 0) {
- return cluster.getCalorimeterHits().get(0);
- } else {
- throw new NullPointerException("Cluster does not define hits!");
- }
+ if(getClusterHitCount(cluster) > 0) {
+ return cluster.getCalorimeterHits().get(0);
+ } else {
+ throw new NullPointerException("Cluster does not define hits!");
+ }
}
/**
@@ -500,7 +546,7 @@
* @return Returns the time as a <code>double</code>.
*/
public static final double getClusterTime(Cluster cluster) {
- return getClusterSeedHit(cluster).getTime();
+ return getClusterSeedHit(cluster).getTime();
}
/**
@@ -510,7 +556,7 @@
* @return Returns the time as a <code>double</code>.
*/
public static final double getClusterTime(SSPCluster cluster) {
- return cluster.getTime();
+ return cluster.getTime();
}
/**
@@ -520,7 +566,7 @@
* @return Returns the cluster x-position.
*/
public static double getClusterX(Cluster cluster) {
- return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[0];
+ return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[0];
}
/**
@@ -530,7 +576,7 @@
* @return Returns the cluster x-position.
*/
public static double getClusterX(SSPCluster cluster) {
- return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[0];
+ return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[0];
}
/**
@@ -539,7 +585,7 @@
* @return Returns the index as an <code>int</code>.
*/
public static final int getClusterXIndex(Cluster cluster) {
- return getClusterSeedHit(cluster).getIdentifierFieldValue("ix");
+ return getClusterSeedHit(cluster).getIdentifierFieldValue("ix");
}
/**
@@ -548,7 +594,7 @@
* @return Returns the index as an <code>int</code>.
*/
public static final int getClusterXIndex(SSPCluster cluster) {
- return cluster.getXIndex();
+ return cluster.getXIndex();
}
/**
@@ -558,7 +604,7 @@
* @return Returns the cluster y-position.
*/
public static double getClusterY(Cluster cluster) {
- return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[1];
+ return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[1];
}
/**
@@ -568,7 +614,7 @@
* @return Returns the cluster y-position.
*/
public static double getClusterY(SSPCluster cluster) {
- return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[1];
+ return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[1];
}
/**
@@ -577,7 +623,7 @@
* @return Returns the index as an <code>int</code>.
*/
public static final int getClusterYIndex(Cluster cluster) {
- return getClusterSeedHit(cluster).getIdentifierFieldValue("iy");
+ return getClusterSeedHit(cluster).getIdentifierFieldValue("iy");
}
/**
@@ -586,7 +632,7 @@
* @return Returns the index as an <code>int</code>.
*/
public static final int getClusterYIndex(SSPCluster cluster) {
- return cluster.getYIndex();
+ return cluster.getYIndex();
}
/**
@@ -596,7 +642,7 @@
* @return Returns the cluster z-position.
*/
public static double getClusterZ(Cluster cluster) {
- return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[2];
+ return getCrystalPosition(getClusterXIndex(cluster), getClusterYIndex(cluster))[2];
}
/**
@@ -606,7 +652,7 @@
* @return Returns the cluster z-position.
*/
public static double getClusterZ(SSPCluster cluster) {
- return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[2];
+ return getCrystalPosition(cluster.getXIndex(), cluster.getYIndex())[2];
}
/**
@@ -618,48 +664,48 @@
* first entry in the array is always the top cluster, with the
* bottom cluster in the next position.
*/
- public static <E> List<E[]> getTopBottomPairs(List<E> clusters, Class<E> clusterType) throws IllegalArgumentException {
- // Ensure that only valid cluster types are processed.
- if(!clusterType.equals(Cluster.class) && !clusterType.equals(SSPCluster.class)) {
- throw new IllegalArgumentException("Class \"" + clusterType.getSimpleName() + "\" is not a supported cluster type.");
- }
-
- // Create a list to store top clusters, bottom clusters, and
- // cluster pairs.
- List<E> topClusters = new ArrayList<E>();
- List<E> botClusters = new ArrayList<E>();
- List<E[]> pairClusters = new ArrayList<E[]>();
-
- // Separate the cluster list into top/bottom clusters.
- for(E cluster : clusters) {
- // Process LCIO clusters...
- if(clusterType.equals(Cluster.class)) {
- if(getClusterYIndex((Cluster) cluster) > 0) {
- topClusters.add(cluster);
- } else { botClusters.add(cluster); }
- }
-
- // Process SSP clusters...
- else if(clusterType.equals(SSPCluster.class)) {
- if(getClusterYIndex((SSPCluster) cluster) > 0) {
- topClusters.add(cluster);
- } else { botClusters.add(cluster); }
- }
- }
-
- // Form all top/bottom cluster pairs.
- for(E topCluster : topClusters) {
- for(E botCluster : botClusters) {
- @SuppressWarnings("unchecked")
- E[] pair = (E[]) Array.newInstance(clusterType, 2);
- pair[0] = topCluster;
- pair[1] = botCluster;
- pairClusters.add(pair);
- }
- }
-
- // Return the cluster pairs.
- return pairClusters;
+ public static <E> List<E[]> getTopBottomPairs(List<E> clusters, Class<E> clusterType) throws IllegalArgumentException {
+ // Ensure that only valid cluster types are processed.
+ if(!clusterType.equals(Cluster.class) && !clusterType.equals(SSPCluster.class)) {
+ throw new IllegalArgumentException("Class \"" + clusterType.getSimpleName() + "\" is not a supported cluster type.");
+ }
+
+ // Create a list to store top clusters, bottom clusters, and
+ // cluster pairs.
+ List<E> topClusters = new ArrayList<E>();
+ List<E> botClusters = new ArrayList<E>();
+ List<E[]> pairClusters = new ArrayList<E[]>();
+
+ // Separate the cluster list into top/bottom clusters.
+ for(E cluster : clusters) {
+ // Process LCIO clusters...
+ if(clusterType.equals(Cluster.class)) {
+ if(getClusterYIndex((Cluster) cluster) > 0) {
+ topClusters.add(cluster);
+ } else { botClusters.add(cluster); }
+ }
+
+ // Process SSP clusters...
+ else if(clusterType.equals(SSPCluster.class)) {
+ if(getClusterYIndex((SSPCluster) cluster) > 0) {
+ topClusters.add(cluster);
+ } else { botClusters.add(cluster); }
+ }
+ }
+
+ // Form all top/bottom cluster pairs.
+ for(E topCluster : topClusters) {
+ for(E botCluster : botClusters) {
+ @SuppressWarnings("unchecked")
+ E[] pair = (E[]) Array.newInstance(clusterType, 2);
+ pair[0] = topCluster;
+ pair[1] = botCluster;
+ pairClusters.add(pair);
+ }
+ }
+
+ // Return the cluster pairs.
+ return pairClusters;
}
/**
@@ -671,29 +717,29 @@
* first entry in the array is always the top cluster, with the
* bottom cluster in the next position.
*/
- public static List<Cluster[]> getTopBottomPairs(Cluster... clusters) {
- // Create a list to store top clusters, bottom clusters, and
- // cluster pairs.
- List<Cluster> topClusters = new ArrayList<Cluster>();
- List<Cluster> botClusters = new ArrayList<Cluster>();
- List<Cluster[]> pairClusters = new ArrayList<Cluster[]>();
-
- // Separate the cluster list into top/bottom clusters.
- for(Cluster cluster : clusters) {
- if(getClusterYIndex(cluster) > 0) {
- topClusters.add(cluster);
- } else { botClusters.add(cluster); }
- }
-
- // Form all top/bottom cluster pairs.
- for(Cluster topCluster : topClusters) {
- for(Cluster botCluster : botClusters) {
- pairClusters.add(new Cluster[] { topCluster, botCluster });
- }
- }
-
- // Return the cluster pairs.
- return pairClusters;
+ public static List<Cluster[]> getTopBottomPairs(Cluster... clusters) {
+ // Create a list to store top clusters, bottom clusters, and
+ // cluster pairs.
+ List<Cluster> topClusters = new ArrayList<Cluster>();
+ List<Cluster> botClusters = new ArrayList<Cluster>();
+ List<Cluster[]> pairClusters = new ArrayList<Cluster[]>();
+
+ // Separate the cluster list into top/bottom clusters.
+ for(Cluster cluster : clusters) {
+ if(getClusterYIndex(cluster) > 0) {
+ topClusters.add(cluster);
+ } else { botClusters.add(cluster); }
+ }
+
+ // Form all top/bottom cluster pairs.
+ for(Cluster topCluster : topClusters) {
+ for(Cluster botCluster : botClusters) {
+ pairClusters.add(new Cluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Return the cluster pairs.
+ return pairClusters;
}
/**
@@ -705,29 +751,29 @@
* The first entry in the array is always the top cluster, with
* the bottom cluster in the next position.
*/
- public static List<SSPCluster[]> getTopBottomPairs(SSPCluster... clusters) {
- // Create a list to store top clusters, bottom clusters, and
- // cluster pairs.
- List<SSPCluster> topClusters = new ArrayList<SSPCluster>();
- List<SSPCluster> botClusters = new ArrayList<SSPCluster>();
- List<SSPCluster[]> pairClusters = new ArrayList<SSPCluster[]>();
-
- // Separate the cluster list into top/bottom clusters.
- for(SSPCluster cluster : clusters) {
- if(getClusterYIndex(cluster) > 0) {
- topClusters.add(cluster);
- } else { botClusters.add(cluster); }
- }
-
- // Form all top/bottom cluster pairs.
- for(SSPCluster topCluster : topClusters) {
- for(SSPCluster botCluster : botClusters) {
- pairClusters.add(new SSPCluster[] { topCluster, botCluster });
- }
- }
-
- // Return the cluster pairs.
- return pairClusters;
+ public static List<SSPCluster[]> getTopBottomPairs(SSPCluster... clusters) {
+ // Create a list to store top clusters, bottom clusters, and
+ // cluster pairs.
+ List<SSPCluster> topClusters = new ArrayList<SSPCluster>();
+ List<SSPCluster> botClusters = new ArrayList<SSPCluster>();
+ List<SSPCluster[]> pairClusters = new ArrayList<SSPCluster[]>();
+
+ // Separate the cluster list into top/bottom clusters.
+ for(SSPCluster cluster : clusters) {
+ if(getClusterYIndex(cluster) > 0) {
+ topClusters.add(cluster);
+ } else { botClusters.add(cluster); }
+ }
+
+ // Form all top/bottom cluster pairs.
+ for(SSPCluster topCluster : topClusters) {
+ for(SSPCluster botCluster : botClusters) {
+ pairClusters.add(new SSPCluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Return the cluster pairs.
+ return pairClusters;
}
/**
@@ -791,12 +837,12 @@
* @return Returns the cut value.
*/
public static double getValueCoplanarity(Cluster[] clusterPair) {
- // Get the variables used by the calculation.
- double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
- double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
-
- // Return the calculated value.
- return getValueCoplanarity(x, y);
+ // Get the variables used by the calculation.
+ double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
+ double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
+
+ // Return the calculated value.
+ return getValueCoplanarity(x, y);
}
/**
@@ -807,12 +853,12 @@
* @return Returns the cut value.
*/
public static double getValueCoplanarity(SSPCluster[] clusterPair) {
- // Get the variables used by the calculation.
- double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
- double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
-
- // Return the calculated value.
- return getValueCoplanarity(x, y);
+ // Get the variables used by the calculation.
+ double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
+ double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
+
+ // Return the calculated value.
+ return getValueCoplanarity(x, y);
}
/**
@@ -824,14 +870,14 @@
*/
@Deprecated
public static double getValueCoplanarityLegacy(Cluster[] clusterPair) {
- // Get the variables used by the calculation.
- double x[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("ix"),
- getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("ix") };
- double y[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("iy"),
- getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("iy") };
-
- // Return the calculated value.
- return getValueCoplanarityLegacy(x, y);
+ // Get the variables used by the calculation.
+ double x[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("ix"),
+ getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("ix") };
+ double y[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("iy"),
+ getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("iy") };
+
+ // Return the calculated value.
+ return getValueCoplanarityLegacy(x, y);
}
/**
@@ -841,10 +887,10 @@
* @return Returns the difference between the cluster energies.
*/
public static double getValueEnergyDifference(Cluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
-
- // Perform the calculation.
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+
+ // Perform the calculation.
return getValueEnergyDifference(energy);
}
@@ -855,10 +901,10 @@
* @return Returns the difference between the cluster energies.
*/
public static double getValueEnergyDifference(SSPCluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
-
- // Perform the calculation.
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+
+ // Perform the calculation.
return getValueEnergyDifference(energy);
}
@@ -871,13 +917,13 @@
* @return Returns the energy slope value.
*/
public static double getValueEnergySlope(Cluster[] clusterPair, double energySlopeParamF) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
- double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
- double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
-
- // Perform the calculation.
- return getValueEnergySlope(energy, x, y, energySlopeParamF);
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+ double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
+ double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
+
+ // Perform the calculation.
+ return getValueEnergySlope(energy, x, y, energySlopeParamF);
}
/**
@@ -889,13 +935,13 @@
* @return Returns the energy slope value.
*/
public static double getValueEnergySlope(SSPCluster[] clusterPair, double energySlopeParamF) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
- double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
- double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
-
- // Perform the calculation.
- return getValueEnergySlope(energy, x, y, energySlopeParamF);
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+ double x[] = { getClusterX(clusterPair[0]), getClusterX(clusterPair[1]) };
+ double y[] = { getClusterY(clusterPair[0]), getClusterY(clusterPair[1]) };
+
+ // Perform the calculation.
+ return getValueEnergySlope(energy, x, y, energySlopeParamF);
}
/**
@@ -910,15 +956,15 @@
*/
@Deprecated
public static double getValueEnergySlopeLegacy(Cluster[] clusterPair, double energySlopeParamF) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
- double x[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("ix"),
- getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("ix") };
- double y[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("iy"),
- getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("iy") };
-
- // Perform the calculation.
- return getValueEnergySlopeLegacy(energy, x, y, energySlopeParamF);
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+ double x[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("ix"),
+ getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("ix") };
+ double y[] = { getClusterSeedHit(clusterPair[0]).getIdentifierFieldValue("iy"),
+ getClusterSeedHit(clusterPair[1]).getIdentifierFieldValue("iy") };
+
+ // Perform the calculation.
+ return getValueEnergySlopeLegacy(energy, x, y, energySlopeParamF);
}
/**
@@ -928,11 +974,11 @@
* @return Returns the sum of the cluster energies.
*/
public static double getValueEnergySum(Cluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
-
- // Perform the calculation.
- return getValueEnergySum(energy);
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+
+ // Perform the calculation.
+ return getValueEnergySum(energy);
}
/**
@@ -942,11 +988,11 @@
* @return Returns the sum of the cluster energies.
*/
public static double getValueEnergySum(SSPCluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
-
- // Perform the calculation.
- return getValueEnergySum(energy);
+ // Get the variables used by the calculation.
+ double[] energy = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+
+ // Perform the calculation.
+ return getValueEnergySum(energy);
}
/**
@@ -956,12 +1002,12 @@
* @return Returns the absolute difference in the cluster times..
*/
public static double getValueTimeCoincidence(Cluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] time = { clusterPair[0].getCalorimeterHits().get(0).getTime(),
- clusterPair[1].getCalorimeterHits().get(0).getTime() };
-
- // Perform the calculation.
- return getValueTimeCoincidence(time);
+ // Get the variables used by the calculation.
+ double[] time = { clusterPair[0].getCalorimeterHits().get(0).getTime(),
+ clusterPair[1].getCalorimeterHits().get(0).getTime() };
+
+ // Perform the calculation.
+ return getValueTimeCoincidence(time);
}
/**
@@ -971,34 +1017,34 @@
* @return Returns the absolute difference in the cluster times..
*/
public static double getValueTimeCoincidence(SSPCluster[] clusterPair) {
- // Get the variables used by the calculation.
- double[] time = { clusterPair[0].getTime(), clusterPair[1].getTime() };
-
- // Perform the calculation.
- return getValueTimeCoincidence(time);
- }
-
- /**
- * Indicates whether the argument cluster is located in the fiducial
- * region or not.
- * @param cluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is located in
- * the fiducial region and <code>false</code> otherwise.
- */
- public static final boolean inFiducialRegion(Cluster cluster) {
- return inFiducialRegion(getClusterXIndex(cluster), getClusterYIndex(cluster));
- }
-
- /**
- * Indicates whether the argument cluster is located in the fiducial
- * region or not.
- * @param cluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is located in
- * the fiducial region and <code>false</code> otherwise.
- */
- public static final boolean inFiducialRegion(SSPCluster cluster) {
- return inFiducialRegion(getClusterXIndex(cluster), getClusterYIndex(cluster));
- }
+ // Get the variables used by the calculation.
+ double[] time = { clusterPair[0].getTime(), clusterPair[1].getTime() };
+
+ // Perform the calculation.
+ return getValueTimeCoincidence(time);
+ }
+
+ /**
+ * Indicates whether the argument cluster is located in the fiducial
+ * region or not.
+ * @param cluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is located in
+ * the fiducial region and <code>false</code> otherwise.
+ */
+ public static final boolean inFiducialRegion(Cluster cluster) {
+ return inFiducialRegion(getClusterXIndex(cluster), getClusterYIndex(cluster));
+ }
+
+ /**
+ * Indicates whether the argument cluster is located in the fiducial
+ * region or not.
+ * @param cluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is located in
+ * the fiducial region and <code>false</code> otherwise.
+ */
+ public static final boolean inFiducialRegion(SSPCluster cluster) {
+ return inFiducialRegion(getClusterXIndex(cluster), getClusterYIndex(cluster));
+ }
/**
* Checks if a cluster pair is coplanar to the beam within a given
@@ -1080,7 +1126,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCut(Cluster[] clusterPair) {
- return pairEnergySumCut(getValueEnergySum(clusterPair));
+ return pairEnergySumCut(getValueEnergySum(clusterPair));
}
/**
@@ -1092,7 +1138,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCutHigh(Cluster[] clusterPair) {
- return pairEnergySumCutHigh(getValueEnergySum(clusterPair));
+ return pairEnergySumCutHigh(getValueEnergySum(clusterPair));
}
/**
@@ -1104,7 +1150,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCutLow(Cluster[] clusterPair) {
- return pairEnergySumCutLow(getValueEnergySum(clusterPair));
+ return pairEnergySumCutLow(getValueEnergySum(clusterPair));
}
/**
@@ -1117,7 +1163,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCut(SSPCluster[] clusterPair) {
- return pairEnergySumCut(getValueEnergySum(clusterPair));
+ return pairEnergySumCut(getValueEnergySum(clusterPair));
}
/**
@@ -1129,7 +1175,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCutHigh(SSPCluster[] clusterPair) {
- return pairEnergySumCutHigh(getValueEnergySum(clusterPair));
+ return pairEnergySumCutHigh(getValueEnergySum(clusterPair));
}
/**
@@ -1141,7 +1187,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairEnergySumCutLow(SSPCluster[] clusterPair) {
- return pairEnergySumCutLow(getValueEnergySum(clusterPair));
+ return pairEnergySumCutLow(getValueEnergySum(clusterPair));
}
/**
@@ -1153,7 +1199,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairTimeCoincidenceCut(Cluster[] clusterPair) {
- return pairTimeCoincidenceCut(getValueTimeCoincidence(clusterPair));
+ return pairTimeCoincidenceCut(getValueTimeCoincidence(clusterPair));
}
/**
@@ -1165,7 +1211,7 @@
* the cut and <code>false</code> if it does not.
*/
public boolean pairTimeCoincidenceCut(SSPCluster[] clusterPair) {
- return pairTimeCoincidenceCut(getValueTimeCoincidence(clusterPair));
+ return pairTimeCoincidenceCut(getValueTimeCoincidence(clusterPair));
}
/**
@@ -1243,6 +1289,18 @@
*/
private boolean clusterTotalEnergyCutLow(double clusterEnergy) {
return (clusterEnergy >= cuts.get(CLUSTER_TOTAL_ENERGY_LOW));
+ }
+
+ /**
+ * Gets the angle between the cluster and the centerpoint of the
+ * calorimeter.
+ * @param x - The cluster seed x-position.
+ * @param y - The cluster seed y-position.
+ * @return Returns the cluster angle as an <code>int</code> in units
+ * of degrees.
+ */
+ private static int getClusterAngle(double x, double y) {
+ return (int) Math.round(Math.atan(x / y) * 180.0 / Math.PI);
}
/**
@@ -1266,22 +1324,22 @@
* the x-index and either of the cases where <code>iy == 0</code>
* or <code>|iy| > 5</code> for the y-index.
*/
- private static double[] getCrystalPosition(int ix, int iy) throws IndexOutOfBoundsException {
- // Make sure that the requested crystal is a valid crystal.
- if(ix == 0 || ix < -23 || ix > 23) {
- throw new IndexOutOfBoundsException(String.format("Value \"%d\" is invalid for field x-index.", ix));
- } if(iy == 0 || iy < -5 || iy > 5) {
- throw new IndexOutOfBoundsException(String.format("Value \"%d\" is invalid for field y-index.", iy));
- }
-
- // Get the position map.
- double posMap[];
- if(ix < 1) { posMap = position[5 - iy][22 - ix]; }
- else { posMap = position[5 - iy][23 - ix]; }
-
- // Return the corrected mapped position.
- return new double[] { posMap[0], posMap[2], posMap[1] };
- }
+ private static double[] getCrystalPosition(int ix, int iy) throws IndexOutOfBoundsException {
+ // Make sure that the requested crystal is a valid crystal.
+ if(ix == 0 || ix < -23 || ix > 23) {
+ throw new IndexOutOfBoundsException(String.format("Value \"%d\" is invalid for field x-index.", ix));
+ } if(iy == 0 || iy < -5 || iy > 5) {
+ throw new IndexOutOfBoundsException(String.format("Value \"%d\" is invalid for field y-index.", iy));
+ }
+
+ // Get the position map.
+ double posMap[];
+ if(ix < 1) { posMap = position[5 - iy][22 - ix]; }
+ else { posMap = position[5 - iy][23 - ix]; }
+
+ // Return the corrected mapped position.
+ return new double[] { posMap[0], posMap[2], posMap[1] };
+ }
/**
* Calculates the value used by the coplanarity cut.
@@ -1295,7 +1353,7 @@
// Get the cluster angles.
int[] clusterAngle = new int[2];
for(int i = 0; i < 2; i++) {
- clusterAngle[i] = (int) Math.round(Math.atan(x[i] / y[i]) * 180.0 / Math.PI);
+ clusterAngle[i] = getClusterAngle(x[i], y[i]); //(int) Math.round(Math.atan(x[i] / y[i]) * 180.0 / Math.PI);
}
// Calculate the coplanarity cut value.
@@ -1343,9 +1401,9 @@
* @return Returns the cut value.
*/
private static double getValueEnergySlope(double energy[], double x[], double y[], double energySlopeParamF) {
- // Determine which cluster is the lower-energy cluster.
- int lei = energy[0] < energy[1] ? 0 : 1;
-
+ // Determine which cluster is the lower-energy cluster.
+ int lei = energy[0] < energy[1] ? 0 : 1;
+
// E + R*F
// Get the low energy cluster energy.
double slopeParamE = energy[lei];
@@ -1369,9 +1427,9 @@
*/
@Deprecated
private static double getValueEnergySlopeLegacy(double energy[], double x[], double y[], double energySlopeParamF) {
- // Determine which cluster is the lower-energy cluster.
- int lei = energy[0] < energy[1] ? 0 : 1;
-
+ // Determine which cluster is the lower-energy cluster.
+ int lei = energy[0] < energy[1] ? 0 : 1;
+
// E + R*F
// Get the low energy cluster energy.
double slopeParamE = energy[lei];
@@ -1401,52 +1459,52 @@
* the two clusters.
*/
private static double getValueTimeCoincidence(double[] time) {
- return Math.abs(time[0] - time[1]);
- }
-
- /**
- * Indicates whether the argument cluster is located in the fiducial
- * region or not.
- * @param cluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is located in
- * the fiducial region and <code>false</code> otherwise.
- */
- private static final boolean inFiducialRegion(int ix, int iy) {
- // Get the x and y indices for the cluster.
- int absx = Math.abs(ix);
- int absy = Math.abs(iy);
-
- // Check if the cluster is on the top or the bottom of the
- // calorimeter, as defined by |y| == 5. This is an edge cluster
- // and is not in the fiducial region.
- if(absy == 5) {
- return false;
- }
-
- // Check if the cluster is on the extreme left or right side
- // of the calorimeter, as defined by |x| == 23. This is also
- // and edge cluster is not in the fiducial region.
- if(absx == 23) {
- return false;
- }
-
- // Check if the cluster is along the beam gap, as defined by
- // |y| == 1. This is an internal edge cluster and is not in the
- // fiducial region.
- if(absy == 1) {
- return false;
- }
-
- // Lastly, check if the cluster falls along the beam hole, as
- // defined by clusters with -11 <= x <= -1 and |y| == 2. This
- // is not the fiducial region.
- if(absy == 2 && ix <= -1 && ix >= -11) {
- return false;
- }
-
- // If all checks fail, the cluster is in the fiducial region.
- return true;
- }
+ return Math.abs(time[0] - time[1]);
+ }
+
+ /**
+ * Indicates whether the argument cluster is located in the fiducial
+ * region or not.
+ * @param cluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is located in
+ * the fiducial region and <code>false</code> otherwise.
+ */
+ private static final boolean inFiducialRegion(int ix, int iy) {
+ // Get the x and y indices for the cluster.
+ int absx = Math.abs(ix);
+ int absy = Math.abs(iy);
+
+ // Check if the cluster is on the top or the bottom of the
+ // calorimeter, as defined by |y| == 5. This is an edge cluster
+ // and is not in the fiducial region.
+ if(absy == 5) {
+ return false;
+ }
+
+ // Check if the cluster is on the extreme left or right side
+ // of the calorimeter, as defined by |x| == 23. This is also
+ // and edge cluster is not in the fiducial region.
+ if(absx == 23) {
+ return false;
+ }
+
+ // Check if the cluster is along the beam gap, as defined by
+ // |y| == 1. This is an internal edge cluster and is not in the
+ // fiducial region.
+ if(absy == 1) {
+ return false;
+ }
+
+ // Lastly, check if the cluster falls along the beam hole, as
+ // defined by clusters with -11 <= x <= -1 and |y| == 2. This
+ // is not the fiducial region.
+ if(absy == 2 && ix <= -1 && ix >= -11) {
+ return false;
+ }
+
+ // If all checks fail, the cluster is in the fiducial region.
+ return true;
+ }
/**
* Checks if a coplanarity angle is within threshold.
@@ -1522,7 +1580,7 @@
* the cut and <code>false</code> if it does not.
*/
private boolean pairTimeCoincidenceCut(double timeDifference) {
- return (timeDifference <= cuts.get(PAIR_TIME_COINCIDENCE));
+ return (timeDifference <= cuts.get(PAIR_TIME_COINCIDENCE));
}
/**
@@ -1535,193 +1593,193 @@
* Note that in this table, position[][] = { x, z, y } by in the
* coordinate system employed by the rest of the class.
*/
- private static final double[][][] position = {
- { { -340.003, 97.065, 87.845 }, { -324.283, 97.450, 87.875 }, { -308.648, 97.810, 87.900 },
- { -293.093, 98.150, 87.920 }, { -277.618, 98.470, 87.940 }, { -262.213, 98.765, 87.965 },
- { -246.878, 99.040, 87.980 }, { -231.603, 99.290, 87.995 }, { -216.393, 99.520, 88.010 },
- { -201.228, 99.725, 88.030 }, { -186.118, 99.905, 88.040 }, { -171.058, 100.070, 88.050 },
- { -156.038, 100.205, 88.055 }, { -141.058, 100.325, 88.070 }, { -126.113, 100.415, 88.075 },
- { -111.198, 100.485, 88.075 }, { -96.313, 100.530, 88.080 }, { -81.453, 100.555, 88.085 },
- { -66.608, 100.560, 88.085 }, { -51.788, 100.540, 88.080 }, { -36.983, 100.490, 88.075 },
- { -22.183, 100.425, 88.075 }, { -7.393, 100.335, 88.070 }, { 7.393, 100.335, 88.070 },
- { 22.183, 100.425, 88.075 }, { 36.983, 100.490, 88.075 }, { 51.793, 100.540, 88.080 },
- { 66.613, 100.560, 88.085 }, { 81.453, 100.555, 88.085 }, { 96.313, 100.530, 88.080 },
- { 111.198, 100.485, 88.075 }, { 126.113, 100.415, 88.075 }, { 141.053, 100.325, 88.070 },
- { 156.038, 100.205, 88.055 }, { 171.053, 100.070, 88.050 }, { 186.118, 99.905, 88.040 },
- { 201.228, 99.725, 88.030 }, { 216.388, 99.520, 88.010 }, { 231.608, 99.290, 87.995 },
- { 246.878, 99.040, 87.980 }, { 262.218, 98.765, 87.965 }, { 277.623, 98.470, 87.940 },
- { 293.098, 98.150, 87.920 }, { 308.653, 97.810, 87.900 }, { 324.288, 97.450, 87.875 },
- { 340.008, 97.065, 87.845 }
- },
- { { -340.003, 97.040, 72.715 }, { -324.283, 97.420, 72.735 }, { -308.648, 97.785, 72.750 },
- { -293.093, 98.125, 72.765 }, { -277.618, 98.450, 72.785 }, { -262.213, 98.745, 72.800 },
- { -246.878, 99.015, 72.815 }, { -231.603, 99.265, 72.825 }, { -216.388, 99.495, 72.840 },
- { -201.228, 99.700, 72.850 }, { -186.118, 99.885, 72.860 }, { -171.058, 100.045, 72.865 },
- { -156.033, 100.185, 72.875 }, { -141.053, 100.300, 72.880 }, { -126.108, 100.395, 72.880 },
- { -111.193, 100.460, 72.890 }, { -96.308, 100.510, 72.890 }, { -81.448, 100.535, 72.895 },
- { -66.608, 100.535, 72.890 }, { -51.788, 100.510, 72.890 }, { -36.978, 100.470, 72.890 },
- { -22.183, 100.405, 72.880 }, { -7.388, 100.310, 72.880 }, { 7.393, 100.310, 72.880 },
- { 22.188, 100.405, 72.885 }, { 36.983, 100.470, 72.890 }, { 51.793, 100.510, 72.890 },
- { 66.613, 100.535, 72.890 }, { 81.453, 100.535, 72.895 }, { 96.313, 100.510, 72.890 },
- { 111.198, 100.460, 72.890 }, { 126.113, 100.395, 72.880 }, { 141.063, 100.300, 72.880 },
- { 156.043, 100.185, 72.875 }, { 171.063, 100.045, 72.865 }, { 186.123, 99.885, 72.860 },
- { 201.233, 99.700, 72.850 }, { 216.393, 99.495, 72.840 }, { 231.608, 99.265, 72.825 },
- { 246.883, 99.015, 72.815 }, { 262.218, 98.745, 72.800 }, { 277.623, 98.450, 72.785 },
- { 293.098, 98.125, 72.765 }, { 308.653, 97.785, 72.750 }, { 324.288, 97.420, 72.735 },
- { 340.008, 97.040, 72.715 }
- },
- { { -340.003, 96.990, 57.600 }, { -324.283, 97.375, 57.610 }, { -308.648, 97.740, 57.625 },
- { -293.093, 98.080, 57.630 }, { -277.618, 98.395, 57.645 }, { -262.213, 98.700, 57.655 },
- { -246.873, 98.970, 57.660 }, { -231.603, 99.220, 57.670 }, { -216.383, 99.450, 57.680 },
- { -201.228, 99.660, 57.685 }, { -186.113, 99.840, 57.695 }, { -171.053, 100.005, 57.695 },
- { -156.033, 100.140, 57.700 }, { -141.053, 100.255, 57.710 }, { -126.108, 100.345, 57.710 },
- { -111.193, 100.420, 57.710 }, { -96.308, 100.465, 57.715 }, { -81.448, 100.490, 57.715 },
- { -66.608, 100.490, 57.715 }, { -51.788, 100.470, 57.710 }, { -36.978, 100.425, 57.710 },
- { -22.178, 100.355, 57.710 }, { -7.388, 100.265, 57.705 }, { 7.398, 100.265, 57.705 },
- { 22.188, 100.355, 57.710 }, { 36.988, 100.425, 57.710 }, { 51.793, 100.470, 57.710 },
- { 66.613, 100.490, 57.715 }, { 81.458, 100.490, 57.715 }, { 96.318, 100.465, 57.715 },
- { 111.198, 100.420, 57.710 }, { 126.118, 100.345, 57.710 }, { 141.063, 100.255, 57.710 },
- { 156.043, 100.140, 57.700 }, { 171.063, 100.005, 57.695 }, { 186.123, 99.840, 57.695 },
- { 201.233, 99.660, 57.685 }, { 216.393, 99.450, 57.680 }, { 231.608, 99.220, 57.670 },
- { 246.883, 98.970, 57.660 }, { 262.218, 98.700, 57.655 }, { 277.623, 98.395, 57.645 },
- { 293.098, 98.080, 57.630 }, { 308.653, 97.740, 57.625 }, { 324.288, 97.375, 57.610 },
- { 340.008, 96.990, 57.600 }
- },
- { { -340.003, 96.925, 42.490 }, { -324.283, 97.305, 42.495 }, { -308.648, 97.675, 42.505 },
- { -293.093, 98.010, 42.510 }, { -277.618, 98.330, 42.510 }, { -262.213, 98.625, 42.515 },
- { -246.873, 98.900, 42.525 }, { -231.603, 99.155, 42.530 }, { -216.383, 99.385, 42.535 },
- { -201.223, 99.590, 42.530 }, { -186.113, 99.775, 42.535 }, { -171.048, 99.930, 42.540 },
- { -156.033, 100.070, 42.545 }, { -141.048, 100.185, 42.545 }, { -126.108, 100.280, 42.550 },
- { -111.193, 100.350, 42.545 }, { -96.308, 100.400, 42.545 }, { -81.448, 100.420, 42.550 },
- { -66.608, 100.425, 42.550 }, { -51.788, 100.405, 42.550 }, { -36.978, 100.355, 42.545 },
- { -22.178, 100.290, 42.545 }, { -7.388, 100.200, 42.545 }, { 7.398, 100.200, 42.545 },
- { 22.188, 100.290, 42.545 }, { 36.988, 100.355, 42.545 }, { 51.793, 100.405, 42.550 },
- { 66.613, 100.425, 42.550 }, { 81.458, 100.420, 42.550 }, { 96.318, 100.400, 42.545 },
- { 111.198, 100.350, 42.545 }, { 126.118, 100.280, 42.550 }, { 141.063, 100.185, 42.545 },
- { 156.043, 100.070, 42.545 }, { 171.063, 99.930, 42.540 }, { 186.123, 99.775, 42.535 },
- { 201.233, 99.590, 42.530 }, { 216.393, 99.385, 42.535 }, { 231.608, 99.155, 42.530 },
- { 246.883, 98.900, 42.525 }, { 262.218, 98.625, 42.515 }, { 277.628, 98.330, 42.510 },
- { 293.098, 98.010, 42.510 }, { 308.653, 97.675, 42.505 }, { 324.288, 97.305, 42.495 },
- { 340.008, 96.925, 42.490 }
- },
- { { -340.003, 96.830, 27.385 }, { -324.278, 97.215, 27.385 }, { -308.648, 97.575, 27.385 },
- { -293.093, 97.915, 27.385 }, { -277.613, 98.240, 27.385 }, { -262.213, 98.535, 27.385 },
- { -246.878, 98.810, 27.385 }, { -231.603, 99.060, 27.385 }, { -216.383, 99.290, 27.385 },
- { -201.223, 99.495, 27.385 }, { -186.113, 99.680, 27.385 }, { -171.048, 99.840, 27.385 },
- { -156.033, 99.980, 27.385 }, { -141.048, 100.095, 27.385 }, { -126.103, 100.185, 27.385 },
- { -111.193, 100.255, 27.385 }, { -96.303, 100.305, 27.385 }, { -81.448, 100.330, 27.385 },
- { -66.608, 100.330, 27.385 }, { -51.783, 100.310, 27.385 }, { -36.973, 100.265, 27.385 },
- { -22.178, 100.200, 27.385 }, { -7.388, 100.105, 27.385 }, { 7.403, 100.105, 27.385 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 156.078, 99.980, 27.385 }, { 171.103, 99.840, 27.385 }, { 186.168, 99.680, 27.385 },
- { 201.268, 99.495, 27.385 }, { 216.423, 99.290, 27.385 }, { 231.638, 99.060, 27.385 },
- { 246.913, 98.810, 27.385 }, { 262.248, 98.535, 27.385 }, { 277.658, 98.240, 27.385 },
- { 293.133, 97.920, 27.385 }, { 308.688, 97.575, 27.385 }, { 324.323, 97.215, 27.385 },
- { 340.043, 96.830, 27.385 }
- },
- { { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }
- },
- { { -339.998, 96.840, -27.330 }, { -324.278, 97.225, -27.340 }, { -308.643, 97.585, -27.345 },
- { -293.093, 97.925, -27.350 }, { -277.613, 98.245, -27.360 }, { -262.213, 98.545, -27.365 },
- { -246.868, 98.820, -27.365 }, { -231.598, 99.070, -27.370 }, { -216.383, 99.300, -27.375 },
- { -201.223, 99.505, -27.380 }, { -186.113, 99.690, -27.385 }, { -171.048, 99.850, -27.380 },
- { -156.028, 99.990, -27.385 }, { -141.048, 100.100, -27.390 }, { -126.103, 100.195, -27.390 },
- { -111.193, 100.265, -27.395 }, { -96.303, 100.315, -27.395 }, { -81.443, 100.340, -27.390 },
- { -66.603, 100.335, -27.390 }, { -51.783, 100.315, -27.390 }, { -36.973, 100.275, -27.395 },
- { -22.173, 100.205, -27.390 }, { -7.383, 100.115, -27.385 }, { 7.403, 100.115, -27.385 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
- { 156.088, 99.985, -27.385 }, { 171.103, 99.845, -27.380 }, { 186.168, 99.680, -27.385 },
- { 201.268, 99.495, -27.380 }, { 216.428, 99.290, -27.375 }, { 231.643, 99.060, -27.370 },
- { 246.913, 98.810, -27.365 }, { 262.258, 98.535, -27.365 }, { 277.658, 98.240, -27.360 },
- { 293.138, 97.925, -27.350 }, { 308.688, 97.580, -27.345 }, { 324.323, 97.215, -27.340 },
- { 340.043, 96.835, -27.330 }
- },
- { { -339.998, 96.930, -42.435 }, { -324.278, 97.315, -42.445 }, { -308.648, 97.680, -42.455 },
- { -293.093, 98.015, -42.470 }, { -277.613, 98.340, -42.480 }, { -262.208, 98.635, -42.490 },
- { -246.873, 98.910, -42.500 }, { -231.593, 99.160, -42.510 }, { -216.383, 99.390, -42.515 },
- { -201.223, 99.595, -42.525 }, { -186.113, 99.780, -42.525 }, { -171.048, 99.940, -42.535 },
- { -156.028, 100.080, -42.540 }, { -141.048, 100.195, -42.540 }, { -126.103, 100.290, -42.545 },
- { -111.193, 100.355, -42.550 }, { -96.303, 100.405, -42.550 }, { -81.443, 100.430, -42.550 },
- { -66.608, 100.430, -42.550 }, { -51.783, 100.405, -42.550 }, { -36.973, 100.365, -42.550 },
- { -22.178, 100.295, -42.545 }, { -7.388, 100.205, -42.545 }, { 7.403, 100.205, -42.545 },
- { 22.193, 100.295, -42.545 }, { 36.988, 100.365, -42.550 }, { 51.798, 100.405, -42.550 },
- { 66.623, 100.430, -42.550 }, { 81.458, 100.430, -42.550 }, { 96.318, 100.405, -42.550 },
- { 111.208, 100.355, -42.550 }, { 126.118, 100.290, -42.545 }, { 141.063, 100.195, -42.540 },
- { 156.043, 100.080, -42.540 }, { 171.063, 99.940, -42.535 }, { 186.128, 99.780, -42.525 },
- { 201.238, 99.595, -42.525 }, { 216.398, 99.390, -42.515 }, { 231.613, 99.160, -42.510 },
- { 246.888, 98.910, -42.500 }, { 262.223, 98.635, -42.490 }, { 277.628, 98.340, -42.480 },
- { 293.108, 98.015, -42.470 }, { 308.663, 97.680, -42.455 }, { 324.293, 97.315, -42.445 },
- { 340.013, 96.930, -42.435 }
- },
- { { -339.998, 97.000, -57.540 }, { -324.278, 97.385, -57.560 }, { -308.648, 97.745, -57.575 },
- { -293.093, 98.090, -57.595 }, { -277.613, 98.410, -57.610 }, { -262.208, 98.705, -57.625 },
- { -246.873, 98.975, -57.640 }, { -231.593, 99.225, -57.655 }, { -216.383, 99.455, -57.665 },
- { -201.223, 99.665, -57.675 }, { -186.113, 99.845, -57.685 }, { -171.048, 100.010, -57.690 },
- { -156.028, 100.145, -57.700 }, { -141.048, 100.265, -57.705 }, { -126.103, 100.355, -57.710 },
- { -111.193, 100.425, -57.710 }, { -96.303, 100.475, -57.720 }, { -81.443, 100.495, -57.715 },
- { -66.608, 100.500, -57.720 }, { -51.783, 100.480, -57.715 }, { -36.973, 100.430, -57.710 },
- { -22.178, 100.365, -57.710 }, { -7.388, 100.275, -57.705 }, { 7.403, 100.275, -57.705 },
- { 22.193, 100.365, -57.710 }, { 36.988, 100.430, -57.710 }, { 51.798, 100.480, -57.715 },
- { 66.623, 100.500, -57.720 }, { 81.458, 100.495, -57.715 }, { 96.318, 100.475, -57.720 },
- { 111.208, 100.425, -57.710 }, { 126.118, 100.355, -57.710 }, { 141.063, 100.265, -57.705 },
- { 156.043, 100.145, -57.700 }, { 171.063, 100.010, -57.690 }, { 186.128, 99.845, -57.685 },
- { 201.238, 99.665, -57.675 }, { 216.398, 99.455, -57.665 }, { 231.613, 99.225, -57.655 },
- { 246.888, 98.975, -57.640 }, { 262.223, 98.705, -57.625 }, { 277.628, 98.410, -57.610 },
- { 293.108, 98.090, -57.595 }, { 308.663, 97.745, -57.575 }, { 324.293, 97.385, -57.560 },
- { 340.013, 97.000, -57.540 }
- },
- { { -339.998, 97.045, -72.655 }, { -324.278, 97.435, -72.680 }, { -308.648, 97.795, -72.710 },
- { -293.093, 98.135, -72.730 }, { -277.613, 98.455, -72.750 }, { -262.208, 98.750, -72.775 },
- { -246.873, 99.020, -72.795 }, { -231.593, 99.280, -72.810 }, { -216.383, 99.505, -72.820 },
- { -201.223, 99.710, -72.840 }, { -186.113, 99.895, -72.850 }, { -171.048, 100.055, -72.860 },
- { -156.028, 100.190, -72.870 }, { -141.048, 100.305, -72.880 }, { -126.103, 100.400, -72.885 },
- { -111.193, 100.470, -72.890 }, { -96.303, 100.520, -72.890 }, { -81.443, 100.540, -72.895 },
- { -66.608, 100.540, -72.895 }, { -51.783, 100.520, -72.895 }, { -36.973, 100.480, -72.890 },
- { -22.178, 100.405, -72.885 }, { -7.388, 100.320, -72.880 }, { 7.403, 100.320, -72.880 },
- { 22.193, 100.405, -72.885 }, { 36.988, 100.480, -72.890 }, { 51.798, 100.520, -72.895 },
- { 66.623, 100.540, -72.895 }, { 81.458, 100.540, -72.895 }, { 96.318, 100.520, -72.890 },
- { 111.208, 100.470, -72.890 }, { 126.118, 100.400, -72.885 }, { 141.063, 100.305, -72.880 },
- { 156.043, 100.190, -72.870 }, { 171.063, 100.055, -72.860 }, { 186.128, 99.895, -72.850 },
- { 201.238, 99.710, -72.840 }, { 216.398, 99.505, -72.820 }, { 231.613, 99.280, -72.810 },
- { 246.888, 99.020, -72.795 }, { 262.223, 98.750, -72.775 }, { 277.628, 98.455, -72.750 },
- { 293.108, 98.135, -72.730 }, { 308.663, 97.795, -72.710 }, { 324.293, 97.435, -72.680 },
- { 340.013, 97.045, -72.655 }
- },
- { { -339.998, 97.070, -87.790 }, { -324.278, 97.460, -87.820 }, { -308.648, 97.820, -87.850 },
- { -293.093, 98.160, -87.885 }, { -277.613, 98.480, -87.910 }, { -262.208, 98.775, -87.935 },
- { -246.873, 99.050, -87.960 }, { -231.593, 99.300, -87.980 }, { -216.383, 99.530, -88.000 },
- { -201.223, 99.735, -88.015 }, { -186.113, 99.920, -88.030 }, { -171.048, 100.080, -88.045 },
- { -156.028, 100.215, -88.055 }, { -141.048, 100.335, -88.065 }, { -126.103, 100.420, -88.070 },
- { -111.193, 100.490, -88.075 }, { -96.303, 100.540, -88.085 }, { -81.443, 100.565, -88.085 },
- { -66.608, 100.560, -88.085 }, { -51.783, 100.540, -88.085 }, { -36.973, 100.500, -88.080 },
- { -22.178, 100.430, -88.075 }, { -7.388, 100.340, -88.065 }, { 7.403, 100.340, -88.070 },
- { 22.193, 100.430, -88.075 }, { 36.988, 100.500, -88.080 }, { 51.798, 100.540, -88.085 },
- { 66.623, 100.560, -88.085 }, { 81.458, 100.565, -88.085 }, { 96.318, 100.540, -88.085 },
- { 111.208, 100.490, -88.075 }, { 126.118, 100.420, -88.070 }, { 141.063, 100.335, -88.065 },
- { 156.043, 100.215, -88.055 }, { 171.063, 100.080, -88.045 }, { 186.128, 99.915, -88.030 },
- { 201.238, 99.735, -88.015 }, { 216.398, 99.530, -88.000 }, { 231.613, 99.300, -87.980 },
- { 246.888, 99.050, -87.960 }, { 262.223, 98.775, -87.935 }, { 277.628, 98.480, -87.910 },
- { 293.108, 98.160, -87.885 }, { 308.663, 97.820, -87.850 }, { 324.293, 97.460, -87.820 },
- { 340.013, 97.070, -87.790 }
- }
- };
-}
+ private static final double[][][] position = {
+ { { -340.003, 97.065, 87.845 }, { -324.283, 97.450, 87.875 }, { -308.648, 97.810, 87.900 },
+ { -293.093, 98.150, 87.920 }, { -277.618, 98.470, 87.940 }, { -262.213, 98.765, 87.965 },
+ { -246.878, 99.040, 87.980 }, { -231.603, 99.290, 87.995 }, { -216.393, 99.520, 88.010 },
+ { -201.228, 99.725, 88.030 }, { -186.118, 99.905, 88.040 }, { -171.058, 100.070, 88.050 },
+ { -156.038, 100.205, 88.055 }, { -141.058, 100.325, 88.070 }, { -126.113, 100.415, 88.075 },
+ { -111.198, 100.485, 88.075 }, { -96.313, 100.530, 88.080 }, { -81.453, 100.555, 88.085 },
+ { -66.608, 100.560, 88.085 }, { -51.788, 100.540, 88.080 }, { -36.983, 100.490, 88.075 },
+ { -22.183, 100.425, 88.075 }, { -7.393, 100.335, 88.070 }, { 7.393, 100.335, 88.070 },
+ { 22.183, 100.425, 88.075 }, { 36.983, 100.490, 88.075 }, { 51.793, 100.540, 88.080 },
+ { 66.613, 100.560, 88.085 }, { 81.453, 100.555, 88.085 }, { 96.313, 100.530, 88.080 },
+ { 111.198, 100.485, 88.075 }, { 126.113, 100.415, 88.075 }, { 141.053, 100.325, 88.070 },
+ { 156.038, 100.205, 88.055 }, { 171.053, 100.070, 88.050 }, { 186.118, 99.905, 88.040 },
+ { 201.228, 99.725, 88.030 }, { 216.388, 99.520, 88.010 }, { 231.608, 99.290, 87.995 },
+ { 246.878, 99.040, 87.980 }, { 262.218, 98.765, 87.965 }, { 277.623, 98.470, 87.940 },
+ { 293.098, 98.150, 87.920 }, { 308.653, 97.810, 87.900 }, { 324.288, 97.450, 87.875 },
+ { 340.008, 97.065, 87.845 }
+ },
+ { { -340.003, 97.040, 72.715 }, { -324.283, 97.420, 72.735 }, { -308.648, 97.785, 72.750 },
+ { -293.093, 98.125, 72.765 }, { -277.618, 98.450, 72.785 }, { -262.213, 98.745, 72.800 },
+ { -246.878, 99.015, 72.815 }, { -231.603, 99.265, 72.825 }, { -216.388, 99.495, 72.840 },
+ { -201.228, 99.700, 72.850 }, { -186.118, 99.885, 72.860 }, { -171.058, 100.045, 72.865 },
+ { -156.033, 100.185, 72.875 }, { -141.053, 100.300, 72.880 }, { -126.108, 100.395, 72.880 },
+ { -111.193, 100.460, 72.890 }, { -96.308, 100.510, 72.890 }, { -81.448, 100.535, 72.895 },
+ { -66.608, 100.535, 72.890 }, { -51.788, 100.510, 72.890 }, { -36.978, 100.470, 72.890 },
+ { -22.183, 100.405, 72.880 }, { -7.388, 100.310, 72.880 }, { 7.393, 100.310, 72.880 },
+ { 22.188, 100.405, 72.885 }, { 36.983, 100.470, 72.890 }, { 51.793, 100.510, 72.890 },
+ { 66.613, 100.535, 72.890 }, { 81.453, 100.535, 72.895 }, { 96.313, 100.510, 72.890 },
+ { 111.198, 100.460, 72.890 }, { 126.113, 100.395, 72.880 }, { 141.063, 100.300, 72.880 },
+ { 156.043, 100.185, 72.875 }, { 171.063, 100.045, 72.865 }, { 186.123, 99.885, 72.860 },
+ { 201.233, 99.700, 72.850 }, { 216.393, 99.495, 72.840 }, { 231.608, 99.265, 72.825 },
+ { 246.883, 99.015, 72.815 }, { 262.218, 98.745, 72.800 }, { 277.623, 98.450, 72.785 },
+ { 293.098, 98.125, 72.765 }, { 308.653, 97.785, 72.750 }, { 324.288, 97.420, 72.735 },
+ { 340.008, 97.040, 72.715 }
+ },
+ { { -340.003, 96.990, 57.600 }, { -324.283, 97.375, 57.610 }, { -308.648, 97.740, 57.625 },
+ { -293.093, 98.080, 57.630 }, { -277.618, 98.395, 57.645 }, { -262.213, 98.700, 57.655 },
+ { -246.873, 98.970, 57.660 }, { -231.603, 99.220, 57.670 }, { -216.383, 99.450, 57.680 },
+ { -201.228, 99.660, 57.685 }, { -186.113, 99.840, 57.695 }, { -171.053, 100.005, 57.695 },
+ { -156.033, 100.140, 57.700 }, { -141.053, 100.255, 57.710 }, { -126.108, 100.345, 57.710 },
+ { -111.193, 100.420, 57.710 }, { -96.308, 100.465, 57.715 }, { -81.448, 100.490, 57.715 },
+ { -66.608, 100.490, 57.715 }, { -51.788, 100.470, 57.710 }, { -36.978, 100.425, 57.710 },
+ { -22.178, 100.355, 57.710 }, { -7.388, 100.265, 57.705 }, { 7.398, 100.265, 57.705 },
+ { 22.188, 100.355, 57.710 }, { 36.988, 100.425, 57.710 }, { 51.793, 100.470, 57.710 },
+ { 66.613, 100.490, 57.715 }, { 81.458, 100.490, 57.715 }, { 96.318, 100.465, 57.715 },
+ { 111.198, 100.420, 57.710 }, { 126.118, 100.345, 57.710 }, { 141.063, 100.255, 57.710 },
+ { 156.043, 100.140, 57.700 }, { 171.063, 100.005, 57.695 }, { 186.123, 99.840, 57.695 },
+ { 201.233, 99.660, 57.685 }, { 216.393, 99.450, 57.680 }, { 231.608, 99.220, 57.670 },
+ { 246.883, 98.970, 57.660 }, { 262.218, 98.700, 57.655 }, { 277.623, 98.395, 57.645 },
+ { 293.098, 98.080, 57.630 }, { 308.653, 97.740, 57.625 }, { 324.288, 97.375, 57.610 },
+ { 340.008, 96.990, 57.600 }
+ },
+ { { -340.003, 96.925, 42.490 }, { -324.283, 97.305, 42.495 }, { -308.648, 97.675, 42.505 },
+ { -293.093, 98.010, 42.510 }, { -277.618, 98.330, 42.510 }, { -262.213, 98.625, 42.515 },
+ { -246.873, 98.900, 42.525 }, { -231.603, 99.155, 42.530 }, { -216.383, 99.385, 42.535 },
+ { -201.223, 99.590, 42.530 }, { -186.113, 99.775, 42.535 }, { -171.048, 99.930, 42.540 },
+ { -156.033, 100.070, 42.545 }, { -141.048, 100.185, 42.545 }, { -126.108, 100.280, 42.550 },
+ { -111.193, 100.350, 42.545 }, { -96.308, 100.400, 42.545 }, { -81.448, 100.420, 42.550 },
+ { -66.608, 100.425, 42.550 }, { -51.788, 100.405, 42.550 }, { -36.978, 100.355, 42.545 },
+ { -22.178, 100.290, 42.545 }, { -7.388, 100.200, 42.545 }, { 7.398, 100.200, 42.545 },
+ { 22.188, 100.290, 42.545 }, { 36.988, 100.355, 42.545 }, { 51.793, 100.405, 42.550 },
+ { 66.613, 100.425, 42.550 }, { 81.458, 100.420, 42.550 }, { 96.318, 100.400, 42.545 },
+ { 111.198, 100.350, 42.545 }, { 126.118, 100.280, 42.550 }, { 141.063, 100.185, 42.545 },
+ { 156.043, 100.070, 42.545 }, { 171.063, 99.930, 42.540 }, { 186.123, 99.775, 42.535 },
+ { 201.233, 99.590, 42.530 }, { 216.393, 99.385, 42.535 }, { 231.608, 99.155, 42.530 },
+ { 246.883, 98.900, 42.525 }, { 262.218, 98.625, 42.515 }, { 277.628, 98.330, 42.510 },
+ { 293.098, 98.010, 42.510 }, { 308.653, 97.675, 42.505 }, { 324.288, 97.305, 42.495 },
+ { 340.008, 96.925, 42.490 }
+ },
+ { { -340.003, 96.830, 27.385 }, { -324.278, 97.215, 27.385 }, { -308.648, 97.575, 27.385 },
+ { -293.093, 97.915, 27.385 }, { -277.613, 98.240, 27.385 }, { -262.213, 98.535, 27.385 },
+ { -246.878, 98.810, 27.385 }, { -231.603, 99.060, 27.385 }, { -216.383, 99.290, 27.385 },
+ { -201.223, 99.495, 27.385 }, { -186.113, 99.680, 27.385 }, { -171.048, 99.840, 27.385 },
+ { -156.033, 99.980, 27.385 }, { -141.048, 100.095, 27.385 }, { -126.103, 100.185, 27.385 },
+ { -111.193, 100.255, 27.385 }, { -96.303, 100.305, 27.385 }, { -81.448, 100.330, 27.385 },
+ { -66.608, 100.330, 27.385 }, { -51.783, 100.310, 27.385 }, { -36.973, 100.265, 27.385 },
+ { -22.178, 100.200, 27.385 }, { -7.388, 100.105, 27.385 }, { 7.403, 100.105, 27.385 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 156.078, 99.980, 27.385 }, { 171.103, 99.840, 27.385 }, { 186.168, 99.680, 27.385 },
+ { 201.268, 99.495, 27.385 }, { 216.423, 99.290, 27.385 }, { 231.638, 99.060, 27.385 },
+ { 246.913, 98.810, 27.385 }, { 262.248, 98.535, 27.385 }, { 277.658, 98.240, 27.385 },
+ { 293.133, 97.920, 27.385 }, { 308.688, 97.575, 27.385 }, { 324.323, 97.215, 27.385 },
+ { 340.043, 96.830, 27.385 }
+ },
+ { { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }
+ },
+ { { -339.998, 96.840, -27.330 }, { -324.278, 97.225, -27.340 }, { -308.643, 97.585, -27.345 },
+ { -293.093, 97.925, -27.350 }, { -277.613, 98.245, -27.360 }, { -262.213, 98.545, -27.365 },
+ { -246.868, 98.820, -27.365 }, { -231.598, 99.070, -27.370 }, { -216.383, 99.300, -27.375 },
+ { -201.223, 99.505, -27.380 }, { -186.113, 99.690, -27.385 }, { -171.048, 99.850, -27.380 },
+ { -156.028, 99.990, -27.385 }, { -141.048, 100.100, -27.390 }, { -126.103, 100.195, -27.390 },
+ { -111.193, 100.265, -27.395 }, { -96.303, 100.315, -27.395 }, { -81.443, 100.340, -27.390 },
+ { -66.603, 100.335, -27.390 }, { -51.783, 100.315, -27.390 }, { -36.973, 100.275, -27.395 },
+ { -22.173, 100.205, -27.390 }, { -7.383, 100.115, -27.385 }, { 7.403, 100.115, -27.385 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 }, { 0.000, 0.000, 0.000 },
+ { 156.088, 99.985, -27.385 }, { 171.103, 99.845, -27.380 }, { 186.168, 99.680, -27.385 },
+ { 201.268, 99.495, -27.380 }, { 216.428, 99.290, -27.375 }, { 231.643, 99.060, -27.370 },
+ { 246.913, 98.810, -27.365 }, { 262.258, 98.535, -27.365 }, { 277.658, 98.240, -27.360 },
+ { 293.138, 97.925, -27.350 }, { 308.688, 97.580, -27.345 }, { 324.323, 97.215, -27.340 },
+ { 340.043, 96.835, -27.330 }
+ },
+ { { -339.998, 96.930, -42.435 }, { -324.278, 97.315, -42.445 }, { -308.648, 97.680, -42.455 },
+ { -293.093, 98.015, -42.470 }, { -277.613, 98.340, -42.480 }, { -262.208, 98.635, -42.490 },
+ { -246.873, 98.910, -42.500 }, { -231.593, 99.160, -42.510 }, { -216.383, 99.390, -42.515 },
+ { -201.223, 99.595, -42.525 }, { -186.113, 99.780, -42.525 }, { -171.048, 99.940, -42.535 },
+ { -156.028, 100.080, -42.540 }, { -141.048, 100.195, -42.540 }, { -126.103, 100.290, -42.545 },
+ { -111.193, 100.355, -42.550 }, { -96.303, 100.405, -42.550 }, { -81.443, 100.430, -42.550 },
+ { -66.608, 100.430, -42.550 }, { -51.783, 100.405, -42.550 }, { -36.973, 100.365, -42.550 },
+ { -22.178, 100.295, -42.545 }, { -7.388, 100.205, -42.545 }, { 7.403, 100.205, -42.545 },
+ { 22.193, 100.295, -42.545 }, { 36.988, 100.365, -42.550 }, { 51.798, 100.405, -42.550 },
+ { 66.623, 100.430, -42.550 }, { 81.458, 100.430, -42.550 }, { 96.318, 100.405, -42.550 },
+ { 111.208, 100.355, -42.550 }, { 126.118, 100.290, -42.545 }, { 141.063, 100.195, -42.540 },
+ { 156.043, 100.080, -42.540 }, { 171.063, 99.940, -42.535 }, { 186.128, 99.780, -42.525 },
+ { 201.238, 99.595, -42.525 }, { 216.398, 99.390, -42.515 }, { 231.613, 99.160, -42.510 },
+ { 246.888, 98.910, -42.500 }, { 262.223, 98.635, -42.490 }, { 277.628, 98.340, -42.480 },
+ { 293.108, 98.015, -42.470 }, { 308.663, 97.680, -42.455 }, { 324.293, 97.315, -42.445 },
+ { 340.013, 96.930, -42.435 }
+ },
+ { { -339.998, 97.000, -57.540 }, { -324.278, 97.385, -57.560 }, { -308.648, 97.745, -57.575 },
+ { -293.093, 98.090, -57.595 }, { -277.613, 98.410, -57.610 }, { -262.208, 98.705, -57.625 },
+ { -246.873, 98.975, -57.640 }, { -231.593, 99.225, -57.655 }, { -216.383, 99.455, -57.665 },
+ { -201.223, 99.665, -57.675 }, { -186.113, 99.845, -57.685 }, { -171.048, 100.010, -57.690 },
+ { -156.028, 100.145, -57.700 }, { -141.048, 100.265, -57.705 }, { -126.103, 100.355, -57.710 },
+ { -111.193, 100.425, -57.710 }, { -96.303, 100.475, -57.720 }, { -81.443, 100.495, -57.715 },
+ { -66.608, 100.500, -57.720 }, { -51.783, 100.480, -57.715 }, { -36.973, 100.430, -57.710 },
+ { -22.178, 100.365, -57.710 }, { -7.388, 100.275, -57.705 }, { 7.403, 100.275, -57.705 },
+ { 22.193, 100.365, -57.710 }, { 36.988, 100.430, -57.710 }, { 51.798, 100.480, -57.715 },
+ { 66.623, 100.500, -57.720 }, { 81.458, 100.495, -57.715 }, { 96.318, 100.475, -57.720 },
+ { 111.208, 100.425, -57.710 }, { 126.118, 100.355, -57.710 }, { 141.063, 100.265, -57.705 },
+ { 156.043, 100.145, -57.700 }, { 171.063, 100.010, -57.690 }, { 186.128, 99.845, -57.685 },
+ { 201.238, 99.665, -57.675 }, { 216.398, 99.455, -57.665 }, { 231.613, 99.225, -57.655 },
+ { 246.888, 98.975, -57.640 }, { 262.223, 98.705, -57.625 }, { 277.628, 98.410, -57.610 },
+ { 293.108, 98.090, -57.595 }, { 308.663, 97.745, -57.575 }, { 324.293, 97.385, -57.560 },
+ { 340.013, 97.000, -57.540 }
+ },
+ { { -339.998, 97.045, -72.655 }, { -324.278, 97.435, -72.680 }, { -308.648, 97.795, -72.710 },
+ { -293.093, 98.135, -72.730 }, { -277.613, 98.455, -72.750 }, { -262.208, 98.750, -72.775 },
+ { -246.873, 99.020, -72.795 }, { -231.593, 99.280, -72.810 }, { -216.383, 99.505, -72.820 },
+ { -201.223, 99.710, -72.840 }, { -186.113, 99.895, -72.850 }, { -171.048, 100.055, -72.860 },
+ { -156.028, 100.190, -72.870 }, { -141.048, 100.305, -72.880 }, { -126.103, 100.400, -72.885 },
+ { -111.193, 100.470, -72.890 }, { -96.303, 100.520, -72.890 }, { -81.443, 100.540, -72.895 },
+ { -66.608, 100.540, -72.895 }, { -51.783, 100.520, -72.895 }, { -36.973, 100.480, -72.890 },
+ { -22.178, 100.405, -72.885 }, { -7.388, 100.320, -72.880 }, { 7.403, 100.320, -72.880 },
+ { 22.193, 100.405, -72.885 }, { 36.988, 100.480, -72.890 }, { 51.798, 100.520, -72.895 },
+ { 66.623, 100.540, -72.895 }, { 81.458, 100.540, -72.895 }, { 96.318, 100.520, -72.890 },
+ { 111.208, 100.470, -72.890 }, { 126.118, 100.400, -72.885 }, { 141.063, 100.305, -72.880 },
+ { 156.043, 100.190, -72.870 }, { 171.063, 100.055, -72.860 }, { 186.128, 99.895, -72.850 },
+ { 201.238, 99.710, -72.840 }, { 216.398, 99.505, -72.820 }, { 231.613, 99.280, -72.810 },
+ { 246.888, 99.020, -72.795 }, { 262.223, 98.750, -72.775 }, { 277.628, 98.455, -72.750 },
+ { 293.108, 98.135, -72.730 }, { 308.663, 97.795, -72.710 }, { 324.293, 97.435, -72.680 },
+ { 340.013, 97.045, -72.655 }
+ },
+ { { -339.998, 97.070, -87.790 }, { -324.278, 97.460, -87.820 }, { -308.648, 97.820, -87.850 },
+ { -293.093, 98.160, -87.885 }, { -277.613, 98.480, -87.910 }, { -262.208, 98.775, -87.935 },
+ { -246.873, 99.050, -87.960 }, { -231.593, 99.300, -87.980 }, { -216.383, 99.530, -88.000 },
+ { -201.223, 99.735, -88.015 }, { -186.113, 99.920, -88.030 }, { -171.048, 100.080, -88.045 },
+ { -156.028, 100.215, -88.055 }, { -141.048, 100.335, -88.065 }, { -126.103, 100.420, -88.070 },
+ { -111.193, 100.490, -88.075 }, { -96.303, 100.540, -88.085 }, { -81.443, 100.565, -88.085 },
+ { -66.608, 100.560, -88.085 }, { -51.783, 100.540, -88.085 }, { -36.973, 100.500, -88.080 },
+ { -22.178, 100.430, -88.075 }, { -7.388, 100.340, -88.065 }, { 7.403, 100.340, -88.070 },
+ { 22.193, 100.430, -88.075 }, { 36.988, 100.500, -88.080 }, { 51.798, 100.540, -88.085 },
+ { 66.623, 100.560, -88.085 }, { 81.458, 100.565, -88.085 }, { 96.318, 100.540, -88.085 },
+ { 111.208, 100.490, -88.075 }, { 126.118, 100.420, -88.070 }, { 141.063, 100.335, -88.065 },
+ { 156.043, 100.215, -88.055 }, { 171.063, 100.080, -88.045 }, { 186.128, 99.915, -88.030 },
+ { 201.238, 99.735, -88.015 }, { 216.398, 99.530, -88.000 }, { 231.613, 99.300, -87.980 },
+ { 246.888, 99.050, -87.960 }, { 262.223, 98.775, -87.935 }, { 277.628, 98.480, -87.910 },
+ { 293.108, 98.160, -87.885 }, { 308.663, 97.820, -87.850 }, { 324.293, 97.460, -87.820 },
+ { 340.013, 97.070, -87.790 }
+ }
+ };
+}
Modified: java/branches/HPSJAVA-409/run-database/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/run-database/pom.xml (original)
+++ java/branches/HPSJAVA-409/run-database/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/run-database/</url>
@@ -20,8 +20,21 @@
<artifactId>hps-record-util</artifactId>
</dependency>
<dependency>
- <groupId>org.hps</groupId>
- <artifactId>hps-datacat-client</artifactId>
+ <groupId>srs</groupId>
+ <artifactId>org-srs-datacat-client</artifactId>
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>org/hps/run/database/RunBuilderTest.java</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
</project>
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java Wed Apr 27 11:11:32 2016
@@ -16,7 +16,7 @@
*
* @param run the run number
*/
- public void deleteEpicsData(EpicsType epicsType, final int run);
+ public void deleteEpicsData(EpicsType epicsType, int run);
/**
* Get EPICS data by run.
@@ -34,5 +34,5 @@
*
* @param epicsDataList the list of EPICS data
*/
- void insertEpicsData(List<EpicsData> epicsDataList);
+ void insertEpicsData(List<EpicsData> epicsDataList, int run);
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java Wed Apr 27 11:11:32 2016
@@ -74,6 +74,9 @@
/**
* Delete all EPICS data for a run from the database.
+ * <p>
+ * Only the <code>epics_header</code> records are deleted and the child records
+ * are deleted automatically via a <code>CASCADE</code>.
*
* @param run the run number
*/
@@ -97,12 +100,12 @@
deleteEpicsData.setInt(1, headerId);
int rowsAffected = deleteEpicsData.executeUpdate();
if (rowsAffected == 0) {
- throw new SQLException("Deletion of EPICS data failed; no rows affect.");
+ throw new SQLException("Deletion of EPICS data failed; no rows affected.");
}
deleteHeader.setInt(1, headerId);
rowsAffected = deleteHeader.executeUpdate();
if (rowsAffected == 0) {
- throw new SQLException("Deletion of EPICS header failed; no rows affect.");
+ throw new SQLException("Deletion of EPICS header failed; no rows affected.");
}
}
@@ -137,7 +140,7 @@
* Get EPICS data by run.
*
* @param run the run number
- * @param epicsType the type of EPICS data (1s or 10s)
+ * @param epicsType the type of EPICS data (2s or 20s)
* @return the EPICS data
*/
@Override
@@ -149,7 +152,7 @@
final List<EpicsVariable> variables = epicsVariableDao.getEpicsVariables(epicsType);
selectEpicsData = connection.prepareStatement("SELECT * FROM " + epicsType.getTableName()
+ " LEFT JOIN epics_headers ON " + epicsType.getTableName() + ".epics_header_id = epics_headers.id"
- + " WHERE epics_headers.run = ?");
+ + " WHERE epics_headers.run = ? ORDER BY epics_headers.sequence");
selectEpicsData.setInt(1, run);
ResultSet resultSet = selectEpicsData.executeQuery();
while (resultSet.next()) {
@@ -189,12 +192,14 @@
/**
* Insert a list of EPICS data into the database.
* <p>
- * The run number comes from the header information.
+ * By default, the run number from the header will be used, but it will be overridden
+ * if it does not match the <code>run</code> argument. (There are a few data files
+ * where the run in the EPICS header is occassionally wrong.)
*
* @param epicsDataList the list of EPICS data
*/
@Override
- public void insertEpicsData(final List<EpicsData> epicsDataList) {
+ public void insertEpicsData(final List<EpicsData> epicsDataList, int run) {
if (epicsDataList.isEmpty()) {
throw new IllegalArgumentException("The EPICS data list is empty.");
}
@@ -208,9 +213,11 @@
if (epicsHeader == null) {
throw new IllegalArgumentException("The EPICS data is missing a header.");
}
- insertHeaderStatement.setInt(1, epicsHeader.getRun());
+ insertHeaderStatement.setInt(1, run); /* Don't use run from bank as it is sometimes wrong! */
insertHeaderStatement.setInt(2, epicsHeader.getSequence());
insertHeaderStatement.setInt(3, epicsHeader.getTimestamp());
+ LOGGER.finer("creating EPICs record with run = " + run + " ; seq = "
+ + epicsHeader.getSequence() + "; ts = " + epicsHeader.getTimestamp());
final int rowsCreated = insertHeaderStatement.executeUpdate();
if (rowsCreated == 0) {
throw new SQLException("Creation of EPICS header record failed; no rows affected.");
@@ -238,11 +245,11 @@
insertStatement.setDouble(parameterIndex, value);
++parameterIndex;
}
- final int dataRowsCreated = insertStatement.executeUpdate();
+ final int dataRowsCreated = insertStatement.executeUpdate();
if (dataRowsCreated == 0) {
throw new SQLException("Creation of EPICS data failed; no rows affected.");
}
- LOGGER.info("inserted EPICS data with run " + epicsHeader.getRun() + ", seq " + epicsHeader.getSequence() + "timestamp "
+ LOGGER.finer("inserted EPICS data with run = " + run + "; seq = " + epicsHeader.getSequence() + "; ts = "
+ epicsHeader.getTimestamp());
insertStatement.close();
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsType.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsType.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsType.java Wed Apr 27 11:11:32 2016
@@ -3,34 +3,35 @@
import org.hps.record.epics.EpicsData;
/**
- * Enum for representing different types of EPICS data in the run database, of which there are currently two (1s and
- * 10s).
+ * Enum for representing different types of EPICS data in the run database, of which there are currently two (2s and
+ * 20s).
*
* @author Jeremy McCormick, SLAC
*/
+// FIXME: move to record-util
public enum EpicsType {
/**
- * 10S EPICS data.
+ * 20S EPICS data.
*/
- EPICS_10S(10),
+ EPICS_20S(20),
/**
- * 1S EPICS data.
+ * 2S EPICS data.
*/
- EPICS_1S(1);
+ EPICS_2S(2);
/**
* Get the type from an int.
*
* @param type the type from an int
* @return the type from an int
- * @throws IllegalArgumentException if <code>type</code> is invalid (not 1 or 10)
+ * @throws IllegalArgumentException if <code>type</code> is invalid (not 2 or 20)
*/
public static EpicsType fromInt(final int type) {
- if (type == EPICS_1S.type) {
- return EPICS_1S;
- } else if (type == EPICS_10S.type) {
- return EPICS_10S;
+ if (type == EPICS_2S.type) {
+ return EPICS_2S;
+ } else if (type == EPICS_20S.type) {
+ return EPICS_20S;
} else {
throw new IllegalArgumentException("The type code is invalid (must be 1 or 10): " + type);
}
@@ -44,9 +45,9 @@
public static EpicsType getEpicsType(final EpicsData epicsData) {
// FIXME: The type argument should be set on creation which would make this key check unnecessary.
if (epicsData.getKeys().contains("MBSY2C_energy")) {
- return EpicsType.EPICS_1S;
+ return EpicsType.EPICS_2S;
} else {
- return EpicsType.EPICS_10S;
+ return EpicsType.EPICS_20S;
}
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsVariable.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsVariable.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/EpicsVariable.java Wed Apr 27 11:11:32 2016
@@ -2,7 +2,7 @@
/**
* Information about an EPICS variable including its name in the EPICS database, column name for the run database,
- * description of the variable, and type (either 1s or 10s).
+ * description of the variable, and type (either 2s or 20s).
* <p>
* This class is used to represent data from the <i>epics_variables</i> table in the run database.
*
@@ -29,7 +29,7 @@
private final String variableName;
/**
- * The type of the variable (1s or 10s).
+ * The type of the variable (2s or 20s).
*/
private final EpicsType variableType;
@@ -53,9 +53,9 @@
* Create an EPICs variable.
*
* @param variableName the name of the variable
- * @param columnName the column name in the run db
+ * @param columnName the column name in the run database
* @param description the variable's description
- * @param variableType the type of the variable
+ * @param type the integer encoding of the type
*/
public EpicsVariable(final String variableName, final String columnName, final String description, final int type) {
this.variableName = variableName;
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java Wed Apr 27 11:11:32 2016
@@ -1,62 +1,24 @@
package org.hps.run.database;
import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Logger;
+import java.net.URISyntaxException;
import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
import org.hps.conditions.database.ConnectionParameters;
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatacatClientFactory;
-import org.hps.datacat.client.Dataset;
-import org.hps.datacat.client.DatasetMetadata;
-import org.hps.record.evio.EvioFileUtilities;
+import org.srs.datacat.client.Client;
+import org.srs.datacat.client.ClientBuilder;
/**
- * Command line tool for updating the run database from EVIO files registered in the data catalog.
+ * Command line tool for inserting records into the run database.
*
* @author Jeremy McCormick, SLAC
*/
-public class RunDatabaseCommandLine {
-
- /**
- * Set of features supported by the tool.
- */
- static enum Feature {
- /**
- * Insert EPICS data.
- */
- EPICS,
- /**
- * Insert scaler data.
- */
- SCALERS,
- /**
- * Insert run summary.
- */
- SUMMARY,
- /**
- * Insert trigger config.
- */
- TRIGGER_CONFIG
- }
-
- /**
- * Initialize the logger.
- */
- private static final Logger LOGGER = Logger.getLogger(RunDatabaseCommandLine.class.getPackage().getName());
-
+public final class RunDatabaseCommandLine {
+
/**
* Command line options for the crawler.
*/
@@ -66,11 +28,18 @@
* Statically define the command options.
*/
static {
- OPTIONS.addOption("f", "feature", true, "enable a feature");
- OPTIONS.addOption("p", "connection-properties", true, "database connection properties file (required)");
OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
OPTIONS.addOption("r", "run", true, "run to update");
- OPTIONS.addOption("u", "update", false, "allow updating existing run in the database");
+ OPTIONS.addOption("p", "connection-properties", true, "database connection properties file (required)");
+ OPTIONS.addOption("Y", "dry-run", false, "dry run which will not update the database");
+ OPTIONS.addOption("x", "replace", false, "allow deleting and replacing an existing run");
+ OPTIONS.addOption("s", "spreadsheet", true, "path to run database spreadsheet (CSV format)");
+ OPTIONS.addOption("d", "detector", true, "conditions system detector name");
+ OPTIONS.addOption("N", "no-evio-processing", false, "skip processing of all EVIO files");
+ OPTIONS.addOption("L", "load", false, "load back run information after inserting (for debugging)");
+ OPTIONS.addOption("u", "url", true, "data catalog URL");
+ OPTIONS.addOption("S", "site", true, "data catalog site (e.g. SLAC or JLAB)");
+ OPTIONS.addOption("f", "folder", true, "folder in datacat for dataset search");
}
/**
@@ -81,113 +50,76 @@
public static void main(final String args[]) {
new RunDatabaseCommandLine().parse(args).run();
}
-
- /**
- * Allow updating of the database for existing runs.
- */
- private boolean allowUpdates = false;
-
- /**
- * The set of enabled features.
- */
- private final Set<Feature> features = new HashSet<Feature>();
-
- /**
- * The run manager for interacting with the run db.
- */
- private RunManager runManager;
-
- /**
- * Create a run processor from the current configuration.
- *
- * @return the run processor
- */
- private RunProcessor createEvioRunProcessor(final RunSummaryImpl runSummary, final List<File> files) {
-
- final RunProcessor runProcessor = new RunProcessor(runSummary, files);
-
- if (features.contains(Feature.EPICS)) {
- runProcessor.addEpicsProcessor();
- }
- if (features.contains(Feature.SCALERS)) {
- runProcessor.addScalerProcessor();
- }
- if (features.contains(Feature.TRIGGER_CONFIG)) {
- runProcessor.addTriggerTimeProcessor();
- }
-
- return runProcessor;
- }
-
- /**
- * Get the list of EVIO files for the run.
- *
- * @param run the run number
- * @return the list of EVIO files from the run
- */
- private Map<File, Dataset> getEvioFiles(final int run) {
- final DatacatClient datacatClient = new DatacatClientFactory().createClient();
- final Set<String> metadata = new HashSet<String>();
- final Map<File, Dataset> files = new HashMap<File, Dataset>();
- metadata.add("runMin");
- metadata.add("eventCount");
- metadata.add("fileNumber");
- metadata.add("endTimestamp");
- metadata.add("startTimestamp");
- metadata.add("hasEnd");
- metadata.add("hasPrestart");
- final List<Dataset> datasets = datacatClient.findDatasets("data/raw",
- "fileFormat eq 'EVIO' AND dataType eq 'RAW' AND runMin eq " + run, metadata);
- if (datasets.isEmpty()) {
- throw new IllegalStateException("No EVIO datasets for run " + run + " were found in the data catalog.");
- }
- for (final Dataset dataset : datasets) {
- files.put(new File(dataset.getLocations().get(0).getResource()), dataset);
- }
- return files;
- }
-
- /**
- * Insert information for a run into the database.
- *
- * @param runManager the run manager for interacting with the run db
- * @param runSummary the run summary with information about the run
- */
- private void insertRun(final RunManager runManager, final RunSummary runSummary) {
-
- final RunDatabaseDaoFactory runFactory = new RunDatabaseDaoFactory(runManager.getConnection());
-
- // Add the run summary record.
- if (this.features.contains(Feature.SUMMARY)) {
- LOGGER.info("inserting run summary");
- runFactory.createRunSummaryDao().insertRunSummary(runSummary);
- }
-
- if (this.features.contains(Feature.EPICS)) {
- LOGGER.info("inserting EPICS data");
- runFactory.createEpicsDataDao().insertEpicsData(runSummary.getEpicsData());
- }
-
- if (this.features.contains(Feature.SCALERS)) {
- LOGGER.info("inserting scaler data");
- runFactory.createScalerDataDao().insertScalerData(runSummary.getScalerData(), runManager.getRun());
- }
-
- if (this.features.contains(Feature.TRIGGER_CONFIG)) {
- LOGGER.info("inserting trigger config");
- runFactory.createTriggerConfigDao().insertTriggerConfig(runSummary.getTriggerConfig(), runManager.getRun());
- }
- }
-
- /**
- * Parse command line options and return reference to <code>this</code>.
+
+ /**
+ * Enable dry run which will not update the run database.
+ */
+ private boolean dryRun = false;
+
+ /**
+ * Run number.
+ */
+ private int run;
+
+ /**
+ * Path to spreadsheet CSV file.
+ */
+ private File spreadsheetFile = null;
+
+ /**
+ * Name of detector for conditions system (default for Eng Run 2015 provided here).
+ */
+ private String detectorName = "HPS-EngRun2015-Nominal-v3";
+
+ /**
+ * Allow replacement of existing records.
+ */
+ private boolean replace = false;
+
+ /**
+ * Skip full EVIO file processing.
+ */
+ private boolean skipEvioProcessing = false;
+
+ /**
+ * Load back run information after insert (for debugging).
+ */
+ private boolean reload = false;
+
+ /**
+ * Database connection parameters.
+ */
+ private ConnectionParameters connectionParameters = null;
+
+ /**
+ * Data catalog client interface.
+ */
+ private Client datacatClient = null;
+
+ /**
+ * Data catalog site.
+ */
+ private String site = "JLAB";
+
+ /**
+ * Data catalog URL.
+ */
+ private String url = "http://hpsweb.jlab.org/datacat/r";
+
+ /**
+ * Default folder for file search.
+ */
+ private String folder = "/HPS/data/raw";
+
+ /**
+ * Parse command line options and return reference to <code>this</code> object.
*
* @param args the command line arguments
* @return reference to this object
*/
- RunDatabaseCommandLine parse(final String args[]) {
+ private RunDatabaseCommandLine parse(final String args[]) {
try {
- final CommandLine cl = new DefaultParser().parse(OPTIONS, args);
+ final CommandLine cl = new PosixParser().parse(OPTIONS, args);
// Print help and exit.
if (cl.hasOption("h") || args.length == 0) {
@@ -204,43 +136,74 @@
throw new IllegalArgumentException("Connection properties file " + dbPropFile.getPath()
+ " does not exist.");
}
- final ConnectionParameters connectionParameters = ConnectionParameters.fromProperties(dbPropFile);
- LOGGER.config("using " + dbPropPath + " for db connection properties");
-
- runManager = new RunManager(connectionParameters.createConnection());
-
+ connectionParameters = ConnectionParameters.fromProperties(dbPropFile);
} else {
// Database connection properties file is required.
- throw new RuntimeException("Connection properties are required.");
- }
-
- Integer run = null;
+ throw new RuntimeException("Connection properties are a required argument.");
+ }
+
+ // Run number.
if (cl.hasOption("r")) {
run = Integer.parseInt(cl.getOptionValue("r"));
} else {
throw new RuntimeException("The run number is required.");
}
- runManager.setRun(run);
-
+
+ // Dry run.
+ if (cl.hasOption("Y")) {
+ this.dryRun = true;
+ }
+
+ // Run spreadsheet.
+ if (cl.hasOption("s")) {
+ this.spreadsheetFile = new File(cl.getOptionValue("s"));
+ if (!this.spreadsheetFile.exists()) {
+ throw new RuntimeException("The run spreadsheet " + this.spreadsheetFile.getPath() + " is inaccessible or does not exist.");
+ }
+ }
+
+ // Detector name.
+ if (cl.hasOption("d")) {
+ this.detectorName = cl.getOptionValue("d");
+ }
+
+ // Replace existing run.
+ if (cl.hasOption("x")) {
+ this.replace = true;
+ }
+
+ // Skip full EVIO processing.
+ if (cl.hasOption("N")) {
+ this.skipEvioProcessing = true;
+ }
+
+ // Load back run info at end of job.
+ if (cl.hasOption("L")) {
+ this.reload = true;
+ }
+
+ // Data catalog URL.
+ if (cl.hasOption("u")) {
+ url = cl.getOptionValue("u");
+ }
+
+ // Site in the data catalog.
+ if (cl.hasOption("S")) {
+ site = cl.getOptionValue("S");
+ }
+
+ // Set folder for dataset search.
if (cl.hasOption("f")) {
- // Enable individual features.
- for (final String arg : cl.getOptionValues("f")) {
- features.add(Feature.valueOf(arg));
- }
- } else {
- // By default all features are enabled.
- features.addAll(Arrays.asList(Feature.values()));
- }
- for (final Feature feature : features) {
- LOGGER.config("feature " + feature.name() + " is enabled.");
- }
-
- // Allow updates to existing runs in the db.
- if (cl.hasOption("u")) {
- this.allowUpdates = true;
- LOGGER.config("updating or replacing existing run data is enabled");
- }
-
+ folder = cl.getOptionValue("f");
+ }
+
+ // Initialize the data catalog client.
+ try {
+ datacatClient = new ClientBuilder().setUrl(url).build();
+ } catch (URISyntaxException e) {
+ throw new RuntimeException("Bad datacat URL.", e);
+ }
+
} catch (final ParseException e) {
throw new RuntimeException(e);
}
@@ -249,90 +212,21 @@
}
/**
- * Run the job to update the information in the run database.
+ * Configure the builder from command line options and run the job to update the database.
*/
private void run() {
-
- LOGGER.info("starting");
-
- final boolean runExists = runManager.runExists();
-
- // Fail if run exists and updates are not allowed.
- if (runExists && !allowUpdates) {
- throw new IllegalStateException("The run " + runManager.getRun()
- + " already exists and updates are not allowed.");
- }
-
- // Get the run number configured from command line.
- final int run = runManager.getRun();
-
- // Get the list of EVIO files for the run using a data catalog query.
- final Map<File, Dataset> fileDatasets = this.getEvioFiles(run);
- final List<File> files = new ArrayList<File>(fileDatasets.keySet());
- EvioFileUtilities.sortBySequence(files);
-
- // Process the run's files to get information.
- final RunSummaryImpl runSummary = new RunSummaryImpl(run);
- final RunProcessor runProcessor = this.createEvioRunProcessor(runSummary, files);
- try {
- runProcessor.processRun();
- } catch (final Exception e) {
- throw new RuntimeException(e);
- }
-
- // Set number of files from datacat query.
- runSummary.setTotalFiles(files.size());
-
- // Set run start date.
- this.setStartDate(fileDatasets, files, runSummary);
-
- // Set run end date.
- this.setEndDate(fileDatasets, files, runSummary);
-
- // Delete existing run.
- if (runExists) {
- runManager.deleteRun();
- }
-
- // Insert run into database.
- this.insertRun(runManager, runSummary);
-
- // Close the database connection.
- runManager.closeConnection();
-
- LOGGER.info("done");
- }
-
- /**
- * Set the run end date.
- *
- * @param fileDatasets the run's datasets
- * @param files the run's EVIO files
- * @param runSummary the run summary
- */
- private void setEndDate(final Map<File, Dataset> fileDatasets, final List<File> files,
- final RunSummaryImpl runSummary) {
- final Dataset lastDataset = fileDatasets.get(files.get(files.size() - 1));
- final DatasetMetadata metadata = lastDataset.getMetadata();
- // System.out.println("endTimestamp: " + metadata.getLong("endTimestamp"));
- final Date endDate = new Date(metadata.getLong("endTimestamp"));
- // System.out.println("endDate: " + startDate);
- runSummary.setEndDate(endDate);
- runSummary.setEndOkay(metadata.getLong("hasEnd") == 0 ? false : true);
- }
-
- /**
- * Set the run start date.
- *
- * @param fileDatasets the run's datasets
- * @param files the run's EVIO files
- * @param runSummary the run summary
- */
- private void setStartDate(final Map<File, Dataset> fileDatasets, final List<File> files,
- final RunSummaryImpl runSummary) {
- final Dataset firstDataset = fileDatasets.get(files.get(0));
- final DatasetMetadata metadata = firstDataset.getMetadata();
- final Date startDate = new Date(metadata.getLong("startTimestamp"));
- runSummary.setStartDate(startDate);
- }
+ new RunDatabaseBuilder()
+ .createRunSummary(run)
+ .setFolder(folder)
+ .setDetectorName(detectorName)
+ .setConnectionParameters(connectionParameters)
+ .setDatacatClient(datacatClient)
+ .setSite(site)
+ .setDryRun(dryRun)
+ .setReplace(replace)
+ .skipEvioProcessing(skipEvioProcessing)
+ .setSpreadsheetFile(spreadsheetFile)
+ .setReload(reload)
+ .run();
+ }
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunManager.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunManager.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunManager.java Wed Apr 27 11:11:32 2016
@@ -6,37 +6,26 @@
import java.util.logging.Logger;
import org.hps.conditions.database.ConnectionParameters;
+import org.hps.record.daqconfig.DAQConfig;
import org.hps.record.epics.EpicsData;
import org.hps.record.scalers.ScalerData;
-import org.hps.record.triggerbank.TriggerConfig;
+import org.hps.record.svt.SvtConfigData;
+import org.hps.record.triggerbank.TriggerConfigData;
import org.lcsim.conditions.ConditionsEvent;
import org.lcsim.conditions.ConditionsListener;
/**
- * Manages read-only access to the run database and creates a {@link RunSummary} for a specific run.
+ * Manages access to the run database.
*
* @author Jeremy McCormick, SLAC
*/
public final class RunManager implements ConditionsListener {
/**
- * Simple class for caching data.
- */
- private class DataCache {
-
- List<EpicsData> epicsData;
- RunSummary fullRunSummary;
- Boolean runExists;
- RunSummary runSummary;
- List<ScalerData> scalerData;
- TriggerConfig triggerConfig;
- }
-
- /**
* The default connection parameters for read-only access to the run database.
*/
private static ConnectionParameters DEFAULT_CONNECTION_PARAMETERS = new ConnectionParameters("hpsuser",
- "darkphoton", "hps_run_db", "hpsdb.jlab.org");
+ "darkphoton", "hps_run_db_v2", "hpsdb.jlab.org");
/**
* The singleton instance of the RunManager.
@@ -49,8 +38,7 @@
private static final Logger LOGGER = Logger.getLogger(RunManager.class.getPackage().getName());
/**
- * Get the global instance of the {@link RunManager}.
- *
+ * Get the global instance of the {@link RunManager}.
* @return the global instance of the {@link RunManager}
*/
public static RunManager getRunManager() {
@@ -64,21 +52,11 @@
* The active database connection.
*/
private Connection connection;
-
- /**
- * The database connection parameters, initially set to the default parameters.
- */
- private final ConnectionParameters connectionParameters = DEFAULT_CONNECTION_PARAMETERS;
-
- /**
- * The data cache of run information.
- */
- private DataCache dataCache;
-
+
/**
* Factory for creating database API objects.
*/
- private final RunDatabaseDaoFactory factory;
+ private final DaoProvider factory;
/**
* The run number; the -1 value indicates that this has not been set externally yet.
@@ -86,34 +64,28 @@
private Integer run = null;
/**
+ * Class constructor.
+ * @param connection the database connection
+ */
+ public RunManager(final Connection connection) {
+ try {
+ if (connection.isClosed()) {
+ throw new RuntimeException("The connection is already closed and cannot be used.");
+ }
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ this.connection = connection;
+ factory = new DaoProvider(this.connection);
+ }
+
+ /**
* Class constructor using default connection parameters.
*/
public RunManager() {
- this.connection = DEFAULT_CONNECTION_PARAMETERS.createConnection();
- this.openConnection();
- factory = new RunDatabaseDaoFactory(this.connection);
- }
-
- /**
- * Class constructor.
- *
- * @param connection the database connection
- */
- public RunManager(final Connection connection) {
- this.connection = connection;
- this.openConnection();
- factory = new RunDatabaseDaoFactory(this.connection);
- }
-
- /**
- * Check if the run number has been set.
- */
- private void checkRunNumber() {
- if (this.run == null) {
- throw new IllegalStateException("The run number was not set.");
- }
- }
-
+ this(DEFAULT_CONNECTION_PARAMETERS.createConnection());
+ }
+
/**
* Close the database connection.
*/
@@ -129,7 +101,6 @@
/**
* Load new run information when conditions have changed.
- *
* @param conditionsEvent the event with new conditions information
*/
@Override
@@ -138,21 +109,7 @@
}
/**
- * Delete a run from the database.
- *
- * @param run the run number
- */
- public void deleteRun() {
- // Create object for updating run info in the database.
- final RunSummaryDao runSummaryDao = factory.createRunSummaryDao();
-
- // Delete run from the database.
- runSummaryDao.deleteFullRun(run);
- }
-
- /**
- * Return the database connection.
- *
+ * Return the database connection.
* @return the database connection
*/
Connection getConnection() {
@@ -161,186 +118,159 @@
/**
* Get the EPICS data for the current run.
- *
* @param epicsType the type of EPICS data
* @return the EPICS data for the current run
*/
public List<EpicsData> getEpicsData(final EpicsType epicsType) {
- this.checkRunNumber();
- if (this.dataCache.epicsData == null) {
- LOGGER.info("loading EPICS data for run " + this.run);
- this.dataCache.epicsData = factory.createEpicsDataDao().getEpicsData(epicsType, this.run);
- }
- return this.dataCache.epicsData;
- }
-
- /**
- * Get the EPICS variables.
- *
+ return factory.getEpicsDataDao().getEpicsData(epicsType, this.run);
+ }
+
+ /**
+ * Get the list of EPICS variables definitions.
* @param epicsType the type of EPICS data
- * @return the EPICS data for the current run
+ * @return the list of EPICS variable definitions
*/
public List<EpicsVariable> getEpicsVariables(final EpicsType epicsType) {
- return factory.createEpicsVariableDao().getEpicsVariables(epicsType);
- }
-
- /**
- * Get the full run summary for the current run including scaler data, etc.
- *
- * @return the full run summary for the current run
- */
- public RunSummary getFullRunSummary() {
- this.checkRunNumber();
- if (this.dataCache.fullRunSummary == null) {
- this.dataCache.fullRunSummary = factory.createRunSummaryDao().readFullRunSummary(this.run);
- }
- return this.dataCache.fullRunSummary;
- }
-
- /**
- * Get the current run number.
- *
- * @return the run number
- */
- public int getRun() {
- return run;
- }
-
- /**
- * Get the complete list of run numbers from the database.
- *
+ return factory.getEpicsVariableDao().getEpicsVariables(epicsType);
+ }
+
+ /**
+ * Get the full list of run numbers from the database.
* @return the complete list of run numbers
*/
public List<Integer> getRuns() {
- return new RunSummaryDaoImpl(this.connection).getRuns();
- }
-
- /**
- * Get the full list of summaries for all runs in the database without complex data like EPICS records.
- *
- * @return the full list of run summaries
- */
- public List<RunSummary> getRunSummaries() {
- return this.factory.createRunSummaryDao().getRunSummaries();
- }
-
- /**
- * Get the run summary for the current run not including its sub-objects like scaler data.
- *
+ return factory.getRunSummaryDao().getRuns();
+ }
+
+ /**
+ * Get the run summary for the current run.
* @return the run summary for the current run
*/
public RunSummary getRunSummary() {
- this.checkRunNumber();
- if (this.dataCache.runSummary == null) {
- this.dataCache.runSummary = factory.createRunSummaryDao().getRunSummary(this.run);
- }
- return this.dataCache.runSummary;
- }
-
- /**
- * Get the scaler data for the current run.
- *
+ return factory.getRunSummaryDao().getRunSummary(this.run);
+ }
+
+ /**
+ * Get the scaler data for the current run.
* @return the scaler data for the current run
*/
public List<ScalerData> getScalerData() {
- this.checkRunNumber();
- if (this.dataCache.scalerData == null) {
- LOGGER.info("loading scaler data for run " + this.run);
- this.dataCache.scalerData = factory.createScalerDataDao().getScalerData(run);
- }
- return this.dataCache.scalerData;
- }
-
- /**
- * Get the trigger config for the current run.
- *
- * @return the trigger config for the current run
- */
- public TriggerConfig getTriggerConfig() {
- this.checkRunNumber();
- if (this.dataCache.triggerConfig == null) {
- LOGGER.info("loading trigger config for run " + this.run);
- this.dataCache.triggerConfig = factory.createTriggerConfigDao().getTriggerConfig(run);
- }
- return this.dataCache.triggerConfig;
- }
-
- /**
- * Update the database with information found from crawling the files.
- *
- * @param runs the list of runs to update
- * @throws SQLException if there is a database query error
- */
- public void insertRun(final RunSummary runSummary) throws SQLException {
- LOGGER.info("updating run database for run " + runSummary.getRun());
-
- // Create object for updating run info in the database.
- final RunSummaryDao runSummaryDao = factory.createRunSummaryDao();
-
- // Insert run summary into database.
- runSummaryDao.insertFullRunSummary(runSummary);
-
- LOGGER.info("done updating run database");
- }
-
- /**
- * Open a new database connection from the connection parameters if the current one is closed or <code>null</code>.
- * <p>
- * This method does nothing if the connection is already open.
- */
- public void openConnection() {
- try {
- if (this.connection.isClosed()) {
- LOGGER.info("creating new database connection");
- this.connection = connectionParameters.createConnection();
- }
- } catch (final SQLException e) {
- throw new RuntimeException("Error opening database connection.", e);
- }
- }
+ return factory.getScalerDataDao().getScalerData(this.run);
+ }
+
+ /**
+ * Get SVT configuration data.
+ * @return the SVT configuration data
+ */
+ public List<SvtConfigData> getSvtConfigData() {
+ return factory.getSvtConfigDao().getSvtConfigs(this.run);
+ }
+
+ /**
+ * Get the DAQ (trigger) configuration for the run.
+ * @return the DAQ configuration for the run
+ */
+ public DAQConfig getDAQConfig() {
+ TriggerConfigData config = factory.getTriggerConfigDao().getTriggerConfig(this.run);
+ return config.loadDAQConfig(this.run);
+ }
/**
* Return <code>true</code> if the run exists in the database.
- *
* @return <code>true</code> if the run exists in the database
*/
- public boolean runExists() {
- this.checkRunNumber();
- if (this.dataCache.runExists == null) {
- this.dataCache.runExists = factory.createRunSummaryDao().runSummaryExists(this.run);
- }
- return this.dataCache.runExists;
- }
-
- /**
- * Return <code>true</code> if the run exists in the database.
- *
+ public boolean runExists() {
+ return factory.getRunSummaryDao().runSummaryExists(this.run);
+ }
+
+ /**
+ * Set the run number and then load the applicable {@link RunSummary} from the database.
* @param run the run number
- * @return <code>true</code> if the run exists in the database
- */
- boolean runExists(final int run) {
- if (this.dataCache.runExists == null) {
- this.dataCache.runExists = factory.createRunSummaryDao().runSummaryExists(run);
- }
- return this.dataCache.runExists;
- }
-
- /**
- * Set the run number and then load the applicable {@link RunSummary} from the database.
- *
- * @param run the run number
*/
public void setRun(final int run) {
-
if (this.run == null || run != this.run) {
-
- LOGGER.info("setting new run " + run);
-
+ LOGGER.info("setting run " + run);
// Set the run number.
this.run = run;
-
- // Reset the data cache.
- this.dataCache = new DataCache();
- }
- }
+ }
+ }
+
+ /**
+ * Get the currently active run number or <code>null</code>.
+ * @return the currently active run number of <code>null</code>
+ */
+ public Integer getRun() {
+ return this.run;
+ }
+
+ /**
+ * Create or replace a run summary in the database.
+ * @param runSummary the run summary to update
+ * @param replaceExisting <code>true</code> to allow an existing run summary to be replaced
+ */
+ void updateRunSummary(RunSummary runSummary, boolean replaceExisting) {
+ final RunSummaryDao runSummaryDao = factory.getRunSummaryDao();
+ RunManager runManager = new RunManager();
+ runManager.setRun(runSummary.getRun());
+ if (runManager.runExists()) {
+ if (replaceExisting) {
+ runSummaryDao.updateRunSummary(runSummary);
+ } else {
+ throw new RuntimeException("Run already exists and replacement is not allowed.");
+ }
+ } else {
+ runSummaryDao.insertRunSummary(runSummary);
+ }
+ }
+
+ /**
+ * Create or replace the trigger config for the run.
+ * @param triggerConfig the trigger config
+ * @param replaceExisting <code>true</code> to allow an existing trigger to be replaced
+ */
+ void updateTriggerConfig(TriggerConfigData triggerConfig, boolean replaceExisting) {
+ final TriggerConfigDao configDao = factory.getTriggerConfigDao();
+ if (configDao.getTriggerConfig(run) != null) {
+ if (replaceExisting) {
+ configDao.deleteTriggerConfig(run);
+ } else {
+ throw new RuntimeException("Run already exists and replacement is not allowed.");
+ }
+ }
+ configDao.insertTriggerConfig(triggerConfig, run);
+ }
+
+ /**
+ * Create or replace EPICS data for the run.
+ * @param epicsData the EPICS data
+ */
+ void updateEpicsData(List<EpicsData> epicsData) {
+ if (epicsData != null && !epicsData.isEmpty()) {
+ factory.getEpicsDataDao().insertEpicsData(epicsData, this.run);
+ }
+ }
+
+ /**
+ * Create or replace scaler data for the run.
+ * @param scalerData the scaler data
+ */
+ void updateScalerData(List<ScalerData> scalerData) {
+ if (scalerData != null) {
+ factory.getScalerDataDao().insertScalerData(scalerData, this.run);
+ }
+ }
+
+ /**
+ * Delete a run from the database.
+ * @param run the run number
+ */
+ void deleteRun() {
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2S, run);
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20S, run);
+ factory.getScalerDataDao().deleteScalerData(run);
+ factory.getSvtConfigDao().deleteSvtConfigs(run);
+ factory.getTriggerConfigDao().deleteTriggerConfig(run);
+ factory.getRunSummaryDao().deleteRunSummary(run);
+ }
+
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummary.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummary.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummary.java Wed Apr 27 11:11:32 2016
@@ -1,137 +1,129 @@
package org.hps.run.database;
-import java.io.File;
import java.util.Date;
-import java.util.List;
-
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.record.epics.EpicsData;
-import org.hps.record.scalers.ScalerData;
-import org.hps.record.triggerbank.TriggerConfig;
/**
- * This is an API for accessing run summary information which is persisted as a row in the <i>runs</i> table of the run
- * database.
+ * This is an API for accessing run summary information which is persisted as a row in the <i>run_summaries</i> table.
* <p>
- * This information includes:
- * <ul>
- * <li>run number</li>
- * <li>start date</li>
- * <li>end date</li>
- * <li>number of events</li>
- * <li>number of EVIO files</li>
- * <li>whether the END event was found indicating that the DAQ did not crash</li>
- * <li>whether the run is considered good (all <code>true</code> for now)</li>
- * </ul>
- * <p>
- * It also references several complex objects including lists of {@link org.hps.record.epics.EpicsData} and
- * {@link org.hps.record.scalers.ScalerData} for the run, as well as a list of EVIO files.
+ * All timestamp fields use the Unix convention (seconds since the epoch).
*
+ * @author Jeremy McCormick, SLAC
* @see RunSummaryImpl
* @see RunSummaryDao
* @see RunSummaryDaoImpl
* @see RunManager
- *
- * @author Jeremy McCormick, SLAC
*/
public interface RunSummary {
-
+
/**
- * Get the creation date of this run record.
+ * Get the creation date of this record.
*
- * @return the creation date of this run record
+ * @return the creation date of this record
*/
Date getCreated();
/**
- * Get the end date.
- *
- * @return the end date
+ * Get the END event timestamp or the timestamp from the last head bank if END is not present.
+ *
+ * @return the last event timestamp
*/
- Date getEndDate();
+ Integer getEndTimestamp();
/**
- * Return <code>true</code> if END event was found in the data.
- *
- * @return <code>true</code> if END event was in the data
+ * Get the GO event timestamp.
+ *
+ * @return the GO event timestamp
*/
- boolean getEndOkay();
+ Integer getGoTimestamp();
/**
- * Get the EPICS data from the run.
- *
- * @return the EPICS data from the run
+ * Get the livetime computed from the clock scaler.
+ *
+ * @return the livetime computed from the clock scaler
*/
- List<EpicsData> getEpicsData();
+ Double getLivetimeClock();
/**
- * Get the event rate (effectively the trigger rate) which is the total events divided by the number of seconds in
- * the run.
- *
- * @return the event rate
+ * Get the livetime computed from the FCUP_TDC scaler.
+ *
+ * @return the livetime computed from the FCUP_TDC scaler
*/
- double getEventRate();
+ Double getLivetimeFcupTdc();
+
+ /**
+ * Get the livetime computed from the FCUP_TRG scaler.
+ *
+ * @return the livetime computed from the FCUP_TRG scaler
+ */
+ Double getLivetimeFcupTrg();
+
+ /**
+ * Get the notes for the run (from the run spreadsheet).
+ *
+ * @return the notes for the run
+ */
+ String getNotes();
+
+ /**
+ * Get the PRESTART event timestamp.
+ *
+ * @return the PRESTART event timestamp
+ */
+ Integer getPrestartTimestamp();
/**
* Get the run number.
*
* @return the run number
*/
- int getRun();
+ Integer getRun();
+
+ /**
+ * Get the target setting for the run (string from run spreadsheet).
+ *
+ * @return the target setting for the run
+ */
+ String getTarget();
/**
- * Return <code>true</code> if the run was okay (no major errors or data corruption occurred).
- *
- * @return <code>true</code> if the run was okay
+ * Get the TI time offset in ns.
+ *
+ * @return the TI time offset in ns
*/
- boolean getRunOkay();
+ Long getTiTimeOffset();
/**
- * Get the scaler data of this run.
+ * Get the total number of events in the run.
*
- * @return the scaler data of this run
+ * @return the total number of events in the run
*/
- List<ScalerData> getScalerData();
+ Long getTotalEvents();
/**
- * Get the trigger config int values.
+ * Get the total number of EVIO files in this run.
*
- * @return the trigger config int values
+ * @return the total number of files in this run
*/
- TriggerConfig getTriggerConfig();
+ Integer getTotalFiles();
/**
- * Get the start date.
- *
- * @return the start date
+ * Get the trigger config name (from the run spreadsheet).
+ *
+ * @return the trigger config name
*/
- Date getStartDate();
+ String getTriggerConfigName();
/**
- * Get the total events in the run.
- *
- * @return the total events in the run
+ * Get the trigger rate in KHz.
+ *
+ * @return the trigger rate in KHz
*/
- int getTotalEvents();
+ Double getTriggerRate();
/**
- * Get the total number of EVIO files for this run.
+ * Get the date when this record was last updated.
*
- * @return the total number of files for this run
- */
- int getTotalFiles();
-
- /**
- * Get the number of seconds in the run which is the difference between the start and end times.
- *
- * @return the total seconds in the run
- */
- long getTotalSeconds();
-
- /**
- * Get the date when this run record was last updated.
- *
- * @return the date when this run record was last updated
+ * @return the date when this record was last updated
*/
Date getUpdated();
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java Wed Apr 27 11:11:32 2016
@@ -8,14 +8,7 @@
* @author Jeremy McCormick, SLAC
*/
interface RunSummaryDao {
-
- /**
- * Delete a run summary from the database including its referenced objects such as EPICS data.
- *
- * @param runSummary the run summary to delete
- */
- void deleteFullRun(int run);
-
+
/**
* Delete a run summary by run number.
*
@@ -24,26 +17,12 @@
void deleteRunSummary(int run);
/**
- * Delete a run summary but not its objects.
- *
- * @param runSummary the run summary object
- */
- void deleteRunSummary(RunSummary runSummary);
-
- /**
* Get the list of run numbers.
*
* @return the list of run numbers
*/
List<Integer> getRuns();
-
- /**
- * Get a list of run summaries without loading their objects such as EPICS data.
- *
- * @return the list of run summaries
- */
- List<RunSummary> getRunSummaries();
-
+
/**
* Get a run summary by run number without loading object state.
*
@@ -51,36 +30,13 @@
* @return the run summary object
*/
RunSummary getRunSummary(int run);
-
+
/**
- * Insert a list of run summaries along with its referenced objects such as scaler and EPICS data.
- *
- * @param runSummaryList the list of run summaries
- * @param deleteExisting <code>true</code> to allow deletion and replacement of existing run summaries
- */
- void insertFullRunSummaries(List<RunSummary> runSummaryList, boolean deleteExisting);
-
- /**
- * Insert a run summary including all its objects.
- *
- * @param runSummary the run summary object
- */
- void insertFullRunSummary(RunSummary runSummary);
-
- /**
- * Insert a run summary but not its objects.
+ * Insert a run summary.
*
* @param runSummary the run summary object
*/
void insertRunSummary(RunSummary runSummary);
-
- /**
- * Read a run summary and its objects such as scaler data.
- *
- * @param run the run number
- * @return the full run summary
- */
- RunSummary readFullRunSummary(int run);
/**
* Return <code>true</code> if a run summary exists in the database.
@@ -89,10 +45,10 @@
* @return <code>true</code> if <code>run</code> exists in the database
*/
boolean runSummaryExists(int run);
-
+
/**
- * Update a run summary but not its objects.
- *
+ * Update a run summary that already exists.
+ *
* @param runSummary the run summary to update
*/
void updateRunSummary(RunSummary runSummary);
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java Wed Apr 27 11:11:32 2016
@@ -5,13 +5,8 @@
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
import java.util.List;
-import java.util.TimeZone;
import java.util.logging.Logger;
-
-import org.hps.record.epics.EpicsData;
/**
* Implementation of database operations for {@link RunSummary} objects in the run database.
@@ -21,37 +16,29 @@
final class RunSummaryDaoImpl implements RunSummaryDao {
/**
- * SQL query strings.
- */
- private static final class RunSummaryQuery {
-
- /**
- * Delete by run number.
- */
- private static final String DELETE_RUN = "DELETE FROM runs WHERE run = ?";
- /**
- * Insert a record for a run.
- */
- private static final String INSERT = "INSERT INTO runs (run, start_date, end_date, nevents, nfiles, end_ok, created) VALUES(?, ?, ?, ?, ?, ?, NOW())";
- /**
- * Select all records.
- */
- private static final String SELECT_ALL = "SELECT * from runs";
- /**
- * Select record by run number.
- */
- private static final String SELECT_RUN = "SELECT run, start_date, end_date, nevents, nfiles, end_ok, run_ok, updated, created FROM runs WHERE run = ?";
- /**
- * Update information for a run.
- */
- private static final String UPDATE_RUN = "UPDATE runs SET start_date, end_date, nevents, nfiles, end_ok, run_ok WHERE run = ?";
- }
-
- /**
- * Eastern time zone.
- */
- private static Calendar CALENDAR = new GregorianCalendar(TimeZone.getTimeZone("America/New_York"));
-
+ * Delete by run number.
+ */
+ private static final String DELETE = "DELETE FROM run_summaries WHERE run = ?";
+
+ /**
+ * Insert a record for a run.
+ */
+ private static final String INSERT = "INSERT INTO run_summaries (run, nevents, nfiles, prestart_timestamp,"
+ + " go_timestamp, end_timestamp, trigger_rate, trigger_config_name, ti_time_offset,"
+ + " livetime_clock, livetime_fcup_tdc, livetime_fcup_trg, target, notes, created, updated)"
+ + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())";
+
+ private static final String UPDATE = "UPDATE run_summaries SET nevents = ?, nfiles = ?, prestart_timestamp = ?,"
+ + " go_timestamp = ?, end_timestamp = ?, trigger_rate = ?, trigger_config_name = ?, ti_time_offset = ?,"
+ + " livetime_clock = ?, livetime_fcup_tdc = ?, livetime_fcup_trg = ?, target = ?, notes = ?, updated = NOW()"
+ + " WHERE run = ?";
+
+
+ /**
+ * Select record by run number.
+ */
+ private static final String SELECT = "SELECT * FROM run_summaries WHERE run = ?";
+
/**
* Initialize the logger.
*/
@@ -61,60 +48,17 @@
* The database connection.
*/
private final Connection connection;
-
- /**
- * The database API for EPICS data.
- */
- private EpicsDataDao epicsDataDao = null;
-
- /**
- * The database API for scaler data.
- */
- private ScalerDataDao scalerDataDao = null;
-
- /**
- * The database API for integer trigger config.
- */
- private TriggerConfigDao triggerConfigIntDao = null;
-
+
/**
* Create a new DAO object for run summary information.
*
* @param connection the database connection
*/
RunSummaryDaoImpl(final Connection connection) {
- // Set the connection.
if (connection == null) {
throw new IllegalArgumentException("The connection is null.");
}
this.connection = connection;
-
- // Setup DAO API objects for managing complex object state.
- epicsDataDao = new EpicsDataDaoImpl(this.connection);
- scalerDataDao = new ScalerDataDaoImpl(this.connection);
- triggerConfigIntDao = new TriggerConfigDaoImpl(this.connection);
- }
-
- /**
- * Delete a run from the database including its referenced objects such as EPICS data.
- *
- * @param runSummary the run summary to delete
- */
- @Override
- public void deleteFullRun(int run) {
-
- // Delete EPICS log.
- this.epicsDataDao.deleteEpicsData(EpicsType.EPICS_1S, run);
- this.epicsDataDao.deleteEpicsData(EpicsType.EPICS_10S, run);
-
- // Delete scaler data.
- this.scalerDataDao.deleteScalerData(run);
-
- // Delete trigger config.
- this.triggerConfigIntDao.deleteTriggerConfigInt(run);
-
- // Finally delete the run summary information.
- this.deleteRunSummary(run);
}
/**
@@ -126,7 +70,7 @@
public void deleteRunSummary(final int run) {
PreparedStatement preparedStatement = null;
try {
- preparedStatement = connection.prepareStatement(RunSummaryQuery.DELETE_RUN);
+ preparedStatement = connection.prepareStatement(DELETE);
preparedStatement.setInt(1, run);
preparedStatement.executeUpdate();
} catch (final SQLException e) {
@@ -141,32 +85,7 @@
}
}
}
-
- /**
- * Delete a run summary but not its objects.
- *
- * @param runSummary the run summary object
- */
- @Override
- public void deleteRunSummary(final RunSummary runSummary) {
- PreparedStatement preparedStatement = null;
- try {
- preparedStatement = connection.prepareStatement(RunSummaryQuery.DELETE_RUN);
- preparedStatement.setInt(1, runSummary.getRun());
- preparedStatement.executeUpdate();
- } catch (final SQLException e) {
- throw new RuntimeException(e);
- } finally {
- if (preparedStatement != null) {
- try {
- preparedStatement.close();
- } catch (final SQLException e) {
- e.printStackTrace();
- }
- }
- }
- }
-
+
/**
* Get the list of run numbers.
*
@@ -177,7 +96,7 @@
final List<Integer> runs = new ArrayList<Integer>();
PreparedStatement preparedStatement = null;
try {
- preparedStatement = this.connection.prepareStatement("SELECT distinct(run) FROM runs ORDER BY run");
+ preparedStatement = this.connection.prepareStatement("SELECT distinct(run) FROM run_summaries ORDER BY run");
final ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
final Integer run = resultSet.getInt(1);
@@ -196,47 +115,9 @@
}
return runs;
}
-
- /**
- * Get a list of run summaries without loading their objects such as EPICS data.
- *
- * @return the list of run summaries
- */
- @Override
- public List<RunSummary> getRunSummaries() {
- PreparedStatement statement = null;
- final List<RunSummary> runSummaries = new ArrayList<RunSummary>();
- try {
- statement = this.connection.prepareStatement(RunSummaryQuery.SELECT_ALL);
- final ResultSet resultSet = statement.executeQuery();
- while (resultSet.next()) {
- final RunSummaryImpl runSummary = new RunSummaryImpl(resultSet.getInt("run"));
- runSummary.setStartDate(resultSet.getTimestamp("start_date"));
- runSummary.setEndDate(resultSet.getTimestamp("end_date"));
- runSummary.setTotalEvents(resultSet.getInt("nevents"));
- runSummary.setTotalFiles(resultSet.getInt("nfiles"));
- runSummary.setEndOkay(resultSet.getBoolean("end_ok"));
- runSummary.setRunOkay(resultSet.getBoolean("run_ok"));
- runSummary.setUpdated(resultSet.getTimestamp("updated"));
- runSummary.setCreated(resultSet.getTimestamp("created"));
- runSummaries.add(runSummary);
- }
- } catch (final SQLException e) {
- throw new RuntimeException(e);
- } finally {
- if (statement != null) {
- try {
- statement.close();
- } catch (final SQLException e) {
- e.printStackTrace();
- }
- }
- }
- return runSummaries;
- }
-
- /**
- * Get a run summary by run number without loading object state.
+
+ /**
+ * Get a run summary.
*
* @param run the run number
* @return the run summary object
@@ -246,22 +127,28 @@
PreparedStatement statement = null;
RunSummaryImpl runSummary = null;
try {
- statement = this.connection.prepareStatement(RunSummaryQuery.SELECT_RUN);
+ statement = this.connection.prepareStatement(SELECT);
statement.setInt(1, run);
final ResultSet resultSet = statement.executeQuery();
if (!resultSet.next()) {
- throw new IllegalArgumentException("No record exists for run " + run + " in database.");
- }
-
+ throw new IllegalArgumentException("Run " + run + " does not exist in database.");
+ }
runSummary = new RunSummaryImpl(run);
- runSummary.setStartDate(resultSet.getTimestamp("start_date"));
- runSummary.setEndDate(resultSet.getTimestamp("end_date"));
- runSummary.setTotalEvents(resultSet.getInt("nevents"));
+ runSummary.setTotalEvents(resultSet.getLong("nevents"));
runSummary.setTotalFiles(resultSet.getInt("nfiles"));
- runSummary.setEndOkay(resultSet.getBoolean("end_ok"));
- runSummary.setRunOkay(resultSet.getBoolean("run_ok"));
+ runSummary.setPrestartTimestamp(resultSet.getInt("prestart_timestamp"));
+ runSummary.setGoTimestamp(resultSet.getInt("go_timestamp"));
+ runSummary.setEndTimestamp(resultSet.getInt("end_timestamp"));
+ runSummary.setTriggerRate(resultSet.getDouble("trigger_rate"));
+ runSummary.setTriggerConfigName(resultSet.getString("trigger_config_name"));
+ runSummary.setTiTimeOffset(resultSet.getLong("ti_time_offset"));
+ runSummary.setLivetimeClock(resultSet.getDouble("livetime_clock"));
+ runSummary.setLivetimeFcupTdc(resultSet.getDouble("livetime_fcup_tdc"));
+ runSummary.setLivetimeFcupTrg(resultSet.getDouble("livetime_fcup_trg"));
+ runSummary.setTarget(resultSet.getString("target"));
+ runSummary.setNotes(resultSet.getString("notes"));
+ runSummary.setCreated(resultSet.getTimestamp("created"));
runSummary.setUpdated(resultSet.getTimestamp("updated"));
- runSummary.setCreated(resultSet.getTimestamp("created"));
} catch (final SQLException e) {
throw new RuntimeException(e);
} finally {
@@ -275,129 +162,9 @@
}
return runSummary;
}
-
- /**
- * Insert a list of run summaries along with their complex state such as referenced scaler and EPICS data.
- *
- * @param runSummaryList the list of run summaries
- * @param deleteExisting <code>true</code> to allow deletion and replacement of existing run summaries
- */
- @Override
- public void insertFullRunSummaries(final List<RunSummary> runSummaryList, final boolean deleteExisting) {
-
- if (runSummaryList == null) {
- throw new IllegalArgumentException("The run summary list is null.");
- }
- if (runSummaryList.isEmpty()) {
- throw new IllegalArgumentException("The run summary list is empty.");
- }
-
- LOGGER.info("inserting " + runSummaryList.size() + " run summaries into database");
-
- // Turn off auto commit.
- try {
- LOGGER.info("turning off auto commit");
- this.connection.setAutoCommit(false);
- } catch (final SQLException e) {
- throw new RuntimeException(e);
- }
-
- // Loop over all runs found while crawling.
- for (final RunSummary runSummary : runSummaryList) {
-
- final int run = runSummary.getRun();
-
- LOGGER.info("inserting run summary for run " + run + " into database");
-
- // Does the run exist in the database already?
- if (this.runSummaryExists(run)) {
- // Is deleting existing rows allowed?
- if (deleteExisting) {
- LOGGER.info("deleting existing run summary");
- // Delete the existing rows.
- this.deleteFullRun(runSummary.getRun());
- } else {
- // Rows exist but updating is disallowed which is a fatal error.
- throw new IllegalStateException("Run " + runSummary.getRun()
- + " already exists and updates are disallowed.");
- }
- }
-
- // Insert full run summary information including sub-objects.
- LOGGER.info("inserting run summary");
- this.insertFullRunSummary(runSummary);
- LOGGER.info("run summary for " + run + " inserted successfully");
-
- try {
- // Commit the transaction for the run.
- LOGGER.info("committing transaction");
- this.connection.commit();
- } catch (final SQLException e1) {
- try {
- LOGGER.severe("rolling back transaction");
- // Rollback the transaction if there was an error.
- this.connection.rollback();
- } catch (final SQLException e2) {
- throw new RuntimeException(e2);
- }
- }
-
- LOGGER.info("done inserting run summary " + run);
- }
-
- try {
- LOGGER.info("turning auto commit on");
- // Turn auto commit back on.
- this.connection.setAutoCommit(true);
- } catch (final SQLException e) {
- e.printStackTrace();
- }
-
- LOGGER.info("done inserting run summaries");
- }
-
- /**
- * Insert a run summary including all its objects.
- *
- * @param runSummary the run summary object to insert
- */
- @Override
- public void insertFullRunSummary(final RunSummary runSummary) {
-
- if (runSummary == null) {
- throw new IllegalArgumentException("The run summary is null.");
- }
-
- // Insert basic run log info.
- this.insertRunSummary(runSummary);
-
- // Insert EPICS data.
- if (runSummary.getEpicsData() != null && !runSummary.getEpicsData().isEmpty()) {
- LOGGER.info("inserting " + runSummary.getEpicsData().size() + " EPICS records");
- epicsDataDao.insertEpicsData(runSummary.getEpicsData());
- } else {
- LOGGER.warning("no EPICS data to insert");
- }
-
- // Insert scaler data.
- if (runSummary.getScalerData() != null && !runSummary.getScalerData().isEmpty()) {
- LOGGER.info("inserting " + runSummary.getScalerData().size() + " scaler data records");
- scalerDataDao.insertScalerData(runSummary.getScalerData(), runSummary.getRun());
- } else {
- LOGGER.warning("no scaler data to insert");
- }
-
- // Insert trigger config.
- if (runSummary.getTriggerConfig() != null && !runSummary.getTriggerConfig().isEmpty()) {
- LOGGER.info("inserting " + runSummary.getTriggerConfig().size() + " trigger config variables");
- triggerConfigIntDao.insertTriggerConfig(runSummary.getTriggerConfig(), runSummary.getRun());
- } else {
- LOGGER.warning("no trigger config to insert");
- }
- }
-
- /**
- * Insert a run summary but not its objects.
+
+ /**
+ * Insert a run summary.
*
* @param runSummary the run summary object
*/
@@ -405,13 +172,23 @@
public void insertRunSummary(final RunSummary runSummary) {
PreparedStatement preparedStatement = null;
try {
- preparedStatement = connection.prepareStatement(RunSummaryQuery.INSERT);
+ preparedStatement = connection.prepareStatement(INSERT);
preparedStatement.setInt(1, runSummary.getRun());
- preparedStatement.setTimestamp(2, new java.sql.Timestamp(runSummary.getStartDate().getTime()), CALENDAR);
- preparedStatement.setTimestamp(3, new java.sql.Timestamp(runSummary.getEndDate().getTime()), CALENDAR);
- preparedStatement.setInt(4, runSummary.getTotalEvents());
- preparedStatement.setInt(5, runSummary.getTotalFiles());
- preparedStatement.setBoolean(6, runSummary.getEndOkay());
+ preparedStatement.setLong(2, runSummary.getTotalEvents());
+ preparedStatement.setInt(3, runSummary.getTotalFiles());
+ /* Use setObject on the rest as they may be null. */
+ preparedStatement.setObject(4, runSummary.getPrestartTimestamp());
+ preparedStatement.setObject(5, runSummary.getGoTimestamp());
+ preparedStatement.setObject(6, runSummary.getEndTimestamp());
+ preparedStatement.setObject(7, runSummary.getTriggerRate());
+ preparedStatement.setObject(8, runSummary.getTriggerConfigName());
+ preparedStatement.setObject(9, runSummary.getTiTimeOffset());
+ preparedStatement.setObject(10, runSummary.getLivetimeClock());
+ preparedStatement.setObject(11, runSummary.getLivetimeFcupTdc());
+ preparedStatement.setObject(12, runSummary.getLivetimeFcupTrg());
+ preparedStatement.setObject(13, runSummary.getTarget());
+ preparedStatement.setObject(14, runSummary.getNotes());
+ LOGGER.fine(preparedStatement.toString());
preparedStatement.executeUpdate();
} catch (final SQLException e) {
throw new RuntimeException(e);
@@ -425,34 +202,42 @@
}
}
}
-
- /**
- * Read a run summary and its objects such as scaler data.
- *
- * @param run the run number
- * @return the full run summary
- */
- @Override
- public RunSummary readFullRunSummary(final int run) {
-
- // Read main run summary but not referenced objects.
- final RunSummaryImpl runSummary = (RunSummaryImpl) this.getRunSummary(run);
-
- // Read EPICS data and set on RunSummary.
- final List<EpicsData> epicsDataList = new ArrayList<EpicsData>();
- epicsDataList.addAll(epicsDataDao.getEpicsData(EpicsType.EPICS_1S, run));
- epicsDataList.addAll(epicsDataDao.getEpicsData(EpicsType.EPICS_10S, run));
- runSummary.setEpicsData(epicsDataList);
-
- // Read scaler data and set on RunSummary.
- runSummary.setScalerData(scalerDataDao.getScalerData(run));
-
- // Read trigger config.
- runSummary.setTriggerConfig(triggerConfigIntDao.getTriggerConfig(run));
-
- return runSummary;
- }
-
+
+ @Override
+ public void updateRunSummary(RunSummary runSummary) {
+ PreparedStatement preparedStatement = null;
+ try {
+ preparedStatement = connection.prepareStatement(UPDATE);
+ preparedStatement.setLong(1, runSummary.getTotalEvents());
+ preparedStatement.setInt(2, runSummary.getTotalFiles());
+ preparedStatement.setObject(3, runSummary.getPrestartTimestamp());
+ preparedStatement.setObject(4, runSummary.getGoTimestamp());
+ preparedStatement.setObject(5, runSummary.getEndTimestamp());
+ preparedStatement.setObject(6, runSummary.getTriggerRate());
+ preparedStatement.setObject(7, runSummary.getTriggerConfigName());
+ preparedStatement.setObject(8, runSummary.getTiTimeOffset());
+ preparedStatement.setObject(9, runSummary.getLivetimeClock());
+ preparedStatement.setObject(10, runSummary.getLivetimeFcupTdc());
+ preparedStatement.setObject(11, runSummary.getLivetimeFcupTrg());
+ preparedStatement.setObject(12, runSummary.getTarget());
+ preparedStatement.setObject(13, runSummary.getNotes());
+ preparedStatement.setInt(14, runSummary.getRun());
+ LOGGER.fine(preparedStatement.toString());
+ preparedStatement.executeUpdate();
+ } catch (final SQLException e) {
+ throw new RuntimeException(e);
+ } finally {
+ if (preparedStatement != null) {
+ try {
+ preparedStatement.close();
+ } catch (final SQLException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+
/**
* Return <code>true</code> if a run summary exists in the database for the run number.
*
@@ -463,7 +248,7 @@
public boolean runSummaryExists(final int run) {
PreparedStatement preparedStatement = null;
try {
- preparedStatement = connection.prepareStatement("SELECT run FROM runs where run = ?");
+ preparedStatement = connection.prepareStatement("SELECT run FROM run_summaries where run = ?");
preparedStatement.setInt(1, run);
final ResultSet rs = preparedStatement.executeQuery();
return rs.first();
@@ -479,35 +264,4 @@
}
}
}
-
- /**
- * Update a run summary but not its complex state.
- *
- * @param runSummary the run summary to update
- */
- @Override
- public void updateRunSummary(final RunSummary runSummary) {
- PreparedStatement preparedStatement = null;
- try {
- preparedStatement = connection.prepareStatement(RunSummaryQuery.UPDATE_RUN);
- preparedStatement.setTimestamp(1, new java.sql.Timestamp(runSummary.getStartDate().getTime()), CALENDAR);
- preparedStatement.setTimestamp(2, new java.sql.Timestamp(runSummary.getEndDate().getTime()), CALENDAR);
- preparedStatement.setInt(3, runSummary.getTotalEvents());
- preparedStatement.setInt(4, runSummary.getTotalFiles());
- preparedStatement.setBoolean(5, runSummary.getEndOkay());
- preparedStatement.setBoolean(6, runSummary.getRunOkay());
- preparedStatement.setInt(7, runSummary.getRun());
- preparedStatement.executeUpdate();
- } catch (final SQLException e) {
- throw new RuntimeException(e);
- } finally {
- if (preparedStatement != null) {
- try {
- preparedStatement.close();
- } catch (final SQLException e) {
- e.printStackTrace();
- }
- }
- }
- }
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java Wed Apr 27 11:11:32 2016
@@ -1,40 +1,13 @@
package org.hps.run.database;
-import java.io.File;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TimeZone;
-
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.record.epics.EpicsData;
-import org.hps.record.scalers.ScalerData;
-import org.hps.record.triggerbank.TriggerConfig;
/**
* Implementation of {@link RunSummary} for retrieving information from the run database.
*
* @author Jeremy McCormick, SLAC
*/
-public final class RunSummaryImpl implements RunSummary {
-
- /**
- * Default date display format.
- */
- private static final DateFormat DATE_DISPLAY = new SimpleDateFormat();
-
- static {
- /**
- * Set default time zone for display to East Coast (JLAB) where data was
- * taken.
- */
- DATE_DISPLAY.setCalendar(new GregorianCalendar(TimeZone.getTimeZone("America/New_York")));
- }
+final class RunSummaryImpl implements RunSummary {
/**
* Date this record was created.
@@ -42,60 +15,80 @@
private Date created;
/**
- * End date of run.
- */
- private Date endDate;
-
- /**
- * This is <code>true</code> if the END event is found in the data.
- */
- private boolean endOkay;
-
- /**
- * The EPICS data from the run.
- */
- private List<EpicsData> epicsDataList;
+ * Timestamp of END event.
+ */
+ private Integer endTimestamp;
+
+ /**
+ * Timestamp of GO event.
+ */
+ private Integer goTimestamp;
+
+ /**
+ * Clock livetime calculation.
+ */
+ private Double livetimeClock;
+
+ /**
+ * FCup TDC livetime calculation.
+ */
+ private Double livetimeTdc;
+
+ /**
+ * FCup TRG livetime calculation.
+ */
+ private Double livetimeTrg;
+
+ /**
+ * Notes about the run (from spreadsheet).
+ */
+ private String notes;
+
+ /**
+ * Timestamp of PRESTART event.
+ */
+ private Integer prestartTimestamp;
/**
* The run number.
*/
- private final int run;
-
- /**
- * Flag to indicate run was okay.
- */
- private boolean runOkay = true;
-
- /**
- * The scaler data for the run.
- */
- private List<ScalerData> scalerDataList;
-
- /**
- * The trigger data for the run.
- */
- private TriggerConfig triggerConfig;
-
- /**
- * Start date of run.
- */
- private Date startDate;
+ private final Integer run;
+
+ /**
+ * Target setup (string from run spreadsheet).
+ */
+ private String target;
+
+ /**
+ * TI time offset in ns.
+ */
+ private Long tiTimeOffset;
/**
* The total events found in the run across all files.
*/
- private int totalEvents = -1;
+ private Long totalEvents;
/**
* The total number of files in the run.
*/
- private int totalFiles = 0;
+ private Integer totalFiles;
+
+ /**
+ * Name of the trigger config file.
+ */
+ private String triggerConfigName;
+
+ /**
+ * Trigger rate in KHz.
+ */
+ private double triggerRate;
/**
* Date when the run record was last updated.
*/
private Date updated;
-
+
/**
* Create a run summary.
*
@@ -105,209 +98,174 @@
this.run = run;
}
- /**
- * Get the creation date of this run record.
- *
- * @return the creation date of this run record
- */
+ @Override
public Date getCreated() {
return this.created;
}
- /**
- * Get the end date.
- *
- * @return the end date
- */
- public Date getEndDate() {
- return endDate;
- }
-
- /**
- * Return <code>true</code> if END event was found in the data.
- *
- * @return <code>true</code> if END event was in the data
- */
- public boolean getEndOkay() {
- return this.endOkay;
- }
-
- /**
- * Get the EPICS data from the run.
- *
- * @return the EPICS data from the run
- */
- public List<EpicsData> getEpicsData() {
- return this.epicsDataList;
- }
-
- /**
- * Get the event rate (effectively the trigger rate) which is the total
- * events divided by the number of seconds in the run.
- *
- * @return the event rate
- */
- public double getEventRate() {
- if (this.getTotalEvents() <= 0) {
- throw new RuntimeException("Total events is zero or invalid.");
- }
- return (double) this.getTotalEvents() / (double) this.getTotalSeconds();
- }
-
- /**
- * Get the run number.
- *
- * @return the run number
- */
- public int getRun() {
+ @Override
+ public Integer getEndTimestamp() {
+ return endTimestamp;
+ }
+
+ @Override
+ public Integer getGoTimestamp() {
+ return goTimestamp;
+ }
+
+ @Override
+ public Double getLivetimeClock() {
+ return this.livetimeClock;
+ }
+
+ @Override
+ public Double getLivetimeFcupTdc() {
+ return this.livetimeTdc;
+ }
+
+ @Override
+ public Double getLivetimeFcupTrg() {
+ return this.livetimeTrg;
+ }
+
+ @Override
+ public String getNotes() {
+ return this.notes;
+ }
+
+ @Override
+ public Integer getPrestartTimestamp() {
+ return prestartTimestamp;
+ }
+
+ @Override
+ public Integer getRun() {
return this.run;
}
- /**
- * Return <code>true</code> if the run was okay (no major errors or data
- * corruption occurred).
- *
- * @return <code>true</code> if the run was okay
- */
- public boolean getRunOkay() {
- return this.runOkay;
- }
-
- /**
- * Get the scaler data of this run.
- *
- * @return the scaler data of this run
- */
- public List<ScalerData> getScalerData() {
- return this.scalerDataList;
- }
-
- /**
- * Get the trigger config of this run.
- *
- * @return the trigger config of this run
- */
- public TriggerConfig getTriggerConfig() {
- return triggerConfig;
- }
-
- /**
- * Get the start date.
- *
- * @return the start date
- */
- public Date getStartDate() {
- return startDate;
- }
-
- /**
- * Get the total events in the run.
- *
- * @return the total events in the run
- */
- public int getTotalEvents() {
+ @Override
+ public String getTarget() {
+ return this.target;
+ }
+
+ @Override
+ public Long getTiTimeOffset() {
+ return this.tiTimeOffset;
+ }
+
+ @Override
+ public Long getTotalEvents() {
return this.totalEvents;
}
- /**
- * Get the total number of files for this run.
- *
- * @return the total number of files for this run
- */
- public int getTotalFiles() {
+ @Override
+ public Integer getTotalFiles() {
return this.totalFiles;
}
-
- /**
- * Get the number of seconds in the run which is the difference between the
- * start and end times.
- *
- * @return the total seconds in the run
- */
- public long getTotalSeconds() {
- return (endDate.getTime() - startDate.getTime()) / 1000;
- }
-
- /**
- * Get the date when this run record was last updated.
- *
- * @return the date when this run record was last updated
- */
+
+ @Override
+ public String getTriggerConfigName() {
+ return this.triggerConfigName;
+ }
+
+ @Override
+ public Double getTriggerRate() {
+ return this.triggerRate;
+ }
+
+ @Override
public Date getUpdated() {
return updated;
}
-
- /**
- * Set the creation date of the run record.
- *
- * @param created the creation date of the run record
- */
- void setCreated(final Date created) {
+
+ /**
+ * Set the creation date of the run summary.
+ *
+ * @param created the creation date
+ */
+ void setCreated(Date created) {
this.created = created;
}
/**
- * Set the start date.
- *
- * @param startDate the start date
- */
- void setEndDate(final Date endDate) {
- this.endDate = endDate;
- }
-
- /**
- * Set if end is okay.
- *
- * @param endOkay <code>true</code> if end is okay
- */
- void setEndOkay(final boolean endOkay) {
- this.endOkay = endOkay;
- }
-
- /**
- * Set the EPICS data for the run.
- *
- * @param epics the EPICS data for the run
- */
- void setEpicsData(final List<EpicsData> epicsDataList) {
- this.epicsDataList = epicsDataList;
- }
-
- /**
- * Set whether the run was "okay" meaning the data is usable for physics
- * analysis.
- *
- * @param runOkay <code>true</code> if the run is okay
- */
- void setRunOkay(final boolean runOkay) {
- this.runOkay = runOkay;
- }
-
- /**
- * Set the scaler data of the run.
- *
- * @param scalerData the scaler data
- */
- void setScalerData(final List<ScalerData> scalerDataList) {
- this.scalerDataList = scalerDataList;
- }
-
- /**
- * Set the trigger config of the run.
- *
- * @param triggerConfig the trigger config
- */
- void setTriggerConfig(final TriggerConfig triggerConfig) {
- this.triggerConfig = triggerConfig;
- }
-
- /**
- * Set the start date.
- *
- * @param startDate the start date
- */
- void setStartDate(final Date startDate) {
- this.startDate = startDate;
+ * Set the end timestamp.
+ *
+ * @param endTimestamp the end timestamp
+ */
+ void setEndTimestamp(Integer endTimestamp) {
+ this.endTimestamp = endTimestamp;
+ }
+
+ /**
+ * Set the GO timestamp.
+ *
+ * @param goTimestamp the GO timestamp
+ */
+ void setGoTimestamp(Integer goTimestamp) {
+ this.goTimestamp = goTimestamp;
+ }
+
+ /**
+ * Set the clock livetime.
+ *
+ * @param livetimeClock the clock livetime
+ */
+ void setLivetimeClock(Double livetimeClock) {
+ this.livetimeClock = livetimeClock;
+ }
+
+ /**
+ * Set the FCUP TDC livetime.
+ *
+ * @param livetimeTdc the FCUP TDC livetime
+ */
+ void setLivetimeFcupTdc(Double livetimeTdc) {
+ this.livetimeTdc = livetimeTdc;
+ }
+
+ /**
+ * Set the FCUP TRG livetime.
+ *
+ * @param livetimeTrg the FCUP TRG livetime
+ */
+ void setLivetimeFcupTrg(Double livetimeTrg) {
+ this.livetimeTrg = livetimeTrg;
+ }
+
+ /**
+ * Set the notes.
+ *
+ * @param notes the notes
+ */
+ void setNotes(String notes) {
+ this.notes = notes;
+ }
+
+ /**
+ * Set the PRESTART timestamp.
+ *
+ * @param prestartTimestamp the PRESTART timestamp
+ */
+ void setPrestartTimestamp(Integer prestartTimestamp) {
+ this.prestartTimestamp = prestartTimestamp;
+ }
+
+ /**
+ * Set the target description.
+ *
+ * @param target the target description
+ */
+ void setTarget(String target) {
+ this.target = target;
+ }
+
+ /**
+ * Set the TI time offset in ns.
+ *
+ * @param tiTimeOffset the TIM time offset in ns
+ */
+ void setTiTimeOffset(Long tiTimeOffset) {
+ this.tiTimeOffset = tiTimeOffset;
}
/**
@@ -315,7 +273,7 @@
*
* @param totalEvents the total number of physics events in the run
*/
- void setTotalEvents(final int totalEvents) {
+ void setTotalEvents(final Long totalEvents) {
this.totalEvents = totalEvents;
}
@@ -324,37 +282,59 @@
*
* @param totalFiles the total number of EVIO files in the run
*/
- void setTotalFiles(final int totalFiles) {
+ void setTotalFiles(final Integer totalFiles) {
this.totalFiles = totalFiles;
}
/**
- * Set the date when this run record was last updated.
- *
- * @param updated the date when the run record was last updated
- */
- void setUpdated(final Date updated) {
+ * Set the trigger config file.
+ *
+ * @param triggerConfigName the trigger config file
+ */
+ void setTriggerConfigName(String triggerConfigName) {
+ this.triggerConfigName = triggerConfigName;
+ }
+
+ /**
+ * Set the trigger rate in KHz.
+ *
+ * @param triggerRate the trigger rate in KHz
+ */
+ void setTriggerRate(Double triggerRate) {
+ this.triggerRate = triggerRate;
+ }
+
+ /**
+ * Set the updated date of the summary.
+ *
+ * @param updated the updated date
+ */
+ void setUpdated(Date updated) {
this.updated = updated;
}
-
- /**
- * Convert this object to a string.
- *
+
+ /**
+ * Convert the object to a string.
+ *
* @return this object converted to a string
*/
@Override
public String toString() {
return "RunSummary { "
+ "run: " + this.getRun()
- + ", startDate: " + (this.getStartDate() != null ? DATE_DISPLAY.format(this.getStartDate()) : null)
- + ", endDate: " + (this.getEndDate() != null ? DATE_DISPLAY.format(this.getEndDate()) : null)
- + ", totalEvents: " + this.getTotalEvents()
- + ", totalFiles: " + this.getTotalFiles()
- + ", endOkay: " + this.getEndOkay()
- + ", runOkay: "
- + this.getRunOkay()
- + ", updated: " + this.getUpdated()
+ + ", events: " + this.getTotalEvents()
+ + ", files: " + this.getTotalFiles()
+ ", created: " + this.getCreated()
+ + ", updated: " + this.getUpdated()
+ + ", prestartTimestamp: " + this.getPrestartTimestamp()
+ + ", goTimestamp: " + this.getGoTimestamp()
+ + ", endTimestamp: " + this.getEndTimestamp()
+ + ", triggerConfigFile: " + this.getTriggerConfigName()
+ + ", triggerRate: " + this.getTriggerRate()
+ + ", livetimeClock: " + this.getLivetimeClock()
+ + ", livetimeTdc: " + this.getLivetimeFcupTdc()
+ + ", livetimeTrg: " + this.getLivetimeFcupTrg()
+ + ", tiTimeOffset: " + this.getTiTimeOffset()
+ " }";
}
}
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java Wed Apr 27 11:11:32 2016
@@ -18,15 +18,9 @@
final class ScalerDataDaoImpl implements ScalerDataDao {
/**
- * SQL query strings.
+ * Insert a record.
*/
- private static final class ScalerDataQuery {
-
- /**
- * Insert a record.
- */
- private static final String INSERT = createInsertSql();
- }
+ private static final String INSERT = createInsertSql();
/**
* Create insert SQL for scaler data.
@@ -102,7 +96,8 @@
PreparedStatement selectScalers = null;
final List<ScalerData> scalerDataList = new ArrayList<ScalerData>();
try {
- selectScalers = this.connection.prepareStatement("SELECT * FROM scalers WHERE run = ? ORDER BY event");
+ selectScalers = this.connection.prepareStatement("SELECT * FROM sc"
+ + "alers WHERE run = ? ORDER BY event");
selectScalers.setInt(1, run);
final ResultSet resultSet = selectScalers.executeQuery();
while (resultSet.next()) {
@@ -139,7 +134,7 @@
public void insertScalerData(final List<ScalerData> scalerDataList, final int run) {
PreparedStatement insertScalers = null;
try {
- insertScalers = this.connection.prepareStatement(ScalerDataQuery.INSERT);
+ insertScalers = this.connection.prepareStatement(INSERT);
for (final ScalerData scalerData : scalerDataList) {
insertScalers.setInt(1, run);
insertScalers.setInt(2, scalerData.getEventId());
Modified: java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/package-info.java
=============================================================================
--- java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/package-info.java (original)
+++ java/branches/HPSJAVA-409/run-database/src/main/java/org/hps/run/database/package-info.java Wed Apr 27 11:11:32 2016
@@ -1,4 +1,4 @@
/**
- * API for accessing the HPS run database.
+ * API for accessing and updating the HPS run database.
*/
package org.hps.run.database;
Modified: java/branches/HPSJAVA-409/steering-files/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/pom.xml (original)
+++ java/branches/HPSJAVA-409/steering-files/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/steering-files/</url>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/analysis/StarterAnalysis.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/analysis/StarterAnalysis.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/analysis/StarterAnalysis.lcsim Wed Apr 27 11:11:32 2016
@@ -20,4 +20,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceMonitoringApp.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceMonitoringApp.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceMonitoringApp.lcsim Wed Apr 27 11:11:32 2016
@@ -1,9 +1,9 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
- <driver name="EventMarkerDriver"/>
+ <driver name="EventMarkerDriver"/>
<driver name="EcalRunningPedestal"/>
- <driver name="EcalRawConverter" />
+ <driver name="EcalRawConverter" />
<driver name="LedAnalysisDriver"/>
<driver name="EcalEventDisplay" /> <!-- Ecal event display -->
<driver name="AidaSaveDriver"/>
@@ -48,5 +48,5 @@
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
<outputFileName>LedSequenceMonitorOut.aida</outputFileName>
</driver>
- </drivers>
+ </drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim Wed Apr 27 11:11:32 2016
@@ -5,8 +5,7 @@
<driver name="EcalRunningPedestal"/>
<driver name="EcalRawConverter" />
<driver name="LedAnalysisDriver"/>
- <!-- <driver name="EcalEventDisplay" /> --> <!-- Ecal event display -->
- <driver name="AidaSaveDriver"/>
+ <driver name="AidaSaveDriver"/>
</execute>
<drivers>
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
@@ -18,13 +17,14 @@
</driver>
<driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
<ecalCollectionName>EcalCalHits</ecalCollectionName>
- <use2014Gain>false</use2014Gain>
<useTimestamps>false</useTimestamps>
<useTruthTime>false</useTruthTime>
<useRunningPedestal>true</useRunningPedestal>
<useTimeWalkCorrection>true</useTimeWalkCorrection>
+ <nsa>60</nsa> <!-- these are critical since the defaults in software are 100 - 20, as in prod. runs -->
+ <nsb>16</nsb>
</driver>
- <driver name="EcalEventDisplay" type="org.hps.monitoring.ecal.plots.EcalEventDisplay">
+ <!-- <driver name="EcalEventDisplay" type="org.hps.monitoring.ecal.plots.EcalEventDisplay">
<inputCollection>EcalCalHits</inputCollection>
<inputCollectionRaw>EcalReadoutHits</inputCollectionRaw>
<inputClusterCollection>EcalClusters</inputClusterCollection>
@@ -33,19 +33,21 @@
<minEch>0.005</minEch>
<eventRefreshRate>2</eventRefreshRate>
</driver>
+ -->
<driver name="LedAnalysisDriver" type="org.hps.monitoring.ecal.plots.EcalLedSequenceMonitor">
<isMonitoringApp>false</isMonitoringApp>
<doFullAnalysis>false</doFullAnalysis>
- <skipMin>0.25</skipMin>
+ <skipMin>0.2</skipMin>
<skipInitial>0.05</skipInitial>
<useRawEnergy>true</useRawEnergy>
- <energyCut>1</energyCut>
+ <energyCut>2.0</energyCut>
<nEventsMin>300</nEventsMin>
<evnMinDraw>0.</evnMinDraw>
- <evnMaxDraw>20000.</evnMaxDraw>
+ <evnMaxDraw>80000.</evnMaxDraw>
+ <saveTuple>false</saveTuple>
</driver>
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>${outputFile}</outputFileName>
- </driver>
+ <outputFileName>${outputFile}.LedAnalysis.aida</outputFileName>
+ </driver>
</drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringFinal.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringFinal.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringFinal.lcsim Wed Apr 27 11:11:32 2016
@@ -1,7 +1,7 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
<driver name="EcalRawConverter" />
- <driver name="EcalClusterer" />
+ <driver name="EcalClusterer" />
<driver name="EcalMonitoringPlots" /> <!-- General plots -->
<driver name="EcalHitPlots" /> <!-- Single hit distributions -->
<driver name="EcalClusterPlots" /> <!-- Clusters distributions -->
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringOnly.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringOnly.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/EcalMonitoringOnly.lcsim Wed Apr 27 11:11:32 2016
@@ -1,7 +1,7 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
<driver name="EcalRawConverter" />
- <driver name="EcalClusterer" />
+ <driver name="EcalClusterer" />
<driver name="EcalMonitoringPlots" /> <!-- General plots -->
<driver name="EcalHitPlots" /> <!-- Single hit distributions -->
<driver name="EcalClusterPlots" /> <!-- Clusters distributions -->
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/SvtOnlineMonitoring.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/SvtOnlineMonitoring.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/SvtOnlineMonitoring.lcsim Wed Apr 27 11:11:32 2016
@@ -10,22 +10,27 @@
<driver name="PedestalPlots" />-->
<driver name="SensorOccupancy" />
<driver name="RawTrackerHitFitterDriver" />
+ <!--<driver name="SVTPulseFitPlots" />-->
<driver name="TrackerHitDriver" />
<driver name="ClusterPlots" />
<driver name="CleanupDriver" />
</execute>
<drivers>
<driver name="SensorOccupancy" type="org.hps.monitoring.drivers.svt.SensorOccupancyPlotsDriver" >
- <eventRefreshRate>100</eventRefreshRate>
+ <eventRefreshRate>100</eventRefreshRate>
<enablePositionPlots>false</enablePositionPlots>
<enableMaxSamplePlots>true</enableMaxSamplePlots>
<maxSamplePosition>3</maxSamplePosition>
<timeWindowWeight>3</timeWindowWeight>
<resetPeriod>50000</resetPeriod>
+ <saveRootFile>false</saveRootFile>
</driver>
- <driver name="SvtHitPlots" type="org.hps.monitoring.drivers.svt.SvtHitPlots" />
- <driver name="SamplesPlots" type="org.hps.monitoring.drivers.svt.SamplesPlots" />
- <driver name="PedestalPlots" type="org.hps.monitoring.drivers.svt.PedestalPlots" />
+ <driver name="SvtHitPlots" type="org.hps.monitoring.drivers.svt.SvtHitPlots">
+ <doPerChannelsSampleplots>true</doPerChannelsSampleplots>
+ <!--<saveRootFile>false</saveRootFile>-->
+ </driver>
+ <!--<driver name="SamplesPlots" type="org.hps.monitoring.drivers.svt.SamplesPlots" />-->
+ <!--<driver name="PedestalPlots" type="org.hps.monitoring.drivers.svt.PedestalPlots" />-->
<driver name="RawTrackerHitFitterDriver" type="org.hps.recon.tracking.RawTrackerHitFitterDriver">
<fitAlgorithm>Linear</fitAlgorithm>
<useTimestamps>false</useTimestamps>
@@ -41,7 +46,10 @@
<neighborDeltaT>8.0</neighborDeltaT>
<debug>false</debug>
</driver>
- <driver name="ClusterPlots" type="org.hps.monitoring.drivers.svt.SvtClusterPlots" />
+ <!--<driver name="SVTPulseFitPlots" type="org.hps.monitoring.drivers.svt.SVTPulseFitPlots" />-->
+ <driver name="ClusterPlots" type="org.hps.monitoring.drivers.svt.SvtClusterPlots">
+ <saveRootFile>false</saveRootFile>
+ </driver>
<driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver" />
</drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/TriggerDiagnosticsMonitoring.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/TriggerDiagnosticsMonitoring.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/monitoring/TriggerDiagnosticsMonitoring.lcsim Wed Apr 27 11:11:32 2016
@@ -19,7 +19,7 @@
<driver name="ConditionsDriver" type="org.hps.conditions.ConditionsDriver">
<tag>pass0</tag>
</driver>
- <driver name="DAQConfig" type="org.hps.recon.ecal.daqconfig.DAQConfigDriver"/>
+ <driver name="DAQConfig" type="org.hps.record.daqconfig.DAQConfigDriver"/>
<driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
<ecalCollectionName>EcalCalHits</ecalCollectionName>
<use2014Gain>false</use2014Gain>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/V0CandidateFilter.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/V0CandidateFilter.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/production/V0CandidateFilter.lcsim Wed Apr 27 11:11:32 2016
@@ -22,12 +22,6 @@
<!-- Driver to strip events -->
<driver name="StripEvent"
type="org.hps.recon.filtering.V0CandidateFilter">
- <!-- Name of the V0 Candidate Collection of ReconstructedParticles -->
- <v0CandidateCollectionName>TargetConstrainedV0Candidates</v0CandidateCollectionName>
- <!-- Maximum difference in the ECal cluster times [ns]-->
- <clusterTimingCut>2.5</clusterTimingCut>
- <!-- A tight selection requires one and only one real V0 vertex -->
- <tightConstraint>false</tightConstraint>
<!-- Setting this true keeps ALL events containing EPICS data -->
<keepEpicsDataEvents>true</keepEpicsDataEvents>
</driver>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/CommRun2014TightPairs.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/CommRun2014TightPairs.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/CommRun2014TightPairs.lcsim Wed Apr 27 11:11:32 2016
@@ -44,7 +44,7 @@
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>false</use2014Gain>
<debug>true</debug>
- </driver>
+ </driver>
<driver name="EcalClusterer" type="org.hps.recon.ecal.cluster.GTPClusterDriver">
<inputHitCollectionName>EcalCorrectedHits</inputHitCollectionName>
<clusterWindow>1</clusterWindow>
@@ -62,7 +62,7 @@
<energySlopeParamF>0.005500</energySlopeParamF>
<energySlopeLow>0.4</energySlopeLow>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="AidaSaveDriver"
type="org.lcsim.job.AidaSaveDriver">
<outputFileName>${outputFile}_triggerPlots</outputFileName>
@@ -73,4 +73,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/EngineeringRun2014PrescaledTriggers.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/EngineeringRun2014PrescaledTriggers.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/EngineeringRun2014PrescaledTriggers.lcsim Wed Apr 27 11:11:32 2016
@@ -54,7 +54,7 @@
<nsa>100</nsa>
<nsb>20</nsb>
<use2014Gain>false</use2014Gain>
- </driver>
+ </driver>
<driver name="EcalClustererGTP" type="org.hps.recon.ecal.cluster.GTPClusterDriver">
<inputHitCollectionName>EcalCorrectedHits</inputHitCollectionName>
<outputClusterCollectionName>EcalClusters</outputClusterCollectionName>
@@ -82,7 +82,7 @@
<outputFileName>${outputFile}.triggers.singles0</outputFileName>
<lcioFile>${outputFile}.singles0.slcio</lcioFile>
<!--<verbose>true</verbose>-->
- </driver>
+ </driver>
<driver name="SinglesTrigger1" type="org.hps.readout.ecal.SinglesTriggerDriver">
<clusterCollectionName>EcalClusters</clusterCollectionName>
<deadTime>32</deadTime>
@@ -94,7 +94,7 @@
<outputFileName>${outputFile}.triggers.singles1</outputFileName>
<lcioFile>${outputFile}.singles1.slcio</lcioFile>
<!--<verbose>true</verbose>-->
- </driver>
+ </driver>
<driver name="PairTrigger0" type="org.hps.readout.ecal.FADCPrimaryTriggerDriver">
<clusterCollectionName>EcalClusters</clusterCollectionName>
<deadTime>32</deadTime>
@@ -113,7 +113,7 @@
<outputFileName>${outputFile}.triggers.pairs0</outputFileName>
<lcioFile>${outputFile}.pairs0.slcio</lcioFile>
<!--<verbose>true</verbose>-->
- </driver>
+ </driver>
<driver name="PairTrigger1" type="org.hps.readout.ecal.FADCPrimaryTriggerDriver">
<clusterCollectionName>EcalClusters</clusterCollectionName>
<deadTime>32</deadTime>
@@ -132,7 +132,7 @@
<outputFileName>${outputFile}.triggers.pairs1</outputFileName>
<lcioFile>${outputFile}.pairs1.slcio</lcioFile>
<!--<verbose>true</verbose>-->
- </driver>
+ </driver>
<driver name="AidaSaveDriver"
type="org.lcsim.job.AidaSaveDriver">
<outputFileName>${outputFile}_triggerPlots</outputFileName>
@@ -143,4 +143,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutNoPileup.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutNoPileup.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutNoPileup.lcsim Wed Apr 27 11:11:32 2016
@@ -48,7 +48,7 @@
<deadTime>0</deadTime>
<pairCoincidence>0</pairCoincidence>
<lcioFile>${outputFile}.slcio</lcioFile>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout">
<noPileup>true</noPileup>
</driver>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutToEvio.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutToEvio.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014ReadoutToEvio.lcsim Wed Apr 27 11:11:32 2016
@@ -44,7 +44,7 @@
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>true</use2014Gain>
<!-- <debug>true</debug>-->
- </driver>
+ </driver>
<driver name="EcalClusterer" type="org.hps.recon.ecal.cluster.GTPClusterDriver">
<inputHitCollectionName>EcalCorrectedHits</inputHitCollectionName>
<clusterWindow>1</clusterWindow>
@@ -54,7 +54,7 @@
<deadTime>10</deadTime>
<pairCoincidence>2</pairCoincidence>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout" />
<driver name="ClockDriver" type="org.hps.readout.ecal.ClockDriver"/>
<driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver">
@@ -62,4 +62,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014TruthReadoutToLcio.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014TruthReadoutToLcio.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/HPS2014TruthReadoutToLcio.lcsim Wed Apr 27 11:11:32 2016
@@ -47,7 +47,7 @@
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>true</use2014Gain>
<!-- <debug>true</debug>-->
- </driver>
+ </driver>
<driver name="EcalClusterer" type="org.hps.recon.ecal.cluster.GTPClusterDriver">
<inputHitCollectionName>EcalCorrectedHits</inputHitCollectionName>
<clusterWindow>1</clusterWindow>
@@ -57,7 +57,7 @@
<deadTime>10</deadTime>
<pairCoincidence>2</pairCoincidence>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout">
</driver>
<driver name="AidaSaveDriver"
@@ -70,4 +70,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/LcioToEvio.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/LcioToEvio.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/LcioToEvio.lcsim Wed Apr 27 11:11:32 2016
@@ -15,7 +15,7 @@
<period>1</period>
<deadTime>0</deadTime>
<triggerDelay>0</triggerDelay>
- </driver>
+ </driver>
<driver name="TestRunReconToEvio" type="org.hps.evio.TestRunTriggeredReconToEvio">
<evioOutputFile>${outputFile}.evio</evioOutputFile>
</driver>
@@ -23,4 +23,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunNoPileup.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunNoPileup.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunNoPileup.lcsim Wed Apr 27 11:11:32 2016
@@ -39,7 +39,7 @@
<clusterCollectionName>EcalTriggerClusters</clusterCollectionName>
<deadTime>0</deadTime>
<lcioFile>${outputFile}.slcio</lcioFile>
- </driver>
+ </driver>
<driver name="ClockDriver" type="org.hps.readout.ecal.ClockDriver"/>
<driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver"/>
</drivers>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunReadoutToEvio.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunReadoutToEvio.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/readout/TestRunReadoutToEvio.lcsim Wed Apr 27 11:11:32 2016
@@ -20,7 +20,7 @@
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
</driver>
- <driver name="BadChannelFilter" type="org.hps.recon.tracking.SVTBadChannelFilterDriver" />
+ <driver name="BadChannelFilter" type="org.hps.recon.tracking.SVTBadChannelFilterDriver" />
<driver name="EcalReadout" type="org.hps.readout.ecal.FADCEcalReadoutDriver">
<coincidenceWindow>8</coincidenceWindow>
<ecalName>Ecal</ecalName>
@@ -39,7 +39,7 @@
<rawCollectionName>EcalRawHits</rawCollectionName>
<ecalCollectionName>EcalCorrectedHits</ecalCollectionName>
<scale>1</scale>
- </driver>
+ </driver>
<driver name="EcalTriggerFilter" type="org.hps.recon.ecal.EcalTriggerFilterDriver">
<inputCollection>EcalCorrectedHits</inputCollection>
<outputCollection>EcalFilteredHits</outputCollection>
@@ -55,7 +55,7 @@
<clusterCollectionName>EcalClusters</clusterCollectionName>
<deadTime>10</deadTime>
<outputFileName>${outputFile}.triggers</outputFileName>
- </driver>
+ </driver>
<driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout" />
<driver name="ClockDriver" type="org.hps.readout.ecal.ClockDriver"/>
<driver name="TestRunReconToEvio" type="org.hps.evio.TestRunTriggeredReconToEvio">
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2014EcalReconMC.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2014EcalReconMC.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2014EcalReconMC.lcsim Wed Apr 27 11:11:32 2016
@@ -18,6 +18,7 @@
<execute>
<!--<driver name="EventMarkerDriver" />-->
<driver name="EcalRunningPedestal"/>
+ <driver name="RfFitter"/>
<driver name="EcalRawConverter" />
<driver name="ReconClusterer" />
<driver name="GTPOnlineClusterer" />
@@ -28,6 +29,8 @@
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>100</eventInterval>
</driver>
+ <driver name="RfFitter" type="org.hps.evio.RfFitterDriver"/>
+
<driver name="ConditionsDriver" type="org.hps.conditions.ConditionsDriver">
<detectorName>HPS-ECalCommissioning-v3-fieldmap</detectorName>
<runNumber>3422</runNumber>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullRecon.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullRecon.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullRecon.lcsim Wed Apr 27 11:11:32 2016
@@ -7,7 +7,9 @@
@author <a href="mailto:[log in to unmask]">Omar Moreno</a>
-->
<execute>
-
+ <!--RF driver-->
+ <driver name="RfFitter"/>
+
<!-- Ecal reconstruction drivers -->
<driver name="EcalRunningPedestal"/>
<driver name="EcalRawConverter" />
@@ -51,7 +53,6 @@
ReconstructedParticle types are properly set.
-->
<driver name="MergeTrackCollections"/>
- <driver name="GBLOutputDriver" />
<driver name="GBLRefitterDriver" />
<driver name="TrackDataDriver" />
<driver name="ReconParticleDriver" />
@@ -61,7 +62,9 @@
<drivers>
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
- </driver>
+ </driver>
+
+ <driver name="RfFitter" type="org.hps.evio.RfFitterDriver"/>
<!-- Ecal reconstruction drivers -->
<driver name="EcalRunningPedestal" type="org.hps.recon.ecal.EcalRunningPedestalDriver">
@@ -134,8 +137,7 @@
<ecalClusterCollectionName>EcalClustersCorr</ecalClusterCollectionName>
<trackCollectionNames>MatchedTracks GBLTracks</trackCollectionNames>
</driver>
- <driver name="GBLOutputDriver" type="org.hps.recon.tracking.gbl.GBLOutputDriver"/>
- <driver name="GBLRefitterDriver" type="org.hps.recon.tracking.gbl.HpsGblRefitter"/>
+ <driver name="GBLRefitterDriver" type="org.hps.recon.tracking.gbl.GBLRefitterDriver"/>
<driver name="LCIOWriter" type="org.lcsim.util.loop.LCIODriver">
<outputFilePath>${outputFile}.slcio</outputFilePath>
</driver>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullReconMC.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullReconMC.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/recon/EngineeringRun2015FullReconMC.lcsim Wed Apr 27 11:11:32 2016
@@ -11,6 +11,7 @@
<driver name="ConditionsDriver"/>
<driver name="EventMarkerDriver"/>
+ <driver name="RfFitter"/>
<!-- Ecal reconstruction drivers -->
<driver name="EcalRawConverter" />
<driver name="ReconClusterer" />
@@ -53,7 +54,6 @@
ReconstructedParticle types are properly set.
-->
<driver name="MergeTrackCollections"/>
- <driver name="GBLOutputDriver" />
<driver name="GBLRefitterDriver" />
<driver name="TrackDataDriver" />
<driver name="ReconParticleDriver" />
@@ -69,7 +69,9 @@
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
- </driver>
+ </driver>
+ <driver name="RfFitter" type="org.hps.evio.RfFitterDriver"/>
+
<driver name="RawTrackerHitSensorSetup" type="org.lcsim.recon.tracking.digitization.sisim.config.RawTrackerHitSensorSetup">
<readoutCollections>SVTRawTrackerHits</readoutCollections>
</driver>
@@ -119,8 +121,7 @@
<rmsTimeCut>8.0</rmsTimeCut>
</driver>
<driver name="MergeTrackCollections" type="org.hps.recon.tracking.MergeTrackCollections" />
- <driver name="GBLOutputDriver" type="org.hps.recon.tracking.gbl.GBLOutputDriver"/>
- <driver name="GBLRefitterDriver" type="org.hps.recon.tracking.gbl.HpsGblRefitter"/>
+ <driver name="GBLRefitterDriver" type="org.hps.recon.tracking.gbl.GBLRefitterDriver"/>
<driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
<ecalCollectionName>EcalCalHits</ecalCollectionName>
<fixShapeParameter>true</fixShapeParameter>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/celentan/LedAnalysisFromEvio.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/celentan/LedAnalysisFromEvio.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/celentan/LedAnalysisFromEvio.lcsim Wed Apr 27 11:11:32 2016
@@ -1,9 +1,9 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
- <driver name="EventMarkerDriver"/>
+ <driver name="EventMarkerDriver"/>
<driver name="EcalRunningPedestal"/>
- <driver name="EcalRawConverter" />
+ <driver name="EcalRawConverter" />
<driver name="AidaSaveDriver"/>
</execute>
<drivers>
@@ -25,5 +25,5 @@
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
<outputFileName>${outputFile}.aida</outputFileName>
</driver>
- </drivers>
+ </drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/ClusterRecon.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/ClusterRecon.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/ClusterRecon.lcsim Wed Apr 27 11:11:32 2016
@@ -5,7 +5,7 @@
@author Holly Szumila <[log in to unmask]>
-->
<execute>
- <driver name="EventMarkerDriver"/>
+ <driver name="EventMarkerDriver"/>
<driver name="EcalRunningPedestal"/>
<driver name="EcalRawConverter" />
<driver name="ReconClusterer" />
@@ -13,7 +13,7 @@
<driver name="LCIOWriter"/>
</execute>
<drivers>
- <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
+ <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
</driver>
<driver name="EcalRunningPedestal" type="org.hps.recon.ecal.EcalRunningPedestalDriver">
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EcalSimReadout.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EcalSimReadout.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EcalSimReadout.lcsim Wed Apr 27 11:11:32 2016
@@ -21,11 +21,11 @@
<eventInterval>1000</eventInterval>
</driver>
<driver name="ConditionsDriver" type="org.hps.conditions.ConditionsDriver">
- <runNumber>0</runNumber>
- <freeze>true</freeze>
- <detectorName>HPS-ECalCommissioning-v3-fieldmap</detectorName>
- </driver>
-<!-- <driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
+ <runNumber>0</runNumber>
+ <freeze>true</freeze>
+ <detectorName>HPS-ECalCommissioning-v3-fieldmap</detectorName>
+ </driver>
+<!-- <driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
<outputFile>${outputFile}.slcio</outputFile>
<rejectBackground>true</rejectBackground>
</driver>-->
@@ -45,7 +45,7 @@
<ecalCollectionName>EcalCorrectedHits</ecalCollectionName>
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>false</use2014Gain>
- </driver>
+ </driver>
<driver name="ReconClusterer" type="org.hps.recon.ecal.cluster.ReconClusterDriver">
<logLevel>WARNING</logLevel>
<inputHitCollectionName>EcalCorrectedHits</inputHitCollectionName>
@@ -65,4 +65,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015EcalOnly.lcsim
=============================================================================
Binary files - no diff available.
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/EngineeringRun2015_FEEIter_Filter.lcsim
=============================================================================
Binary files - no diff available.
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/QuickEcalReadout.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/QuickEcalReadout.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/holly/QuickEcalReadout.lcsim Wed Apr 27 11:11:32 2016
@@ -18,11 +18,11 @@
<eventInterval>1000</eventInterval>
</driver>
<driver name="ConditionsDriver" type="org.hps.conditions.ConditionsDriver">
- <runNumber>0</runNumber>
- <freeze>true</freeze>
- <detectorName>HPS-Proposal2014-v7-2pt2</detectorName>
- </driver>
- <driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
+ <runNumber>0</runNumber>
+ <freeze>true</freeze>
+ <detectorName>HPS-Proposal2014-v7-2pt2</detectorName>
+ </driver>
+ <driver name="TestRunReconToLcio" type="org.hps.evio.TestRunTriggeredReconToLcio">
<outputFile>${outputFile}.slcio</outputFile>
<rejectBackground>true</rejectBackground>
</driver>
@@ -44,4 +44,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/EcalScoring.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/EcalScoring.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/EcalScoring.lcsim Wed Apr 27 11:11:32 2016
@@ -25,4 +25,4 @@
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/HitTimes.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/HitTimes.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/HitTimes.lcsim Wed Apr 27 11:11:32 2016
@@ -1,14 +1,14 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
- <execute>
- <driver name="EventMarkerDriver"/>
- <driver name="HitTimePrintDriver"/>
- </execute>
- <drivers>
- <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
- <eventInterval>1</eventInterval>
- </driver>
- <driver name="HitTimePrintDriver" type="org.hps.users.meeg.HitTimePrintDriver"/>
- </drivers>
+ <execute>
+ <driver name="EventMarkerDriver"/>
+ <driver name="HitTimePrintDriver"/>
+ </execute>
+ <drivers>
+ <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
+ <eventInterval>1</eventInterval>
+ </driver>
+ <driver name="HitTimePrintDriver" type="org.hps.users.meeg.HitTimePrintDriver"/>
+ </drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/PairsSkimmer.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/PairsSkimmer.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/PairsSkimmer.lcsim Wed Apr 27 11:11:32 2016
@@ -44,4 +44,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/SmallHits.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/SmallHits.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/SmallHits.lcsim Wed Apr 27 11:11:32 2016
@@ -1,18 +1,18 @@
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
- <execute>
- <driver name="EventMarkerDriver"/>
- <driver name="SVTSmallHitsDriver"/>
+ <execute>
+ <driver name="EventMarkerDriver"/>
+ <driver name="SVTSmallHitsDriver"/>
<driver name="AidaSaveDriver"/>
- </execute>
- <drivers>
- <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
- <eventInterval>1000</eventInterval>
- </driver>
- <driver name="SVTSmallHitsDriver" type="org.hps.users.meeg.SVTSmallHitsDriver"/>
+ </execute>
+ <drivers>
+ <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
+ <eventInterval>1000</eventInterval>
+ </driver>
+ <driver name="SVTSmallHitsDriver" type="org.hps.users.meeg.SVTSmallHitsDriver"/>
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
<outputFileName>smallhits.root</outputFileName>
</driver>
- </drivers>
+ </drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/ecal_fadc_bkgd.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/ecal_fadc_bkgd.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/ecal_fadc_bkgd.lcsim Wed Apr 27 11:11:32 2016
@@ -10,11 +10,11 @@
<driver name="EcalConverter" />
<driver name="EcalClusterer" />
<driver name="EcalTrigger" />
-<!-- <driver name="EcalPlots"/>-->
+<!-- <driver name="EcalPlots"/>-->
<driver name="EcalFADCPlots" />
<driver name="EcalTriggerPlots" />
<driver name="MCParticlePlots" />
-<!-- <driver name="Writer"/>-->
+<!-- <driver name="Writer"/>-->
<driver name="AidaSaveDriver" />
<driver name="ClockDriver" />
</execute>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/raw_triggers.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/raw_triggers.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/meeg/raw_triggers.lcsim Wed Apr 27 11:11:32 2016
@@ -5,10 +5,10 @@
-->
<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
-<!-- <driver name="EventMarkerDriver"/>-->
+<!-- <driver name="EventMarkerDriver"/>-->
<driver name="EcalClusterer" />
<driver name="EcalTrigger" />
-<!-- <driver name="MCParticlePlots"/>-->
+<!-- <driver name="MCParticlePlots"/>-->
<driver name="AidaSaveDriver" />
<driver name="ClockDriver" />
</execute>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/EngineeringRun2015FullRecon_Pass2_Gbl.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/EngineeringRun2015FullRecon_Pass2_Gbl.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/EngineeringRun2015FullRecon_Pass2_Gbl.lcsim Wed Apr 27 11:11:32 2016
@@ -56,7 +56,7 @@
<driver name="GBLOutputDriver" />
<driver name="GBLRefitterDriver" />
<driver name="TrackDataDriver" />
- <!--<driver name="GblResidualEcalDriver" />-->
+ <driver name="GblResidualEcalDriver" />
<!--<driver name="TrackExtrapolationTestDriver"/>-->
<driver name="ReconParticleDriver" />
<!--<driver name="TrackingReconstructionPlots" />-->
@@ -162,13 +162,13 @@
<outputFileName>${outputFile}.root</outputFileName>
</driver>
<driver name="AlignmentFilterDriver" type="org.hps.recon.filtering.SvtAlignmentFilter"/>
- <driver name="GblResidualEcalDriver" type="org.hps.users.phansson.ECalExtrapolationDriver"/>
+ <driver name="GblResidualEcalDriver" type="org.hps.users.phansson.GblResidualDriver"/>
<driver name="TrackExtrapolationTestDriver" type="org.hps.users.phansson.TrackExtrapolationTestDriver"/>
<driver name="TrackingReconstructionPlots" type="org.hps.users.phansson.TrackingReconstructionPlots">
<showPlots>False</showPlots>
</driver>
<driver name="TimerDriver1" type="org.hps.util.TimerDriver"/>
- <driver name="GeomChecker" type="org.hps.users.phansson.TrackingGeometryChecker"/>
+ <driver name="GeomChecker" type="org.hps.users.phansson.tools.TrackingGeometryChecker"/>
</drivers>
</lcsim>
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/TestRunOfflineRecon.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/TestRunOfflineRecon.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/phansson/TestRunOfflineRecon.lcsim Wed Apr 27 11:11:32 2016
@@ -39,7 +39,7 @@
</driver>
<driver name="TrackerReconDriver" type="org.hps.recon.tracking.TrackerReconDriver">
<debug>false</debug>
- <!--<strategyResource>/org/hps/recon/tracking/strategies/HPS-Test-All.xml</strategyResource>-->
+ <!--<strategyResource>/org/hps/recon/tracking/strategies/HPS-Test-All.xml</strategyResource>-->
<strategyResource>/org/hps/recon/tracking/strategies/HPS-TestRun-357.xml</strategyResource>
</driver>
<driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
Modified: java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/rafo/TestSteering.lcsim
=============================================================================
--- java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/rafo/TestSteering.lcsim (original)
+++ java/branches/HPSJAVA-409/steering-files/src/main/resources/org/hps/steering/users/rafo/TestSteering.lcsim Wed Apr 27 11:11:32 2016
@@ -17,4 +17,4 @@
</driver>
</drivers>
</lcsim>
-
+
Modified: java/branches/HPSJAVA-409/tracking/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/tracking/pom.xml (original)
+++ java/branches/HPSJAVA-409/tracking/pom.xml Wed Apr 27 11:11:32 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/tracking/</url>
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/readout/svt/FpgaData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/readout/svt/FpgaData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/readout/svt/FpgaData.java Wed Apr 27 11:11:32 2016
@@ -39,8 +39,9 @@
/**
*
- * @param temperature : array containing hybrid temperatures
- * @param tail : word present at the end of a FPGA data set
+ * @param fpgaID FPGA ID
+ * @param data : array containing hybrid temperatures
+ * @param tail : word present at the end of a FPGA data set
*/
public FpgaData(int fpgaID, int[] data, int tail) {
this.fpgaID = fpgaID;
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/DumbShaperFit.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/DumbShaperFit.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/DumbShaperFit.java Wed Apr 27 11:11:32 2016
@@ -34,7 +34,7 @@
}
public Collection<ShapeFitParameters> fitShape(int channel, short[] samples, HpsSiSensor sensor){
-
+
ShapeFitParameters fitresults = new ShapeFitParameters();
double[] pedSub = {-99.0, -99.0, -99.0, -99.0, -99.0, -99.0};
double maxADC = -99999;
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HelicalTrackHitDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HelicalTrackHitDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HelicalTrackHitDriver.java Wed Apr 27 11:11:32 2016
@@ -41,15 +41,39 @@
import org.lcsim.recon.tracking.digitization.sisim.TrackerHitType;
/**
- * Driver used to create stereo hits from clusters.
- *
+ * This <code>Driver</code> creates 3D hits from SVT strip clusters of stereo pairs, which by default
+ * are read from the <b>StripClusterer_SiTrackerHitStrip1D</b> input collection.
+ * <p>
+ * The following collections will be added to the output event:
+ * <ul>
+ * <li>HelicalTrackHits</li>
+ * <li>RotatedHelicalTrackHits</li>
+ * <li>HelicalTrackHitRelations</li>
+ * <li>RotatedHelicalTrackHitRelations</li>
+ * <li>HelicalTrackMCRelations</li>
+ * <li>RotatedHelicalTrackMCRelations<li>
+ * </ul>
+ * <p>
+ * Class has the following default parameters values in the code (or from <code>EngineeringRun2015FullRecon.lcsim</code>):
+ * <ul>
+ * <li>{@link #setClusterTimeCut(double)} - 12.0 (ns)</li>
+ * <li>{@link #setMaxDt(double)} - 16.0 (ns)</li>
+ * <li>{@link #setClusterAmplitudeCut(double)} - 400.0</li>
+ * <li>{@link #setRejectGhostHits(boolean)} - <code>false</code></li>
+ * <li>{@link #setDebug(boolean)} - <code>false</code></li>
+ * <li>{@link #setEpsParallel(double)} - 0.013</li>
+ * <li>{@link #setEpsStereo(double)} - 0.01</li>
+ * <li>{@link #setSaveAxialHits(boolean)} - <code>false</code></li>
+ * <li>{@link #setStripHitsCollectionName(String)} - StripClusterer_SiTrackerHitStrip1D</li>
+ * <li>{@link #setHelicalTrackHitRelationsCollectionName(String)} - HelicalTrackHitRelations</li>
+ * <li>{@link #setHelicalTrackMCRelationsCollectionName(String)} - HelicalTrackMCRelations</li>
+ * <li>{@link #setOutputHitCollectionName(String)} - HelicalTrackHits</li>
+ * </ul>
*
* @author Mathew Graham <[log in to unmask]>
* @author Per Hansson <[log in to unmask]>
* @author Omar Moreno <[log in to unmask]>
- *
*/
-// TODO: Add class documentation.
// FIXME: The option to run using the Common geometry should be removed
public class HelicalTrackHitDriver extends org.lcsim.fit.helicaltrack.HelicalTrackHitDriver {
@@ -64,8 +88,8 @@
private final List<String> _colnames = new ArrayList<String>();
private boolean _doTransformToTracking = true;
private boolean _saveAxialHits = false;
- private String _axialname = "AxialTrackHits";
- private String _axialmcrelname = "AxialTrackHitsMCRelations";
+ private final String _axialname = "AxialTrackHits";
+ private final String _axialmcrelname = "AxialTrackHitsMCRelations";
private boolean rejectGhostHits = false;
public enum LayerGeometryType {
@@ -143,7 +167,7 @@
public void setEpsStereo(double eps) {
this._crosser.setEpsStereoAngle(eps);
}
-
+
/**
*
* @param trans
@@ -199,8 +223,9 @@
// Create an LCRelation from a HelicalTrackHit to an MC particle used to
// create it
List<LCRelation> mcrelations = new ArrayList<LCRelation>();
- RelationalTable hittomc = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ RelationalTable hittomc = null;
if (event.hasCollection(LCRelation.class, "SVTTrueHitRelations")) {
+ hittomc = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
List<LCRelation> trueHitRelations = event.get(LCRelation.class, "SVTTrueHitRelations");
for (LCRelation relation : trueHitRelations) {
if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
@@ -241,9 +266,11 @@
// Create a HelicalTrackStrip for this hit
HelicalTrackStrip strip = makeDigiStrip(h);
- for (RawTrackerHit rth : h.getRawHits()) {
- for (Object simHit : hittomc.allFrom(rth)) {
- strip.addMCParticle(((SimTrackerHit) simHit).getMCParticle());
+ if (hittomc != null) {
+ for (RawTrackerHit rth : h.getRawHits()) {
+ for (Object simHit : hittomc.allFrom(rth)) {
+ strip.addMCParticle(((SimTrackerHit) simHit).getMCParticle());
+ }
}
}
@@ -259,10 +286,12 @@
if (((HpsSiSensor) h.getSensor()).isAxial()) {
HelicalTrack2DHit haxial = makeDigiAxialHit(h);
axialhits.add(haxial);
- List<RawTrackerHit> rl = haxial.getRawHits();
- for (RawTrackerHit rth : rl) {
- for (Object simHit : hittomc.allFrom(rth)) {
- haxial.addMCParticle(((SimTrackerHit) simHit).getMCParticle());
+ if (hittomc != null) {
+ List<RawTrackerHit> rl = haxial.getRawHits();
+ for (RawTrackerHit rth : rl) {
+ for (Object simHit : hittomc.allFrom(rth)) {
+ haxial.addMCParticle(((SimTrackerHit) simHit).getMCParticle());
+ }
}
}
axialmcrelations.add(new MyLCRelation(haxial, haxial.getMCParticles()));
@@ -427,7 +456,7 @@
Collection<TrackerHit> htsList = hittostrip.allFrom(cross);
for (TrackerHit strip : htsList) {
Set<HelicalTrackHit> sharedCrosses = hittostrip.allTo(strip);
- System.out.println(sharedCrosses.size());
+// System.out.println(sharedCrosses.size());
if (sharedCrosses.size() > 1) {
// this.getLogger().warning(String.format("removing possible ghost hit"));
iter.remove();
@@ -456,14 +485,18 @@
helhits.addAll(stereoCrosses);
event.put(_outname, helhits, HelicalTrackHit.class, 0);
event.put(_hitrelname, hitrelations, LCRelation.class, 0);
- event.put(_mcrelname, mcrelations, LCRelation.class, 0);
+ if (hittomc != null) {
+ event.put(_mcrelname, mcrelations, LCRelation.class, 0);
+ }
if (_saveAxialHits) {
event.put(_axialname, axialhits, HelicalTrackHit.class, 0);
- event.put(_axialmcrelname, axialmcrelations, LCRelation.class, 0);
- System.out.println(this.getClass().getSimpleName() + " : number of " + _axialmcrelname + " found = " + axialmcrelations.size());
+ if (hittomc != null) {
+ event.put(_axialmcrelname, axialmcrelations, LCRelation.class, 0);
+ System.out.println(this.getClass().getSimpleName() + " : number of " + _axialmcrelname + " found = " + axialmcrelations.size());
+ }
}
if (_doTransformToTracking) {
- addRotatedHitsToEvent(event, stereoCrosses);
+ addRotatedHitsToEvent(event, stereoCrosses, hittomc != null);
if (_saveAxialHits) {
addRotated2DHitsToEvent(event, axialhits);
}
@@ -595,7 +628,7 @@
return strip;
}
- private void addRotatedHitsToEvent(EventHeader event, List<HelicalTrackCross> stereohits) {
+ private void addRotatedHitsToEvent(EventHeader event, List<HelicalTrackCross> stereohits, boolean isMC) {
List<HelicalTrackHit> rotatedhits = new ArrayList<HelicalTrackHit>();
List<LCRelation> hthrelations = new ArrayList<LCRelation>();
@@ -634,10 +667,10 @@
strip1.add(rotatedstriphits.get(0));
strip2.add(rotatedstriphits.get(1));
List<HelicalTrackCross> newhits = _crosser.MakeHits(strip1, strip2);
- if(newhits.size()!=1) {
+ if (newhits.size() != 1) {
throw new RuntimeException("no rotated cross was created!?");
}
- HelicalTrackCross newhit = newhits.get(0);
+ HelicalTrackCross newhit = newhits.get(0);
//HelicalTrackCross newhit = new HelicalTrackCross(rotatedstriphits.get(0), rotatedstriphits.get(1));
for (MCParticle mcp : cross.getMCParticles()) {
newhit.addMCParticle(mcp);
@@ -651,7 +684,9 @@
event.put("Rotated" + _outname, rotatedhits, HelicalTrackHit.class, 0);
event.put("Rotated" + _hitrelname, hthrelations, LCRelation.class, 0);
- event.put("Rotated" + _mcrelname, mcrelations, LCRelation.class, 0);
+ if (isMC) {
+ event.put("Rotated" + _mcrelname, mcrelations, LCRelation.class, 0);
+ }
}
/*
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java Wed Apr 27 11:11:32 2016
@@ -28,9 +28,15 @@
this.rmsTimeCut = rmsTimeCut;
}
+ public void setDebug(boolean debug) {
+ this.debug = debug;
+ }
+
@Override
public boolean checkSeed(SeedCandidate candidate) {
-// System.out.format("seed with %d hits\n", candidate.getHits().size());
+ if (debug) {
+ System.out.format("%s: seed with %d hits\n", this.getClass().getSimpleName(), candidate.getHits().size());
+ }
int nStrips = 0;
double meanTime = 0;
for (HelicalTrackHit hth : candidate.getHits()) {
@@ -50,15 +56,16 @@
}
// if (nStrips<6) return true;
seedsChecked++;
-// rmsTime = Math.sqrt(rmsTime / nStrips);
-// System.out.format("seed RMS %f on %d hits\n",rmsTime,nStrips);
+ if (debug) {
+ System.out.format("%s: seed RMS %f on %d hits\n", this.getClass().getSimpleName(), Math.sqrt(rmsTime / nStrips), nStrips);
+ }
boolean passCheck = (rmsTime < minTrackHits * rmsTimeCut * rmsTimeCut);
// boolean passCheck = (rmsTime < minTrackHits * rmsTimeCut);
if (passCheck) {
seedsPassed++;
}
if (debug && seedsChecked % 10000 == 0) {
- System.out.format("Checked %d seeds, %d passed (%d failed)\n", seedsChecked, seedsPassed, seedsChecked - seedsPassed);
+ System.out.format("%s: Checked %d seeds, %d passed (%d failed)\n", this.getClass().getSimpleName(), seedsChecked, seedsPassed, seedsChecked - seedsPassed);
}
return passCheck;
@@ -66,7 +73,9 @@
@Override
public boolean checkTrack(SeedTrack track) {
-// System.out.format("track with %d hits\n", track.getTrackerHits().size());
+ if (debug) {
+ System.out.format("%s: track with %d hits\n", this.getClass().getSimpleName(), track.getTrackerHits().size());
+ }
tracksChecked++;
int nStrips = 0;
double meanTime = 0;
@@ -87,13 +96,15 @@
}
rmsTime = Math.sqrt(rmsTime / nStrips);
// rmsTime = rmsTime / nStrips;
-// System.out.format("track RMS %f on %d hits\n", rmsTime, nStrips);
+ if (debug) {
+ System.out.format("%s: track RMS %f on %d hits\n", this.getClass().getSimpleName(), rmsTime, nStrips);
+ }
boolean passCheck = (rmsTime < rmsTimeCut);
if (passCheck) {
tracksPassed++;
}
if (debug && tracksChecked % 100 == 0) {
- System.out.format("Checked %d tracks, %d passed (%d failed)\n", tracksChecked, tracksPassed, tracksChecked - tracksPassed);
+ System.out.format("%s: Checked %d tracks, %d passed (%d failed)\n", this.getClass().getSimpleName(), tracksChecked, tracksPassed, tracksChecked - tracksPassed);
}
return passCheck;
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java Wed Apr 27 11:11:32 2016
@@ -161,7 +161,7 @@
double signal = base_hit.getAmp();
double noiseRMS = 0;
for(int sampleN = 0; sampleN < HPSSVTConstants.TOTAL_NUMBER_OF_SAMPLES; sampleN++){
- noiseRMS += ((HpsSiSensor) rth.getDetectorElement()).getNoise(channel_number, sampleN);
+ noiseRMS += ((HpsSiSensor) rth.getDetectorElement()).getNoise(channel_number, sampleN);
}
noiseRMS = noiseRMS/HPSSVTConstants.TOTAL_NUMBER_OF_SAMPLES;
@@ -225,7 +225,7 @@
cluster_signal += hit.getAmp();
double strip_noise = 0;
for(int sampleN = 0; sampleN < HPSSVTConstants.TOTAL_NUMBER_OF_SAMPLES; sampleN++){
- strip_noise += ((HpsSiSensor) hit.getRawTrackerHit().getDetectorElement()).getNoise(clustered_cell, sampleN);
+ strip_noise += ((HpsSiSensor) hit.getRawTrackerHit().getDetectorElement()).getNoise(clustered_cell, sampleN);
}
strip_noise = strip_noise/HPSSVTConstants.TOTAL_NUMBER_OF_SAMPLES;
cluster_noise_squared += Math.pow(strip_noise, 2);
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NoiselessReadoutChip.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NoiselessReadoutChip.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/NoiselessReadoutChip.java Wed Apr 27 11:11:32 2016
@@ -164,8 +164,8 @@
// Loop over the channels contained in the SiElectrodeDataCollection
for (Integer channel : data.keySet()) {
-
- if(dropBadChannels && ((HpsSiSensor) electrodes.getDetectorElement()).isBadChannel(channel)){
+
+ if(dropBadChannels && ((HpsSiSensor) electrodes.getDetectorElement()).isBadChannel(channel)){
//===> if (dropBadChannels && HPSSVTCalibrationConstants.isBadChannel((SiSensor) electrodes.getDetectorElement(), channel)) {
// System.out.format("%d bad\n", channel);
continue;
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/SVTBadChannelFilterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/SVTBadChannelFilterDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/SVTBadChannelFilterDriver.java Wed Apr 27 11:11:32 2016
@@ -19,33 +19,33 @@
*/
public class SVTBadChannelFilterDriver extends Driver {
- // RawTrackerHit collection name
+ // RawTrackerHit collection name
private String rawTrackerHitCollection = "SVTRawTrackerHits";
@Override
public void process(EventHeader event) {
-
+
if (event.hasCollection(RawTrackerHit.class, rawTrackerHitCollection)) {
- // Get the list of raw hits from the event
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawTrackerHitCollection);
-
+ // Get the list of raw hits from the event
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, rawTrackerHitCollection);
+
// Get the hits meta data from the event
- LCMetaData meta = event.getMetaData(hits);
+ LCMetaData meta = event.getMetaData(hits);
- // Iterate over all raw hits in the event. If the raw hit is
- // identified to come from a noisy/bad channel, remove it from
- // the list of raw hits.
+ // Iterate over all raw hits in the event. If the raw hit is
+ // identified to come from a noisy/bad channel, remove it from
+ // the list of raw hits.
Iterator<RawTrackerHit> hitsIterator = hits.iterator();
while (hitsIterator.hasNext()) {
- RawTrackerHit hit = hitsIterator.next();
+ RawTrackerHit hit = hitsIterator.next();
//hit.setMetaData(meta);
int strip = hit.getIdentifierFieldValue("strip");
HpsSiSensor sensor = (HpsSiSensor) hit.getDetectorElement();
if(sensor.isBadChannel(strip)){
- hitsIterator.remove();
+ hitsIterator.remove();
}
if (!sensor.getReadout().getHits(RawTrackerHit.class).isEmpty()) {
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java Wed Apr 27 11:11:32 2016
@@ -34,6 +34,7 @@
private final int nPulses;
final double[] amplitudes;
final double[] amplitudeErrors;
+ private double pedestal;
//===> private ChannelConstants channelConstants;
private HpsSiSensor sensor;
private int channel;
@@ -44,6 +45,7 @@
private int nUsedSamples;
private int firstFittedPulse;
private int nFittedPulses;
+ private boolean fitPedestal = false;
private boolean debug = false;
private static final Logger minuitLoggger = Logger.getLogger("org.freehep.math.minuit");
@@ -61,6 +63,18 @@
} else {
minuitLoggger.setLevel(Level.OFF);
}
+ }
+
+ public void setFitPedestal(boolean fitPedestal) {
+ this.fitPedestal = fitPedestal;
+ }
+
+ public boolean fitsPedestal() {
+ return fitPedestal;
+ }
+
+ public double getPedestal() {
+ return pedestal;
}
@Override
@@ -120,7 +134,11 @@
ShapeFitParameters fit = new ShapeFitParameters();
fit.setAmp(amplitudes[i]);
fit.setAmpErr(amplitudeErrors[i]);
- fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses, chisq));
+ if (fitPedestal) {
+ fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses - 1, chisq));
+ } else {
+ fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses, chisq));
+ }
fit.setT0(min.userState().value(i));
@@ -177,15 +195,22 @@
nUsedSamples = split;
//fit only the first pulse
nFittedPulses = 1;
- FunctionMinimum frontFit = doRecursiveFit(fitData);
+ FunctionMinimum frontFit;
+ frontFit = doRecursiveFit(fitData);
if (debug) {
- System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval());
- }
-
+ if (fitPedestal) {
+ System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval(), pedestal);
+ } else {
+ System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval());
+ }
+ }
//subtract first pulse from fit input
for (int i = 0; i < samples.length; i++) {
//===> fitData[i] -= amplitudes[firstFittedPulse] * getAmplitude(HPSSVTConstants.SAMPLING_INTERVAL * i - frontFit.userState().value(0), channelConstants);
fitData[i] -= amplitudes[firstFittedPulse] * shape.getAmplitudePeakNorm(HPSSVTConstants.SAMPLING_INTERVAL * i - frontFit.userState().value(0));
+ if (fitPedestal) {
+ fitData[i] -= pedestal;
+ }
}
if (debug) {
@@ -201,7 +226,14 @@
//fit the rest of the pulses
firstFittedPulse++;
nFittedPulses = nPulses - firstFittedPulse;
- FunctionMinimum backFit = doRecursiveFit(fitData);
+ FunctionMinimum backFit;
+ if (fitPedestal) {
+ fitPedestal = false;
+ backFit = doRecursiveFit(fitData);
+ fitPedestal = true;
+ } else {
+ backFit = doRecursiveFit(fitData);
+ }
if (debug) {
System.out.format("back fit:\tt0=%f,\tA=%f,\tchisq=%f\n", backFit.userState().value(0), amplitudes[firstFittedPulse], backFit.fval());
@@ -221,7 +253,11 @@
FunctionMinimum combinedFit = minuitFit(combinedGuess);
if (debug) {
- System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval());
+ if (fitPedestal) {
+ System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval(), pedestal);
+ } else {
+ System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval());
+ }
}
double newchisq = evaluateMinimum(combinedFit);
@@ -234,8 +270,11 @@
// double newchisq = evaluateMinimum(bestFit);
if (debug) {
- System.out.println("new chisq:\t" + bestChisq);
- System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval());
+ if (fitPedestal) {
+ System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval(), pedestal);
+ } else {
+ System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval());
+ }
}
return bestFit;
}
@@ -322,7 +361,8 @@
if (times.length != nFittedPulses) {
throw new RuntimeException("wrong number of parameters in doLinFit");
}
- RealMatrix sc_mat = new Array2DRowRealMatrix(nFittedPulses, nUsedSamples);
+ int nAmplitudes = fitPedestal ? nFittedPulses + 1 : nFittedPulses;
+ RealMatrix sc_mat = new Array2DRowRealMatrix(nAmplitudes, nUsedSamples);
RealVector y_vec = new ArrayRealVector(nUsedSamples);
RealVector var_vec = new ArrayRealVector(nUsedSamples);
@@ -330,6 +370,9 @@
for (int i = 0; i < nFittedPulses; i++) {
//===> sc_mat.setEntry(i, j, getAmplitude(HPSSVTConstants.SAMPLING_INTERVAL * (firstUsedSample + j) - times[i], channelConstants) / sigma[firstUsedSample + j]);
sc_mat.setEntry(i, j, shape.getAmplitudePeakNorm(HPSSVTConstants.SAMPLING_INTERVAL * (firstUsedSample + j) - times[i]) / sigma[firstUsedSample + j]);
+ }
+ if (fitPedestal) {
+ sc_mat.setEntry(nFittedPulses, j, 1.0 / sigma[firstUsedSample + j]);
}
y_vec.setEntry(j, y[firstUsedSample + j] / sigma[firstUsedSample + j]);
var_vec.setEntry(j, sigma[firstUsedSample + j] * sigma[firstUsedSample + j]);
@@ -344,7 +387,7 @@
a_solver = a_cholesky.getSolver();
solved_amplitudes = a_solver.solve(a_vec);
amplitude_err = a_solver.solve(sc_mat.operate(var_vec));
- if (solved_amplitudes.getMinValue() < 0) {
+ if (solved_amplitudes.getSubVector(0, nFittedPulses).getMinValue() < 0) {
goodFit = false;
}
} catch (NonPositiveDefiniteMatrixException e) {
@@ -352,8 +395,8 @@
}
if (!goodFit) {
- solved_amplitudes = new ArrayRealVector(nFittedPulses, 0.0);
- amplitude_err = new ArrayRealVector(nFittedPulses, Double.POSITIVE_INFINITY);
+ solved_amplitudes = new ArrayRealVector(nAmplitudes, 0.0);
+ amplitude_err = new ArrayRealVector(nAmplitudes, Double.POSITIVE_INFINITY);
}
double chisq = y_vec.subtract(sc_mat.preMultiply(solved_amplitudes)).getNorm();
@@ -361,6 +404,9 @@
for (int i = 0; i < nFittedPulses; i++) {
amplitudes[firstFittedPulse + i] = solved_amplitudes.getEntry(i);
amplitudeErrors[firstFittedPulse + i] = Math.sqrt(amplitude_err.getEntry(i));
+ }
+ if (fitPedestal) {
+ pedestal = solved_amplitudes.getEntry(nFittedPulses);
}
return chisq;
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java Wed Apr 27 11:11:32 2016
@@ -4,7 +4,7 @@
import java.util.Map;
/**
- * Enum constants for different {@link Track}s based on what tracking
+ * Enum constants for different {@link org.lcsim.event.Track} objects based on what tracking
* strategy was used. The type is defined by comparing the tracking strategy
* name to the name of all the enum constants.
*
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackData.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,13 @@
package org.hps.recon.tracking;
+import java.util.List;
+import org.apache.commons.math3.util.Pair;
+import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
+import org.lcsim.event.LCRelation;
+import org.lcsim.event.RelationalTable;
+import org.lcsim.event.Track;
+import org.lcsim.event.base.BaseRelationalTable;
/**
* Generic object used to persist track data not available through a Track
@@ -155,4 +162,24 @@
public boolean isFixedSize() {
return true;
}
+
+ private static Pair<EventHeader, RelationalTable> trackDataToTrackCache = null;
+
+ public static RelationalTable getTrackDataToTrackTable(EventHeader event) {
+ if (trackDataToTrackCache == null || trackDataToTrackCache.getFirst() != event) {
+ RelationalTable trackDataToTrack = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> hitrelations = event.get(LCRelation.class, TRACK_DATA_RELATION_COLLECTION);
+ for (LCRelation relation : hitrelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ trackDataToTrack.add(relation.getFrom(), relation.getTo());
+ }
+ }
+ trackDataToTrackCache = new Pair<EventHeader, RelationalTable>(event, trackDataToTrack);
+ }
+ return trackDataToTrackCache.getSecond();
+ }
+
+ public static GenericObject getTrackData(EventHeader event, Track track) {
+ return (GenericObject) getTrackDataToTrackTable(event).from(track);
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java Wed Apr 27 11:11:32 2016
@@ -8,7 +8,6 @@
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.GenericObject;
import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.RelationalTable;
@@ -16,7 +15,6 @@
import org.lcsim.event.TrackState;
import org.lcsim.event.TrackerHit;
import org.lcsim.event.base.BaseLCRelation;
-import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.fit.helicaltrack.HelicalTrackCross;
import org.lcsim.fit.helicaltrack.HelicalTrackHit;
import org.lcsim.fit.helicaltrack.HelicalTrackStrip;
@@ -25,8 +23,8 @@
import org.lcsim.util.Driver;
/**
- * Driver used to persist additional {@link Track} information via a
- * {@link GenericObject}.
+ * Driver used to persist additional {@link org.lcsim.event.Track} information via a
+ * {@link org.lcsim.event.GenericObject} collection.
*
* @author Omar Moreno, UCSC
* @author Sho Uemura, SLAC
@@ -34,7 +32,7 @@
public final class TrackDataDriver extends Driver {
/** logger **/
- private static Logger LOGGER = Logger.getLogger(TrackDataDriver.class.getPackage().getName());
+ private static final Logger LOGGER = Logger.getLogger(TrackDataDriver.class.getPackage().getName());
/** The B field map */
@@ -107,6 +105,7 @@
*
* @param detector LCSim {@link Detector} geometry
*/
+ @Override
protected void detectorChanged(Detector detector) {
// Get the field map from the detector object
@@ -121,6 +120,7 @@
*
* @param event : LCSim event
*/
+ @Override
protected void process(EventHeader event) {
// Check if the event contains a collection of the type Track. If it
@@ -135,15 +135,11 @@
// Get the collection of LCRelations relating RotatedHelicalTrackHits to
// HelicalTrackHits
- List<LCRelation> rotatedHthToHthRelations = event.get(LCRelation.class, ROTATED_HTH_REL_COL_NAME);
- BaseRelationalTable hthToRotatedHth = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE,
- RelationalTable.Weighting.UNWEIGHTED);
- hthToRotatedHth.addRelations(rotatedHthToHthRelations);
RelationalTable hitToStrips = TrackUtils.getHitToStripsTable(event);
RelationalTable hitToRotated = TrackUtils.getHitToRotatedTable(event);
- List<HelicalTrackHit> rotatedHths = event.get(HelicalTrackHit.class, ROTATED_HTH_COL_NAME);
+// List<HelicalTrackHit> rotatedHths = event.get(HelicalTrackHit.class, ROTATED_HTH_COL_NAME);
// Create a container that will be used to store all TrackData objects.
List<TrackData> trackDataCollection = new ArrayList<TrackData>();
@@ -159,21 +155,21 @@
// residuals
List<LCRelation> trackToTrackResidualsRelations = new ArrayList<LCRelation>();
- double xResidual = 0;
- double yResidual = 0;
-
- float totalT0 = 0;
- float totalHits = 0;
- float trackTime = 0;
+ double xResidual;
+ double yResidual;
+
+ float totalT0;
+ float totalHits;
+ float trackTime;
int trackerVolume = -1;
- boolean isFirstHit = true;
-
- HpsSiSensor sensor = null;
- Hep3Vector stereoHitPosition = null;
- Hep3Vector trackPosition = null;
- HelicalTrackHit helicalTrackHit = null;
+ boolean isFirstHit;
+
+ HpsSiSensor sensor;
+ Hep3Vector stereoHitPosition;
+ Hep3Vector trackPosition;
+ HelicalTrackHit helicalTrackHit;
List<Double> t0Residuals = new ArrayList<Double>();
List<Double> trackResidualsX = new ArrayList<Double>();
@@ -183,9 +179,7 @@
// Loop over each of the track collections retrieved from the event
for (List<Track> tracks : trackCollections) {
-
-
-
+
// Loop over all the tracks in the event
for (Track track : tracks) {
@@ -198,7 +192,10 @@
stereoLayers.clear();
isFirstHit = true;
- //
+// TrackState trackStateForResiduals = TrackUtils.getTrackStateAtLocation(track, TrackState.AtLastHit);
+// if (trackStateForResiduals == null ) trackStateForResiduals= TrackUtils.getTrackStateAtLocation(track, TrackState.AtIP);
+ TrackState trackStateForResiduals = TrackUtils.getTrackStateAtLocation(track, TrackState.AtIP);
+
// Change the position of a HelicalTrackHit to be the corrected
// one.
// FIXME: Now that multiple track collections are being used,
@@ -216,7 +213,7 @@
// Extrapolate the track to the stereo hit position and
// calculate track residuals
stereoHitPosition = ((HelicalTrackHit) rotatedStereoHit).getCorrectedPosition();
- trackPosition = TrackUtils.extrapolateTrack(track, stereoHitPosition.x());
+ trackPosition = TrackUtils.extrapolateTrack(trackStateForResiduals, stereoHitPosition.x());
xResidual = trackPosition.x() - stereoHitPosition.y();
yResidual = trackPosition.y() - stereoHitPosition.z();
trackResidualsX.add(xResidual);
@@ -230,7 +227,7 @@
// Get the HelicalTrackHit corresponding to the
// RotatedHelicalTrackHit associated with a track
- helicalTrackHit = (HelicalTrackHit) hthToRotatedHth.from(rotatedStereoHit);
+ helicalTrackHit = (HelicalTrackHit) hitToRotated.from(rotatedStereoHit);
((HelicalTrackHit) rotatedStereoHit).setPosition(stereoHitPosition.v());
stereoHitPosition = CoordinateTransformations.transformVectorToDetector(stereoHitPosition);
helicalTrackHit.setPosition(stereoHitPosition.v());
@@ -261,18 +258,7 @@
// Extrapolate the track to the face of the Ecal and get the TrackState
if( TrackType.isGBL(track.getType())) {
- TrackState stateLast = null;
- TrackState stateIP = null;
- for(int ist= 0; ist < track.getTrackStates().size(); ist++) {
- if( track.getTrackStates().get(ist).getLocation() == TrackState.AtLastHit )
- stateLast = track.getTrackStates().get(ist);
- if( track.getTrackStates().get(ist).getLocation() == TrackState.AtIP )
- stateIP = track.getTrackStates().get(ist);
- }
- if( stateLast == null)
- throw new RuntimeException("last hit track state for GBL track was not found");
-// TrackState stateEcal = TrackUtils.extrapolateTrackUsingFieldMap(stateLast, extStartPos, ecalPosition, stepSize, bFieldMap);
-// track.getTrackStates().add(stateEcal);
+ TrackState stateIP = TrackUtils.getTrackStateAtLocation(track, TrackState.AtIP);
if( stateIP == null)
throw new RuntimeException("IP track state for GBL track was not found");
TrackState stateEcalIP = TrackUtils.extrapolateTrackUsingFieldMap(stateIP, extStartPos, ecalPosition, stepSize, bFieldMap);
@@ -317,7 +303,7 @@
}
// Add all collections to the event
- event.put(TrackData.TRACK_DATA_COLLECTION, trackDataCollection, TrackTimeData.class, 0);
+ event.put(TrackData.TRACK_DATA_COLLECTION, trackDataCollection, TrackData.class, 0);
event.put(TrackData.TRACK_DATA_RELATION_COLLECTION, trackDataRelations, LCRelation.class, 0);
event.put(TRK_RESIDUALS_COL_NAME, trackResidualsCollection, TrackResidualsData.class, 0);
event.put(TRK_RESIDUALS_REL_COL_NAME, trackToTrackResidualsRelations, LCRelation.class, 0);
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java Wed Apr 27 11:11:32 2016
@@ -17,10 +17,7 @@
private final double[] doubles;
/**
- * Default Ctor
- *
- * @param trackerVolume : The SVT volume to which the track used to
- * calculate the residuals corresponds to.
+ * Default Ctor
*/
public TrackQualityData() {
doubles = new double[2];
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackResidualsData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackResidualsData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackResidualsData.java Wed Apr 27 11:11:32 2016
@@ -13,84 +13,84 @@
*/
public class TrackResidualsData implements GenericObject {
- List<Double> trackResidualsX = new ArrayList<Double>();
- List<Float> trackResidualsY = new ArrayList<Float>();
- List<Integer> layers = new ArrayList<Integer>();
-
- /**
- * Default Ctor
- *
- * @param trackerVolume : The SVT volume to which the track used to calculate
- * the residuals corresponds to.
- */
- public TrackResidualsData(int trackerVolume, List<Integer> layers, List<Double> trackResidualsX, List<Float> trackResidualsY){
- this.layers.addAll(layers);
- this.layers.add(trackerVolume);
- this.trackResidualsX.addAll(trackResidualsX);
- this.trackResidualsY.addAll(trackResidualsY);
- }
+ List<Double> trackResidualsX = new ArrayList<Double>();
+ List<Float> trackResidualsY = new ArrayList<Float>();
+ List<Integer> layers = new ArrayList<Integer>();
+
+ /**
+ * Default Ctor
+ *
+ * @param trackerVolume : The SVT volume to which the track used to calculate
+ * the residuals corresponds to.
+ */
+ public TrackResidualsData(int trackerVolume, List<Integer> layers, List<Double> trackResidualsX, List<Float> trackResidualsY){
+ this.layers.addAll(layers);
+ this.layers.add(trackerVolume);
+ this.trackResidualsX.addAll(trackResidualsX);
+ this.trackResidualsY.addAll(trackResidualsY);
+ }
- /**
- *
- * @return tracker volume : 0 if top 1 if bottom
- */
- public int getTrackerVolume(){
- return layers.get(layers.size() - 1);
- }
-
- /**
- *
- */
- @Override
- public double getDoubleVal(int index) {
- return trackResidualsX.get(index);
- }
+ /**
+ *
+ * @return tracker volume : 0 if top 1 if bottom
+ */
+ public int getTrackerVolume(){
+ return layers.get(layers.size() - 1);
+ }
+
+ /**
+ *
+ */
+ @Override
+ public double getDoubleVal(int index) {
+ return trackResidualsX.get(index);
+ }
- /**
- *
- */
- @Override
- public float getFloatVal(int index) {
- return trackResidualsY.get(index);
- }
+ /**
+ *
+ */
+ @Override
+ public float getFloatVal(int index) {
+ return trackResidualsY.get(index);
+ }
- /**
- *
- */
- @Override
- public int getIntVal(int index) {
- return layers.get(index);
- }
+ /**
+ *
+ */
+ @Override
+ public int getIntVal(int index) {
+ return layers.get(index);
+ }
- /**
- *
- */
- @Override
- public int getNDouble() {
- return trackResidualsX.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNDouble() {
+ return trackResidualsX.size();
+ }
- /**
- *
- */
- @Override
- public int getNFloat() {
- return trackResidualsY.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNFloat() {
+ return trackResidualsY.size();
+ }
- /**
- *
- */
- @Override
- public int getNInt() {
- return layers.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNInt() {
+ return layers.size();
+ }
- /**
- *
- */
- @Override
- public boolean isFixedSize() {
- return false;
- }
+ /**
+ *
+ */
+ @Override
+ public boolean isFixedSize() {
+ return false;
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackTimeData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackTimeData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackTimeData.java Wed Apr 27 11:11:32 2016
@@ -12,124 +12,124 @@
*/
public class TrackTimeData implements GenericObject {
- List<Float> trackTimeData = new ArrayList<Float>();
- List<Double> t0Residuals = new ArrayList<Double>();
- List<Integer> layers = new ArrayList<Integer>();
-
- // Constants
- private final static int SVT_VOLUME_INDEX = 0;
- private final static int TRACK_TIME_INDEX = 1;
-
- /**
- * Default Ctor
- *
- * @param trackTime : The mean t0 time of all hits of a track
- * @param trackerVolume : The SVT volume to which the track used to calculate
- * the track time corresponds to.
- *
- */
- public TrackTimeData(float trackerVolume, double trackTime, List<Integer> layers, List<Double> t0Residuals){
- trackTimeData.add(trackerVolume);
- trackTimeData.add((float) trackTime);
- this.layers.addAll(layers);
- this.t0Residuals.addAll(t0Residuals);
- }
-
- /**
- *
- *
- * @param layer :
- * @param t0Residual :
- *
- */
- private void addResidual(int layer, double t0Residual) {
- layers.add(layer);
- t0Residuals.add(t0Residual);
- }
+ List<Float> trackTimeData = new ArrayList<Float>();
+ List<Double> t0Residuals = new ArrayList<Double>();
+ List<Integer> layers = new ArrayList<Integer>();
+
+ // Constants
+ private final static int SVT_VOLUME_INDEX = 0;
+ private final static int TRACK_TIME_INDEX = 1;
+
+ /**
+ * Default Ctor
+ *
+ * @param trackTime : The mean t0 time of all hits of a track
+ * @param trackerVolume : The SVT volume to which the track used to calculate
+ * the track time corresponds to.
+ *
+ */
+ public TrackTimeData(float trackerVolume, double trackTime, List<Integer> layers, List<Double> t0Residuals){
+ trackTimeData.add(trackerVolume);
+ trackTimeData.add((float) trackTime);
+ this.layers.addAll(layers);
+ this.t0Residuals.addAll(t0Residuals);
+ }
+
+ /**
+ *
+ *
+ * @param layer :
+ * @param t0Residual :
+ *
+ */
+ private void addResidual(int layer, double t0Residual) {
+ layers.add(layer);
+ t0Residuals.add(t0Residual);
+ }
- /**
- *
- */
- public double getTrackTime() {
- return trackTimeData.get(TRACK_TIME_INDEX);
- }
-
- /**
- *
- *
- */
- public double getT0Residual(int layer) {
- return this.getDoubleVal(layer);
- }
+ /**
+ *
+ */
+ public double getTrackTime() {
+ return trackTimeData.get(TRACK_TIME_INDEX);
+ }
+
+ /**
+ *
+ *
+ */
+ public double getT0Residual(int layer) {
+ return this.getDoubleVal(layer);
+ }
- /**
- *
- */
- public double getClusterTime(int layer) {
- return this.getTrackTime() - this.getT0Residual(layer);
- }
-
- /**
- *
- *
- */
- public boolean isTopSvtVolume() {
- return (trackTimeData.get(SVT_VOLUME_INDEX) == 0) ? true : false;
- }
-
- /**
- *
- */
- @Override
- public double getDoubleVal(int index) {
- return t0Residuals.get(index);
- }
+ /**
+ *
+ */
+ public double getClusterTime(int layer) {
+ return this.getTrackTime() - this.getT0Residual(layer);
+ }
+
+ /**
+ *
+ *
+ */
+ public boolean isTopSvtVolume() {
+ return (trackTimeData.get(SVT_VOLUME_INDEX) == 0) ? true : false;
+ }
+
+ /**
+ *
+ */
+ @Override
+ public double getDoubleVal(int index) {
+ return t0Residuals.get(index);
+ }
- /**
- *
- */
- @Override
- public float getFloatVal(int index) {
- return trackTimeData.get(index);
- }
+ /**
+ *
+ */
+ @Override
+ public float getFloatVal(int index) {
+ return trackTimeData.get(index);
+ }
- /**
- *
- */
- @Override
- public int getIntVal(int index) {
- return layers.get(index);
- }
+ /**
+ *
+ */
+ @Override
+ public int getIntVal(int index) {
+ return layers.get(index);
+ }
- /**
- *
- */
- @Override
- public int getNDouble() {
- return t0Residuals.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNDouble() {
+ return t0Residuals.size();
+ }
- /**
- *
- */
- @Override
- public int getNFloat() {
- return trackTimeData.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNFloat() {
+ return trackTimeData.size();
+ }
- /**
- *
- */
- @Override
- public int getNInt() {
- return layers.size();
- }
+ /**
+ *
+ */
+ @Override
+ public int getNInt() {
+ return layers.size();
+ }
- /**
- *
- */
- @Override
- public boolean isFixedSize() {
- return false;
- }
+ /**
+ *
+ */
+ @Override
+ public boolean isFixedSize() {
+ return false;
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java Wed Apr 27 11:11:32 2016
@@ -1,18 +1,26 @@
package org.hps.recon.tracking;
+import hep.physics.matrix.SymmetricMatrix;
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Matrix;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.VecOp;
+
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import hep.physics.matrix.SymmetricMatrix;
-import hep.physics.vec.BasicHep3Vector;
-import hep.physics.vec.Hep3Matrix;
-import hep.physics.vec.Hep3Vector;
-import hep.physics.vec.SpacePoint;
-import hep.physics.vec.VecOp;
+import org.apache.commons.math3.util.Pair;
+import org.hps.recon.tracking.EventQuality.Quality;
+import org.hps.recon.tracking.gbl.HelicalTrackStripGbl;
+
+import static org.lcsim.constants.Constants.fieldConversion;
import org.lcsim.detector.ITransform3D;
import org.lcsim.detector.solids.Box;
@@ -48,10 +56,6 @@
import org.lcsim.util.swim.Helix;
import org.lcsim.util.swim.Line;
import org.lcsim.util.swim.Trajectory;
-import org.hps.recon.tracking.EventQuality.Quality;
-import org.hps.recon.tracking.gbl.HelicalTrackStripGbl;
-
-import static org.lcsim.constants.Constants.fieldConversion;
/**
* Assorted helper functions for the track and helix objects in lcsim. Re-use as
@@ -60,7 +64,6 @@
* @author Omar Moreno <[log in to unmask]>
*/
// TODO: Switch to tracking/LCsim coordinates for the extrapolation output!
-// FIXME: This class should probably be broken up into several different sets of utilities by type. --JM
public class TrackUtils {
/**
@@ -75,7 +78,7 @@
*
* @param track
* @param x
- * @return
+ * @return the position along the x-axis
*/
public static Hep3Vector extrapolateHelixToXPlane(Track track, double x) {
return extrapolateHelixToXPlane(getHTF(track), x);
@@ -85,13 +88,74 @@
return extrapolateHelixToXPlane(getHTF(track), x);
}
+ /**
+ * Change reference point of helix (following L3 Internal Note 1666.)
+ * @param newRefPoint - The new reference point in XY
+ */
+ public static double[] getParametersAtNewRefPoint(double[] newRefPoint, HpsHelicalTrackFit helicalTrackFit) {
+ return getParametersAtNewRefPoint(newRefPoint, helicalTrackFit.getRefPoint(),helicalTrackFit.parameters());
+ }
+
+ /**
+ * Change reference point of helix (following L3 Internal Note 1666.)
+ * @param newRefPoint - The new reference point in XY
+ */
+ public static double[] getParametersAtNewRefPoint(double[] newRefPoint, double[] __refPoint,
+ double[] parameters) {
+
+ double phi0 = parameters[HelicalTrackFit.phi0Index];
+ double curvature =parameters[HelicalTrackFit.curvatureIndex];
+ double dca = parameters[HelicalTrackFit.dcaIndex];
+ double slope = parameters[HelicalTrackFit.slopeIndex];
+ double z0 = parameters[HelicalTrackFit.z0Index];
+
+ //take care of phi0 range if needed (this matters for dphi below I think)
+ // L3 defines it in the range [-pi,pi]
+ if(phi0 > Math.PI)
+ phi0 -= Math.PI*2;
+
+ double dx = newRefPoint[0] - __refPoint[0];
+ double dy = newRefPoint[1] - __refPoint[1];
+ double sinphi = Math.sin(phi0);
+ double cosphi = Math.cos(phi0);
+ double R = 1.0/curvature;
+
+ // calculate new phi
+ double phinew = Math.atan2( sinphi - dx/(R-dca) , cosphi + dy/(R-dca) );
+
+ // difference in phi
+ // watch out for ambiguity
+ double dphi = phinew - phi0;
+ if (Math.abs( dphi ) > Math.PI)
+ throw new RuntimeException("dphi is large " + dphi + " from phi0 " + phi0
+ + " and phinew " + phinew + " take care of the ambiguity!!??");
+
+ // calculate new dca
+ double dcanew = dca + dx*sinphi - dy*cosphi + (dx*cosphi + dy*sinphi)*Math.tan( dphi/2. );
+
+ // path length from old to new point
+ double s = -1.0*dphi/curvature;
+
+ // new z0
+ double z0new = z0 + s*slope;
+
+ // new array
+ double[] params = new double[5];
+ params[HelicalTrackFit.phi0Index] = phinew;
+ params[HelicalTrackFit.curvatureIndex] = curvature;
+ params[HelicalTrackFit.dcaIndex] = dcanew;
+ params[HelicalTrackFit.slopeIndex] = slope;
+ params[HelicalTrackFit.z0Index] = z0new;
+ return params;
+ }
+
/**
* Extrapolate helix to a position along the x-axis. Re-use HelixUtils.
*
- * @param track
+ * @param htf
* @param x
- * @return
+ * @return the position along the x-axis
*/
public static Hep3Vector extrapolateHelixToXPlane(HelicalTrackFit htf, double x) {
double s = HelixUtils.PathToXPlane(htf, x, 0., 0).get(0);
@@ -282,7 +346,7 @@
*
* @param track - to be extrapolated
* @param z
- * @return
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(Track track, double z) {
return extrapolateTrack(track.getTrackStates().get(0),z);
@@ -293,7 +357,7 @@
*
* @param track - to be extrapolated
* @param z
- * @return
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(TrackState track, double z) {
@@ -329,10 +393,8 @@
/**
* Extrapolate track to given position, using dipole position from geometry.
*
- * @param helix - to be extrapolated
- * @param track - position along the x-axis of the helix in lcsim
- * coordiantes
- * @return
+ * @param track - position along the x-axis of the helix in lcsim coordinates
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(Track track, double z, Detector detector) {
@@ -375,7 +437,7 @@
*
* @param helix - to be extrapolated
* @param z - position along the x-axis of the helix in lcsim coordiantes
- * @return
+ * @return the extrapolated position
*/
public static Hep3Vector extrapolateTrack(HelicalTrackFit helix, double z) {
SeedTrack trk = new SeedTrack();
@@ -800,11 +862,11 @@
/**
- * Transform MCParticle into a Helix object. Note that it produces the helix
- * parameters at nominal x=0 and assumes that there is no field at x<0
- *
- * @param mcp MC particle to be transformed
- * @param org origin to be used for the track
+ * Transform MCParticle into a {@link HelicalTrackFit} object. Note that it produces the {@link HelicalTrackFit}
+ * parameters at nominal reference point at origin and assumes that there is no field at x<0
+ *
+ * @param mcp - MC particle to be transformed
+ * @param origin - origin to be used for the track
* @return {@link HelicalTrackFit} object based on the MC particle
*/
public static HelicalTrackFit getHTF(MCParticle mcp, Hep3Vector origin, double Bz) {
@@ -870,27 +932,7 @@
return htf;
}
- public static StraightLineTrack findSLTAtZ(Track trk1, double zVal, boolean useFringe) {
- SeedTrack s1 = (SeedTrack) trk1;
- HelicalTrackFit htf1 = s1.getSeedCandidate().getHelix();
- HPSTrack hpstrk1 = new HPSTrack(htf1);
- Hep3Vector pos1;
- if (useFringe) {
- // broken because you need ot provide the Field Map to get this...
-// pos1 = hpstrk1.getPositionAtZMap(100.0, zVal, 5.0)[0];
- } else
- pos1 = TrackUtils.extrapolateTrack(trk1, zVal);
- // System.out.printf("%s: Position1 at edge of fringe %s\n",this.getClass().getSimpleName(),pos1.toString());
- Helix traj = (Helix) hpstrk1.getTrajectory();
- if (traj == null) {
- SpacePoint r0 = new SpacePoint(HelixUtils.PointOnHelix(htf1, 0));
- traj = new Helix(r0, htf1.R(), htf1.phi0(), Math.atan(htf1.slope()));
- }
- HelixConverter converter = new HelixConverter(0.);
- StraightLineTrack slt1 = converter.Convert(traj);
- // System.out.printf("%s: straight line track: x0=%f,y0=%f,z0=%f dz/dx=%f dydx=%f targetY=%f targetZ=%f \n",this.getClass().getSimpleName(),slt1.x0(),slt1.y0(),slt1.z0(),slt1.dzdx(),slt1.dydx(),slt1.TargetYZ()[0],slt1.TargetYZ()[1]);
- return slt1;
- }
+
public static MCParticle getMatchedTruthParticle(Track track) {
boolean debug = false;
@@ -984,24 +1026,36 @@
return new HelicalTrackHit(pos, hitcov, dedx, time, type, rhits, detname, layer, beflag);
}
+ private static Pair<EventHeader, RelationalTable> hitToStripsCache = null;
+
public static RelationalTable getHitToStripsTable(EventHeader event) {
- RelationalTable hitToStrips = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- List<LCRelation> hitrelations = event.get(LCRelation.class, "HelicalTrackHitRelations");
- for (LCRelation relation : hitrelations)
- if (relation != null && relation.getFrom() != null && relation.getTo() != null)
- hitToStrips.add(relation.getFrom(), relation.getTo());
-
- return hitToStrips;
- }
+ if (hitToStripsCache == null || hitToStripsCache.getFirst() != event) {
+ RelationalTable hitToStrips = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> hitrelations = event.get(LCRelation.class, "HelicalTrackHitRelations");
+ for (LCRelation relation : hitrelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hitToStrips.add(relation.getFrom(), relation.getTo());
+ }
+ }
+ hitToStripsCache = new Pair<EventHeader, RelationalTable>(event, hitToStrips);
+ }
+ return hitToStripsCache.getSecond();
+ }
+
+ private static Pair<EventHeader, RelationalTable> hitToRotatedCache = null;
public static RelationalTable getHitToRotatedTable(EventHeader event) {
-
- RelationalTable hitToRotated = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
- List<LCRelation> rotaterelations = event.get(LCRelation.class, "RotatedHelicalTrackHitRelations");
- for (LCRelation relation : rotaterelations)
- if (relation != null && relation.getFrom() != null && relation.getTo() != null)
- hitToRotated.add(relation.getFrom(), relation.getTo());
- return hitToRotated;
+ if (hitToRotatedCache == null || hitToRotatedCache.getFirst() != event) {
+ RelationalTable hitToRotated = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> rotaterelations = event.get(LCRelation.class, "RotatedHelicalTrackHitRelations");
+ for (LCRelation relation : rotaterelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hitToRotated.add(relation.getFrom(), relation.getTo());
+ }
+ }
+ hitToRotatedCache = new Pair<EventHeader, RelationalTable>(event, hitToRotated);
+ }
+ return hitToRotatedCache.getSecond();
}
public static double getTrackTime(Track track, RelationalTable hitToStrips, RelationalTable hitToRotated) {
@@ -1019,6 +1073,20 @@
for (TrackerHit hit : track.getTrackerHits())
hits.addAll(hitToStrips.allFrom(hitToRotated.from(hit)));
return hits;
+ }
+
+ public static List<TrackerHit> sortHits(Collection<TrackerHit> hits) {
+ List<TrackerHit> hitList = new ArrayList<TrackerHit>(hits);
+ Collections.sort(hitList, new LayerComparator());
+ return hitList;
+ }
+
+ private static class LayerComparator implements Comparator<TrackerHit> {
+
+ @Override
+ public int compare(TrackerHit o1, TrackerHit o2) {
+ return Integer.compare(TrackUtils.getLayer(o1), TrackUtils.getLayer(o2));
+ }
}
public static boolean hasSharedStrips(Track track1, Track track2, RelationalTable hitToStrips, RelationalTable hitToRotated) {
@@ -1091,7 +1159,7 @@
* @param trk
* @param hitToStrips
* @param hitToRotated
- * @return
+ * @return isolations for all 12 strip layers
*/
public static Double[] getIsolations(Track trk, RelationalTable hitToStrips, RelationalTable hitToRotated) {
Double[] isolations = new Double[12];
@@ -1263,7 +1331,7 @@
* @param r0
* @param q
* @param B
- * @return
+ * @return the created trajectory
*/
public static Trajectory getTrajectory(Hep3Vector p0, org.lcsim.spacegeom.SpacePoint r0, double q, double B) {
SpaceVector p = new CartesianVector(p0.v());
@@ -1279,15 +1347,25 @@
return new Line(r0, phi, lambda);
}
- public static TrackState getTrackStateAtECal(Track trk) {
+ /**
+ * Port of Track.getTrackState(int location) from the C++ LCIO API.
+ * @param trk A track.
+ * @param location A TrackState location constant
+ * @return The first matching TrackState; null if none is found.
+ */
+ public static TrackState getTrackStateAtLocation(Track trk, int location) {
for (TrackState state : trk.getTrackStates()) {
- if (state.getLocation() == TrackState.AtCalorimeter) {
+ if (state.getLocation() == location) {
return state;
}
}
return null;
}
+ public static TrackState getTrackStateAtECal(Track trk) {
+ return getTrackStateAtLocation(trk, TrackState.AtCalorimeter);
+ }
+
public static Hep3Vector getBField(Detector detector) {
return detector.getFieldMap().getField(new BasicHep3Vector(0., 0., 500.0));
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java Wed Apr 27 11:11:32 2016
@@ -13,11 +13,9 @@
import java.util.logging.Logger;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.LCRelation;
import org.lcsim.event.RelationalTable;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
-import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.event.base.BaseTrack;
import org.lcsim.fit.helicaltrack.HelicalTrackHit;
import org.lcsim.geometry.Detector;
@@ -37,7 +35,7 @@
public final class TrackerReconDriver extends Driver {
private static final Logger LOGGER = Logger.getLogger(TrackerReconDriver.class.getPackage().getName());
-
+
// Debug flag.
private boolean debug = false;
// Tracks found across all events.
@@ -65,7 +63,7 @@
private double rmsTimeCut = -1;
private boolean rejectUncorrectedHits = true;
private boolean rejectSharedHits = false;
-
+
public TrackerReconDriver() {
}
@@ -145,10 +143,8 @@
// Get B-field Y with no sign. Seed Tracker doesn't like signed B-field components.
// FIXME Is this always right?
-// this.bfield = Math.abs((detector.getFieldMap().getField(new BasicHep3Vector(0, 0, 0)).y()));
- double zvalInTracker=500.0;//50cm...about the middle
- Hep3Vector fieldInTracker=detector.getFieldMap().getField(new BasicHep3Vector(0, 0, zvalInTracker));
- LOGGER.config("fieldInTracker at "+zvalInTracker+": Bx = "+fieldInTracker.x()+"; By = "+fieldInTracker.y()+"; Bz = "+fieldInTracker.z());
+ Hep3Vector fieldInTracker = TrackUtils.getBField(detector);
+ LOGGER.config("fieldInTracker: Bx = " + fieldInTracker.x() + "; By = " + fieldInTracker.y() + "; Bz = " + fieldInTracker.z());
this.bfield = Math.abs(fieldInTracker.y());
LOGGER.config(String.format("%s: Set B-field to %.6f\n", this.getClass().getSimpleName(), this.bfield));
@@ -165,8 +161,9 @@
//
// 1) Driver to run Seed Tracker.
//
- if (!strategyResource.startsWith("/"))
+ if (!strategyResource.startsWith("/")) {
strategyResource = "/org/hps/recon/tracking/strategies/" + strategyResource;
+ }
List<SeedStrategy> sFinallist = StrategyXMLUtils.getStrategyListFromInputStream(this.getClass().getResourceAsStream(strategyResource));
SeedTracker stFinal = new SeedTracker(sFinallist, this._useHPSMaterialManager, this.includeMS);
stFinal.setApplySectorBinning(_applySectorBinning);
@@ -177,14 +174,18 @@
stFinal.setInputCollectionName(stInputCollectionName);
stFinal.setTrkCollectionName(trackCollectionName);
stFinal.setBField(bfield);
- if (debug)
+ if (debug) {
stFinal.setDiagnostics(new SeedTrackerDiagnostics());
+ }
// stFinal.setSectorParams(false); //this doesn't actually seem to do anything
stFinal.setSectorParams(1, 10000);
add(stFinal);
- if (rmsTimeCut > 0)
- stFinal.setTrackCheck(new HitTimeTrackCheck(rmsTimeCut));
+ if (rmsTimeCut > 0) {
+ HitTimeTrackCheck timeCheck = new HitTimeTrackCheck(rmsTimeCut);
+ timeCheck.setDebug(debug);
+ stFinal.setTrackCheck(timeCheck);
+ }
}
/**
@@ -198,10 +199,11 @@
// Debug printouts.
if (debug) {
- if (event.hasCollection(HelicalTrackHit.class, stInputCollectionName))
+ if (event.hasCollection(HelicalTrackHit.class, stInputCollectionName)) {
System.out.println(this.getClass().getSimpleName() + ": The HelicalTrackHit collection " + stInputCollectionName + " has " + event.get(HelicalTrackHit.class, stInputCollectionName).size() + " hits.");
- else
+ } else {
System.out.println(this.getClass().getSimpleName() + ": No HelicalTrackHit collection for this event");
+ }
// Check for Tracks.
List<Track> tracks = event.get(Track.class, trackCollectionName);
System.out.println(this.getClass().getSimpleName() + ": The Track collection " + trackCollectionName + " has " + tracks.size() + " tracks.");
@@ -236,27 +238,13 @@
}
if (rejectSharedHits) {
-
- RelationalTable hittostrip = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- List<LCRelation> hitrelations = event.get(LCRelation.class, "HelicalTrackHitRelations");
- for (LCRelation relation : hitrelations) {
- if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
- hittostrip.add(relation.getFrom(), relation.getTo());
- }
- }
-
- RelationalTable hittorotated = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
- List<LCRelation> rotaterelations = event.get(LCRelation.class, "RotatedHelicalTrackHitRelations");
- for (LCRelation relation : rotaterelations) {
- if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
- hittorotated.add(relation.getFrom(), relation.getTo());
- }
- }
+ RelationalTable hitToStrips = TrackUtils.getHitToStripsTable(event);
+ RelationalTable hitToRotated = TrackUtils.getHitToRotatedTable(event);
Map<TrackerHit, List<Track>> stripsToTracks = new HashMap<TrackerHit, List<Track>>();
for (Track track : tracks) {
for (TrackerHit hit : track.getTrackerHits()) {
- Collection<TrackerHit> htsList = hittostrip.allFrom(hittorotated.from(hit));
+ Collection<TrackerHit> htsList = hitToStrips.allFrom(hitToRotated.from(hit));
for (TrackerHit strip : htsList) {
List<Track> sharedTracks = stripsToTracks.get(strip);
if (sharedTracks == null) {
@@ -272,7 +260,7 @@
while (iter.hasNext()) {
Track track = iter.next();
for (TrackerHit hit : track.getTrackerHits()) {
- Collection<TrackerHit> htsList = hittostrip.allFrom(hittorotated.from(hit));
+ Collection<TrackerHit> htsList = hitToStrips.allFrom(hitToRotated.from(hit));
for (TrackerHit strip : htsList) {
List<Track> sharedTracks = stripsToTracks.get(strip);
if (sharedTracks.size() > 1) {
@@ -304,15 +292,16 @@
* @param tracks The list of <code>Track</code> objects.
*/
private void setTrackType(List<Track> tracks) {
- for (Track track : tracks)
+ for (Track track : tracks) {
((BaseTrack) track).setTrackType(BaseTrack.TrackType.Y_FIELD.ordinal());
+ }
}
@Override
public void endOfData() {
if (debug) {
System.out.println("-------------------------------------------");
- System.out.println(this.getName() + " found " + ntracks + " tracks in " + nevents + " events which is " + ((double) ntracks / (double) nevents) + " tracks per event.");
+ System.out.println(this.getName() + " with strategy " + strategyResource + " found " + ntracks + " tracks in " + nevents + " events which is " + ((double) ntracks / (double) nevents) + " tracks per event.");
}
}
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/WTrack.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/WTrack.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/WTrack.java Wed Apr 27 11:11:32 2016
@@ -12,9 +12,12 @@
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
/**
- * Track parameterization representation.
- *
- * @author phansson
+ * Track representation based on paper
+ * Paul Avery, CBX 98-39, June 9, 1998
+ *
+ * Used primarily for the algorithm to intersect a helix with a generic plane in space.
+ *
+ * @author phansson <[log in to unmask]>
*/
public class WTrack {
@@ -25,7 +28,6 @@
private boolean _debug = false;
private final int max_iterations_intercept = 10;
private final double epsilon_intercept = 1e-4;
-
/**
* Constructor. Assumes that b-field is in detector z direction.
@@ -34,8 +36,8 @@
* @param bfield value and sign of magnetic field
*/
public WTrack(HelicalTrackFit track, double bfield) {
- _htf = track;
- //_bfield = flip ? -1.0 * bfield : bfield; // flip if needed
+ _htf = track;
+ //_bfield = flip ? -1.0 * bfield : bfield; // flip if needed
_bfield = bfield;
_a = -1 * Constants.fieldConversion * _bfield * Math.signum(track.R());
double p = track.p(Math.abs(_bfield));
@@ -185,7 +187,7 @@
* Get point on helix at path length s in arbitrary oriented, constant magnetic field with unit vector h
* @param s - path length
* @param h - magnetic field unit vector
- * @return
+ * @return get a 3D point along the helix
*/
private Hep3Vector getPointOnHelix(double s, Hep3Vector h) {
WTrack track = this;
@@ -224,8 +226,8 @@
}
/*
- Calculate the exact position of the new helix parameters at path length s in an arbitrarily oriented,
- constant magnetic field point xp is the point h is a unit vector in the direction of the magnetic field.
+ Calculate the exact position of the new helix parameters at path length s in an arbitrarily oriented,
+ constant magnetic field point xp is the point h is a unit vector in the direction of the magnetic field.
* @param s - path length
* @param h - magnetic field unit vector
* @return track parameters
@@ -262,11 +264,9 @@
* @param xp point on the plane
* @param eta unit vector of the plane
* @param h unit vector of magnetic field
- * @return
+ * @return the intersection point of the helix with the plane
*/
public Hep3Vector getHelixAndPlaneIntercept(Hep3Vector xp, Hep3Vector eta, Hep3Vector h) {
-
-
int iteration = 1;
double s_total = 0.;
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java Wed Apr 27 11:11:32 2016
@@ -11,8 +11,8 @@
* This class is explicitly for HPS where the length of the
* sensors are (mostly) along the detector
* y-dimension ( == HelicalTrackFit x-dimension);
+ * Copied/Modified from org.lcsim.recon.tracking.helicaltrack.HelicalTrack2DHit.java
* @author Matt Graham <[log in to unmask]>
- * Copied/Modified from org.lcsim.recon.tracking.helicaltrack.HelicalTrack2DHit.java
*/
public class HelicalTrack2DHit extends HelicalTrackHit {
private double _axmin;//min value along the bend-direction..
@@ -27,8 +27,6 @@
* @param dEdx deposited energy
* @param time hit time
* @param rawhits list of raw hits
- * @param axmin minimum z for the strip
- * @param axmax maximum z for the strip
* @param detname detector name
* @param layer layer number
* @param beflag
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,20 @@
package org.hps.recon.tracking.gbl;
+import hep.physics.matrix.SymmetricMatrix;
+import hep.physics.vec.Hep3Vector;
+
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.apache.commons.math3.util.Pair;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
+import org.hps.recon.tracking.TrackUtils;
+import org.hps.recon.tracking.gbl.matrix.Matrix;
+import org.hps.recon.tracking.gbl.matrix.SymMatrix;
+import org.hps.recon.tracking.gbl.matrix.Vector;
import org.lcsim.event.Track;
+import org.lcsim.fit.helicaltrack.HelicalTrackFit;
+import org.lcsim.fit.helicaltrack.HelixUtils;
/**
* A class that collects information about a fitted GBL trajectory.
@@ -9,6 +23,9 @@
*
*/
public class FittedGblTrajectory {
+
+ public static Logger LOGGER = Logger.getLogger(FittedGblTrajectory.class.getName());
+
public enum GBLPOINT {
IP(0), LAST(1), VERTEX(2);
private int numVal;
@@ -34,6 +51,7 @@
}
}
+
public static enum GBLPARIDX {
QOVERP(0),YTPRIME(1),XTPRIME(2),XT(3),YT(4);
private int _value;
@@ -44,18 +62,101 @@
return _value;
}
};
+
private GblTrajectory _traj;
private double _chi2;
private double _lost;
private int _ndf;
private Track _seed = null;
- private GBLTrackData _t = null;
+ private Map<Integer, Double> pathLengthMap = null;
+
+ /**
+ * Default constructor.
+ *
+ * @param traj
+ * @param chi2
+ * @param ndf
+ * @param lost
+ */
public FittedGblTrajectory(GblTrajectory traj, double chi2, int ndf, double lost) {
_traj = traj;
_chi2 = chi2;
_ndf = ndf;
_lost = lost;
}
+
+ /**
+ * Find the index (or label) of the GBL point on the trajectory from the {@link GBLPOINT}.
+ * @param point
+ * @return the index of the GBL point on the trajectory from the enum
+ */
+ public int getPointIndex(GBLPOINT point) {
+ int gblPointIndex;
+ if (point.compareTo(GBLPOINT.IP) == 0)
+ gblPointIndex = 1;
+ else if (point.compareTo(GBLPOINT.LAST) == 0)
+ gblPointIndex = _traj.getNumPoints();
+ else
+ throw new RuntimeException("This GBL point " + point.toString() + "( " + point.name() + ") is not valid");
+ return gblPointIndex;
+ }
+
+
+ /**
+ * Find the corrections and covariance matrix for a particular {@link GBLPOINT}
+ * @param point
+ * @param locPar
+ * @param locCov
+ */
+ public void getResults(GBLPOINT point, Vector locPar, SymMatrix locCov) {
+
+ // find the GBL point index
+ int gblPointIndex = getPointIndex(point);
+
+ // get the results
+ getResults(gblPointIndex, locPar, locCov);
+
+ }
+
+
+ /**
+ * Find the corrections and covariance matrix for a particular point on the GBL trajectory
+ * @param iLabel
+ * @param locPar
+ * @param locCov
+ */
+ public void getResults(int iLabel, Vector locPar, SymMatrix locCov) {
+
+ // Get the result from the trajectory
+ int ok = _traj.getResults(iLabel, locPar, locCov);
+
+ // check that the fit was ok
+ if( ok != 0)
+ throw new RuntimeException("Trying to extract GBL corrections for fit that failed!?");
+ }
+
+
+ /**
+ * Find the path length to this point.
+ * @param point - {@link GBLPOINT} point
+ * @return path length
+ */
+ public double getPathLength(GBLPOINT point) {
+ int gblPointIndex = getPointIndex(point);
+ return getPathLength(gblPointIndex);
+ }
+
+ /**
+ * Find the path length to this point.
+ * @param iLabel - GBL point index
+ * @return path length
+ */
+ public double getPathLength(int iLabel) {
+ if( !this.pathLengthMap.containsKey(iLabel) )
+ throw new RuntimeException("This iLabel " + iLabel + " doesn't exists in the path length map.");
+ return this.pathLengthMap.get(iLabel);
+ }
+
public void set_seed(Track seed) {
_seed = seed;
}
@@ -74,5 +175,149 @@
public int get_ndf() {
return _ndf;
}
+
+ public void setPathLengthMap(Map<Integer, Double> pathLengthMap) {
+ this.pathLengthMap = pathLengthMap;
+ }
+
+ public Map<Integer, Double> getPathLengthMap() {
+ if (this.pathLengthMap == null)
+ throw new RuntimeException("No path length map has been set on this trajectory!");
+ return this.pathLengthMap;
+ }
+
+
+
+ /**
+ * Get the corrected perigee parameters and covariance matrix for a point on the {@link GblTrajectory}.
+ *
+ * FIXME the covariance matrix is not properly propagated along the trajectory right now!
+ *
+ * @param htf - helix to be corrected
+ * @param point - {@link GBLPOINT} on the trajectory
+ * @param bfield - magnitude of B-field.
+ * @return the corrected perigee parameters and covariance matrix
+ */
+ public Pair<double[], SymmetricMatrix> getCorrectedPerigeeParameters(HelicalTrackFit htf, GBLPOINT point, double bfield) {
+
+ // find the point on the trajectory from the GBLPOINT
+ int iLabel = getPointIndex(point);
+
+ return getCorrectedPerigeeParameters(htf, iLabel, bfield);
+
+ }
+
+
+ /**
+ * Get the corrected perigee parameters and covariance matrix for a point on the {@link GblTrajectory}.
+ *
+ * FIXME the covariance matrix is not properly propagated along the trajectory right now!
+ *
+ * @param htf - helix to be corrected
+ * @param iLabel - label of the point on the {@link GblTrajectory}
+ * @param bfield - magnitude of B-field.
+ * @return the corrected perigee parameters
+ */
+ public Pair<double[], SymmetricMatrix> getCorrectedPerigeeParameters(HelicalTrackFit htf, int iLabel, double bfield) {
+
+ // Get corrections from GBL fit
+ Vector locPar = new Vector(5);
+ SymMatrix locCov = new SymMatrix(5);
+
+ // Extract the corrections to the track parameters and the covariance matrix from the GBL trajectory
+ getResults(iLabel, locPar, locCov);
+
+ // Use the super class to keep track of reference point of the helix
+ HpsHelicalTrackFit helicalTrackFit = new HpsHelicalTrackFit(htf);
+ double[] refIP = helicalTrackFit.getRefPoint();
+
+ // Calculate new reference point for this point
+ // This is the intersection of the helix with the plane
+ // The trajectory has this information already in the form of a map between GBL point and path length
+ double pathLength = getPathLength(iLabel);
+ Hep3Vector refPointVec = HelixUtils.PointOnHelix(helicalTrackFit, pathLength);
+ double[] refPoint = new double[]{refPointVec.x(), refPointVec.y()};
+
+ LOGGER.finest("pathLength " + pathLength + " -> refPointVec " + refPointVec.toString());
+
+ // Propagate the helix to new reference point
+ double[] helixParametersAtPoint = TrackUtils.getParametersAtNewRefPoint(refPoint, helicalTrackFit);
+
+ // Create a new helix with the new parameters and the new reference point
+ HpsHelicalTrackFit helicalTrackFitAtPoint = new HpsHelicalTrackFit(helixParametersAtPoint, helicalTrackFit.covariance(),
+ helicalTrackFit.chisq(), helicalTrackFit.ndf(), helicalTrackFit.PathMap(),
+ helicalTrackFit.ScatterMap(), refPoint);
+
+ // find the corrected perigee track parameters at this point
+ double[] helixParametersAtPointCorrected = GblUtils.getCorrectedPerigeeParameters(locPar, helicalTrackFitAtPoint, bfield);
+
+ // create a new helix
+ HpsHelicalTrackFit helicalTrackFitAtPointCorrected = new HpsHelicalTrackFit(helixParametersAtPointCorrected, helicalTrackFit.covariance(),
+ helicalTrackFit.chisq(), helicalTrackFit.ndf(), helicalTrackFit.PathMap(),
+ helicalTrackFit.ScatterMap(), refPoint);
+
+ // change reference point back to the original one
+ double[] helixParametersAtIPCorrected = TrackUtils.getParametersAtNewRefPoint(refIP, helicalTrackFitAtPointCorrected);
+
+ // create a new helix for the new parameters at the IP reference point
+ HpsHelicalTrackFit helicalTrackFitAtIPCorrected = new HpsHelicalTrackFit(helixParametersAtIPCorrected, helicalTrackFit.covariance(),
+ helicalTrackFit.chisq(), helicalTrackFit.ndf(), helicalTrackFit.PathMap(),
+ helicalTrackFit.ScatterMap(), refIP);
+
+
+ // Calculate the updated covariance
+ Matrix jacobian = GblUtils.getCLToPerigeeJacobian(helicalTrackFit, helicalTrackFitAtIPCorrected, bfield);
+ Matrix helixCovariance = jacobian.times(locCov.times(jacobian.transpose()));
+ SymmetricMatrix cov = new SymmetricMatrix(5);
+ for (int i = 0; i < 5; i++) {
+ for (int j = 0; j < 5; j++) {
+ if (i >= j) {
+ cov.setElement(i, j, helixCovariance.get(i, j));
+ }
+ }
+ }
+ LOGGER.finest("corrected helix covariance:\n" + cov);
+
+ double parameters_gbl[] = helicalTrackFitAtIPCorrected.parameters();
+
+ return new Pair<double[], SymmetricMatrix>(parameters_gbl,cov);
+ }
+
+
+
+ /**
+ * Extract kinks across the trajectory.
+ * @return kinks in a {@link GBLKinkData} object.
+ */
+ public GBLKinkData getKinks() {
+ GblTrajectory traj = this._traj;
+ // get corrections from GBL fit
+ Vector locPar = new Vector(5);
+ SymMatrix locCov = new SymMatrix(5);
+ float[] lambdaKinks = new float[traj.getNumPoints() - 1];
+ double[] phiKinks = new double[traj.getNumPoints() - 1];
+
+ double oldPhi = 0, oldLambda = 0;
+ for (int i = 0; i < traj.getNumPoints(); i++) {
+ traj.getResults(i + 1, locPar, locCov); // vertex point
+ double newPhi = locPar.get(GBLPARIDX.XTPRIME.getValue());
+ double newLambda = locPar.get(GBLPARIDX.YTPRIME.getValue());
+ if (i > 0) {
+ lambdaKinks[i - 1] = (float) (newLambda - oldLambda);
+ phiKinks[i - 1] = newPhi - oldPhi;
+ // System.out.println("phikink: " + (newPhi - oldPhi));
+ }
+ oldPhi = newPhi;
+ oldLambda = newLambda;
+ }
+
+ return new GBLKinkData(lambdaKinks, phiKinks);
+ }
+
+
+
+
+
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLEventData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLEventData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLEventData.java Wed Apr 27 11:11:32 2016
@@ -3,87 +3,87 @@
import org.lcsim.event.GenericObject;
public class GBLEventData implements GenericObject {
-
- /*
- *
- * Interface enumerator to access the correct data
- *
- */
- private static class GBLINT {
- public static final int RUNNR = 0;
- public static final int BANK_INT_SIZE = 1;
- }
- private static class GBLDOUBLE {
- public static final int BFIELD = 0;
- public static final int BANK_DOUBLE_SIZE = 1;
- }
- // array holding the integer data
- private int bank_int[] = new int[GBLINT.BANK_INT_SIZE];
- private double bank_double[] = new double[GBLDOUBLE.BANK_DOUBLE_SIZE];
-
+
+ /*
+ *
+ * Interface enumerator to access the correct data
+ *
+ */
+ private static class GBLINT {
+ public static final int RUNNR = 0;
+ public static final int BANK_INT_SIZE = 1;
+ }
+ private static class GBLDOUBLE {
+ public static final int BFIELD = 0;
+ public static final int BANK_DOUBLE_SIZE = 1;
+ }
+ // array holding the integer data
+ private int bank_int[] = new int[GBLINT.BANK_INT_SIZE];
+ private double bank_double[] = new double[GBLDOUBLE.BANK_DOUBLE_SIZE];
+
- /**
- * Constructor with event number as parameter
- * @param eventNumber the event number
- *
- */
- public GBLEventData(int eventNumber,double Bz) {
- setRunNr(eventNumber);
- setBfield(Bz);
- }
-
- public void setRunNr(int val) {
- bank_int[GBLINT.RUNNR] = val;
- }
-
- public int getRunNr() {
- return this.getIntVal(GBLINT.RUNNR);
- }
-
- public void setBfield(double val) {
- bank_double[GBLDOUBLE.BFIELD] = val;
- }
-
- public double getBfield() {
- return this.getDoubleVal(GBLDOUBLE.BFIELD);
- }
-
-
- @Override
- public int getNInt() {
- return GBLINT.BANK_INT_SIZE;
- }
+ /**
+ * Constructor with event number as parameter
+ * @param eventNumber the event number
+ *
+ */
+ public GBLEventData(int eventNumber,double Bz) {
+ setRunNr(eventNumber);
+ setBfield(Bz);
+ }
+
+ public void setRunNr(int val) {
+ bank_int[GBLINT.RUNNR] = val;
+ }
+
+ public int getRunNr() {
+ return this.getIntVal(GBLINT.RUNNR);
+ }
+
+ public void setBfield(double val) {
+ bank_double[GBLDOUBLE.BFIELD] = val;
+ }
+
+ public double getBfield() {
+ return this.getDoubleVal(GBLDOUBLE.BFIELD);
+ }
+
+
+ @Override
+ public int getNInt() {
+ return GBLINT.BANK_INT_SIZE;
+ }
- @Override
- public int getNFloat() {
- return 0;
- }
+ @Override
+ public int getNFloat() {
+ return 0;
+ }
- @Override
- public int getNDouble() {
- return GBLDOUBLE.BANK_DOUBLE_SIZE;
- }
+ @Override
+ public int getNDouble() {
+ return GBLDOUBLE.BANK_DOUBLE_SIZE;
+ }
- @Override
- public int getIntVal(int index) {
- return bank_int[index];
- }
+ @Override
+ public int getIntVal(int index) {
+ return bank_int[index];
+ }
- @Override
- public float getFloatVal(int index) {
- // TODO Auto-generated method stub
- return 0;
- }
+ @Override
+ public float getFloatVal(int index) {
+ // TODO Auto-generated method stub
+ return 0;
+ }
- @Override
- public double getDoubleVal(int index) {
- return bank_double[index];
- }
+ @Override
+ public double getDoubleVal(int index) {
+ return bank_double[index];
+ }
- @Override
- public boolean isFixedSize() {
- // TODO Auto-generated method stub
- return false;
- }
+ @Override
+ public boolean isFixedSize() {
+ // TODO Auto-generated method stub
+ return false;
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLFileIO.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLFileIO.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLFileIO.java Wed Apr 27 11:11:32 2016
@@ -11,8 +11,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.recon.tracking.gbl.GBLOutput.ClParams;
-import org.hps.recon.tracking.gbl.GBLOutput.PerigeeParams;
import org.hps.svt.alignment.RunAlignment;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
@@ -47,11 +45,11 @@
}
}
private void openFile(String fileName) {
- if(fileName.equalsIgnoreCase("")) {
- System.out.printf("%s: no file name specified \n", this.getClass().getSimpleName());
- System.exit(1);
- }
- try {
+ if(fileName.equalsIgnoreCase("")) {
+ System.out.printf("%s: no file name specified \n", this.getClass().getSimpleName());
+ System.exit(1);
+ }
+ try {
_fWriter = new FileWriter(fileName);
_pWriter = new PrintWriter(_fWriter);
} catch (IOException ex) {
@@ -67,34 +65,34 @@
addLine(String.format("Track perPar (R phi0 slope d0 z0) %.12f %.12f %.12f %.12f %.12f",htf.R(),htf.phi0(),htf.slope(),htf.dca(),htf.z0()));
}
- String getPerTrackParamStr(PerigeeParams perPar) {
+ String getPerTrackParamStr(GblUtils.PerigeeParams perPar) {
return String.format("Track perPar (R theta phi d0 z0) %.12f %.12f %.12f %.12f %.12f",1.0/perPar.getKappa(),perPar.getTheta(),perPar.getPhi(),perPar.getD0(),perPar.getZ0());
}
- void printPerTrackParam(PerigeeParams perPar) {
+ void printPerTrackParam(GblUtils.PerigeeParams perPar) {
addLine(this.getPerTrackParamStr(perPar));
}
- String getPerTrackParamTruthStr(PerigeeParams perPar) {
+ String getPerTrackParamTruthStr(GblUtils.PerigeeParams perPar) {
return String.format("Truth perPar (kappa theta phi d0 z0) %.12f %.12f %.12f %.12f %.12f",perPar.getKappa(),perPar.getTheta(),perPar.getPhi(),perPar.getD0(),perPar.getZ0());
}
- void printPerTrackParamTruth(PerigeeParams perPar) {
+ void printPerTrackParamTruth(GblUtils.PerigeeParams perPar) {
addLine(this.getPerTrackParamTruthStr(perPar));
}
- String getClTrackParamTruthStr(ClParams perPar) {
+ String getClTrackParamTruthStr(GblUtils.ClParams perPar) {
return String.format("Truth clPar (q/p lambda phi xT yT) %.12f %.12f %.12f %.12f %.12f",perPar.getQoverP(),perPar.getLambda(),perPar.getPhi(),perPar.getXt(),perPar.getYt());
}
- void printClTrackParamTruth(ClParams perPar) {
+ void printClTrackParamTruth(GblUtils.ClParams perPar) {
addLine(this.getClTrackParamTruthStr(perPar));
}
- String getClTrackParamStr(ClParams perPar) {
+ String getClTrackParamStr(GblUtils.ClParams perPar) {
return String.format("Track clPar (q/p lambda phi xT yT) %.12f %.12f %.12f %.12f %.12f",perPar.getQoverP(),perPar.getLambda(),perPar.getPhi(),perPar.getXt(),perPar.getYt());
}
- void printClTrackParam(ClParams perPar) {
+ void printClTrackParam(GblUtils.ClParams perPar) {
addLine(String.format("%s",this.getClTrackParamStr(perPar)));
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLKinkData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLKinkData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLKinkData.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,13 @@
package org.hps.recon.tracking.gbl;
+import java.util.List;
+import org.apache.commons.math3.util.Pair;
+import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
+import org.lcsim.event.LCRelation;
+import org.lcsim.event.RelationalTable;
+import org.lcsim.event.Track;
+import org.lcsim.event.base.BaseRelationalTable;
/**
* Generic object used to persist GBL kink data.
@@ -94,4 +101,27 @@
public boolean isFixedSize() {
return true;
}
+
+ private static Pair<EventHeader, RelationalTable> kinkDataToTrackCache = null;
+
+ public static RelationalTable getKinkDataToTrackTable(EventHeader event) {
+ if (kinkDataToTrackCache == null || kinkDataToTrackCache.getFirst() != event) {
+ RelationalTable kinkDataToTrack = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ if (event.hasCollection(LCRelation.class, DATA_RELATION_COLLECTION)) {
+ List<LCRelation> relations = event.get(LCRelation.class, DATA_RELATION_COLLECTION);
+ for (LCRelation relation : relations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ kinkDataToTrack.add(relation.getFrom(), relation.getTo());
+ }
+ }
+ }
+ kinkDataToTrackCache = new Pair<EventHeader, RelationalTable>(event, kinkDataToTrack);
+ }
+ return kinkDataToTrackCache.getSecond();
+ }
+
+ public static GenericObject getKinkData(EventHeader event, Track track) {
+ return (GenericObject) getKinkDataToTrackTable(event).from(track);
+ }
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutput.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutput.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutput.java Wed Apr 27 11:11:32 2016
@@ -40,8 +40,6 @@
import org.lcsim.geometry.subdetector.BarrelEndcapFlag;
import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
import org.lcsim.recon.tracking.digitization.sisim.TrackerHitType;
-import org.lcsim.recon.tracking.seedtracker.SeedCandidate;
-import org.lcsim.recon.tracking.seedtracker.SeedTrack;
/**
* Calculate the input needed for Millepede minimization.
@@ -153,15 +151,13 @@
void printGBL(Track trk, List<SiTrackerHitStrip1D> stripHits, GBLTrackData gtd, List<GBLStripClusterData> stripClusterDataList, List<MCParticle> mcParticles, List<SimTrackerHit> simTrackerHits, boolean isMC) {
- SeedTrack st = (SeedTrack) trk;
- SeedCandidate seed = st.getSeedCandidate();
- HelicalTrackFit htf = seed.getHelix();
+ HelicalTrackFit htf = TrackUtils.getHTF(trk);
// Find scatter points along the path
ScatterPoints scatters = _scattering.FindHPSScatterPoints(htf);
// Hits on track
- List<HelicalTrackHit> hits = seed.getHits();
+ List<TrackerHit> hits = trk.getTrackerHits();
// Find the truth particle of the track
MCParticle mcp = null;
@@ -171,7 +167,7 @@
if (isMC) {
// find the truth particle for this track
- mcp = getMatchedTruthParticle(trk);
+ mcp = TrackUtils.getMatchedTruthParticle(trk);
// check if this is an A' event
for(MCParticle part : mcParticles) {
@@ -224,8 +220,8 @@
// Use the truth helix as the initial track for GBL?
//htf = htfTruth;
// Get perigee parameters to curvilinear frame
- PerigeeParams perPar = new PerigeeParams(htf, bFieldVector.z());
- PerigeeParams perParTruth = new PerigeeParams(htfTruth, bFieldVector.z());
+ GblUtils.PerigeeParams perPar = new GblUtils.PerigeeParams(htf, bFieldVector.z());
+ GblUtils.PerigeeParams perParTruth = new GblUtils.PerigeeParams(htfTruth, bFieldVector.z());
//GBLDATA
gtd.setPerigeeTrackParameters(perPar);
@@ -236,8 +232,8 @@
// Get curvilinear parameters
if (textFile != null) {
- ClParams clPar = new ClParams(htf, bFieldVector.z());
- ClParams clParTruth = new ClParams(htfTruth, bFieldVector.z());
+ GblUtils.ClParams clPar = new GblUtils.ClParams(htf, bFieldVector.z());
+ GblUtils.ClParams clParTruth = new GblUtils.ClParams(htfTruth, bFieldVector.z());
textFile.printClTrackParam(clPar);
textFile.printClTrackParamTruth(clParTruth);
@@ -248,7 +244,7 @@
}
// find the projection from the I,J,K to U,V,T curvilinear coordinates
- Hep3Matrix perToClPrj = getPerToClPrj(htf);
+ Hep3Matrix perToClPrj = GblUtils.getPerToClPrj(htf);
//GBLDATA
for (int row = 0; row < perToClPrj.getNRows(); ++row) {
@@ -326,7 +322,7 @@
continue;
}
else {
- hit = hits.get(ihit);
+ hit = (HelicalTrackHit) hits.get(ihit);
htc = (HelicalTrackCross) hit;
strips = htc.getStrips();
correctedHitPosition = hit.getCorrectedPosition();
@@ -782,171 +778,10 @@
}
}
- MCParticle getMatchedTruthParticle(Track track) {
- boolean debug = false;
-
- Map<MCParticle, Integer> particlesOnTrack = new HashMap<MCParticle, Integer>();
-
- if (debug) {
- System.out.printf("getmatched mc particle from %d tracker hits on the track \n", track.getTrackerHits().size());
- }
-
- for (TrackerHit hit : track.getTrackerHits()) {
- List<MCParticle> mcps = ((HelicalTrackHit) hit).getMCParticles();
- if (mcps == null) {
- System.out.printf("%s: warning, this hit (layer %d pos=%s) has no mc particles.\n", this.getClass().getSimpleName(), ((HelicalTrackHit) hit).Layer(), ((HelicalTrackHit) hit).getCorrectedPosition().toString());
- } else {
- if (debug) {
- System.out.printf("%s: this hit (layer %d pos=%s) has %d mc particles.\n", this.getClass().getSimpleName(), ((HelicalTrackHit) hit).Layer(), ((HelicalTrackHit) hit).getCorrectedPosition().toString(), mcps.size());
- }
- for (MCParticle mcp : mcps) {
- if (!particlesOnTrack.containsKey(mcp)) {
- particlesOnTrack.put(mcp, 0);
- }
- int c = particlesOnTrack.get(mcp);
- particlesOnTrack.put(mcp, c + 1);
- }
- }
- }
- if (debug) {
- System.out.printf("Track p=[ %f, %f, %f] \n", track.getTrackStates().get(0).getMomentum()[0], track.getTrackStates().get(0).getMomentum()[1], track.getTrackStates().get(0).getMomentum()[1]);
- System.out.printf("FOund %d particles\n", particlesOnTrack.size());
- for (Map.Entry<MCParticle, Integer> entry : particlesOnTrack.entrySet()) {
- System.out.printf("%d hits assigned to %d p=%s \n", entry.getValue(), entry.getKey().getPDGID(), entry.getKey().getMomentum().toString());
- }
- }
- Map.Entry<MCParticle, Integer> maxEntry = null;
- for (Map.Entry<MCParticle, Integer> entry : particlesOnTrack.entrySet()) {
- if (maxEntry == null || entry.getValue().compareTo(maxEntry.getValue()) > 0) {
- maxEntry = entry; //if ( maxEntry != null ) {
- } // if(entry.getValue().compareTo(maxEntry.getValue()) < 0) continue;
- } //}
- //maxEntry = entry;
- if (debug) {
- if (maxEntry != null) {
- System.out.printf("Matched particle with pdgId=%d and mom %s to track with charge %d and momentum [%f %f %f]\n",
- maxEntry.getKey().getPDGID(), maxEntry.getKey().getMomentum().toString(),
- track.getCharge(), track.getTrackStates().get(0).getMomentum()[0], track.getTrackStates().get(0).getMomentum()[1], track.getTrackStates().get(0).getMomentum()[2]);
- } else {
- System.out.printf("No truth particle found on this track\n");
- }
- }
- return maxEntry == null ? null : maxEntry.getKey();
- }
-
-// private BasicMatrix getJacPerToCl(HelicalTrackFit htf) {
-// System.out.printf("%s: getJacPerToCl\n", this.getClass().getSimpleName());
-// //use propoerly normalized B-field
-// Hep3Vector Bnorm = VecOp.mult(Constants.fieldConversion, _B);
-// //init jacobian to zero
-// BasicMatrix j = new BasicMatrix(5, 5);
-// initZero(j);
-// double lambda = Math.atan(htf.slope());
-// double q = Math.signum(htf.R());
-// double theta = Math.PI / 2.0 - lambda;
-// Hep3Vector T = HelixUtils.Direction(htf, 0.);
-// Hep3Vector p = VecOp.mult(htf.p(Math.abs(_B.z())), T);
-// double pT = htf.pT(Math.abs(_B.z()));
-// Hep3Vector H = VecOp.mult(1. / (Bnorm.magnitude()), Bnorm);
-// Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
-// Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
-// Hep3Vector U = VecOp.mult(-1, J);
-// Hep3Vector V = VecOp.cross(T, U);
-// double alpha = VecOp.cross(H, T).magnitude();
-// Hep3Vector N = VecOp.mult(1. / alpha, VecOp.cross(H, T));
-// Hep3Vector K = Z;
-// double Q = -Bnorm.magnitude() * q / p.magnitude();
-// double kappa = -1.0 * q * Bnorm.z() / pT;
-//
-// if (this._debug != 0) {
-// System.out.printf("%s: Bnorm=%s mag(Bnorm)=%f\n", this.getClass().getSimpleName(), Bnorm.toString(), Bnorm.magnitude());
-// System.out.printf("%s: p=%s |p|=%f pT=%f\n", this.getClass().getSimpleName(), p.toString(), p.magnitude(), pT);
-// System.out.printf("%s: q=%f\n", this.getClass().getSimpleName(), q);
-// System.out.printf("%s: q/p=%f\n", this.getClass().getSimpleName(), q / p.magnitude());
-// System.out.printf("%s: T=%s\n", this.getClass().getSimpleName(), T.toString());
-// System.out.printf("%s: H=%s\n", this.getClass().getSimpleName(), H.toString());
-// System.out.printf("%s: kappa=%f\n", this.getClass().getSimpleName(), kappa);
-// System.out.printf("%s: alpha=%f Q=%f \n", this.getClass().getSimpleName(), alpha, Q);
-// System.out.printf("%s: J=%s \n", this.getClass().getSimpleName(), J.toString());
-// System.out.printf("%s: V=%s \n", this.getClass().getSimpleName(), V.toString());
-// System.out.printf("%s: N=%s \n", this.getClass().getSimpleName(), N.toString());
-// System.out.printf("%s: TdotJ=%f \n", this.getClass().getSimpleName(), VecOp.dot(T, J));
-// System.out.printf("%s: VdotN=%f \n", this.getClass().getSimpleName(), VecOp.dot(V, N));
-// System.out.printf("%s: TdotK=%f \n", this.getClass().getSimpleName(), VecOp.dot(T, K));
-// System.out.printf("%s: UdotN=%f \n", this.getClass().getSimpleName(), VecOp.dot(U, N));
-// }
-//
-// j.setElement(0, 0, -1.0 * Math.sin(theta) / Bnorm.z());
-//
-// j.setElement(0, 1, q / (p.magnitude() * Math.tan(theta)));
-//
-// j.setElement(1, 1, -1);
-//
-// j.setElement(1, 3, -alpha * Q * VecOp.dot(T, J) * VecOp.dot(V, N));
-//
-// j.setElement(1, 4, -alpha * Q * VecOp.dot(T, K) * VecOp.dot(V, N));
-//
-// j.setElement(2, 2, 1);
-//
-// j.setElement(2, 3, -alpha * Q * VecOp.dot(T, J) * VecOp.dot(U, N) / Math.cos(lambda));
-//
-// j.setElement(2, 4, -alpha * Q * VecOp.dot(T, K) * VecOp.dot(U, N) / Math.cos(lambda));
-//
-// j.setElement(3, 3, -1);
-//
-// j.setElement(4, 4, VecOp.dot(V, K));
-//
-// if (_debug > 0) {
-// System.out.printf("%s: lambda= J(1,1)=%f * theta + J(1,3)=%f * eps + J(1,4)=%f * z0 \n",
-// this.getClass().getSimpleName(),
-// j.e(1, 1), j.e(1, 3), j.e(1, 4));
-//
-// }
-//
-// return j;
-//
-// }
- /**
- * Transform MCParticle into a Helix object. Note that it produces the helix
- * parameters at nominal x=0 and assumes that there is no field at x<0
- *
- * @param mcp MC particle to be transformed
- * @return helix object based on the MC particle
- */
-// private HelicalTrackFit getHTF(MCParticle mcp) {
-// Hep3Vector org = this._hpstrans.transformVectorToTracking(mcp.getOrigin());
-// Hep3Vector p = this._hpstrans.transformVectorToTracking(mcp.getMomentum());
-// // Move to x=0 if needed
-// if(org.x() < 0.) {
-// double dydx = p.y()/p.x();
-// double dzdx = p.z()/p.x();
-// double delta_x = -1. * org.x();
-// double y = delta_x * dydx;
-// double z = delta_x * dzdx;
-// double x = org.x() + delta_x;
-// if( Math.abs(x) > 1e-8) throw new RuntimeException("Error: origin is not zero!");
-// Hep3Vector old = org;
-// org = new BasicHep3Vector(x,y,z);
-// System.out.printf("org %s p %s -> org %s\n", old.toString(),p.toString(),org.toString());
-// } else {
-// org = this._hpstrans.transformVectorToTracking(mcp.getOrigin());
-// }
-//
-//
-//
-// HelixParamCalculator helixParamCalculator = new HelixParamCalculator(p, org, -1*((int)mcp.getCharge()), -1.0*this._B.z());
-// double par[] = new double[5];
-// par[HelicalTrackFit.dcaIndex] = helixParamCalculator.getDCA();
-// par[HelicalTrackFit.slopeIndex] = helixParamCalculator.getSlopeSZPlane();
-// par[HelicalTrackFit.phi0Index] = helixParamCalculator.getPhi0();
-// par[HelicalTrackFit.curvatureIndex] = 1.0/helixParamCalculator.getRadius();
-// par[HelicalTrackFit.z0Index] = helixParamCalculator.getZ0();
-// SymmetricMatrix cov = new SymmetricMatrix(5);
-// for(int i=0;i<cov.getNRows();++i) cov.setElement(i, i, 1.);
-// HelicalTrackFit htf = new HelicalTrackFit(par, cov, new double[2], new int[2], null, null);
-// return htf;
-// }
- private double truthTrackFitChi2(PerigeeParams perPar, PerigeeParams perParTruth, SymmetricMatrix covariance) {
+
+
+
+ private double truthTrackFitChi2(GblUtils.PerigeeParams perPar, GblUtils.PerigeeParams perParTruth, SymmetricMatrix covariance) {
//re-shuffle the param vector to match the covariance order of parameters
BasicMatrix p = new BasicMatrix(1, 5);
p.setElement(0, 0, perPar.getD0());
@@ -1007,190 +842,6 @@
return Math.sqrt(Math.pow(E1 + E2, 2) - VecOp.add(p1vec, p2vec).magnitudeSquared());
}
- private static BasicMatrix getPerParVector(double kappa, double theta, double phi, double d0, double z0) {
- BasicMatrix perPar = new BasicMatrix(1, 5);
- perPar.setElement(0, 0, kappa);
- perPar.setElement(0, 1, theta);
- perPar.setElement(0, 2, phi);
- perPar.setElement(0, 3, d0);
- perPar.setElement(0, 4, z0);
- return perPar;
- }
-
- private static BasicMatrix getPerParVector(HelicalTrackFit htf, double B) {
- if (htf != null) {
- double kappa = -1.0 * Math.signum(B) / htf.R();
- double theta = Math.PI / 2.0 - Math.atan(htf.slope());
- return getPerParVector(kappa, theta, htf.phi0(), htf.dca(), htf.z0());
- }
- return new BasicMatrix(1, 5);
- }
-
- /**
- *
- * Store perigee track parameters.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
- */
- public static class PerigeeParams {
-
- private final BasicMatrix _params;
-
- public PerigeeParams(HelicalTrackFit htf, double B) {
- _params = getPerParVector(htf, B);
- }
-
- public PerigeeParams(double kappa, double theta, double phi, double d0, double z0) {
- this._params = getPerParVector(kappa, theta, phi, d0, z0);
- }
-
- public BasicMatrix getParams() {
- return _params;
- }
-
- public double getKappa() {
- return _params.e(0, 0);
- }
-
- public double getTheta() {
- return _params.e(0, 1);
- }
-
- public double getPhi() {
- return _params.e(0, 2);
- }
-
- public double getD0() {
- return _params.e(0, 3);
- }
-
- public double getZ0() {
- return _params.e(0, 4);
- }
- }
-
- /**
- * Computes the projection matrix from the perigee XY plane variables dca
- * and z0 into the curvilinear xT,yT,zT frame (U,V,T)
- *
- * @param htf input helix to find the track direction
- * @return 3x3 projection matrix
- */
- static Hep3Matrix getPerToClPrj(HelicalTrackFit htf) {
- Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
- Hep3Vector T = HelixUtils.Direction(htf, 0.);
- Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
- Hep3Vector K = Z;
- Hep3Vector U = VecOp.mult(-1, J);
- Hep3Vector V = VecOp.cross(T, U);
- Hep3Vector I = VecOp.cross(J, K);
-
- BasicHep3Matrix trans = new BasicHep3Matrix();
- trans.setElement(0, 0, VecOp.dot(I, U));
- trans.setElement(0, 1, VecOp.dot(J, U));
- trans.setElement(0, 2, VecOp.dot(K, U));
- trans.setElement(1, 0, VecOp.dot(I, V));
- trans.setElement(1, 1, VecOp.dot(J, V));
- trans.setElement(1, 2, VecOp.dot(K, V));
- trans.setElement(2, 0, VecOp.dot(I, T));
- trans.setElement(2, 1, VecOp.dot(J, T));
- trans.setElement(2, 2, VecOp.dot(K, T));
- return trans;
-
- /*
- Hep3Vector B = new BasicHep3Vector(0, 0, 1); // TODO sign convention?
- Hep3Vector H = VecOp.mult(1 / bfield, B);
- Hep3Vector T = HelixUtils.Direction(helix, 0.);
- Hep3Vector HcrossT = VecOp.cross(H, T);
- double alpha = HcrossT.magnitude(); // this should be Bvec cross TrackDir/|B|
- double Q = Math.abs(bfield) * q / p;
- Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
- Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
- Hep3Vector K = Z;
- Hep3Vector U = VecOp.mult(-1, J);
- Hep3Vector V = VecOp.cross(T, U);
- Hep3Vector I = VecOp.cross(J, K);
- Hep3Vector N = VecOp.mult(1 / alpha, VecOp.cross(H, T)); //-cross(T,H)/alpha = -cross(T,Z) = -J
- double UdotI = VecOp.dot(U, I); // 0,0
- double NdotV = VecOp.dot(N, V); // 1,1?
- double NdotU = VecOp.dot(N, U); // 0,1?
- double TdotI = VecOp.dot(T, I); // 2,0
- double VdotI = VecOp.dot(V, I); // 1,0
- double VdotK = VecOp.dot(V, K); // 1,2
- */
- }
-
-
- /**
- *
- * Store curvilinear track parameters.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
- */
- public static class ClParams {
-
- private BasicMatrix _params = new BasicMatrix(1, 5);
-
- public ClParams(HelicalTrackFit htf, double B) {
-
- if (htf == null) {
- return;
- }
-
- Hep3Matrix perToClPrj = getPerToClPrj(htf);
-
- double d0 = -1 * htf.dca(); //sign convention for curvilinear frame
- double z0 = htf.z0();
- Hep3Vector vecPer = new BasicHep3Vector(0., d0, z0);
- //System.out.printf("%s: vecPer=%s\n",this.getClass().getSimpleName(),vecPer.toString());
-
- Hep3Vector vecCl = VecOp.mult(perToClPrj, vecPer);
- //System.out.printf("%s: vecCl=%s\n",this.getClass().getSimpleName(),vecCl.toString());
- double xT = vecCl.x();
- double yT = vecCl.y();
- //double zT = vecCl.z();
-
- double lambda = Math.atan(htf.slope());
- double q = Math.signum(htf.R());
- double qOverP = q / htf.p(Math.abs(B));
- double phi = htf.phi0();
-
- _params.setElement(0, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), qOverP);
- _params.setElement(0, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), lambda);
- _params.setElement(0, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), phi);
- _params.setElement(0, FittedGblTrajectory.GBLPARIDX.XT.getValue(), xT);
- _params.setElement(0, FittedGblTrajectory.GBLPARIDX.YT.getValue(), yT);
- }
-
- public BasicMatrix getParams() {
- return _params;
- }
-
- double getQoverP() {
- return _params.e(0, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue());
- }
-
- double getLambda() {
- return _params.e(0, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue());
- }
-
- double getPhi() {
- return _params.e(0, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue());
- }
-
- double getXt() {
- return _params.e(0, FittedGblTrajectory.GBLPARIDX.XT.getValue());
- }
-
- double getYt() {
- return _params.e(0, FittedGblTrajectory.GBLPARIDX.YT.getValue());
- }
-
- }
-
-
/**
*
* {@link HelicalTrackStripGbl} that explicitly uses the given unit vectors when accessed.
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutputDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutputDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLOutputDriver.java Wed Apr 27 11:11:32 2016
@@ -1,41 +1,30 @@
package org.hps.recon.tracking.gbl;
-import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.recon.tracking.EventQuality;
-import org.hps.recon.tracking.StrategyType;
-import org.hps.recon.tracking.TrackType;
import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.EventHeader;
import org.lcsim.event.LCRelation;
import org.lcsim.event.MCParticle;
-import org.lcsim.event.RelationalTable;
import org.lcsim.event.SimTrackerHit;
import org.lcsim.event.Track;
-import org.lcsim.event.TrackerHit;
import org.lcsim.event.base.MyLCRelation;
import org.lcsim.geometry.Detector;
-import org.lcsim.lcio.LCIOConstants;
import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
-import org.lcsim.recon.tracking.seedtracker.SeedTrack;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
/**
- * This driver class is used to 1) write lcio collection of GBL info objects OR
- * 2) write GBL info into a unstructures text-based output
+ * This driver class is used to
+ * 1) write LCIO collection of GBL info objects, or,
+ * 2) write GBL info into a structured text-based output
*
- * It uses a helper class that does the actual work. We will port GBL to java
- * and that will replace this driver.
+ * It uses a helper class that does the actual work.
*
* @author Per Hansson Adrian <[log in to unmask]>
* @version $Id: GBLOutputDriver.java,v 1.9 2013/11/07 03:54:58 phansson Exp $
@@ -58,15 +47,12 @@
private int totalTracksProcessed = 0;
private int iTrack = 0;
private int iEvent = 0;
- private boolean addBeamspot=false;
+ private boolean addBeamspot = false;
private double beamspotScatAngle = 0.000001;
private double beamspotWidthZ = 0.05;
private double beamspotWidthY = 0.15;
- private double beamspotTiltZOverY = 15.0*180.0/Math.PI;
- private double beamspotPosition[] = {0,0,0};
-
-
-
+ private double beamspotTiltZOverY = 15.0 * 180.0 / Math.PI;
+ private double beamspotPosition[] = {0, 0, 0};
public GBLOutputDriver() {
}
@@ -95,7 +81,7 @@
@Override
public void process(EventHeader event) {
- List<Track> tracklist = null;
+ List<Track> tracklist;
if (event.hasCollection(Track.class, trackCollectionName)) {
tracklist = event.get(Track.class, trackCollectionName);
if (_debug > 0) {
@@ -107,7 +93,7 @@
List<SiTrackerHitStrip1D> stripHits = event.get(SiTrackerHitStrip1D.class, "StripClusterer_SiTrackerHitStrip1D");
if (_debug > 0) {
- System.out.printf("%s: Got %d SiTrackerHitStrip1D in this event\n",this.getClass().getSimpleName(), stripHits.size());
+ System.out.printf("%s: Got %d SiTrackerHitStrip1D in this event\n", this.getClass().getSimpleName(), stripHits.size());
}
List<MCParticle> mcParticles = new ArrayList<MCParticle>();
@@ -143,13 +129,11 @@
// Loop over each of the track collections retrieved from the event
for (Track trk : tracklist) {
totalTracks++;
-
- if (_debug > 0) System.out.printf("%s: PX %f bottom %d\n", this.getClass().getSimpleName(), trk.getPX(), TrackUtils.isBottomTrack(trk, 4)?1:0) ;
-
- //if( trk.getPX() < 0.9) continue;
-
- //if( TrackUtils.isBottomTrack(trk, 4)) continue;
-
+
+ if (_debug > 0) {
+ System.out.printf("%s: PX %f bottom %d\n", this.getClass().getSimpleName(), trk.getPX(), TrackUtils.isBottomTrack(trk, 4) ? 1 : 0);
+ }
+
if (TrackUtils.isGoodTrack(trk, tracklist, EventQuality.Quality.NONE)) {
if (_debug > 0) {
System.out.printf("%s: Print GBL output for this track\n", this.getClass().getSimpleName());
@@ -228,8 +212,8 @@
this.isMC = isMC;
}
- public void setAddBeamspot(boolean add){
- this.addBeamspot=add;
+ public void setAddBeamspot(boolean add) {
+ this.addBeamspot = add;
}
public double getBeamspotScatAngle() {
@@ -271,5 +255,8 @@
public void setBeamspotPosition(double beamspotPosition[]) {
this.beamspotPosition = beamspotPosition;
}
-
+
+ public void setTrackCollectionName(String trackCollectionName) {
+ this.trackCollectionName = trackCollectionName;
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLRefitterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLRefitterDriver.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLRefitterDriver.java Wed Apr 27 11:11:32 2016
@@ -11,9 +11,11 @@
import org.hps.recon.tracking.MultipleScattering;
import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.EventHeader;
+import org.lcsim.event.LCRelation;
import org.lcsim.event.RelationalTable;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
+import org.lcsim.event.base.BaseLCRelation;
import org.lcsim.geometry.Detector;
import org.lcsim.lcio.LCIOConstants;
import org.lcsim.util.Driver;
@@ -26,6 +28,7 @@
private String inputCollectionName = "MatchedTracks";
private String outputCollectionName = "GBLTracks";
+ private String trackRelationCollectionName = "MatchedToGBLTrackRelations";
private double bfield;
private final MultipleScattering _scattering = new MultipleScattering(new MaterialSupervisor());
@@ -66,12 +69,21 @@
RelationalTable hitToRotated = TrackUtils.getHitToRotatedTable(event);
List<Track> refittedTracks = new ArrayList<Track>();
+ List<LCRelation> trackRelations = new ArrayList<LCRelation>();
+
+ List<GBLKinkData> kinkDataCollection = new ArrayList<GBLKinkData>();
+ List<LCRelation> kinkDataRelations = new ArrayList<LCRelation>();
Map<Track, Track> inputToRefitted = new HashMap<Track, Track>();
for (Track track : tracks) {
- Pair<Track, GBLKinkData> newTrack = MakeGblTracks.refitTrack(TrackUtils.getHTF(track), TrackUtils.getStripHits(track, hitToStrips, hitToRotated), track.getTrackerHits(), 5, _scattering, bfield);
+ Pair<Track, GBLKinkData> newTrack = MakeGblTracks.refitTrack(TrackUtils.getHTF(track), TrackUtils.getStripHits(track, hitToStrips, hitToRotated), track.getTrackerHits(), 5, track.getType(), _scattering, bfield);
+// newTrack.getFirst().
refittedTracks.add(newTrack.getFirst());
+ trackRelations.add(new BaseLCRelation(track, newTrack.getFirst()));
inputToRefitted.put(track, newTrack.getFirst());
+
+ kinkDataCollection.add(newTrack.getSecond());
+ kinkDataRelations.add(new BaseLCRelation(newTrack.getSecond(), newTrack.getFirst()));
}
if (mergeTracks) {
@@ -106,7 +118,7 @@
}
}
- Pair<Track, GBLKinkData> mergedTrack = MakeGblTracks.refitTrack(TrackUtils.getHTF(track), TrackUtils.getStripHits(track, hitToStrips, hitToRotated), allHth, 5, _scattering, bfield);
+ Pair<Track, GBLKinkData> mergedTrack = MakeGblTracks.refitTrack(TrackUtils.getHTF(track), TrackUtils.getStripHits(track, hitToStrips, hitToRotated), allHth, 5, track.getType(), _scattering, bfield);
mergedTracks.add(mergedTrack.getFirst());
// System.out.format("%f %f %f\n", fit.get_chi2(), inputToRefitted.get(track).getChi2(), inputToRefitted.get(otherTrack).getChi2());
// mergedTrackToTrackList.put(mergedTrack, new ArrayList<Track>());
@@ -131,5 +143,8 @@
// Put the tracks back into the event and exit
int flag = 1 << LCIOConstants.TRBIT_HITS;
event.put(outputCollectionName, refittedTracks, Track.class, flag);
+ event.put(trackRelationCollectionName, trackRelations, LCRelation.class, 0);
+ event.put(GBLKinkData.DATA_COLLECTION, kinkDataCollection, GBLKinkData.class, 0);
+ event.put(GBLKinkData.DATA_RELATION_COLLECTION, kinkDataRelations, LCRelation.class, 0);
}
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java Wed Apr 27 11:11:32 2016
@@ -13,55 +13,55 @@
* @version $Id:
*/
public class GBLStripClusterData implements GenericObject {
-
- /*
- *
- * Interface enumerator to access the correct data
- *
- */
- public static class GBLINT {
- public static final int ID = 0;
- public static final int BANK_INT_SIZE = 1;
- }
- public static class GBLDOUBLE {
- public static final int PATH3D = 0;
- public static final int PATH = 1;
- public static final int UX = 2;
- public static final int UY = 3;
- public static final int UZ = 4;
- public static final int VX = 5;
- public static final int VY = 6;
- public static final int VZ = 7;
- public static final int WX = 8;
- public static final int WY = 9;
- public static final int WZ = 10;
- public static final int TDIRX = 11;
- public static final int TDIRY = 12;
- public static final int TDIRZ = 13;
- public static final int TPHI = 14;
- public static final int UMEAS = 15;
- public static final int TPOSU = 16 ;
- public static final int TPOSV = 17;
- public static final int TPOSW = 18;
- public static final int UMEASERR = 19;
- public static final int MSANGLE = 20;
- public static final int TLAMBDA = 21;
-
-
- public static final int BANK_DOUBLE_SIZE = 22;
-
- }
- // array holding the integer data
- private int bank_int[] = new int[GBLINT.BANK_INT_SIZE];
- // array holding the double data
- private double bank_double[] = new double[GBLDOUBLE.BANK_DOUBLE_SIZE];
-
- /**
- * Default constructor
- */
- public GBLStripClusterData(int id) {
- setId(id);
- }
+
+ /*
+ *
+ * Interface enumerator to access the correct data
+ *
+ */
+ public static class GBLINT {
+ public static final int ID = 0;
+ public static final int BANK_INT_SIZE = 1;
+ }
+ public static class GBLDOUBLE {
+ public static final int PATH3D = 0;
+ public static final int PATH = 1;
+ public static final int UX = 2;
+ public static final int UY = 3;
+ public static final int UZ = 4;
+ public static final int VX = 5;
+ public static final int VY = 6;
+ public static final int VZ = 7;
+ public static final int WX = 8;
+ public static final int WY = 9;
+ public static final int WZ = 10;
+ public static final int TDIRX = 11;
+ public static final int TDIRY = 12;
+ public static final int TDIRZ = 13;
+ public static final int TPHI = 14;
+ public static final int UMEAS = 15;
+ public static final int TPOSU = 16 ;
+ public static final int TPOSV = 17;
+ public static final int TPOSW = 18;
+ public static final int UMEASERR = 19;
+ public static final int MSANGLE = 20;
+ public static final int TLAMBDA = 21;
+
+
+ public static final int BANK_DOUBLE_SIZE = 22;
+
+ }
+ // array holding the integer data
+ private int bank_int[] = new int[GBLINT.BANK_INT_SIZE];
+ // array holding the double data
+ private double bank_double[] = new double[GBLDOUBLE.BANK_DOUBLE_SIZE];
+
+ /**
+ * Default constructor
+ */
+ public GBLStripClusterData(int id) {
+ setId(id);
+ }
/*
* Constructor from GenericObject
@@ -79,239 +79,239 @@
}
}
-
- /**
- * @param set track id to val
- */
- public void setId(int val) {
- bank_int[GBLINT.ID] = val;
- }
-
- /**
- * @return track id for this object
- */
- public int getId() {
- return this.getIntVal(GBLINT.ID);
- }
-
- /**
- * Set path length to this strip cluster
- * @param val
- */
- public void setPath(double val) {
- bank_double[GBLDOUBLE.PATH] = val;
- }
-
- /**
- * Get path length to this strip cluster
- */
- public double getPath() {
- return getDoubleVal(GBLDOUBLE.PATH);
- }
-
- /**
- * Set path length to this strip cluster
- * @param val
- */
- public void setPath3D(double val) {
- bank_double[GBLDOUBLE.PATH3D] = val;
- }
-
- /**
- * Get path length to this strip cluster
- */
- public double getPath3D() {
- return getDoubleVal(GBLDOUBLE.PATH3D);
- }
-
-
- /**
- * Set and get u vector for this strip sensor
- */
- public void setU(Hep3Vector u) {
- bank_double[GBLDOUBLE.UX] = u.x();
- bank_double[GBLDOUBLE.UY] = u.y();
- bank_double[GBLDOUBLE.UZ] = u.z();
- }
- public Hep3Vector getU() {
- return new BasicHep3Vector(getUx(),getUy(),getUz());
- }
- public double getUx() {
- return getDoubleVal(GBLDOUBLE.UX);
- }
- public double getUy() {
- return getDoubleVal(GBLDOUBLE.UY);
- }
- public double getUz() {
- return getDoubleVal(GBLDOUBLE.UZ);
- }
-
- /**
- * Set and get v vector for this strip sensor
- */
-
- public void setV(Hep3Vector v) {
- bank_double[GBLDOUBLE.VX] = v.x();
- bank_double[GBLDOUBLE.VY] = v.y();
- bank_double[GBLDOUBLE.VZ] = v.z();
- }
- public Hep3Vector getV() {
- return new BasicHep3Vector(getVx(),getVy(),getVz());
- }
- public double getVx() {
- return getDoubleVal(GBLDOUBLE.VX);
- }
- public double getVy() {
- return getDoubleVal(GBLDOUBLE.VY);
- }
- public double getVz() {
- return getDoubleVal(GBLDOUBLE.VZ);
- }
-
- /**
- * Set and get w vector for this strip sensor
- */
-
- public void setW(Hep3Vector v) {
- bank_double[GBLDOUBLE.WX] = v.x();
- bank_double[GBLDOUBLE.WY] = v.y();
- bank_double[GBLDOUBLE.WZ] = v.z();
- }
- public Hep3Vector getW() {
- return new BasicHep3Vector(getWx(),getWy(),getWz());
- }
- public double getWx() {
- return getDoubleVal(GBLDOUBLE.WX);
- }
- public double getWy() {
- return getDoubleVal(GBLDOUBLE.WY);
- }
- public double getWz() {
- return getDoubleVal(GBLDOUBLE.WZ);
- }
-
- /**
- * Set track direction at this cluster
- *
- * @param tDir
- */
- public void setTrackDir(Hep3Vector v) {
- bank_double[GBLDOUBLE.TDIRX] = v.x();
- bank_double[GBLDOUBLE.TDIRY] = v.y();
- bank_double[GBLDOUBLE.TDIRZ] = v.z();
- }
- public Hep3Vector getTrackDirection() {
- return new BasicHep3Vector(getTx(),getTy(),getTz());
- }
- public double getTx() {
- return getDoubleVal(GBLDOUBLE.TDIRX);
- }
- public double getTy() {
- return getDoubleVal(GBLDOUBLE.TDIRY);
- }
- public double getTz() {
- return getDoubleVal(GBLDOUBLE.TDIRZ);
- }
-
- public void setTrackPhi(double phi) {
- bank_double[GBLDOUBLE.TPHI] = phi;
- }
-
- public double getTrackPhi() {
- return getDoubleVal(GBLDOUBLE.TPHI);
- }
-
- public void setTrackLambda(double lambda) {
- bank_double[GBLDOUBLE.TLAMBDA] = lambda;
- }
-
- public double getTrackLambda() {
- return getDoubleVal(GBLDOUBLE.TLAMBDA);
- }
-
-
- public void setMeas(double umeas) {
- bank_double[GBLDOUBLE.UMEAS] = umeas;
- }
-
- public double getMeas() {
- return getDoubleVal(GBLDOUBLE.UMEAS);
- }
-
- public void setMeasErr(double x) {
- bank_double[GBLDOUBLE.UMEASERR] = x;
- }
-
- public double getMeasErr() {
- return getDoubleVal(GBLDOUBLE.UMEASERR);
- }
-
-
- /**
- * Set track position in local frame
- * @param trkpos_meas
- */
- public void setTrackPos(Hep3Vector trkpos_meas) {
- bank_double[GBLDOUBLE.TPOSU] = trkpos_meas.x();
- bank_double[GBLDOUBLE.TPOSV] = trkpos_meas.y();
- bank_double[GBLDOUBLE.TPOSW] = trkpos_meas.z();
- }
-
- public Hep3Vector getTrackPos() {
- return new BasicHep3Vector(getTrackPosU(),getTrackPosV(),getTrackPosW());
- }
-
- public double getTrackPosU() {
- return getDoubleVal(GBLDOUBLE.TPOSU);
- }
-
- public double getTrackPosV() {
- return getDoubleVal(GBLDOUBLE.TPOSV);
- }
-
- public double getTrackPosW() {
- return getDoubleVal(GBLDOUBLE.TPOSW);
- }
-
- public void setScatterAngle(double scatAngle) {
- bank_double[GBLDOUBLE.MSANGLE] = scatAngle;
- }
-
- public double getScatterAngle() {
- return getDoubleVal(GBLDOUBLE.MSANGLE);
- }
-
- /*
- * The functions below are all overide from
- * @see org.lcsim.event.GenericObject#getNInt()
- */
-
- public int getNInt() {
- return GBLINT.BANK_INT_SIZE;
- }
-
- public int getNFloat() {
- return 0;
- }
-
- public int getNDouble() {
- return GBLDOUBLE.BANK_DOUBLE_SIZE;
- }
-
- public int getIntVal(int index) {
- return bank_int[index];
- }
-
- public float getFloatVal(int index) {
- return 0;
- }
-
- public double getDoubleVal(int index) {
- return bank_double[index];
- }
-
- public boolean isFixedSize() {
- return false;
- }
+
+ /**
+ * @param val set track id to val
+ */
+ public void setId(int val) {
+ bank_int[GBLINT.ID] = val;
+ }
+
+ /**
+ * @return track id for this object
+ */
+ public int getId() {
+ return this.getIntVal(GBLINT.ID);
+ }
+
+ /**
+ * Set path length to this strip cluster
+ * @param val
+ */
+ public void setPath(double val) {
+ bank_double[GBLDOUBLE.PATH] = val;
+ }
+
+ /**
+ * Get path length to this strip cluster
+ */
+ public double getPath() {
+ return getDoubleVal(GBLDOUBLE.PATH);
+ }
+
+ /**
+ * Set path length to this strip cluster
+ * @param val
+ */
+ public void setPath3D(double val) {
+ bank_double[GBLDOUBLE.PATH3D] = val;
+ }
+
+ /**
+ * Get path length to this strip cluster
+ */
+ public double getPath3D() {
+ return getDoubleVal(GBLDOUBLE.PATH3D);
+ }
+
+
+ /**
+ * Set and get u vector for this strip sensor
+ */
+ public void setU(Hep3Vector u) {
+ bank_double[GBLDOUBLE.UX] = u.x();
+ bank_double[GBLDOUBLE.UY] = u.y();
+ bank_double[GBLDOUBLE.UZ] = u.z();
+ }
+ public Hep3Vector getU() {
+ return new BasicHep3Vector(getUx(),getUy(),getUz());
+ }
+ public double getUx() {
+ return getDoubleVal(GBLDOUBLE.UX);
+ }
+ public double getUy() {
+ return getDoubleVal(GBLDOUBLE.UY);
+ }
+ public double getUz() {
+ return getDoubleVal(GBLDOUBLE.UZ);
+ }
+
+ /**
+ * Set and get v vector for this strip sensor
+ */
+
+ public void setV(Hep3Vector v) {
+ bank_double[GBLDOUBLE.VX] = v.x();
+ bank_double[GBLDOUBLE.VY] = v.y();
+ bank_double[GBLDOUBLE.VZ] = v.z();
+ }
+ public Hep3Vector getV() {
+ return new BasicHep3Vector(getVx(),getVy(),getVz());
+ }
+ public double getVx() {
+ return getDoubleVal(GBLDOUBLE.VX);
+ }
+ public double getVy() {
+ return getDoubleVal(GBLDOUBLE.VY);
+ }
+ public double getVz() {
+ return getDoubleVal(GBLDOUBLE.VZ);
+ }
+
+ /**
+ * Set and get w vector for this strip sensor
+ */
+
+ public void setW(Hep3Vector v) {
+ bank_double[GBLDOUBLE.WX] = v.x();
+ bank_double[GBLDOUBLE.WY] = v.y();
+ bank_double[GBLDOUBLE.WZ] = v.z();
+ }
+ public Hep3Vector getW() {
+ return new BasicHep3Vector(getWx(),getWy(),getWz());
+ }
+ public double getWx() {
+ return getDoubleVal(GBLDOUBLE.WX);
+ }
+ public double getWy() {
+ return getDoubleVal(GBLDOUBLE.WY);
+ }
+ public double getWz() {
+ return getDoubleVal(GBLDOUBLE.WZ);
+ }
+
+ /**
+ * Set track direction at this cluster
+ *
+ * @param v the track direction
+ */
+ public void setTrackDir(Hep3Vector v) {
+ bank_double[GBLDOUBLE.TDIRX] = v.x();
+ bank_double[GBLDOUBLE.TDIRY] = v.y();
+ bank_double[GBLDOUBLE.TDIRZ] = v.z();
+ }
+ public Hep3Vector getTrackDirection() {
+ return new BasicHep3Vector(getTx(),getTy(),getTz());
+ }
+ public double getTx() {
+ return getDoubleVal(GBLDOUBLE.TDIRX);
+ }
+ public double getTy() {
+ return getDoubleVal(GBLDOUBLE.TDIRY);
+ }
+ public double getTz() {
+ return getDoubleVal(GBLDOUBLE.TDIRZ);
+ }
+
+ public void setTrackPhi(double phi) {
+ bank_double[GBLDOUBLE.TPHI] = phi;
+ }
+
+ public double getTrackPhi() {
+ return getDoubleVal(GBLDOUBLE.TPHI);
+ }
+
+ public void setTrackLambda(double lambda) {
+ bank_double[GBLDOUBLE.TLAMBDA] = lambda;
+ }
+
+ public double getTrackLambda() {
+ return getDoubleVal(GBLDOUBLE.TLAMBDA);
+ }
+
+
+ public void setMeas(double umeas) {
+ bank_double[GBLDOUBLE.UMEAS] = umeas;
+ }
+
+ public double getMeas() {
+ return getDoubleVal(GBLDOUBLE.UMEAS);
+ }
+
+ public void setMeasErr(double x) {
+ bank_double[GBLDOUBLE.UMEASERR] = x;
+ }
+
+ public double getMeasErr() {
+ return getDoubleVal(GBLDOUBLE.UMEASERR);
+ }
+
+
+ /**
+ * Set track position in local frame
+ * @param trkpos_meas
+ */
+ public void setTrackPos(Hep3Vector trkpos_meas) {
+ bank_double[GBLDOUBLE.TPOSU] = trkpos_meas.x();
+ bank_double[GBLDOUBLE.TPOSV] = trkpos_meas.y();
+ bank_double[GBLDOUBLE.TPOSW] = trkpos_meas.z();
+ }
+
+ public Hep3Vector getTrackPos() {
+ return new BasicHep3Vector(getTrackPosU(),getTrackPosV(),getTrackPosW());
+ }
+
+ public double getTrackPosU() {
+ return getDoubleVal(GBLDOUBLE.TPOSU);
+ }
+
+ public double getTrackPosV() {
+ return getDoubleVal(GBLDOUBLE.TPOSV);
+ }
+
+ public double getTrackPosW() {
+ return getDoubleVal(GBLDOUBLE.TPOSW);
+ }
+
+ public void setScatterAngle(double scatAngle) {
+ bank_double[GBLDOUBLE.MSANGLE] = scatAngle;
+ }
+
+ public double getScatterAngle() {
+ return getDoubleVal(GBLDOUBLE.MSANGLE);
+ }
+
+ /*
+ * The functions below are all overide from
+ * @see org.lcsim.event.GenericObject#getNInt()
+ */
+
+ public int getNInt() {
+ return GBLINT.BANK_INT_SIZE;
+ }
+
+ public int getNFloat() {
+ return 0;
+ }
+
+ public int getNDouble() {
+ return GBLDOUBLE.BANK_DOUBLE_SIZE;
+ }
+
+ public int getIntVal(int index) {
+ return bank_int[index];
+ }
+
+ public float getFloatVal(int index) {
+ return 0;
+ }
+
+ public double getDoubleVal(int index) {
+ return bank_double[index];
+ }
+
+ public boolean isFixedSize() {
+ return false;
+ }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java Wed Apr 27 11:11:32 2016
@@ -3,7 +3,6 @@
import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.Hep3Matrix;
-import org.hps.recon.tracking.gbl.GBLOutput.PerigeeParams;
import org.lcsim.event.GenericObject;
/**
@@ -61,7 +60,7 @@
}
/**
- * @param set track id to val
+ * @param val track ID value
*/
public void setTrackId(int val) {
bank_int[GBLINT.ID] = val;
@@ -77,7 +76,7 @@
/**
* @param perPar is the perigee parameters that is added to object
*/
- public void setPerigeeTrackParameters(PerigeeParams perPar) {
+ public void setPerigeeTrackParameters(GblUtils.PerigeeParams perPar) {
this.bank_double[GBLDOUBLE.PERKAPPA] = perPar.getKappa();
this.bank_double[GBLDOUBLE.PERTHETA] = perPar.getTheta();
this.bank_double[GBLDOUBLE.PERPHI] = perPar.getPhi();
@@ -85,8 +84,8 @@
this.bank_double[GBLDOUBLE.PERZ0] = perPar.getZ0();
}
- public PerigeeParams getPerigeeTrackParameters() {
- return new PerigeeParams(this.bank_double[GBLDOUBLE.PERKAPPA],
+ public GblUtils.PerigeeParams getPerigeeTrackParameters() {
+ return new GblUtils.PerigeeParams(this.bank_double[GBLDOUBLE.PERKAPPA],
this.bank_double[GBLDOUBLE.PERTHETA],
this.bank_double[GBLDOUBLE.PERPHI],
this.bank_double[GBLDOUBLE.PERD0],
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblData.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblData.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblData.java Wed Apr 27 11:11:32 2016
@@ -69,8 +69,8 @@
nExt = extDer.getColumnDimension();
}
int nParMax = 5 + nLocal + nExt;
-// theParameters.reserve(nParMax); // have to be sorted
-// theDerivatives.reserve(nParMax);
+// theParameters.reserve(nParMax); // have to be sorted
+// theDerivatives.reserve(nParMax);
if (derLocal != null) {
for (int i = 0; i < derLocal.getColumnDimension(); ++i) // local derivatives
@@ -123,8 +123,8 @@
nExtDer = extDer.getColumnDimension();
}
int nParMax = 7 + nExtDer;
-// theParameters.reserve(nParMax); // have to be sorted
-// theDerivatives.reserve(nParMax);
+// theParameters.reserve(nParMax); // have to be sorted
+// theDerivatives.reserve(nParMax);
if (extDer != null) {
for (int i = 0; i < extDer.getColumnDimension(); ++i) // external derivatives
@@ -151,14 +151,14 @@
// * \param [in] derivatives Derivatives (vector)
// */
//void addDerivatives(const std::vector<unsigned int> &index,
-// const std::vector<double> &derivatives) {
-// for (unsigned int i = 0; i < derivatives.size(); ++i) // any derivatives
-// {
-// if (derivatives[i]) {
-// theParameters.push_back(index[i]);
-// theDerivatives.push_back(derivatives[i]);
-// }
-// }
+// const std::vector<double> &derivatives) {
+// for (unsigned int i = 0; i < derivatives.size(); ++i) // any derivatives
+// {
+// if (derivatives[i]) {
+// theParameters.push_back(index[i]);
+// theDerivatives.push_back(derivatives[i]);
+// }
+// }
//}
/// Calculate prediction for data from fit (by GblTrajectory::fit).
void setPrediction(VVector aVector)
@@ -176,27 +176,27 @@
// */
//double setDownWeighting(unsigned int aMethod) {
//
-// double aWeight = 1.;
-// double scaledResidual = fabs(theValue - thePrediction) * sqrt(thePrecision);
-// if (aMethod == 1) // Tukey
-// {
-// if (scaledResidual < 4.6851) {
-// aWeight = (1.0 - 0.045558 * scaledResidual * scaledResidual);
-// aWeight *= aWeight;
-// } else {
-// aWeight = 0.;
-// }
-// } else if (aMethod == 2) //Huber
-// {
-// if (scaledResidual >= 1.345) {
-// aWeight = 1.345 / scaledResidual;
-// }
-// } else if (aMethod == 3) //Cauchy
-// {
-// aWeight = 1.0 / (1.0 + (scaledResidual * scaledResidual / 5.6877));
-// }
-// theDownWeight = aWeight;
-// return aWeight;
+// double aWeight = 1.;
+// double scaledResidual = fabs(theValue - thePrediction) * sqrt(thePrecision);
+// if (aMethod == 1) // Tukey
+// {
+// if (scaledResidual < 4.6851) {
+// aWeight = (1.0 - 0.045558 * scaledResidual * scaledResidual);
+// aWeight *= aWeight;
+// } else {
+// aWeight = 0.;
+// }
+// } else if (aMethod == 2) //Huber
+// {
+// if (scaledResidual >= 1.345) {
+// aWeight = 1.345 / scaledResidual;
+// }
+// } else if (aMethod == 3) //Cauchy
+// {
+// aWeight = 1.0 / (1.0 + (scaledResidual * scaledResidual / 5.6877));
+// }
+// theDownWeight = aWeight;
+// return aWeight;
//}
//
/// Calculate Chi2 contribution.
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblPoint.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblPoint.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblPoint.java Wed Apr 27 11:11:32 2016
@@ -486,7 +486,7 @@
{
int ifail = 0;
// to optimize: need only two last rows of inverse
-// prevJacobian = aJac.InverseFast(ifail);
+// prevJacobian = aJac.InverseFast(ifail);
// block matrix algebra
Matrix CA = aJac.sub(2, 3, 3, 0).times(aJac.sub(3, 0, 0).inverse()); // C*A^-1
Matrix DCAB = aJac.sub(2, 3, 3).minus(CA.times(aJac.sub(3, 2, 0, 3))); // D - C*A^-1 *B
@@ -531,14 +531,14 @@
}
matW.placeAt(matWt.inverse(), 0, 0);
-// if (!matW.InvertFast()) {
-// std::cout << " getDerivatives failed to invert matrix: "
-// << matW << "\n";
-// std::cout
-// << " Possible reason for singular matrix: multiple GblPoints at same arc-length"
-// << "\n";
-// throw std::overflow_error("Singular matrix inversion exception");
-// }
+// if (!matW.InvertFast()) {
+// std::cout << " getDerivatives failed to invert matrix: "
+// << matW << "\n";
+// std::cout
+// << " Possible reason for singular matrix: multiple GblPoints at same arc-length"
+// << "\n";
+// throw std::overflow_error("Singular matrix inversion exception");
+// }
matWJ.placeAt(matW.times(matJ), 0, 0);
vecWd.placeAt(matW.times(vecd), 0, 0);
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblTrajectory.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblTrajectory.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblTrajectory.java Wed Apr 27 11:11:32 2016
@@ -214,7 +214,7 @@
}
/// Retrieve validity of trajectory
- boolean isValid()
+ public boolean isValid()
{
return constructOK;
}
@@ -268,7 +268,7 @@
List<GblPoint> list = thePoints.get(iTraj);
int size = list.size();
// first point is offset
- list.get(0).setOffset(numOffsets++); // intermediate scatterers are offsets
+ list.get(0).setOffset(numOffsets++); // intermediate scatterers are offsets
for (int i = 1; i < size - 1; ++i) {
GblPoint p = list.get(i);
if (p.hasScatterer()) {
@@ -434,7 +434,7 @@
aPoint.getDerivatives(0, prevW, prevWJ, prevWd); // W-, W- * J-, W- * d-
aPoint.getDerivatives(1, nextW, nextWJ, nextWd); // W-, W- * J-, W- * d-
Matrix sumWJ = prevWJ.plus(nextWJ);
-//? matN = sumWJ.inverse(ierr); // N = (W- * J- + W+ * J+)^-1
+//? matN = sumWJ.inverse(ierr); // N = (W- * J- + W+ * J+)^-1
// derivatives for u_int
Matrix prevNW = matN.times(prevW); // N * W-
Matrix nextNW = matN.times(nextW); // N * W+
@@ -841,7 +841,7 @@
/**
* \param [in] level print level (0: minimum, >0: more)
*/
- void printPoints(int level)
+ public void printPoints(int level)
{
System.out.println("GblPoints ");
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java Wed Apr 27 11:11:32 2016
@@ -1,29 +1,240 @@
package org.hps.recon.tracking.gbl;
+import java.util.logging.Logger;
+
import hep.physics.matrix.BasicMatrix;
+import hep.physics.vec.BasicHep3Matrix;
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Matrix;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.VecOp;
+
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.MaterialSupervisor;
import org.hps.recon.tracking.MultipleScattering;
+import org.hps.recon.tracking.gbl.matrix.Matrix;
+import org.hps.recon.tracking.gbl.matrix.Vector;
+import org.lcsim.constants.Constants;
import org.lcsim.detector.IDetectorElement;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
import org.lcsim.fit.helicaltrack.HelixUtils;
import org.lcsim.recon.tracking.seedtracker.ScatterAngle;
/**
- * A class providing various utilities related to GBL
+ * A class with only static utilities related to GBL
*
* @author Per Hansson Adrian <[log in to unmask]>
*
*/
public class GblUtils {
-
+
+ public static Logger LOGGER = Logger.getLogger(GblUtils.class.getName());
+
+
+
+
+ /**
+ * Private constructor to avoid instantiation.
+ */
private GblUtils() {
}
+
+
+ /**
+ *
+ * Store local curvilinear track parameters.
+ *
+ * @author Per Hansson Adrian <[log in to unmask]>
+ *
+ */
+ public static class ClParams {
+
+ private BasicMatrix _params = new BasicMatrix(1, 5);
+
+ public ClParams(HelicalTrackFit htf, double B) {
+
+ if (htf == null) {
+ return;
+ }
+
+ Hep3Matrix perToClPrj = getPerToClPrj(htf);
+
+ double d0 = -1 * htf.dca(); //sign convention for curvilinear frame
+ double z0 = htf.z0();
+ Hep3Vector vecPer = new BasicHep3Vector(0., d0, z0);
+ //System.out.printf("%s: vecPer=%s\n",this.getClass().getSimpleName(),vecPer.toString());
+
+ Hep3Vector vecCl = VecOp.mult(perToClPrj, vecPer);
+ //System.out.printf("%s: vecCl=%s\n",this.getClass().getSimpleName(),vecCl.toString());
+ double xT = vecCl.x();
+ double yT = vecCl.y();
+ //double zT = vecCl.z();
+
+ double lambda = Math.atan(htf.slope());
+ double q = Math.signum(htf.R());
+ double qOverP = q / htf.p(Math.abs(B));
+ double phi = htf.phi0();
+
+ _params.setElement(0, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), qOverP);
+ _params.setElement(0, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), lambda);
+ _params.setElement(0, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), phi);
+ _params.setElement(0, FittedGblTrajectory.GBLPARIDX.XT.getValue(), xT);
+ _params.setElement(0, FittedGblTrajectory.GBLPARIDX.YT.getValue(), yT);
+ }
+
+ public BasicMatrix getParams() {
+ return _params;
+ }
+
+ double getQoverP() {
+ return _params.e(0, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue());
+ }
+
+ double getLambda() {
+ return _params.e(0, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue());
+ }
+
+ double getPhi() {
+ return _params.e(0, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue());
+ }
+
+ double getXt() {
+ return _params.e(0, FittedGblTrajectory.GBLPARIDX.XT.getValue());
+ }
+
+ double getYt() {
+ return _params.e(0, FittedGblTrajectory.GBLPARIDX.YT.getValue());
+ }
+
+ }
+
+
+
+
+
+ /**
+ *
+ * Store perigee track parameters.
+ *
+ * @author Per Hansson Adrian <[log in to unmask]>
+ *
+ */
+ public static class PerigeeParams {
+
+ private final BasicMatrix _params;
+
+ public PerigeeParams(HelicalTrackFit htf, double B) {
+ _params = GblUtils.getPerParVector(htf, B);
+ }
+
+ public PerigeeParams(double kappa, double theta, double phi, double d0, double z0) {
+ this._params = GblUtils.getPerParVector(kappa, theta, phi, d0, z0);
+ }
+
+ public BasicMatrix getParams() {
+ return _params;
+ }
+
+ public double getKappa() {
+ return _params.e(0, 0);
+ }
+
+ public double getTheta() {
+ return _params.e(0, 1);
+ }
+
+ public double getPhi() {
+ return _params.e(0, 2);
+ }
+
+ public double getD0() {
+ return _params.e(0, 3);
+ }
+
+ public double getZ0() {
+ return _params.e(0, 4);
+ }
+ }
+
+
+
+
+
+
+ /**
+ * Get corrected perigee parameters.
+ * @param locPar - GBL local curvilinear corrections
+ * @param helicalTrackFit - helix
+ * @param bfield - B-field strength
+ * @return corrected parameters
+ */
+ public static double[] getCorrectedPerigeeParameters(Vector locPar, HelicalTrackFit helicalTrackFit, double bfield) {
+
+
+ // Explicitly assign corrections to local variables
+ double qOverPCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.QOVERP.getValue());
+ double xTPrimeCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue());
+ double yTPrimeCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue());
+ double xTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.XT.getValue());
+ double yTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.YT.getValue());
+
+
+ // Get helix parameters
+ double qOverP = helicalTrackFit.curvature() / (Constants.fieldConversion * Math.abs(bfield) * Math.sqrt(1 + Math.pow(helicalTrackFit.slope(), 2)));
+ double d0 = -1.0 * helicalTrackFit.dca(); // correct for different sign convention of d0 in perigee frame
+ double z0 = helicalTrackFit.z0();
+ double phi0 = helicalTrackFit.phi0();
+ double lambda = Math.atan(helicalTrackFit.slope());
+
+ // calculate new d0 and z0
+ Hep3Matrix perToClPrj = GblUtils.getPerToClPrj(helicalTrackFit);
+
+ Hep3Matrix clToPerPrj = VecOp.inverse(perToClPrj);
+ Hep3Vector corrPer = VecOp.mult(clToPerPrj, new BasicHep3Vector(xTCorr, yTCorr, 0.0));
+
+ //d0
+ double d0_corr = corrPer.y();
+ double dca_gbl = -1.0 * (d0 + d0_corr);
+
+ //z0
+ double z0_corr = corrPer.z();
+ double z0_gbl = z0 + z0_corr;
+
+ //calculate new slope
+ double lambda_gbl = lambda + yTPrimeCorr;
+ double slope_gbl = Math.tan(lambda_gbl);
+
+ // calculate new curvature
+ double qOverP_gbl = qOverP + qOverPCorr;
+ double C_gbl = Constants.fieldConversion * Math.abs(bfield) * qOverP_gbl / Math.cos(lambda_gbl);
+
+ //calculate new phi0
+ double phi0_gbl = phi0 + xTPrimeCorr - corrPer.x() * C_gbl;
+
+ LOGGER.info("qOverP=" + qOverP + " qOverPCorr=" + qOverPCorr + " qOverP_gbl=" + qOverP_gbl + " ==> pGbl=" + 1.0 / qOverP_gbl + " C_gbl=" + C_gbl);
+
+ LOGGER.info(String.format("corrected helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", dca_gbl, z0_gbl, C_gbl, slope_gbl, phi0_gbl, Math.abs(1 / qOverP_gbl)));
+
+ double parameters_gbl[] = new double[5];
+ parameters_gbl[HelicalTrackFit.dcaIndex] = dca_gbl;
+ parameters_gbl[HelicalTrackFit.phi0Index] = phi0_gbl;
+ parameters_gbl[HelicalTrackFit.curvatureIndex] = C_gbl;
+ parameters_gbl[HelicalTrackFit.z0Index] = z0_gbl;
+ parameters_gbl[HelicalTrackFit.slopeIndex] = slope_gbl;
+
+ return parameters_gbl;
+
+ }
+
+
+
+
public static BasicMatrix gblSimpleJacobianLambdaPhi(double ds, double cosl, double bfac) {
/*
Simple jacobian: quadratic in arc length difference.
using lambda phi as directions
-
+
@param ds: arc length difference
@type ds: float
@param cosl: cos(lambda)
@@ -105,4 +316,168 @@
throw new UnsupportedOperationException("Should not happen. This problem is only solved with the MaterialSupervisor.");
}
}
+
+ /**
+ * Calculate the Jacobian from Curvilinear to Perigee frame.
+ * @param helicalTrackFit - original helix
+ * @param helicalTrackFitAtIPCorrected - corrected helix at this point
+ * @param bfield - magnitude of B-field
+ * @return the Jacobian matrix from Curvilinear to Perigee frame
+ */
+ public static Matrix getCLToPerigeeJacobian(HelicalTrackFit helicalTrackFit, HpsHelicalTrackFit helicalTrackFitAtIPCorrected, double bfield) {
+
+ /*
+ * This part is taken from:
+ // Strandlie, Wittek, NIMA 566, 2006
+ Matrix covariance_gbl = new Matrix(5, 5);
+ //helpers
+ double Bz = -Constants.fieldConversion * Math.abs(bfield); // TODO sign convention and should it be it scaled from Telsa?
+ double p = Math.abs(1 / qOverP_gbl);
+ double q = Math.signum(qOverP_gbl);
+ double tanLambda = Math.tan(lambda_gbl);
+ double cosLambda = Math.cos(lambda_gbl);
+ // Hep3Vector B = new BasicHep3Vector(0, 0, Bz); // TODO sign convention?
+ Hep3Vector H = new BasicHep3Vector(0, 0, 1);
+ Hep3Vector T = HelixUtils.Direction(helix, 0.);
+ Hep3Vector HcrossT = VecOp.cross(H, T);
+ double alpha = HcrossT.magnitude(); // this should be Bvec cross TrackDir/|B|
+ double Q = Bz * q / p;
+ Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
+ Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
+ Hep3Vector K = Z;
+ Hep3Vector U = VecOp.mult(-1, J);
+ Hep3Vector V = VecOp.cross(T, U);
+ Hep3Vector I = VecOp.cross(J, K);
+ Hep3Vector N = VecOp.mult(1 / alpha, VecOp.cross(H, T));
+ double UdotI = VecOp.dot(U, I);
+ double NdotV = VecOp.dot(N, V);
+ double NdotU = VecOp.dot(N, U);
+ double TdotI = VecOp.dot(T, I);
+ double VdotI = VecOp.dot(V, I);
+ double VdotK = VecOp.dot(V, K);
+ covariance_gbl.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), VdotK / TdotI);
+ covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 1);
+ covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XT.getValue(), -alpha * Q * UdotI * NdotU / (cosLambda * TdotI));
+ covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -alpha * Q * VdotI * NdotU / (cosLambda * TdotI));
+ covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), -1 * Bz / cosLambda);
+ // covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 0);
+ covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), -1 * q * Bz * tanLambda / (p * cosLambda));
+ covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), q * Bz * alpha * Q * tanLambda * UdotI * NdotV / (p * cosLambda * TdotI));
+ covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), q * Bz * alpha * Q * tanLambda * VdotI * NdotV / (p * cosLambda * TdotI));
+ covariance_gbl.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -1 / TdotI);
+ covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), -1);
+ covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), alpha * Q * UdotI * NdotV / TdotI);
+ covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), alpha * Q * VdotI * NdotV / TdotI);
+
+ covariance_gbl.print(15, 13);
+ */
+
+ // Sho's magic below
+
+ // Use projection matrix
+ //TODO should this not be the corrected helix?
+ Hep3Matrix perToClPrj = getPerToClPrj(helicalTrackFit);
+ Hep3Matrix clToPerPrj = VecOp.inverse(perToClPrj);
+ double C_gbl = helicalTrackFitAtIPCorrected.curvature();
+ double lambda_gbl = Math.atan(helicalTrackFitAtIPCorrected.slope());
+ double qOverP_gbl = helicalTrackFitAtIPCorrected.curvature() / (Constants.fieldConversion * Math.abs(bfield) * Math.sqrt(1 + Math.pow(helicalTrackFitAtIPCorrected.slope(), 2)));
+
+ Matrix jacobian = new Matrix(5, 5);
+ jacobian.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), -clToPerPrj.e(1, 0));
+ jacobian.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -clToPerPrj.e(1, 1));
+ jacobian.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 1.0);
+ jacobian.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), clToPerPrj.e(0, 1) * C_gbl);
+ jacobian.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), Constants.fieldConversion * Math.abs(bfield) / Math.cos(lambda_gbl));
+ jacobian.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), Constants.fieldConversion * Math.abs(bfield) * qOverP_gbl * Math.tan(lambda_gbl) / Math.cos(lambda_gbl));
+ jacobian.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.XT.getValue(), clToPerPrj.e(2, 0));
+ jacobian.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), clToPerPrj.e(2, 1));
+ jacobian.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), Math.pow(Math.cos(lambda_gbl), -2.0));
+
+ return jacobian;
+ }
+
+
+
+
+
+ /**
+ * Computes the projection matrix from the perigee XY plane variables dca
+ * and z0 into the curvilinear xT,yT,zT frame (U,V,T) with reference point (0,0,0)
+ * for the perigee frame.
+ *
+ * @param htf input helix to find the track direction
+ * @return 3x3 projection matrix
+ */
+ static Hep3Matrix getPerToClPrj(HelicalTrackFit htf) {
+ Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
+ Hep3Vector T = HelixUtils.Direction(htf, 0.);
+ Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
+ Hep3Vector K = Z;
+ Hep3Vector U = VecOp.mult(-1, J);
+ Hep3Vector V = VecOp.cross(T, U);
+ Hep3Vector I = VecOp.cross(J, K);
+
+ BasicHep3Matrix trans = new BasicHep3Matrix();
+ trans.setElement(0, 0, VecOp.dot(I, U));
+ trans.setElement(0, 1, VecOp.dot(J, U));
+ trans.setElement(0, 2, VecOp.dot(K, U));
+ trans.setElement(1, 0, VecOp.dot(I, V));
+ trans.setElement(1, 1, VecOp.dot(J, V));
+ trans.setElement(1, 2, VecOp.dot(K, V));
+ trans.setElement(2, 0, VecOp.dot(I, T));
+ trans.setElement(2, 1, VecOp.dot(J, T));
+ trans.setElement(2, 2, VecOp.dot(K, T));
+ return trans;
+
+ /*
+ Hep3Vector B = new BasicHep3Vector(0, 0, 1); // TODO sign convention?
+ Hep3Vector H = VecOp.mult(1 / bfield, B);
+ Hep3Vector T = HelixUtils.Direction(helix, 0.);
+ Hep3Vector HcrossT = VecOp.cross(H, T);
+ double alpha = HcrossT.magnitude(); // this should be Bvec cross TrackDir/|B|
+ double Q = Math.abs(bfield) * q / p;
+ Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
+ Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
+ Hep3Vector K = Z;
+ Hep3Vector U = VecOp.mult(-1, J);
+ Hep3Vector V = VecOp.cross(T, U);
+ Hep3Vector I = VecOp.cross(J, K);
+ Hep3Vector N = VecOp.mult(1 / alpha, VecOp.cross(H, T)); //-cross(T,H)/alpha = -cross(T,Z) = -J
+ double UdotI = VecOp.dot(U, I); // 0,0
+ double NdotV = VecOp.dot(N, V); // 1,1?
+ double NdotU = VecOp.dot(N, U); // 0,1?
+ double TdotI = VecOp.dot(T, I); // 2,0
+ double VdotI = VecOp.dot(V, I); // 1,0
+ double VdotK = VecOp.dot(V, K); // 1,2
+ */
+ }
+
+
+
+
+
+ private static BasicMatrix getPerParVector(double kappa, double theta, double phi, double d0, double z0) {
+ BasicMatrix perPar = new BasicMatrix(1, 5);
+ perPar.setElement(0, 0, kappa);
+ perPar.setElement(0, 1, theta);
+ perPar.setElement(0, 2, phi);
+ perPar.setElement(0, 3, d0);
+ perPar.setElement(0, 4, z0);
+ return perPar;
+ }
+
+
+
+
+
+ private static BasicMatrix getPerParVector(HelicalTrackFit htf, double B) {
+ if (htf != null) {
+ double kappa = -1.0 * Math.signum(B) / htf.R();
+ double theta = Math.PI / 2.0 - Math.atan(htf.slope());
+ return getPerParVector(kappa, theta, htf.phi0(), htf.dca(), htf.z0());
+ }
+ return new BasicMatrix(1, 5);
+ }
+
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java Wed Apr 27 11:11:32 2016
@@ -13,10 +13,8 @@
import org.lcsim.fit.helicaltrack.HelicalTrackStrip;
/**
- * Encapsulates the {@HelicalTrackStrip} to make sure that the local unit vectors are
+ * Encapsulates the {@link org.lcsim.fit.helicaltrack.HelicalTrackStrip} to make sure that the local unit vectors are
* coming from the underlying geometry.
- *
- * I think the base calss should change but whatever.
*
* @author Per Hansson Adrian <[log in to unmask]>
*
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java Wed Apr 27 11:11:32 2016
@@ -14,10 +14,12 @@
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.apache.commons.math3.util.Pair;
-
import org.hps.recon.tracking.TrackUtils;
+
import static org.hps.recon.tracking.gbl.MakeGblTracks.makeCorrectedTrack;
+
import org.hps.recon.tracking.gbl.matrix.Matrix;
import org.hps.recon.tracking.gbl.matrix.SymMatrix;
import org.hps.recon.tracking.gbl.matrix.Vector;
@@ -52,6 +54,8 @@
private final String track2GblTrackRelationName = "TrackToGBLTrack";
private final String gblTrack2StripRelationName = "GBLTrackToStripData";
private final String outputTrackCollectionName = "GBLTracks";
+ private final String trackRelationCollectionName = "MatchedToGBLTrackRelations";
+
private MilleBinary mille;
private String milleBinaryFileName = MilleBinary.DEFAULT_OUTPUT_FILE_NAME;
@@ -184,6 +188,8 @@
LOGGER.info(trackFits.size() + " fitted GBL tracks before adding to event");
List<Track> newTracks = new ArrayList<Track>();
+
+ List<LCRelation> trackRelations = new ArrayList<LCRelation>();
List<GBLKinkData> kinkDataCollection = new ArrayList<GBLKinkData>();
@@ -201,6 +207,10 @@
// Add the track to the list of tracks
newTracks.add(trk.getFirst());
+
+ // Create relation from seed to GBL track
+ trackRelations.add(new BaseLCRelation(fittedTraj.get_seed(), trk.getFirst()));
+
kinkDataCollection.add(trk.getSecond());
kinkDataRelations.add(new BaseLCRelation(trk.getSecond(), trk.getFirst()));
}
@@ -210,6 +220,7 @@
// Put the tracks back into the event and exit
int flag = 1 << LCIOConstants.TRBIT_HITS;
event.put(outputTrackCollectionName, newTracks, Track.class, flag);
+ event.put(trackRelationCollectionName, trackRelations, LCRelation.class, 0);
event.put(GBLKinkData.DATA_COLLECTION, kinkDataCollection, GBLKinkData.class, 0);
event.put(GBLKinkData.DATA_RELATION_COLLECTION, kinkDataRelations, LCRelation.class, 0);
@@ -222,12 +233,15 @@
public static FittedGblTrajectory fit(List<GBLStripClusterData> hits, double bfac, boolean debug) {
// path length along trajectory
double s = 0.;
+ int iLabel;
+
// jacobian to transport errors between points along the path
Matrix jacPointToPoint = new Matrix(5, 5);
jacPointToPoint.UnitMatrix();
// Vector of the strip clusters used for the GBL fit
List<GblPoint> listOfPoints = new ArrayList<GblPoint>();
+ Map<Integer, Double> pathLengthMap = new HashMap<Integer, Double>();
// Store the projection from local to measurement frame for each strip cluster
Map< Integer, Matrix> proL2m_list = new HashMap<Integer, Matrix>();
@@ -236,6 +250,10 @@
//start trajectory at refence point (s=0) - this point has no measurement
GblPoint ref_point = new GblPoint(jacPointToPoint);
listOfPoints.add(ref_point);
+
+ // save path length to each point
+ iLabel = listOfPoints.size();
+ pathLengthMap.put(iLabel, s);
// Loop over strips
int n_strips = hits.size();
@@ -323,11 +341,11 @@
// measurement/residual in the measurement system
// only 1D measurement in u-direction, set strip measurement direction to zero
Vector meas = new Vector(2);
-// double uRes = strip->GetUmeas() - strip->GetTrackPos().x(); // how can this be correct?
+// double uRes = strip->GetUmeas() - strip->GetTrackPos().x(); // how can this be correct?
double uRes = strip.getMeas() - strip.getTrackPos().x();
meas.set(0, uRes);
meas.set(1, 0.);
-// //meas[0][0] += deltaU[iLayer] # misalignment
+// //meas[0][0] += deltaU[iLayer] # misalignment
Vector measErr = new Vector(2);
measErr.set(0, strip.getMeasErr());
measErr.set(1, 0.);
@@ -405,7 +423,10 @@
// Add this GBL point to list that will be used in fit
listOfPoints.add(point);
- int iLabel = listOfPoints.size();
+ iLabel = listOfPoints.size();
+
+ // save path length to each point
+ pathLengthMap.put(iLabel, s);
// Update MS covariance matrix
msCov.set(1, 1, msCov.get(1, 1) + scatErr.get(0) * scatErr.get(0));
@@ -493,8 +514,10 @@
LOGGER.fine("locPar " + aCorrection.toString());
-//
- return new FittedGblTrajectory(traj, dVals[0], iVals[0], dVals[1]);
+ FittedGblTrajectory fittedTraj = new FittedGblTrajectory(traj, dVals[0], iVals[0], dVals[1]);
+ fittedTraj.setPathLengthMap(pathLengthMap);
+
+ return fittedTraj;
}
@Override
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java Wed Apr 27 11:11:32 2016
@@ -5,22 +5,18 @@
import hep.physics.vec.Hep3Matrix;
import hep.physics.vec.Hep3Vector;
import hep.physics.vec.VecOp;
+
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.apache.commons.math3.util.Pair;
import org.hps.recon.tracking.CoordinateTransformations;
import org.hps.recon.tracking.MultipleScattering;
import org.hps.recon.tracking.TrackType;
import org.hps.recon.tracking.TrackUtils;
-import static org.hps.recon.tracking.gbl.GBLOutput.getPerToClPrj;
-import org.hps.recon.tracking.gbl.matrix.Matrix;
-import org.hps.recon.tracking.gbl.matrix.SymMatrix;
-import org.hps.recon.tracking.gbl.matrix.Vector;
import org.lcsim.constants.Constants;
import org.lcsim.detector.ITransform3D;
import org.lcsim.detector.tracker.silicon.ChargeCarrier;
@@ -48,7 +44,11 @@
public class MakeGblTracks {
private final static Logger LOGGER = Logger.getLogger(MakeGblTracks.class.getPackage().getName());
-
+ static {
+ LOGGER.setLevel(Level.WARNING);
+ }
+
+
private MakeGblTracks() {
}
@@ -60,7 +60,16 @@
}
}
- public static Pair<Track, GBLKinkData> makeCorrectedTrack(FittedGblTrajectory fittedTraj, HelicalTrackFit helix, List<TrackerHit> trackHits, int trackType, double bfield) {
+ /**
+ * Create a new {@link BaseTrack} from a {@link FittedGblTrajectory}.
+ * @param fittedGblTrajectory
+ * @param helicalTrackFit
+ * @param hitsOnTrack
+ * @param trackType
+ * @param bfield
+ * @return the new {@link BaseTrack} and the kinks along the {@link GblTrajectory} as a {@link Pair}.
+ */
+ public static Pair<Track, GBLKinkData> makeCorrectedTrack(FittedGblTrajectory fittedGblTrajectory, HelicalTrackFit helicalTrackFit, List<TrackerHit> hitsOnTrack, int trackType, double bfield) {
// Initialize the reference point to the origin
double[] ref = new double[]{0., 0., 0.};
@@ -68,220 +77,41 @@
BaseTrack trk = new BaseTrack();
// Add the hits to the track
- for (TrackerHit hit : trackHits) {
+ for (TrackerHit hit : hitsOnTrack) {
trk.addHit(hit);
}
- // Set state at vertex
- Pair<double[], SymmetricMatrix> correctedHelixParams = getGblCorrectedHelixParameters(helix, fittedTraj.get_traj(), bfield, FittedGblTrajectory.GBLPOINT.IP);
+ // Set base track parameters
+ Pair<double[], SymmetricMatrix> correctedHelixParams = fittedGblTrajectory.getCorrectedPerigeeParameters(helicalTrackFit, FittedGblTrajectory.GBLPOINT.IP, bfield);
trk.setTrackParameters(correctedHelixParams.getFirst(), bfield);// hack to set the track charge
trk.getTrackStates().clear();
- TrackState stateVertex = new BaseTrackState(correctedHelixParams.getFirst(), ref, correctedHelixParams.getSecond().asPackedArray(true), TrackState.AtIP, bfield);
- trk.getTrackStates().add(stateVertex);
-
- // Set state at last point
- Pair<double[], SymmetricMatrix> correctedHelixParamsLast = getGblCorrectedHelixParameters(helix, fittedTraj.get_traj(), bfield, FittedGblTrajectory.GBLPOINT.LAST);
+ // Set state at IP
+ TrackState stateIP = new BaseTrackState(correctedHelixParams.getFirst(), ref, correctedHelixParams.getSecond().asPackedArray(true), TrackState.AtIP, bfield);
+ trk.getTrackStates().add(stateIP);
+
+ // Set state at last point on trajectory
+ Pair<double[], SymmetricMatrix> correctedHelixParamsLast = fittedGblTrajectory.getCorrectedPerigeeParameters(helicalTrackFit, FittedGblTrajectory.GBLPOINT.LAST, bfield);
TrackState stateLast = new BaseTrackState(correctedHelixParamsLast.getFirst(), ref, correctedHelixParamsLast.getSecond().asPackedArray(true), TrackState.AtLastHit, bfield);
trk.getTrackStates().add(stateLast);
- GBLKinkData kinkData = getKinks(fittedTraj.get_traj());
+ // Extract kinks from trajectory
+ GBLKinkData kinkData = fittedGblTrajectory.getKinks();
// Set other info needed
- trk.setChisq(fittedTraj.get_chi2());
- trk.setNDF(fittedTraj.get_ndf());
+ trk.setChisq(fittedGblTrajectory.get_chi2());
+ trk.setNDF(fittedGblTrajectory.get_ndf());
trk.setFitSuccess(true);
trk.setRefPointIsDCA(true);
trk.setTrackType(TrackType.setGBL(trackType, true));
// Add the track to the list of tracks
// tracks.add(trk);
- LOGGER.info(String.format("helix chi2 %f ndf %d gbl chi2 %f ndf %d\n", helix.chisqtot(), helix.ndf()[0] + helix.ndf()[1], trk.getChi2(), trk.getNDF()));
- if (LOGGER.getLevel().intValue() <= Level.INFO.intValue()) {
- for (int i = 0; i < 5; ++i) {
- LOGGER.info(String.format("param %d: %.10f -> %.10f helix-gbl= %f", i, helix.parameters()[i], trk.getTrackParameter(i), helix.parameters()[i] - trk.getTrackParameter(i)));
- }
- }
+ LOGGER.fine(String.format("helix chi2 %f ndf %d gbl chi2 %f ndf %d\n", helicalTrackFit.chisqtot(), helicalTrackFit.ndf()[0] + helicalTrackFit.ndf()[1], trk.getChi2(), trk.getNDF()));
return new Pair<Track, GBLKinkData>(trk, kinkData);
}
- /**
- * Compute the updated helix parameters and covariance matrix at a given
- * point along the trajectory.
- *
- * @param helix - original seed track
- * @param traj - fitted GBL trajectory
- * @param point - the point along the track where the result is computed.
- * @return corrected parameters.
- */
- public static Pair<double[], SymmetricMatrix> getGblCorrectedHelixParameters(HelicalTrackFit helix, GblTrajectory traj, double bfield, FittedGblTrajectory.GBLPOINT point) {
-
- // get seed helix parameters
- double qOverP = helix.curvature() / (Constants.fieldConversion * Math.abs(bfield) * Math.sqrt(1 + Math.pow(helix.slope(), 2)));
- double d0 = -1.0 * helix.dca(); // correct for different sign convention of d0 in perigee frame
- double z0 = helix.z0();
- double phi0 = helix.phi0();
- double lambda = Math.atan(helix.slope());
-
- LOGGER.info("GblPoint: " + point.toString() + "( " + point.name() + ")");
- LOGGER.info(String.format("original helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", helix.dca(), helix.z0(), helix.curvature(), helix.slope(), helix.phi0(), helix.p(Math.abs(bfield))));
- LOGGER.info("original helix covariance:\n" + helix.covariance());
-
- // get corrections from GBL fit
- Vector locPar = new Vector(5);
- SymMatrix locCov = new SymMatrix(5);
- int pointIndex;
- if (point.compareTo(FittedGblTrajectory.GBLPOINT.IP) == 0) {
- pointIndex = 1;
- } else if (point.compareTo(FittedGblTrajectory.GBLPOINT.LAST) == 0) {
- pointIndex = traj.getNumPoints();
- } else {
- throw new RuntimeException("This GBLPOINT " + point.toString() + "( " + point.name() + ") is not valid");
- }
-
- traj.getResults(pointIndex, locPar, locCov); // vertex point
-// locCov.print(10, 8);
- double qOverPCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.QOVERP.getValue());
- double xTPrimeCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue());
- double yTPrimeCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue());
- double xTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.XT.getValue());
- double yTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.YT.getValue());
-
- LOGGER.info((helix.slope() > 0 ? "top: " : "bot ") + "qOverPCorr " + qOverPCorr + " xtPrimeCorr " + xTPrimeCorr + " yTPrimeCorr " + yTPrimeCorr + " xTCorr " + xTCorr + " yTCorr " + yTCorr);
-
- // calculate new d0 and z0
-// Hep3Matrix perToClPrj = traj.get_track_data().getPrjPerToCl();
- Hep3Matrix perToClPrj = getPerToClPrj(helix);
-
- Hep3Matrix clToPerPrj = VecOp.inverse(perToClPrj);
- Hep3Vector corrPer = VecOp.mult(clToPerPrj, new BasicHep3Vector(xTCorr, yTCorr, 0.0));
-
- //d0
- double d0_corr = corrPer.y();
- double dca_gbl = -1.0 * (d0 + d0_corr);
-
- //z0
- double z0_corr = corrPer.z();
- double z0_gbl = z0 + z0_corr;
-
- //calculate new slope
- double lambda_gbl = lambda + yTPrimeCorr;
- double slope_gbl = Math.tan(lambda_gbl);
-
- // calculate new curvature
- double qOverP_gbl = qOverP + qOverPCorr;
-// double pt_gbl = (1.0 / qOverP_gbl) * Math.cos(lambda_gbl);
-// double C_gbl = Constants.fieldConversion * Math.abs(bfield) / pt_gbl;
- double C_gbl = Constants.fieldConversion * Math.abs(bfield) * qOverP_gbl / Math.cos(lambda_gbl);
-
- //calculate new phi0
- double phi0_gbl = phi0 + xTPrimeCorr - corrPer.x() * C_gbl;
-
- LOGGER.info("qOverP=" + qOverP + " qOverPCorr=" + qOverPCorr + " qOverP_gbl=" + qOverP_gbl + " ==> pGbl=" + 1.0 / qOverP_gbl + " C_gbl=" + C_gbl);
-
- LOGGER.info(String.format("corrected helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", dca_gbl, z0_gbl, C_gbl, slope_gbl, phi0_gbl, Math.abs(1 / qOverP_gbl)));
-
- /*
- // Strandlie, Wittek, NIMA 566, 2006
- Matrix covariance_gbl = new Matrix(5, 5);
- //helpers
- double Bz = -Constants.fieldConversion * Math.abs(bfield); // TODO sign convention and should it be it scaled from Telsa?
- double p = Math.abs(1 / qOverP_gbl);
- double q = Math.signum(qOverP_gbl);
- double tanLambda = Math.tan(lambda_gbl);
- double cosLambda = Math.cos(lambda_gbl);
- // Hep3Vector B = new BasicHep3Vector(0, 0, Bz); // TODO sign convention?
- Hep3Vector H = new BasicHep3Vector(0, 0, 1);
- Hep3Vector T = HelixUtils.Direction(helix, 0.);
- Hep3Vector HcrossT = VecOp.cross(H, T);
- double alpha = HcrossT.magnitude(); // this should be Bvec cross TrackDir/|B|
- double Q = Bz * q / p;
- Hep3Vector Z = new BasicHep3Vector(0, 0, 1);
- Hep3Vector J = VecOp.mult(1. / VecOp.cross(T, Z).magnitude(), VecOp.cross(T, Z));
- Hep3Vector K = Z;
- Hep3Vector U = VecOp.mult(-1, J);
- Hep3Vector V = VecOp.cross(T, U);
- Hep3Vector I = VecOp.cross(J, K);
- Hep3Vector N = VecOp.mult(1 / alpha, VecOp.cross(H, T));
- double UdotI = VecOp.dot(U, I);
- double NdotV = VecOp.dot(N, V);
- double NdotU = VecOp.dot(N, U);
- double TdotI = VecOp.dot(T, I);
- double VdotI = VecOp.dot(V, I);
- double VdotK = VecOp.dot(V, K);
- covariance_gbl.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), VdotK / TdotI);
- covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 1);
- covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XT.getValue(), -alpha * Q * UdotI * NdotU / (cosLambda * TdotI));
- covariance_gbl.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -alpha * Q * VdotI * NdotU / (cosLambda * TdotI));
- covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), -1 * Bz / cosLambda);
- // covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 0);
- covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), -1 * q * Bz * tanLambda / (p * cosLambda));
- covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), q * Bz * alpha * Q * tanLambda * UdotI * NdotV / (p * cosLambda * TdotI));
- covariance_gbl.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), q * Bz * alpha * Q * tanLambda * VdotI * NdotV / (p * cosLambda * TdotI));
- covariance_gbl.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -1 / TdotI);
- covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), -1);
- covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), alpha * Q * UdotI * NdotV / TdotI);
- covariance_gbl.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), alpha * Q * VdotI * NdotV / TdotI);
-
- covariance_gbl.print(15, 13);
- */
- // Sho's magic
- Matrix jacobian = new Matrix(5, 5);
- jacobian.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.XT.getValue(), -clToPerPrj.e(1, 0));
- jacobian.set(HelicalTrackFit.dcaIndex, FittedGblTrajectory.GBLPARIDX.YT.getValue(), -clToPerPrj.e(1, 1));
- jacobian.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue(), 1.0);
- jacobian.set(HelicalTrackFit.phi0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), clToPerPrj.e(0, 1) * C_gbl);
- jacobian.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.QOVERP.getValue(), Constants.fieldConversion * Math.abs(bfield) / Math.cos(lambda_gbl));
- jacobian.set(HelicalTrackFit.curvatureIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), Constants.fieldConversion * Math.abs(bfield) * qOverP_gbl * Math.tan(lambda_gbl) / Math.cos(lambda_gbl));
- jacobian.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.XT.getValue(), clToPerPrj.e(2, 0));
- jacobian.set(HelicalTrackFit.z0Index, FittedGblTrajectory.GBLPARIDX.YT.getValue(), clToPerPrj.e(2, 1));
- jacobian.set(HelicalTrackFit.slopeIndex, FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue(), Math.pow(Math.cos(lambda_gbl), -2.0));
-
-// jacobian.print(15, 13);
- Matrix helixCovariance = jacobian.times(locCov.times(jacobian.transpose()));
- SymmetricMatrix cov = new SymmetricMatrix(5);
- for (int i = 0; i < 5; i++) {
- for (int j = 0; j < 5; j++) {
- if (i >= j) {
- cov.setElement(i, j, helixCovariance.get(i, j));
- }
- }
- }
- LOGGER.info("corrected helix covariance:\n" + cov);
-
- double parameters_gbl[] = new double[5];
- parameters_gbl[HelicalTrackFit.dcaIndex] = dca_gbl;
- parameters_gbl[HelicalTrackFit.phi0Index] = phi0_gbl;
- parameters_gbl[HelicalTrackFit.curvatureIndex] = C_gbl;
- parameters_gbl[HelicalTrackFit.z0Index] = z0_gbl;
- parameters_gbl[HelicalTrackFit.slopeIndex] = slope_gbl;
-
- return new Pair<double[], SymmetricMatrix>(parameters_gbl, cov);
- }
-
- public static GBLKinkData getKinks(GblTrajectory traj) {
-
- // get corrections from GBL fit
- Vector locPar = new Vector(5);
- SymMatrix locCov = new SymMatrix(5);
- float[] lambdaKinks = new float[traj.getNumPoints() - 1];
- double[] phiKinks = new double[traj.getNumPoints() - 1];
-
- double oldPhi = 0, oldLambda = 0;
- for (int i = 0; i < traj.getNumPoints(); i++) {
- traj.getResults(i + 1, locPar, locCov); // vertex point
- double newPhi = locPar.get(FittedGblTrajectory.GBLPARIDX.XTPRIME.getValue());
- double newLambda = locPar.get(FittedGblTrajectory.GBLPARIDX.YTPRIME.getValue());
- if (i > 0) {
- lambdaKinks[i - 1] = (float) (newLambda - oldLambda);
- phiKinks[i - 1] = newPhi - oldPhi;
- }
- oldPhi = newPhi;
- oldLambda = newLambda;
- }
-
- return new GBLKinkData(lambdaKinks, phiKinks);
- }
+
/**
* Do a GBL fit to an arbitrary set of strip hits, with a starting value of
@@ -298,19 +128,28 @@
* @param bfield B-field
* @return The refitted track.
*/
- public static Pair<Track, GBLKinkData> refitTrack(HelicalTrackFit helix, Collection<TrackerHit> stripHits, Collection<TrackerHit> hth, int nIterations, MultipleScattering scattering, double bfield) {
- List<TrackerHit> allHthList = sortHits(hth);
- List<TrackerHit> sortedStripHits = sortHits(stripHits);
- FittedGblTrajectory fit = MakeGblTracks.doGBLFit(helix, sortedStripHits, scattering, bfield, 0);
+ public static Pair<Track, GBLKinkData> refitTrack(HelicalTrackFit helix, Collection<TrackerHit> stripHits, Collection<TrackerHit> hth, int nIterations, int trackType, MultipleScattering scattering, double bfield) {
+ List<TrackerHit> allHthList = TrackUtils.sortHits(hth);
+ List<TrackerHit> sortedStripHits = TrackUtils.sortHits(stripHits);
+ FittedGblTrajectory fit = doGBLFit(helix, sortedStripHits, scattering, bfield, 0);
for (int i = 0; i < nIterations; i++) {
- Pair<Track, GBLKinkData> newTrack = MakeGblTracks.makeCorrectedTrack(fit, helix, allHthList, 0, bfield);
+ Pair<Track, GBLKinkData> newTrack = makeCorrectedTrack(fit, helix, allHthList, trackType, bfield);
helix = TrackUtils.getHTF(newTrack.getFirst());
- fit = MakeGblTracks.doGBLFit(helix, sortedStripHits, scattering, bfield, 0);
- }
- Pair<Track, GBLKinkData> mergedTrack = MakeGblTracks.makeCorrectedTrack(fit, helix, allHthList, 0, bfield);
+ fit = doGBLFit(helix, sortedStripHits, scattering, bfield, 0);
+ }
+ Pair<Track, GBLKinkData> mergedTrack = makeCorrectedTrack(fit, helix, allHthList, trackType, bfield);
return mergedTrack;
}
+ /**
+ * Do a GBL fit to a list of {@link TrackerHit}.
+ * @param htf - seed fit
+ * @param stripHits - list of {@link TrackerHit}.
+ * @param _scattering - estimation of the multiple scattering {@link MultipleScattering}.
+ * @param bfield - magnitude of B-field.
+ * @param debug - debug flag.
+ * @return the fitted GBL trajectory
+ */
public static FittedGblTrajectory doGBLFit(HelicalTrackFit htf, List<TrackerHit> stripHits, MultipleScattering _scattering, double bfield, int debug) {
List<GBLStripClusterData> stripData = makeStripData(htf, stripHits, _scattering, bfield, debug);
double bfac = Constants.fieldConversion * bfield;
@@ -319,6 +158,15 @@
return fit;
}
+ /**
+ * Create a list of {@link GBLStripClusterData} objects that can be used as input to the GBL fitter.
+ * @param htf
+ * @param stripHits
+ * @param _scattering
+ * @param _B
+ * @param _debug
+ * @return the list of GBL strip cluster data
+ */
public static List<GBLStripClusterData> makeStripData(HelicalTrackFit htf, List<TrackerHit> stripHits, MultipleScattering _scattering, double _B, int _debug) {
List<GBLStripClusterData> stripClusterDataList = new ArrayList<GBLStripClusterData>();
@@ -480,18 +328,4 @@
return strip;
}
-
- private static List<TrackerHit> sortHits(Collection<TrackerHit> hits) {
- List<TrackerHit> hitList = new ArrayList<TrackerHit>(hits);
- Collections.sort(hitList, new LayerComparator());
- return hitList;
- }
-
- private static class LayerComparator implements Comparator<TrackerHit> {
-
- @Override
- public int compare(TrackerHit o1, TrackerHit o2) {
- return Integer.compare(TrackUtils.getLayer(o1), TrackUtils.getLayer(o2));
- }
- }
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/TruthResiduals.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/TruthResiduals.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/TruthResiduals.java Wed Apr 27 11:11:32 2016
@@ -73,14 +73,14 @@
Map<Integer, List<SimTrackerHit>> simHitsLayerMap = new HashMap<Integer, List<SimTrackerHit> >();
Map<MCParticle, List<SimTrackerHit> > mcPartSimHitsMap = new HashMap<MCParticle, List<SimTrackerHit > >();
for(SimTrackerHit sh : simTrackerHits) {
- Hep3Vector shpos = CoordinateTransformations.transformVectorToTracking(sh.getPositionVec());
- if(Math.abs(shpos.x()) < 50.0) {
- System.out.printf("%s: Weird hit at %s (%s) in layer %d for MC part %d org %s p %s\n",
+ Hep3Vector shpos = CoordinateTransformations.transformVectorToTracking(sh.getPositionVec());
+ if(Math.abs(shpos.x()) < 50.0) {
+ System.out.printf("%s: Weird hit at %s (%s) in layer %d for MC part %d org %s p %s\n",
this.getClass().getSimpleName(),sh.getPositionVec().toString(),shpos.toString(),sh.getIdentifierFieldValue("layer"),
sh.getMCParticle().getPDGID(),sh.getMCParticle().getOrigin().toString(),sh.getMCParticle().getMomentum().toString());
- System.exit(1);
- }
-
+ System.exit(1);
+ }
+
int layer = sh.getIdentifierFieldValue("layer");
if(!simHitsLayerMap.containsKey(layer)) {
simHitsLayerMap.put(layer, new ArrayList<SimTrackerHit>());
@@ -96,28 +96,28 @@
for(MCParticle mcp : mcPartSimHitsMap.keySet()) {
- this.h_mcp_org.fill(mcp.getOriginX(), mcp.getOriginY());
+ this.h_mcp_org.fill(mcp.getOriginX(), mcp.getOriginY());
}
// Find the particle responsible for the hit in each layer and compute the residual
for(int layer=1;layer<13;++layer) {
- //System.out.printf("layer %d: \n",layer);
+ //System.out.printf("layer %d: \n",layer);
List<SimTrackerHit> simHitsLayer = simHitsLayerMap.get(layer);
-
+
if(simHitsLayer != null ) {
-
- if(simHitsLayer.size()==2) continue;
-
+
+ if(simHitsLayer.size()==2) continue;
+
for(SimTrackerHit simHit : simHitsLayer) {
-
- // Find the MC particle
+
+ // Find the MC particle
MCParticle mcp = simHit.getMCParticle();
if(mcp.getMomentum().magnitude()<0.5) continue;
-
+
// Position in tracking coord
Hep3Vector simHitPosTracking = CoordinateTransformations.transformVectorToTracking(simHit.getPositionVec());
@@ -171,24 +171,24 @@
}
if(layer == 1 && res.y() > 0.1 && this.firstWeirdTrack) {
- double dx = 1.0;
- double xpos = mcp.getOriginZ();
- while(xpos< 100.) {
- xpos += dx;
- trkposExtraPolator = CoordinateTransformations.transformVectorToTracking(TrackUtils.extrapolateTrack(htfTruth,xpos));
- double ypos = trkposExtraPolator.y();
- trkpos_y_vs_x.fill(xpos,ypos);
- }
-
- int idummy = 0;
- while(idummy<2) {
- trkpos_y_vs_x.fill(simHitPosTracking.x(),simHitPosTracking.y());
- idummy++;
- //System.out.printf("weird simhit res pos %s \n", simHitPosTracking.toString());
- }
-
- this.firstWeirdTrack = false;
-
+ double dx = 1.0;
+ double xpos = mcp.getOriginZ();
+ while(xpos< 100.) {
+ xpos += dx;
+ trkposExtraPolator = CoordinateTransformations.transformVectorToTracking(TrackUtils.extrapolateTrack(htfTruth,xpos));
+ double ypos = trkposExtraPolator.y();
+ trkpos_y_vs_x.fill(xpos,ypos);
+ }
+
+ int idummy = 0;
+ while(idummy<2) {
+ trkpos_y_vs_x.fill(simHitPosTracking.x(),simHitPosTracking.y());
+ idummy++;
+ //System.out.printf("weird simhit res pos %s \n", simHitPosTracking.toString());
+ }
+
+ this.firstWeirdTrack = false;
+
}
@@ -200,12 +200,12 @@
public IHistogram getResidual(int layer,String coord) {
- if( !this.res_truthsimhit.containsKey(layer) )
- throw new RuntimeException("Error the layer number is not valid");
- if( coord!="x" || coord!="y")
- throw new RuntimeException("Error the coord is not valid");
- IHistogram1D h = this.res_truthsimhit.get(layer).get(coord=="x"?0:1);
- return h;
+ if( !this.res_truthsimhit.containsKey(layer) )
+ throw new RuntimeException("Error the layer number is not valid");
+ if( coord!="x" || coord!="y")
+ throw new RuntimeException("Error the coord is not valid");
+ IHistogram1D h = this.res_truthsimhit.get(layer).get(coord=="x"?0:1);
+ return h;
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Matrix.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Matrix.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Matrix.java Wed Apr 27 11:11:32 2016
@@ -1,8 +1,6 @@
package org.hps.recon.tracking.gbl.matrix;
-import java.io.BufferedReader;
import java.io.PrintWriter;
-import java.io.StreamTokenizer;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Vector.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Vector.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/recon/tracking/gbl/matrix/Vector.java Wed Apr 27 11:11:32 2016
@@ -1,6 +1,5 @@
package org.hps.recon.tracking.gbl.matrix;
-import java.io.PrintWriter;
/**
* Specializes the Matrix class to a vector.
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java Wed Apr 27 11:11:32 2016
@@ -23,7 +23,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.jdom.Document;
import org.jdom.Element;
@@ -49,361 +49,360 @@
* Class building a new compact.xml detector based on MillepedeII input
* corrections.
*
- * @author Per Ola Hansson Adrian <[log in to unmask]>
- * @date January 15, 2014
+ * @author Per Ola Hansson Adrian <[log in to unmask]>
*/
public class BuildCompact {
- private static int runNumber = -1; //1351;
- private static String detectorName = ""; //"HPS-TestRun-v7";
- private static ConditionsManager conditionsManager = null;
-
- private static Options createCmdLineOpts() {
- Options options = new Options();
- options.addOption(new Option("c",true,"The path to the compact xml file."));
- options.addOption(new Option("o",true,"The name of the new compact xml file."));
- options.addOption(new Option("d",true,"Detector name."));
- options.addOption(new Option("r",true,"Run number."));
-
- return options;
- }
-
- private static void printHelpAndExit(Options options) {
- HelpFormatter help = new HelpFormatter();
- help.printHelp(" ", options);
- System.exit(1);
- }
-
- //private static buildDetector()
-
-
-
-
-
-
- private static class MilleParameterSet {
- private IDetectorElement _det = null;
- List<MilleParameter> params = new ArrayList<MilleParameter>();
- public MilleParameterSet(IDetectorElement d) {
- setDetector(d);
- }
- public void setDetector(IDetectorElement d) {
- _det = d;
- }
- public IDetectorElement getDetector() {
- return _det;
- }
- public void add(MilleParameter par) {
- params.add(par);
- }
- public Hep3Vector getLocalTranslation() {
- Map<String,Double> m = new HashMap<String,Double>();
- for(MilleParameter p : params) {
- if (p.getType() == 1) {
- if (p.getDim() == 1) m.put("u", p.getValue());
- else if(p.getDim() == 2) m.put("v", p.getValue());
- else m.put("w", p.getValue());
- }
- }
- if(m.size() != 3) {
- System.out.println("bad trans!!");
- System.exit(1);
- }
- return new BasicHep3Vector(m.get("u"),m.get("v"),m.get("w"));
-
- }
- public Hep3Vector getLocalRotation() {
- Map<String,Double> m = new HashMap<String,Double>();
- for(MilleParameter p : params) {
- if (p.getType() == 2) {
- if (p.getDim() == 1) m.put("alpha",p.getValue());
- else if(p.getDim() == 2) m.put("beta", p.getValue());
- else m.put("gamma", p.getValue());
- }
- }
- if(m.size() != 3) {
- System.out.println("bad rot!!");
- System.exit(1);
- }
- return new BasicHep3Vector(m.get("alpha"),m.get("beta"),m.get("gamma"));
- }
- public Hep3Vector getGlobalTranslation() {
- ITransform3D localToGlobal = getLocalToGlobal();
- return localToGlobal.getRotation().rotated(getLocalTranslation());
- }
- public double getGlobalTranslation(int d) {
- return getGlobalTranslation().v()[d-1];
- }
- public Hep3Vector getGlobalRotation() {
- ITransform3D localToGlobal = getLocalToGlobal();
- return localToGlobal.getRotation().rotated(getLocalRotation());
- }
- public double getGlobalRotation(int d) {
- return getGlobalRotation().v()[d-1];
- }
- public ITransform3D getLocalToGlobal() {
- ITransform3D localToGlobal = ( (SiSensor) _det).getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
- return localToGlobal;
- }
-
- }
-
- public static void main(String[] args) {
-
- // Setup command line input
- Options options = createCmdLineOpts();
- if (args.length == 0) {
- printHelpAndExit(options);
- }
-
- CommandLineParser parser = new DefaultParser();
- CommandLine cl = null;
- try {
- cl = parser.parse(options, args);
- } catch (ParseException e) {
- throw new RuntimeException("Problem parsing command line options.",e);
- }
-
- String compactFilename = null;
- if(cl.hasOption("c")) {
- compactFilename = cl.getOptionValue("c");
- } else {
- printHelpAndExit(options);
- }
-
- if(cl.hasOption("d")) {
- detectorName = cl.getOptionValue("d");
- } else {
- printHelpAndExit(options);
- }
-
- if(cl.hasOption("r")) {
- runNumber = Integer.parseInt(cl.getOptionValue("r"));
- } else {
- printHelpAndExit(options);
- }
-
- String compactFilenameNew = "compact_new.xml";
- if(cl.hasOption("o")) {
- compactFilenameNew = cl.getOptionValue("o");
- }
-
-
-
- File compactFile = new File(compactFilename);
-
- // read XML
- SAXBuilder builder = new SAXBuilder();
- Document compact_document = null;
- try {
- compact_document = (Document) builder.build(compactFile);
- } catch (JDOMException | IOException e1) {
- throw new RuntimeException("problem with JDOM ", e1);
- }
-
- // read detector geometry
- FileInputStream inCompact;
- try {
- inCompact = new FileInputStream(compactFile);
- } catch (FileNotFoundException e) {
- throw new RuntimeException("cannot open compact file",e);
- }
-
- GeometryReader reader = new GeometryReader();
- Detector det;
- try {
- det = reader.read(inCompact);
- } catch (IOException | JDOMException | ElementCreationException e) {
- throw new RuntimeException("problem reading compact file",e);
- }
-
-
- // set conditions in order to be able to determine which sensors are where in the geometry
- setConditions(detectorName,runNumber);
-
- // Loop over all millepede input files and match parameters with detectors
-
- List<MilleParameterSet> list_det = new ArrayList<MilleParameterSet>();
-
- FileInputStream inMille = null;
- BufferedReader brMille = null;
- try {
- for(String milleFilename : cl.getArgs()) {
- inMille = new FileInputStream(milleFilename);
- brMille = new BufferedReader(new InputStreamReader(inMille));
- String line;
- while((line = brMille.readLine()) != null) {
- //System.out.printf("%s\n",line);
- if(!line.contains("Parameter") && !line.contains("!")) {
-
- MilleParameter par = new MilleParameter(line);
- //System.out.println(par.getXMLName() + " " + par.getValue());
-
- SiSensor detEle = getTrackerDetElement( det, par);
- if(detEle == null) {
- System.out.println("Couldn't find detector for param " + par.getId());
- System.exit(1);
- }
- System.out.println("Found detector " + detEle.getName());
- if(detEle.getClass().isInstance(SiSensor.class)) {
- System.out.println("yeah");
- }
-
- // do we have it already?
- MilleParameterSet useSet = null;
- for(MilleParameterSet set : list_det) {
- if(set.getDetector() == detEle) {
- useSet = set;
- }
- }
-
- if (useSet == null) {
- useSet = new MilleParameterSet(detEle);
- list_det.add(useSet);
- }
-
- //add the parameter
- useSet.add(par);
-
-
- }
- }
- brMille.close();
- }
- }
- catch (IOException e) {
- throw new RuntimeException("problem reading mille file",e);
- }
-
- for(MilleParameterSet set : list_det) {
- System.out.println("Detector " + set.getDetector().getName());
- List<MilleParameter> pars = set.params;
- for(MilleParameter p : pars) {
- System.out.println(p.getXMLName() + " " + p.getValue());
- Element node = findXMLNode(compact_document,p.getXMLName());
- double value = 0.0;
- if(p.getType() == 1){
- value = set.getGlobalTranslation(p.getDim());
- } else if(p.getType() == 2){
- value = set.getGlobalRotation(p.getDim());
- } else {
- System.out.println("This type is illdefnied " + p.getType());
- System.exit(1);
- }
- node.setAttribute("value", String.format("%.6f",value));
- }
- Hep3Vector u = getMeasuredCoordinate( (SiSensor )set.getDetector());
- System.out.println("u " + u.toString());
- System.out.println("t (local) " + set.getLocalTranslation().toString());
- System.out.println("t (global) " + set.getGlobalTranslation().toString());
- System.out.println("r (local) " + set.getLocalRotation().toString());
- System.out.println("r (global) " + set.getGlobalRotation().toString());
-
-
- }
-
- // Save new XML file
-
- XMLOutputter xmlOutput = new XMLOutputter();
- // display nice
- //xmlOutput.setFormat(Format.getPrettyFormat());
- try {
- xmlOutput.output(compact_document, new FileWriter(compactFilenameNew));
- } catch (IOException e) {
- throw new RuntimeException("problem with xml output",e);
- }
-
-
-
-
-
- }
-
- private static Element findXMLNode(Document document, String name) {
- Element rootNode = document.getRootElement();
- List list = rootNode.getChildren("define");
- for(int i = 0; i < list.size(); ++i ) {
- Element node = (Element) list.get(i);
- List llist = node.getChildren("constant");
- //System.out.println("length of list " + llist.size());
- for(int ii = 0; ii < llist.size(); ++ii ) {
- Element nnode = (Element) llist.get(ii);
- //System.out.println("node name " + nnode.getAttributeValue("name") + " " + nnode.getAttributeValue("value") );
- //if(nnode.getAttributeValue("name").contains(name)) {
- if(nnode.getAttributeValue("name").compareTo(name) ==0 ) {
- return nnode;
- }
- }
- }
-
- return null;
- }
-
- private static void setConditions(String detectorName, int run) {
-
- try {
- if(conditionsManager == null) {
- conditionsManager = ConditionsManager.defaultInstance();
- }
- conditionsManager.setDetector(detectorName, run);
-
- } catch (ConditionsNotFoundException e1) {
- throw new RuntimeException("problem setting conditions",e1);
- }
-
- }
-
- private static SiSensor getTrackerDetElement(Detector det, MilleParameter par) {
- List<SiSensor> sensors = det.getSubdetector("Tracker").getDetectorElement().findDescendants(SiSensor.class);
+ private static int runNumber = -1; //1351;
+ private static String detectorName = ""; //"HPS-TestRun-v7";
+ private static ConditionsManager conditionsManager = null;
+
+ private static Options createCmdLineOpts() {
+ Options options = new Options();
+ options.addOption(new Option("c",true,"The path to the compact xml file."));
+ options.addOption(new Option("o",true,"The name of the new compact xml file."));
+ options.addOption(new Option("d",true,"Detector name."));
+ options.addOption(new Option("r",true,"Run number."));
+
+ return options;
+ }
+
+ private static void printHelpAndExit(Options options) {
+ HelpFormatter help = new HelpFormatter();
+ help.printHelp(" ", options);
+ System.exit(1);
+ }
+
+ //private static buildDetector()
+
+
+
+
+
+
+ private static class MilleParameterSet {
+ private IDetectorElement _det = null;
+ List<MilleParameter> params = new ArrayList<MilleParameter>();
+ public MilleParameterSet(IDetectorElement d) {
+ setDetector(d);
+ }
+ public void setDetector(IDetectorElement d) {
+ _det = d;
+ }
+ public IDetectorElement getDetector() {
+ return _det;
+ }
+ public void add(MilleParameter par) {
+ params.add(par);
+ }
+ public Hep3Vector getLocalTranslation() {
+ Map<String,Double> m = new HashMap<String,Double>();
+ for(MilleParameter p : params) {
+ if (p.getType() == 1) {
+ if (p.getDim() == 1) m.put("u", p.getValue());
+ else if(p.getDim() == 2) m.put("v", p.getValue());
+ else m.put("w", p.getValue());
+ }
+ }
+ if(m.size() != 3) {
+ System.out.println("bad trans!!");
+ System.exit(1);
+ }
+ return new BasicHep3Vector(m.get("u"),m.get("v"),m.get("w"));
+
+ }
+ public Hep3Vector getLocalRotation() {
+ Map<String,Double> m = new HashMap<String,Double>();
+ for(MilleParameter p : params) {
+ if (p.getType() == 2) {
+ if (p.getDim() == 1) m.put("alpha",p.getValue());
+ else if(p.getDim() == 2) m.put("beta", p.getValue());
+ else m.put("gamma", p.getValue());
+ }
+ }
+ if(m.size() != 3) {
+ System.out.println("bad rot!!");
+ System.exit(1);
+ }
+ return new BasicHep3Vector(m.get("alpha"),m.get("beta"),m.get("gamma"));
+ }
+ public Hep3Vector getGlobalTranslation() {
+ ITransform3D localToGlobal = getLocalToGlobal();
+ return localToGlobal.getRotation().rotated(getLocalTranslation());
+ }
+ public double getGlobalTranslation(int d) {
+ return getGlobalTranslation().v()[d-1];
+ }
+ public Hep3Vector getGlobalRotation() {
+ ITransform3D localToGlobal = getLocalToGlobal();
+ return localToGlobal.getRotation().rotated(getLocalRotation());
+ }
+ public double getGlobalRotation(int d) {
+ return getGlobalRotation().v()[d-1];
+ }
+ public ITransform3D getLocalToGlobal() {
+ ITransform3D localToGlobal = ( (SiSensor) _det).getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
+ return localToGlobal;
+ }
+
+ }
+
+ public static void main(String[] args) {
+
+ // Setup command line input
+ Options options = createCmdLineOpts();
+ if (args.length == 0) {
+ printHelpAndExit(options);
+ }
+
+ CommandLineParser parser = new PosixParser();
+ CommandLine cl = null;
+ try {
+ cl = parser.parse(options, args);
+ } catch (ParseException e) {
+ throw new RuntimeException("Problem parsing command line options.",e);
+ }
+
+ String compactFilename = null;
+ if(cl.hasOption("c")) {
+ compactFilename = cl.getOptionValue("c");
+ } else {
+ printHelpAndExit(options);
+ }
+
+ if(cl.hasOption("d")) {
+ detectorName = cl.getOptionValue("d");
+ } else {
+ printHelpAndExit(options);
+ }
+
+ if(cl.hasOption("r")) {
+ runNumber = Integer.parseInt(cl.getOptionValue("r"));
+ } else {
+ printHelpAndExit(options);
+ }
+
+ String compactFilenameNew = "compact_new.xml";
+ if(cl.hasOption("o")) {
+ compactFilenameNew = cl.getOptionValue("o");
+ }
+
+
+
+ File compactFile = new File(compactFilename);
+
+ // read XML
+ SAXBuilder builder = new SAXBuilder();
+ Document compact_document = null;
+ try {
+ compact_document = (Document) builder.build(compactFile);
+ } catch (JDOMException | IOException e1) {
+ throw new RuntimeException("problem with JDOM ", e1);
+ }
+
+ // read detector geometry
+ FileInputStream inCompact;
+ try {
+ inCompact = new FileInputStream(compactFile);
+ } catch (FileNotFoundException e) {
+ throw new RuntimeException("cannot open compact file",e);
+ }
+
+ GeometryReader reader = new GeometryReader();
+ Detector det;
+ try {
+ det = reader.read(inCompact);
+ } catch (IOException | JDOMException | ElementCreationException e) {
+ throw new RuntimeException("problem reading compact file",e);
+ }
+
+
+ // set conditions in order to be able to determine which sensors are where in the geometry
+ setConditions(detectorName,runNumber);
+
+ // Loop over all millepede input files and match parameters with detectors
+
+ List<MilleParameterSet> list_det = new ArrayList<MilleParameterSet>();
+
+ FileInputStream inMille = null;
+ BufferedReader brMille = null;
+ try {
+ for(String milleFilename : cl.getArgs()) {
+ inMille = new FileInputStream(milleFilename);
+ brMille = new BufferedReader(new InputStreamReader(inMille));
+ String line;
+ while((line = brMille.readLine()) != null) {
+ //System.out.printf("%s\n",line);
+ if(!line.contains("Parameter") && !line.contains("!")) {
+
+ MilleParameter par = new MilleParameter(line);
+ //System.out.println(par.getXMLName() + " " + par.getValue());
+
+ SiSensor detEle = getTrackerDetElement( det, par);
+ if(detEle == null) {
+ System.out.println("Couldn't find detector for param " + par.getId());
+ System.exit(1);
+ }
+ System.out.println("Found detector " + detEle.getName());
+ if(detEle.getClass().isInstance(SiSensor.class)) {
+ System.out.println("yeah");
+ }
+
+ // do we have it already?
+ MilleParameterSet useSet = null;
+ for(MilleParameterSet set : list_det) {
+ if(set.getDetector() == detEle) {
+ useSet = set;
+ }
+ }
+
+ if (useSet == null) {
+ useSet = new MilleParameterSet(detEle);
+ list_det.add(useSet);
+ }
+
+ //add the parameter
+ useSet.add(par);
+
+
+ }
+ }
+ brMille.close();
+ }
+ }
+ catch (IOException e) {
+ throw new RuntimeException("problem reading mille file",e);
+ }
+
+ for(MilleParameterSet set : list_det) {
+ System.out.println("Detector " + set.getDetector().getName());
+ List<MilleParameter> pars = set.params;
+ for(MilleParameter p : pars) {
+ System.out.println(p.getXMLName() + " " + p.getValue());
+ Element node = findXMLNode(compact_document,p.getXMLName());
+ double value = 0.0;
+ if(p.getType() == 1){
+ value = set.getGlobalTranslation(p.getDim());
+ } else if(p.getType() == 2){
+ value = set.getGlobalRotation(p.getDim());
+ } else {
+ System.out.println("This type is illdefnied " + p.getType());
+ System.exit(1);
+ }
+ node.setAttribute("value", String.format("%.6f",value));
+ }
+ Hep3Vector u = getMeasuredCoordinate( (SiSensor )set.getDetector());
+ System.out.println("u " + u.toString());
+ System.out.println("t (local) " + set.getLocalTranslation().toString());
+ System.out.println("t (global) " + set.getGlobalTranslation().toString());
+ System.out.println("r (local) " + set.getLocalRotation().toString());
+ System.out.println("r (global) " + set.getGlobalRotation().toString());
+
+
+ }
+
+ // Save new XML file
+
+ XMLOutputter xmlOutput = new XMLOutputter();
+ // display nice
+ //xmlOutput.setFormat(Format.getPrettyFormat());
+ try {
+ xmlOutput.output(compact_document, new FileWriter(compactFilenameNew));
+ } catch (IOException e) {
+ throw new RuntimeException("problem with xml output",e);
+ }
+
+
+
+
+
+ }
+
+ private static Element findXMLNode(Document document, String name) {
+ Element rootNode = document.getRootElement();
+ List list = rootNode.getChildren("define");
+ for(int i = 0; i < list.size(); ++i ) {
+ Element node = (Element) list.get(i);
+ List llist = node.getChildren("constant");
+ //System.out.println("length of list " + llist.size());
+ for(int ii = 0; ii < llist.size(); ++ii ) {
+ Element nnode = (Element) llist.get(ii);
+ //System.out.println("node name " + nnode.getAttributeValue("name") + " " + nnode.getAttributeValue("value") );
+ //if(nnode.getAttributeValue("name").contains(name)) {
+ if(nnode.getAttributeValue("name").compareTo(name) ==0 ) {
+ return nnode;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ private static void setConditions(String detectorName, int run) {
+
+ try {
+ if(conditionsManager == null) {
+ conditionsManager = ConditionsManager.defaultInstance();
+ }
+ conditionsManager.setDetector(detectorName, run);
+
+ } catch (ConditionsNotFoundException e1) {
+ throw new RuntimeException("problem setting conditions",e1);
+ }
+
+ }
+
+ private static SiSensor getTrackerDetElement(Detector det, MilleParameter par) {
+ List<SiSensor> sensors = det.getSubdetector("Tracker").getDetectorElement().findDescendants(SiSensor.class);
//List<SiTrackerModule> modules = det.getDetectorElement().findDescendants(SiTrackerModule.class);
//System.out.printf("%d sensors\n",sensors.size());
for (SiSensor module: sensors) {
// Create DAQ Maps
- boolean isTop = ((HpsSiSensor) module).isTopLayer();
- int h = par.getHalf();
- if ((isTop && h == 1) || (!isTop && h == 2)) {
- int layer = ((HpsSiSensor) module).getLayerNumber();
- if (layer == par.getSensor()) {
- //found match
- return module;
- }
- }
+ boolean isTop = ((HpsSiSensor) module).isTopLayer();
+ int h = par.getHalf();
+ if ((isTop && h == 1) || (!isTop && h == 2)) {
+ int layer = ((HpsSiSensor) module).getLayerNumber();
+ if (layer == par.getSensor()) {
+ //found match
+ return module;
+ }
+ }
}
return null;
-
- }
-
-
-
- private static class DetectorList<K> extends ArrayList<DetAlignConstants> {
- //List<DetAlignConstants> _detectors = new ArrayList<DetAlignConstants>();
- public DetectorList() {
- }
-
- public boolean contains(IDetectorElement detEle) {
- return this.get(detEle) == null ? false : true;
- }
-
- public DetAlignConstants get(IDetectorElement detEle) {
- for(DetAlignConstants d : this) {
- if (d == detEle) {
- return d;
- }
- }
- return null;
- }
- public void print() {
- System.out.println("==== " + this.size() + " detectors has alignment corrections ====");
- for(DetAlignConstants det : this) {
- det.print();
- }
- }
-
- }
-
-
-
- private static Hep3Vector getTrackingMeasuredCoordinate(SiSensor sensor)
+
+ }
+
+
+
+ private static class DetectorList<K> extends ArrayList<DetAlignConstants> {
+ //List<DetAlignConstants> _detectors = new ArrayList<DetAlignConstants>();
+ public DetectorList() {
+ }
+
+ public boolean contains(IDetectorElement detEle) {
+ return this.get(detEle) == null ? false : true;
+ }
+
+ public DetAlignConstants get(IDetectorElement detEle) {
+ for(DetAlignConstants d : this) {
+ if (d == detEle) {
+ return d;
+ }
+ }
+ return null;
+ }
+ public void print() {
+ System.out.println("==== " + this.size() + " detectors has alignment corrections ====");
+ for(DetAlignConstants det : this) {
+ det.print();
+ }
+ }
+
+ }
+
+
+
+ private static Hep3Vector getTrackingMeasuredCoordinate(SiSensor sensor)
{
// p-side unit vector
ITransform3D electrodes_to_global = sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
@@ -411,8 +410,8 @@
measuredCoordinate = VecOp.mult(VecOp.mult(CoordinateTransformations.getMatrix(),electrodes_to_global.getRotation().getRotationMatrix()), measuredCoordinate);
return measuredCoordinate;
}
-
- private static Hep3Vector getMeasuredCoordinate(SiSensor sensor)
+
+ private static Hep3Vector getMeasuredCoordinate(SiSensor sensor)
{
// p-side unit vector
ITransform3D electrodes_to_global = sensor.getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
@@ -420,127 +419,127 @@
measuredCoordinate = VecOp.mult(electrodes_to_global.getRotation().getRotationMatrix(), measuredCoordinate);
return measuredCoordinate;
}
-
-
-
- private static class SiSensorDetAlignConstants extends DetAlignConstants {
- public SiSensorDetAlignConstants(IDetectorElement det) {
- super(det);
- }
- public void transform() {
- ITransform3D localToGlobal = null;
- if(_det.getClass().isInstance(SiSensor.class)) {
- localToGlobal = ( (SiSensor) _det).getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
- }
- //Translation
- Hep3Vector t_local = _constants.getTranslationVector();
- Hep3Vector t_global = localToGlobal.getRotation().rotated(t_local);
- _constants.addGlobalTranslation(t_global);
- //Rotation
- Hep3Vector r_local = _constants.getRotationVector();
- Hep3Vector r_global = localToGlobal.getRotation().rotated(r_local);
- _constants.addGlobalRotation(r_global);
- }
-
- }
-
- private static abstract class DetAlignConstants {
- protected IDetectorElement _det = null;
- protected AlignConstants<String,Double> _constants = new AlignConstants<String,Double>();
- public DetAlignConstants(IDetectorElement det) {
- _det = det;
- }
- public abstract void transform();
- public void add(MilleParameter par) {
- this._constants.add(par);
- }
- public void print() {
- System.out.println(_det.getName());
- for(Entry<String, Double> c : this._constants.entrySet()) {
- System.out.println(c.getKey() + " " + c.getValue());
- }
- System.out.println("Local translation " + _constants.getTranslationVector().toString());
- System.out.println("Global translation " + _constants.getTranslationVectorGlobal().toString());
- System.out.println("Local rotation " + _constants.getRotationVector().toString());
- System.out.println("Global rotation " + _constants.getRotationVectorGlobal().toString());
-
-
- }
-
-
- }
-
-
-
- private static class AlignConstants<K,V> extends HashMap<String,Double> {
- List<MilleParameter> _pars = new ArrayList<MilleParameter>();
- public AlignConstants() {
- super();
- }
- public void add(MilleParameter p) {
- _pars.add(p);
- if (p.getType() == 1) {
- if (p.getDim() == 1) this.put("u", p.getValue());
- else if(p.getDim() == 2) this.put("v", p.getValue());
- else this.put("w", p.getValue());
- }
- else {
- if (p.getDim() == 1) this.put("alpha", p.getValue());
- else if(p.getDim() == 2) this.put("beta", p.getValue());
- else this.put("gamma", p.getValue());
- }
- }
- public void print() {
- for(Entry<String,Double> e : this.entrySet()) {
- System.out.println(e.getKey() + " " + e.getValue());
- }
- }
- public Hep3Vector getTranslationVector() {
- if(!this.containsKey("u") || !this.containsKey("v") || !this.containsKey("w")) {
- System.out.println("missing pars for translation");
- print();
- System.exit(1);
- }
- return new BasicHep3Vector(this.get("u"),this.get("v"),this.get("w"));
- }
- public Hep3Vector getTranslationVectorGlobal() {
- if(!this.containsKey("x") || !this.containsKey("y") || !this.containsKey("z")) {
- System.out.println("missing pars for global translation");
- print();
- System.exit(1);
- }
- return new BasicHep3Vector(this.get("x"),this.get("y"),this.get("z"));
- }
- public Hep3Vector getRotationVector() {
- if(!this.containsKey("alpha") || !this.containsKey("beta") || !this.containsKey("gamma")) {
- System.out.println("missing pars for rotation");
- print();
- System.exit(1);
- }
- return new BasicHep3Vector(this.get("alpha"),this.get("beta"),this.get("gamma"));
- }
- public Hep3Vector getRotationVectorGlobal() {
- if(!this.containsKey("rx") || !this.containsKey("ry") || !this.containsKey("rz")) {
- System.out.println("missing pars for global rotation");
- print();
- System.exit(1);
- }
- return new BasicHep3Vector(this.get("rx"),this.get("ry"),this.get("rz"));
- }
- private void addGlobalTranslation(Hep3Vector t) {
- this.put("x", t.x());
- this.put("y", t.y());
- this.put("z", t.z());
- }
- private void addGlobalRotation(Hep3Vector t) {
- this.put("rx", t.x());
- this.put("ry", t.y());
- this.put("rz", t.z());
- }
-
-
- }
-
-
+
+
+
+ private static class SiSensorDetAlignConstants extends DetAlignConstants {
+ public SiSensorDetAlignConstants(IDetectorElement det) {
+ super(det);
+ }
+ public void transform() {
+ ITransform3D localToGlobal = null;
+ if(_det.getClass().isInstance(SiSensor.class)) {
+ localToGlobal = ( (SiSensor) _det).getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
+ }
+ //Translation
+ Hep3Vector t_local = _constants.getTranslationVector();
+ Hep3Vector t_global = localToGlobal.getRotation().rotated(t_local);
+ _constants.addGlobalTranslation(t_global);
+ //Rotation
+ Hep3Vector r_local = _constants.getRotationVector();
+ Hep3Vector r_global = localToGlobal.getRotation().rotated(r_local);
+ _constants.addGlobalRotation(r_global);
+ }
+
+ }
+
+ private static abstract class DetAlignConstants {
+ protected IDetectorElement _det = null;
+ protected AlignConstants<String,Double> _constants = new AlignConstants<String,Double>();
+ public DetAlignConstants(IDetectorElement det) {
+ _det = det;
+ }
+ public abstract void transform();
+ public void add(MilleParameter par) {
+ this._constants.add(par);
+ }
+ public void print() {
+ System.out.println(_det.getName());
+ for(Entry<String, Double> c : this._constants.entrySet()) {
+ System.out.println(c.getKey() + " " + c.getValue());
+ }
+ System.out.println("Local translation " + _constants.getTranslationVector().toString());
+ System.out.println("Global translation " + _constants.getTranslationVectorGlobal().toString());
+ System.out.println("Local rotation " + _constants.getRotationVector().toString());
+ System.out.println("Global rotation " + _constants.getRotationVectorGlobal().toString());
+
+
+ }
+
+
+ }
+
+
+
+ private static class AlignConstants<K,V> extends HashMap<String,Double> {
+ List<MilleParameter> _pars = new ArrayList<MilleParameter>();
+ public AlignConstants() {
+ super();
+ }
+ public void add(MilleParameter p) {
+ _pars.add(p);
+ if (p.getType() == 1) {
+ if (p.getDim() == 1) this.put("u", p.getValue());
+ else if(p.getDim() == 2) this.put("v", p.getValue());
+ else this.put("w", p.getValue());
+ }
+ else {
+ if (p.getDim() == 1) this.put("alpha", p.getValue());
+ else if(p.getDim() == 2) this.put("beta", p.getValue());
+ else this.put("gamma", p.getValue());
+ }
+ }
+ public void print() {
+ for(Entry<String,Double> e : this.entrySet()) {
+ System.out.println(e.getKey() + " " + e.getValue());
+ }
+ }
+ public Hep3Vector getTranslationVector() {
+ if(!this.containsKey("u") || !this.containsKey("v") || !this.containsKey("w")) {
+ System.out.println("missing pars for translation");
+ print();
+ System.exit(1);
+ }
+ return new BasicHep3Vector(this.get("u"),this.get("v"),this.get("w"));
+ }
+ public Hep3Vector getTranslationVectorGlobal() {
+ if(!this.containsKey("x") || !this.containsKey("y") || !this.containsKey("z")) {
+ System.out.println("missing pars for global translation");
+ print();
+ System.exit(1);
+ }
+ return new BasicHep3Vector(this.get("x"),this.get("y"),this.get("z"));
+ }
+ public Hep3Vector getRotationVector() {
+ if(!this.containsKey("alpha") || !this.containsKey("beta") || !this.containsKey("gamma")) {
+ System.out.println("missing pars for rotation");
+ print();
+ System.exit(1);
+ }
+ return new BasicHep3Vector(this.get("alpha"),this.get("beta"),this.get("gamma"));
+ }
+ public Hep3Vector getRotationVectorGlobal() {
+ if(!this.containsKey("rx") || !this.containsKey("ry") || !this.containsKey("rz")) {
+ System.out.println("missing pars for global rotation");
+ print();
+ System.exit(1);
+ }
+ return new BasicHep3Vector(this.get("rx"),this.get("ry"),this.get("rz"));
+ }
+ private void addGlobalTranslation(Hep3Vector t) {
+ this.put("x", t.x());
+ this.put("y", t.y());
+ this.put("z", t.z());
+ }
+ private void addGlobalRotation(Hep3Vector t) {
+ this.put("rx", t.x());
+ this.put("ry", t.y());
+ this.put("rz", t.z());
+ }
+
+
+ }
+
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildMillepedeCompact.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildMillepedeCompact.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/BuildMillepedeCompact.java Wed Apr 27 11:11:32 2016
@@ -6,22 +6,14 @@
* created on 1/15/2014
*/
-import hep.physics.vec.BasicHep3Vector;
-import hep.physics.vec.Hep3Vector;
-import hep.physics.vec.VecOp;
-
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -29,252 +21,271 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
-import org.hps.recon.tracking.CoordinateTransformations;
-import org.jdom.Attribute;
+import org.apache.commons.cli.PosixParser;
import org.jdom.DataConversionException;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.XMLOutputter;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-import org.lcsim.detector.IDetectorElement;
-import org.lcsim.detector.ITransform3D;
-import org.lcsim.detector.tracker.silicon.ChargeCarrier;
-import org.lcsim.detector.tracker.silicon.SiSensor;
-import org.lcsim.geometry.Detector;
-import org.lcsim.geometry.GeometryReader;
import org.lcsim.geometry.compact.converter.MilleParameter;
-import org.lcsim.util.xml.ElementFactory.ElementCreationException;
public class BuildMillepedeCompact {
- private static String detectorName = "Tracker";
- private static boolean replaceConstant = false;
+ private static String detectorName = "Tracker";
+ private static boolean replaceConstant = false;
private static boolean calcNewValue = true;
private static boolean ignoreBeamspot = true;
+ private static boolean flipRotationCorrection = false;
private static Options createCmdLineOpts() {
- Options options = new Options();
- options.addOption(new Option("c",true,"The path to the compact xml file."));
- options.addOption(new Option("o",true,"The name of the new compact xml file."));
- options.addOption(new Option("r", false, "Replace correction instead of adding to it."));
+ Options options = new Options();
+ options.addOption(new Option("c",true,"The path to the compact xml file."));
+ options.addOption(new Option("o",true,"The name of the new compact xml file."));
+ options.addOption(new Option("r", false, "Replace correction instead of adding to it."));
options.addOption(new Option("t", false, "Add a text string as a new value instead of adding to it."));
- return options;
- }
-
- private static void printHelpAndExit(Options options) {
- HelpFormatter help = new HelpFormatter();
- help.printHelp(" ", options);
- System.exit(1);
- }
-
-
- public static void main(String[] args) {
-
- // Setup command line input
- Options options = createCmdLineOpts();
- if (args.length == 0) {
- printHelpAndExit(options);
- }
-
- CommandLineParser parser = new DefaultParser();
- CommandLine cl = null;
- try {
- cl = parser.parse(options, args);
- } catch (ParseException e) {
- throw new RuntimeException("Problem parsing command line options.",e);
- }
-
- String compactFilename = null;
- if(cl.hasOption("c")) {
- compactFilename = cl.getOptionValue("c");
- } else {
- printHelpAndExit(options);
- }
-
- String compactFilenameNew = "compact_new.xml";
- if(cl.hasOption("o")) {
- compactFilenameNew = cl.getOptionValue("o");
- }
-
-
-
- if(cl.hasOption("r")) {
- replaceConstant = true;
- }
-
- if(cl.hasOption("t")) {
+ options.addOption(new Option("f",false, "Flip sign of rotation corrections."));
+ return options;
+ }
+
+ private static void printHelpAndExit(Options options) {
+ HelpFormatter help = new HelpFormatter();
+ help.printHelp(" ", options);
+ System.exit(1);
+ }
+
+
+ public static void main(String[] args) {
+
+ // Setup command line input
+ Options options = createCmdLineOpts();
+ if (args.length == 0) {
+ printHelpAndExit(options);
+ }
+
+ CommandLineParser parser = new PosixParser();
+ CommandLine cl = null;
+ try {
+ cl = parser.parse(options, args);
+ } catch (ParseException e) {
+ throw new RuntimeException("Problem parsing command line options.",e);
+ }
+
+ String compactFilename = null;
+ if(cl.hasOption("c")) {
+ compactFilename = cl.getOptionValue("c");
+ } else {
+ printHelpAndExit(options);
+ }
+
+ String compactFilenameNew = "compact_new.xml";
+ if(cl.hasOption("o")) {
+ compactFilenameNew = cl.getOptionValue("o");
+ }
+
+
+
+ if(cl.hasOption("r")) {
+ replaceConstant = true;
+ }
+
+ if(cl.hasOption("t")) {
calcNewValue = false;
}
-
- File compactFile = new File(compactFilename);
-
- // read XML
- SAXBuilder builder = new SAXBuilder();
- Document compact_document = null;
- try {
- compact_document = (Document) builder.build(compactFile);
- } catch (JDOMException | IOException e1) {
- throw new RuntimeException("problem with JDOM ", e1);
- }
-
-
-
- // Loop over all millepede input files and build a list of parameters
-
- List<MilleParameter> params = new ArrayList<MilleParameter>();
-
- FileInputStream inMille = null;
- BufferedReader brMille = null;
- try {
- for(String milleFilename : cl.getArgs()) {
- inMille = new FileInputStream(milleFilename);
- brMille = new BufferedReader(new InputStreamReader(inMille));
- String line;
- while((line = brMille.readLine()) != null) {
- //System.out.printf("%s\n",line);
- if(!line.contains("Parameter") && !line.contains("!")) {
-
- MilleParameter par = new MilleParameter(line);
- //System.out.println(par.getXMLName() + " " + par.getValue());
-
- if (ignoreBeamspot) {
- if(par.getSensor() == 98 || par.getSensor() == 99) {
- System.out.printf("Ignoring %s\n", par.toString());
- continue;
- }
- }
- System.out.printf("Adding %s\n", par.toString());
- //add the parameter
- params.add(par);
-
- }
- }
- brMille.close();
- }
- }
- catch (IOException e) {
- throw new RuntimeException("problem reading mille file",e);
- }
-
- System.out.printf("Found %d millepede parameters\n ", params.size());
-
-
-
- Element rootNode = compact_document.getRootElement();
- List<Element> detectors = rootNode.getChildren("detectors");
- for(Element detectorsNode : detectors) {
- List<Element> detectorNode = detectorsNode.getChildren("detector");
- if(detectorNode!=null) {
- System.out.println(detectorNode.size() + " detectors");
- for(Element detector : detectorNode) {
- if(detector.getAttribute("name")!=null) {
- if(detector.getAttributeValue("name").compareTo(detectorName)==0 ) {
- System.out.println("Found " + detectorName);
- for(MilleParameter p : params) {
- Element node = findMillepedeConstantNode(detector,Integer.toString(p.getId()));
- if(node!=null) {
+ if(cl.hasOption("f")) {
+ flipRotationCorrection = true;
+ }
+
+ if (calcNewValue)
+ System.out.println("DO CALCULATE NEW VALUE");
+ else
+ System.out.println("DO NOT CALCULATE NEW VALUE");
+
+
+
+ if (flipRotationCorrection)
+ System.out.println("DO FLIP ROTATIONS");
+ else
+ System.out.println("DO NOT FLIP ROTATIONS");
+
+
+
+ File compactFile = new File(compactFilename);
+
+ // read XML
+ SAXBuilder builder = new SAXBuilder();
+ Document compact_document = null;
+ try {
+ compact_document = (Document) builder.build(compactFile);
+ } catch (JDOMException | IOException e1) {
+ throw new RuntimeException("problem with JDOM ", e1);
+ }
+
+
+
+ // Loop over all millepede input files and build a list of parameters
+
+ List<MilleParameter> params = new ArrayList<MilleParameter>();
+
+ FileInputStream inMille = null;
+ BufferedReader brMille = null;
+ try {
+ for(String milleFilename : cl.getArgs()) {
+ inMille = new FileInputStream(milleFilename);
+ brMille = new BufferedReader(new InputStreamReader(inMille));
+ String line;
+ while((line = brMille.readLine()) != null) {
+ //System.out.printf("%s\n",line);
+ if(!line.contains("Parameter") && !line.contains("!")) {
+
+ MilleParameter par = new MilleParameter(line);
+ //System.out.println(par.getXMLName() + " " + par.getValue());
+
+ if (ignoreBeamspot) {
+ if(par.getSensor() == 98 || par.getSensor() == 99) {
+ System.out.printf("Ignoring %s\n", par.toString());
+ continue;
+ }
+ }
+ System.out.printf("Adding %s\n", par.toString());
+ //add the parameter
+ params.add(par);
+
+ }
+ }
+ brMille.close();
+ }
+ }
+ catch (IOException e) {
+ throw new RuntimeException("problem reading mille file",e);
+ }
+
+ System.out.printf("Found %d millepede parameters\n ", params.size());
+
+
+
+ Element rootNode = compact_document.getRootElement();
+ List<Element> detectors = rootNode.getChildren("detectors");
+ for(Element detectorsNode : detectors) {
+ List<Element> detectorNode = detectorsNode.getChildren("detector");
+ if(detectorNode!=null) {
+ System.out.println(detectorNode.size() + " detectors");
+ for(Element detector : detectorNode) {
+ if(detector.getAttribute("name")!=null) {
+ if(detector.getAttributeValue("name").compareTo(detectorName)==0 ) {
+ System.out.println("Found " + detectorName);
+ for(MilleParameter p : params) {
+ Element node = findMillepedeConstantNode(detector,Integer.toString(p.getId()));
+ if(node!=null) {
double correction = p.getValue();
- // have the option of adding a text value to the compact instead of actually computing the new value
- if(calcNewValue) {
-
- double oldValue = 0;
- try {
- oldValue = node.getAttribute("value").getDoubleValue();
- } catch (DataConversionException e) {
- e.printStackTrace();
- }
- double newValue;
- if(replaceConstant) {
- newValue = correction;
- } else {
- if (p.getType() == MilleParameter.Type.ROTATION.getType()) {
- newValue = oldValue - correction;
- } else {
- newValue = oldValue + correction;
- }
- }
- System.out.println("Update " + p.getId() + ": " + oldValue + " (corr. " + correction + ") -> " + newValue );
- node.setAttribute("value", String.format("%.6f",newValue));
-
- } else {
-
- String oldValue = node.getAttribute("value").getValue();
-
- if(replaceConstant)
- throw new RuntimeException("Doesn't make sense to try and replace with the string option?");
-
- if( correction != 0.0) {
- String newValue = oldValue + " + " + String.format("%.6f",correction);
- System.out.println("Update " + p.getId() + ": " + oldValue + " (corr. " + correction + ") -> " + newValue );
- node.setAttribute("value", newValue);
- }
- }
-
- } else {
- throw new RuntimeException("no element found for " + p.getId() + " check format of compact file");
- }
- }
- }
- } else {
- throw new RuntimeException("this detector node element is not formatted correctly");
- }
- }
- } else {
- throw new RuntimeException("this detector node element is not formatted correctly");
- }
- }
-
-
- // Save new XML file
-
- XMLOutputter xmlOutput = new XMLOutputter();
- // display nice
- //xmlOutput.setFormat(Format.getPrettyFormat());
- try {
- xmlOutput.output(compact_document, new FileWriter(compactFilenameNew));
- } catch (IOException e) {
- throw new RuntimeException("problem with xml output",e);
- }
-
-
-
-
-
- }
-
-
- private static Element findMillepedeConstantNode(Element detector, String name) {
- Element element_constants = detector.getChild("millepede_constants");
- if(element_constants==null) {
- throw new RuntimeException("no alignment constants in this xml file.");
- }
- List<Element> list = element_constants.getChildren("millepede_constant");
- for(Element element : list) {
- if(element.getAttribute("name")!=null) {
- if(element.getAttributeValue("name").compareTo(name) == 0) {
- return element;
- }
- } else {
- throw new RuntimeException("this element is not formatted correctly");
- }
- }
- return null;
- }
-
-
-
-
-
-
+ // have the option of adding a text value to the compact instead of actually computing the new value
+ if(calcNewValue) {
+
+ double oldValue = 0;
+ try {
+ oldValue = node.getAttribute("value").getDoubleValue();
+ } catch (DataConversionException e) {
+ e.printStackTrace();
+ }
+ double newValue;
+ if(replaceConstant) {
+ newValue = correction;
+ } else {
+ if (p.getType() == MilleParameter.Type.ROTATION.getType() && !flipRotationCorrection) {
+ newValue = oldValue - correction;
+ System.out.println("NOFLIP");
+ } else {
+ newValue = oldValue + correction;
+ System.out.println("FLIP");
+ }
+ }
+ System.out.println("Update " + p.getId() + ": " + oldValue + " (corr. " + correction + ") -> " + newValue );
+ node.setAttribute("value", String.format("%.6f",newValue));
+
+ } else {
+
+ String oldValue = node.getAttribute("value").getValue();
+
+ if(replaceConstant)
+ throw new RuntimeException("Doesn't make sense to try and replace with the string option?");
+
+ if( correction != 0.0) {
+ String newValue;
+
+ if (p.getType() == MilleParameter.Type.ROTATION.getType() && !flipRotationCorrection) {
+ newValue = oldValue + " - " + String.format("%.6f",correction);
+ System.out.println("NOFLIP");
+ } else {
+ newValue = oldValue + " + " + String.format("%.6f",correction);
+ System.out.println("FLIP");
+ }
+
+ System.out.println("Update " + p.getId() + ": " + oldValue + " (corr. " + correction + ") -> " + newValue );
+ node.setAttribute("value", newValue);
+ }
+ }
+
+ } else {
+ throw new RuntimeException("no element found for " + p.getId() + " check format of compact file");
+ }
+ }
+ }
+ } else {
+ throw new RuntimeException("this detector node element is not formatted correctly");
+ }
+ }
+ } else {
+ throw new RuntimeException("this detector node element is not formatted correctly");
+ }
+ }
+
+
+ // Save new XML file
+
+ XMLOutputter xmlOutput = new XMLOutputter();
+ // display nice
+ //xmlOutput.setFormat(Format.getPrettyFormat());
+ try {
+ xmlOutput.output(compact_document, new FileWriter(compactFilenameNew));
+ } catch (IOException e) {
+ throw new RuntimeException("problem with xml output",e);
+ }
+
+
+
+
+
+ }
+
+
+ private static Element findMillepedeConstantNode(Element detector, String name) {
+ Element element_constants = detector.getChild("millepede_constants");
+ if(element_constants==null) {
+ throw new RuntimeException("no alignment constants in this xml file.");
+ }
+ List<Element> list = element_constants.getChildren("millepede_constant");
+ for(Element element : list) {
+ if(element.getAttribute("name")!=null) {
+ if(element.getAttributeValue("name").compareTo(name) == 0) {
+ return element;
+ }
+ } else {
+ throw new RuntimeException("this element is not formatted correctly");
+ }
+ }
+ return null;
+ }
+
+
+
+
+
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java Wed Apr 27 11:11:32 2016
@@ -255,7 +255,7 @@
* Capacitance for a particular cell. Units are pF.
*
* @param cell_id
- * @return
+ * @return the capacitance for the cell (pF)
*/
public double getCapacitance(int cell_id) // capacitance in pF
{
@@ -266,7 +266,7 @@
* Nominal capacitance used for throwing random noise in the sensor.
* Calculated using middle strip. Units are pF.
*
- * @return
+ * @return the nominal capacitance used for noise generation in the sensor (pF)
*/
public double getCapacitance() {
return getCapacitance(getNCells(0) / 2);
Modified: java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/MillepedeCompactDump.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/MillepedeCompactDump.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/java/org/hps/svt/alignment/MillepedeCompactDump.java Wed Apr 27 11:11:32 2016
@@ -15,7 +15,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
@@ -28,115 +28,115 @@
public class MillepedeCompactDump {
- private static String detectorName = "Tracker";
+ private static String detectorName = "Tracker";
private static Options createCmdLineOpts() {
- Options options = new Options();
- options.addOption(new Option("c",true,"The path to the compact xml file."));
- options.addOption(new Option("o",true,"The name of the output text file."));
- return options;
- }
-
- private static void printHelpAndExit(Options options) {
- HelpFormatter help = new HelpFormatter();
- help.printHelp(" ", options);
- System.exit(1);
- }
-
-
- public static void main(String[] args) {
+ Options options = new Options();
+ options.addOption(new Option("c",true,"The path to the compact xml file."));
+ options.addOption(new Option("o",true,"The name of the output text file."));
+ return options;
+ }
+
+ private static void printHelpAndExit(Options options) {
+ HelpFormatter help = new HelpFormatter();
+ help.printHelp(" ", options);
+ System.exit(1);
+ }
+
+
+ public static void main(String[] args) {
- // Setup command line input
- Options options = createCmdLineOpts();
- if (args.length == 0) {
- printHelpAndExit(options);
- }
+ // Setup command line input
+ Options options = createCmdLineOpts();
+ if (args.length == 0) {
+ printHelpAndExit(options);
+ }
- CommandLineParser parser = new DefaultParser();
- CommandLine cl = null;
- try {
- cl = parser.parse(options, args);
- } catch (ParseException e) {
- throw new RuntimeException("Problem parsing command line options.",e);
- }
-
- String compactFilename = null;
- if(cl.hasOption("c")) {
- compactFilename = cl.getOptionValue("c");
- } else {
- printHelpAndExit(options);
- }
-
- String outputFilename = "millepede_dump.txt";// + compactFilename.replace(".xml", ".txt");
- if(cl.hasOption("o")) {
- outputFilename = cl.getOptionValue("o");
- }
-
+ CommandLineParser parser = new PosixParser();
+ CommandLine cl = null;
+ try {
+ cl = parser.parse(options, args);
+ } catch (ParseException e) {
+ throw new RuntimeException("Problem parsing command line options.",e);
+ }
+
+ String compactFilename = null;
+ if(cl.hasOption("c")) {
+ compactFilename = cl.getOptionValue("c");
+ } else {
+ printHelpAndExit(options);
+ }
+
+ String outputFilename = "millepede_dump.txt";// + compactFilename.replace(".xml", ".txt");
+ if(cl.hasOption("o")) {
+ outputFilename = cl.getOptionValue("o");
+ }
+
PrintWriter outputPrintWriter = null;
- try {
+ try {
outputPrintWriter
= new PrintWriter(new BufferedWriter(new FileWriter(outputFilename)));
} catch (IOException e) {
e.printStackTrace();
}
-
-
-
-
- File compactFile = new File(compactFilename);
-
- // read XML
- SAXBuilder builder = new SAXBuilder();
- Document compact_document = null;
- try {
- compact_document = (Document) builder.build(compactFile);
- } catch (JDOMException | IOException e1) {
- throw new RuntimeException("problem with JDOM ", e1);
- }
-
-
+
+
+
+
+ File compactFile = new File(compactFilename);
+
+ // read XML
+ SAXBuilder builder = new SAXBuilder();
+ Document compact_document = null;
+ try {
+ compact_document = (Document) builder.build(compactFile);
+ } catch (JDOMException | IOException e1) {
+ throw new RuntimeException("problem with JDOM ", e1);
+ }
+
+
- Element rootNode = compact_document.getRootElement();
-
- // find the constants needed to calculate the final millepede parameters
- List<Element> definitions = rootNode.getChildren("define");
- for(Element definition : definitions) {
- List<Element> constants = definition.getChildren("constant");
-
- }
+ Element rootNode = compact_document.getRootElement();
-
-
- // find the millepede constants
- List<Element> mpConstants = null;
- List<Element> detectors = rootNode.getChildren("detectors");
- for(Element detectorsNode : detectors) {
- List<Element> detectorNode = detectorsNode.getChildren("detector");
- if(detectorNode!=null) {
- System.out.println(detectorNode.size() + " detectors");
- for(Element detector : detectorNode) {
- if(detector.getAttribute("name")!=null) {
- if(detector.getAttributeValue("name").compareTo(detectorName)==0 ) {
- System.out.println("Found " + detectorName);
-
- Element element_constants = detector.getChild("millepede_constants");
- if(element_constants==null) {
- throw new RuntimeException("no alignment constants in this compact file.");
- }
- mpConstants = element_constants.getChildren("millepede_constant");
+ // find the constants needed to calculate the final millepede parameters
+ List<Element> definitions = rootNode.getChildren("define");
+ for(Element definition : definitions) {
+ List<Element> constants = definition.getChildren("constant");
+
+ }
+
+
+
+ // find the millepede constants
+ List<Element> mpConstants = null;
+ List<Element> detectors = rootNode.getChildren("detectors");
+ for(Element detectorsNode : detectors) {
+ List<Element> detectorNode = detectorsNode.getChildren("detector");
+ if(detectorNode!=null) {
+ System.out.println(detectorNode.size() + " detectors");
+ for(Element detector : detectorNode) {
+ if(detector.getAttribute("name")!=null) {
+ if(detector.getAttributeValue("name").compareTo(detectorName)==0 ) {
+ System.out.println("Found " + detectorName);
+
+ Element element_constants = detector.getChild("millepede_constants");
+ if(element_constants==null) {
+ throw new RuntimeException("no alignment constants in this compact file.");
+ }
+ mpConstants = element_constants.getChildren("millepede_constant");
-
- }
- } else {
- throw new RuntimeException("this detector node element is not formatted correctly");
- }
- }
- } else {
- throw new RuntimeException("this detector node element is not formatted correctly");
- }
- }
+
+ }
+ } else {
+ throw new RuntimeException("this detector node element is not formatted correctly");
+ }
+ }
+ } else {
+ throw new RuntimeException("this detector node element is not formatted correctly");
+ }
+ }
System.out.println("Found " + mpConstants.size() + " constants" );
for(Element element : mpConstants) {
String name = element.getAttributeValue("name");
@@ -147,19 +147,19 @@
}
- outputPrintWriter.close();
-
-
-
-
- }
+ outputPrintWriter.close();
+
+
+
+
+ }
-
+
-
-
-
+
+
+
}
Modified: java/branches/HPSJAVA-409/tracking/src/main/resources/org/hps/recon/tracking/strategies/HPS-Test-All.xml
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/main/resources/org/hps/recon/tracking/strategies/HPS-Test-All.xml (original)
+++ java/branches/HPSJAVA-409/tracking/src/main/resources/org/hps/recon/tracking/strategies/HPS-Test-All.xml Wed Apr 27 11:11:32 2016
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<StrategyList xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" xs:noNamespaceSchemaLocation="http://lcsim.org/recon/tracking/seedtracker/strategybuilder/strategies.xsd">
<TargetDetector>HPS-Test-All</TargetDetector>
-
+
<Strategy name="Strategy1-1">
<MinPT>0.250</MinPT>
<MinHits>4</MinHits>
@@ -70,7 +70,7 @@
</Layers>
</Strategy>
- <Strategy name="Strategy3-1">
+ <Strategy name="Strategy3-1">
<MinPT>0.250</MinPT>
<MinHits>4</MinHits>
<MinConfirm>1</MinConfirm>
Modified: java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/HelicalTrackHitDriverTest.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/HelicalTrackHitDriverTest.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/HelicalTrackHitDriverTest.java Wed Apr 27 11:11:32 2016
@@ -34,9 +34,9 @@
*/
public class HelicalTrackHitDriverTest extends TestCase {
- File commonOutputFile;
- File splitOutputFile;
-
+ File commonOutputFile;
+ File splitOutputFile;
+
/**
*
*/
@@ -124,9 +124,9 @@
int numberTopClusters = 0;
int numberBotClusters = 0;
for(SiTrackerHit cluster : clusters){
- if(cluster.getPositionAsVector().y() > 0)
- numberTopClusters++;
- else numberBotClusters++;
+ if(cluster.getPositionAsVector().y() > 0)
+ numberTopClusters++;
+ else numberBotClusters++;
}
if(!event.hasCollection(HelicalTrackHit.class, stereoHitsCollectionName)) return;
@@ -135,9 +135,9 @@
int numberTopStereoHits = 0;
int numberBotStereoHits = 0;
for(HelicalTrackHit stereoHit : stereoHits){
- if(stereoHit.getPosition()[1] > 0)
- numberTopStereoHits++;
- else numberBotStereoHits++;
+ if(stereoHit.getPosition()[1] > 0)
+ numberTopStereoHits++;
+ else numberBotStereoHits++;
}
nTopClusters.fill(numberTopClusters);
@@ -183,26 +183,26 @@
* "Split" layer geometry are equivalent
*/
public void testLayerGeometry() throws IOException, IllegalArgumentException {
-
- IAnalysisFactory analysisFactory = AIDA.defaultInstance().analysisFactory();
-
- ITree commonTree = analysisFactory.createTreeFactory().create(commonOutputFile.getAbsolutePath());
- ITree splitTree = analysisFactory.createTreeFactory().create(splitOutputFile.getAbsolutePath());
-
- double ksPvalue = CompareHistograms.getKolmogorovPValue( (IHistogram1D) splitTree.find("Number of Top Clusters"),
- (IHistogram1D) commonTree.find("Number of Top Clusters"));
- assertTrue("Number of top clusters is unequal!", ksPvalue > 0.05 );
-
- ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Bottom Clusters"),
- (IHistogram1D) splitTree.find("Number of Bottom Clusters"));
- assertTrue("Number of bottom clusters is unequal!", ksPvalue > 0.05 );
-
- ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Top Stereo Hits"),
- (IHistogram1D) splitTree.find("Number of Top Stereo Hits"));
- assertTrue("Number of top stereo hits is unequal!", ksPvalue > 0.05 );
-
- ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Bottom Stereo Hits"),
- (IHistogram1D) splitTree.find("Number of Bottom Stereo Hits"));
- assertTrue("Number of bottom stereo hits is unequal!", ksPvalue > 0.05 );
+
+ IAnalysisFactory analysisFactory = AIDA.defaultInstance().analysisFactory();
+
+ ITree commonTree = analysisFactory.createTreeFactory().create(commonOutputFile.getAbsolutePath());
+ ITree splitTree = analysisFactory.createTreeFactory().create(splitOutputFile.getAbsolutePath());
+
+ double ksPvalue = CompareHistograms.getKolmogorovPValue( (IHistogram1D) splitTree.find("Number of Top Clusters"),
+ (IHistogram1D) commonTree.find("Number of Top Clusters"));
+ assertTrue("Number of top clusters is unequal!", ksPvalue > 0.05 );
+
+ ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Bottom Clusters"),
+ (IHistogram1D) splitTree.find("Number of Bottom Clusters"));
+ assertTrue("Number of bottom clusters is unequal!", ksPvalue > 0.05 );
+
+ ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Top Stereo Hits"),
+ (IHistogram1D) splitTree.find("Number of Top Stereo Hits"));
+ assertTrue("Number of top stereo hits is unequal!", ksPvalue > 0.05 );
+
+ ksPvalue = CompareHistograms.getKolmogorovPValue((IHistogram1D) commonTree.find("Number of Bottom Stereo Hits"),
+ (IHistogram1D) splitTree.find("Number of Bottom Stereo Hits"));
+ assertTrue("Number of bottom stereo hits is unequal!", ksPvalue > 0.05 );
}
}
Modified: java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/TruthResidualTest.java
=============================================================================
--- java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/TruthResidualTest.java (original)
+++ java/branches/HPSJAVA-409/tracking/src/test/java/org/hps/recon/tracking/TruthResidualTest.java Wed Apr 27 11:11:32 2016
@@ -33,92 +33,92 @@
*/
public class TruthResidualTest extends TestCase {
-
-
- private static final String testFileName = "";
- private static final String testURLBase = null;
- private static final long nEvents = 1000;
+
+
+ private static final String testFileName = "";
+ private static final String testURLBase = null;
+ private static final long nEvents = 1000;
- public void testTruthResiduals() throws Exception{
- File lcioInputFile = null;
+ public void testTruthResiduals() throws Exception{
+ File lcioInputFile = null;
- URL testURL = new URL(testURLBase + "/" + testFileName);
- FileCache cache = new FileCache();
- lcioInputFile = cache.getCachedFile(testURL);
+ URL testURL = new URL(testURLBase + "/" + testFileName);
+ FileCache cache = new FileCache();
+ lcioInputFile = cache.getCachedFile(testURL);
- //Process and write out the file
- LCSimLoop loop = new LCSimLoop();
- loop.setLCIORecordSource(lcioInputFile);
- loop.add(new MainTrackingDriver());
- File outputFile = new TestOutputFile(testFileName.replaceAll(".slcio", "") + "_hpsTrackTruthResidualTrackingTest.slcio");
- outputFile.getParentFile().mkdirs(); //make sure the parent directory exists
- loop.add(new LCIODriver(outputFile));
- loop.loop(nEvents, null);
- loop.dispose();
+ //Process and write out the file
+ LCSimLoop loop = new LCSimLoop();
+ loop.setLCIORecordSource(lcioInputFile);
+ loop.add(new MainTrackingDriver());
+ File outputFile = new TestOutputFile(testFileName.replaceAll(".slcio", "") + "_hpsTrackTruthResidualTrackingTest.slcio");
+ outputFile.getParentFile().mkdirs(); //make sure the parent directory exists
+ loop.add(new LCIODriver(outputFile));
+ loop.loop(nEvents, null);
+ loop.dispose();
- //Read LCIO back and test!
- LCSimLoop readLoop = new LCSimLoop();
- readLoop.add(new TestResiduals());
- readLoop.setLCIORecordSource(outputFile);
- readLoop.loop(nEvents, null);
- readLoop.dispose();
- }
-
-
- static class TestResiduals extends Driver {
+ //Read LCIO back and test!
+ LCSimLoop readLoop = new LCSimLoop();
+ readLoop.add(new TestResiduals());
+ readLoop.setLCIORecordSource(outputFile);
+ readLoop.loop(nEvents, null);
+ readLoop.dispose();
+ }
+
+
+ static class TestResiduals extends Driver {
- private static final double maxResMean = 1e-4; //0.1um
- private static final double maxResRMS = 5e-4; //0.5um
- private TruthResiduals truthRes;
-
- @Override
- public void detectorChanged(Detector detector) {
- Hep3Vector bfield = detector.getFieldMap().getField(new BasicHep3Vector(0., 0., 1.));
- truthRes = new TruthResiduals(bfield);
- truthRes.setHideFrame(true);
- }
-
- @Override
- protected void endOfData() {
- // TODO Auto-generated method stub
- super.endOfData();
-
- IHistogram hx = truthRes.getResidual(1, "x");
- IHistogram hy = truthRes.getResidual(1, "y");
- if (hx != null && hx.entries()>10) {
- IHistogram1D hx1d = (IHistogram1D)hx;
- assertTrue("Mean of layer 1 truth hit residual is not zero " + hx1d.mean(), Math.abs(hx1d.mean()) >maxResMean );
- assertTrue("RMS of layer 1 truth hit residual is not zero" + hx1d.rms(), Math.abs(hx1d.rms()) >maxResRMS );
- }
- if (hy != null && hy.entries()>10) {
- IHistogram1D hy1d = (IHistogram1D)hy;
- assertTrue("Mean of layer 1 truth hit residual is not zero " + hy1d.mean(), Math.abs(hy1d.mean()) >maxResMean );
- assertTrue("RMS of layer 1 truth hit residual is not zero " + hy1d.mean(), Math.abs(hy1d.rms()) >maxResRMS );
- }
- }
+ private static final double maxResMean = 1e-4; //0.1um
+ private static final double maxResRMS = 5e-4; //0.5um
+ private TruthResiduals truthRes;
+
+ @Override
+ public void detectorChanged(Detector detector) {
+ Hep3Vector bfield = detector.getFieldMap().getField(new BasicHep3Vector(0., 0., 1.));
+ truthRes = new TruthResiduals(bfield);
+ truthRes.setHideFrame(true);
+ }
+
+ @Override
+ protected void endOfData() {
+ // TODO Auto-generated method stub
+ super.endOfData();
+
+ IHistogram hx = truthRes.getResidual(1, "x");
+ IHistogram hy = truthRes.getResidual(1, "y");
+ if (hx != null && hx.entries()>10) {
+ IHistogram1D hx1d = (IHistogram1D)hx;
+ assertTrue("Mean of layer 1 truth hit residual is not zero " + hx1d.mean(), Math.abs(hx1d.mean()) >maxResMean );
+ assertTrue("RMS of layer 1 truth hit residual is not zero" + hx1d.rms(), Math.abs(hx1d.rms()) >maxResRMS );
+ }
+ if (hy != null && hy.entries()>10) {
+ IHistogram1D hy1d = (IHistogram1D)hy;
+ assertTrue("Mean of layer 1 truth hit residual is not zero " + hy1d.mean(), Math.abs(hy1d.mean()) >maxResMean );
+ assertTrue("RMS of layer 1 truth hit residual is not zero " + hy1d.mean(), Math.abs(hy1d.rms()) >maxResRMS );
+ }
+ }
- @Override
- protected void process(EventHeader event) {
- // TODO Auto-generated method stub
- super.process(event);
-
- List<MCParticle> mcParticles = null;
- if(event.hasCollection(MCParticle.class,"MCParticle")) {
- mcParticles = event.get(MCParticle.class,"MCParticle");
- }
-
- List<SimTrackerHit> simTrackerHits = event.get(SimTrackerHit.class, "TrackerHits");
-
-
- if(simTrackerHits != null && mcParticles != null) {
- truthRes.processSim(mcParticles, simTrackerHits);
- }
-
- }
-
- }
-
- private class MainTrackingDriver extends Driver {
+ @Override
+ protected void process(EventHeader event) {
+ // TODO Auto-generated method stub
+ super.process(event);
+
+ List<MCParticle> mcParticles = null;
+ if(event.hasCollection(MCParticle.class,"MCParticle")) {
+ mcParticles = event.get(MCParticle.class,"MCParticle");
+ }
+
+ List<SimTrackerHit> simTrackerHits = event.get(SimTrackerHit.class, "TrackerHits");
+
+
+ if(simTrackerHits != null && mcParticles != null) {
+ truthRes.processSim(mcParticles, simTrackerHits);
+ }
+
+ }
+
+ }
+
+ private class MainTrackingDriver extends Driver {
public MainTrackingDriver() {
@@ -138,7 +138,7 @@
}
}
-
-
-
+
+
+
}
Modified: java/branches/HPSJAVA-409/users/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/users/pom.xml (original)
+++ java/branches/HPSJAVA-409/users/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/users/</url>
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitFunction.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitFunction.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitFunction.java Wed Apr 27 11:11:32 2016
@@ -7,29 +7,29 @@
* Straight line fit
*/
public class RfFitFunction extends AbstractIFunction {
- protected double intercept=0;
- protected double slope=0;
- public RfFitFunction() {
- this("");
- }
- public RfFitFunction(String title) {
- super();
- this.variableNames=new String[]{"time"};
- this.parameterNames=new String[]{"intercept","slope"};
+ protected double intercept=0;
+ protected double slope=0;
+ public RfFitFunction() {
+ this("");
+ }
+ public RfFitFunction(String title) {
+ super();
+ this.variableNames=new String[]{"time"};
+ this.parameterNames=new String[]{"intercept","slope"};
- init(title);
- }
- public double value(double [] v) {
- return intercept + (v[0])*slope;
- }
- public void setParameters(double[] pars) throws IllegalArgumentException {
- super.setParameters(pars);
- intercept=pars[0];
- slope=pars[1];
- }
- public void setParameter(String key,double value) throws IllegalArgumentException{
- super.setParameter(key,value);
- if (key.equals("intercept")) intercept=value;
- else if (key.equals("slope")) slope=value;
- }
+ init(title);
+ }
+ public double value(double [] v) {
+ return intercept + (v[0])*slope;
+ }
+ public void setParameters(double[] pars) throws IllegalArgumentException {
+ super.setParameters(pars);
+ intercept=pars[0];
+ slope=pars[1];
+ }
+ public void setParameter(String key,double value) throws IllegalArgumentException{
+ super.setParameter(key,value);
+ if (key.equals("intercept")) intercept=value;
+ else if (key.equals("slope")) slope=value;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitterDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfFitterDriver.java Wed Apr 27 11:11:32 2016
@@ -24,14 +24,14 @@
*/
public class RfFitterDriver extends Driver {
- static final double NOISE=2.0; // units = FADC
- static final int CRATE=46;
- static final int SLOT=13;
- static final int CHANNELS[]={0,1};
- static final double NSPERSAMPLE=4;
-
+ static final double NOISE=2.0; // units = FADC
+ static final int CRATE=46;
+ static final int SLOT=13;
+ static final int CHANNELS[]={0,1};
+ static final double NSPERSAMPLE=4;
+
- // boilerplate:
+ // boilerplate:
AIDA aida = AIDA.defaultInstance();
IAnalysisFactory analysisFactory = aida.analysisFactory();
IFunctionFactory functionFactory = analysisFactory.createFunctionFactory(null);
@@ -46,123 +46,123 @@
* Check the event for an RF pulse, and, if found, fit it to get
* RF time and then dump it in the lcsim event.
*/
- public void process(EventHeader event) {
- if (!event.hasCollection(GenericObject.class,"FADCGenericHits")) return;
-
- boolean foundRf=false;
- double times[]={-9999,-9999};
-
- for (GenericObject gob : event.get(GenericObject.class,"FADCGenericHits")) {
- FADCGenericHit hit=(FADCGenericHit)gob;
-
- // ignore hits not from proper RF signals based on crate/slot/channel:
- if (hit.getCrate()!=CRATE || hit.getSlot()!=SLOT) continue;
- for (int ii=0; ii<CHANNELS.length; ii++) {
- if (hit.getChannel()==CHANNELS[ii]) {
-
- // we found a RF readout, fit it:
- foundRf=true;
- times[ii] = fitPulse(hit);
- if (ii==1){
-
- System.out.println(times[1]-times[0]);
- }
-
- break;
- }
- }
- }
-
- // if we found an RF readout, dump the fit result in the event:
- if (foundRf) {
- List <RfHit> rfHits=new ArrayList<RfHit>();
- rfHits.add(new RfHit(times));
- event.put("RFHits", rfHits, RfHit.class, 1);
- }
- }
+ public void process(EventHeader event) {
+ if (!event.hasCollection(GenericObject.class,"FADCGenericHits")) return;
+
+ boolean foundRf=false;
+ double times[]={-9999,-9999};
+
+ for (GenericObject gob : event.get(GenericObject.class,"FADCGenericHits")) {
+ FADCGenericHit hit=(FADCGenericHit)gob;
+
+ // ignore hits not from proper RF signals based on crate/slot/channel:
+ if (hit.getCrate()!=CRATE || hit.getSlot()!=SLOT) continue;
+ for (int ii=0; ii<CHANNELS.length; ii++) {
+ if (hit.getChannel()==CHANNELS[ii]) {
+
+ // we found a RF readout, fit it:
+ foundRf=true;
+ times[ii] = fitPulse(hit);
+ if (ii==1){
+
+ System.out.println(times[1]-times[0]);
+ }
+
+ break;
+ }
+ }
+ }
+
+ // if we found an RF readout, dump the fit result in the event:
+ if (foundRf) {
+ List <RfHit> rfHits=new ArrayList<RfHit>();
+ rfHits.add(new RfHit(times));
+ event.put("RFHits", rfHits, RfHit.class, 1);
+ }
+ }
- /*
- * Perform the fit to the RF pulse:
- */
- public double fitPulse(FADCGenericHit hit) {
- fitData.clear();
- final int adcSamples[]=hit.getData();
- //stores the number of peaks
- int iz=0;
- int peakBin[]={-999,-999};
- final int threshold = 300;
- double fitThresh[]={-999,-999};
- double pedVal[]={-999,-999};
-
- // Look for bins containing the peaks (2-3 peaks)
- for (int ii=4; ii<adcSamples.length; ii++) {
- // After 2 peaks, stop looking for more
- if (iz==2){break;}
- if ((adcSamples[ii+1]>0) && (adcSamples[ii-1]>0) && (adcSamples[ii]>threshold) && ii>8){
- if ((adcSamples[ii]>adcSamples[ii+1]) && (adcSamples[ii]>=adcSamples[ii-1]) ){
-
- peakBin[iz]=ii;
- iz++;
- }
- }
- }
-
-
- int jj=0;
- // Choose peak closest to center of window (second peak, ik=1)
- final int ik=1;
- pedVal[ik] = (adcSamples[peakBin[ik]-6]+adcSamples[peakBin[ik]-7]+adcSamples[peakBin[ik]-8]+adcSamples[peakBin[ik]-9])/4.0;
- fitThresh[ik]= (adcSamples[peakBin[ik]]+pedVal[ik])/3.0;
-
- // Initial values: we find/fit 3 points:
- double itime[] = {-999,-999,-999};
- double ifadc[] = {-999,-999,-999};
-
- // Find the points of the peak bin to peak bin-5
- for (int ll=0; ll<5; ll++){
- if ((adcSamples[peakBin[ik]-5+ll]) > fitThresh[ik]){
- // One point is below fit threshold and two points are above
- if(jj==0 && (adcSamples[peakBin[ik]-6+ll] > pedVal[ik])){
- final int zz=fitData.size();
- fitData.addPoint();
- itime[zz] = peakBin[ik]-6+ll;
- ifadc[zz] = adcSamples[peakBin[ik]-6+ll];
- fitData.point(zz).coordinate(0).setValue(peakBin[ik]-6+ll);
- fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-6+ll]);
- fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
- fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
- jj++;
- }
- final int zz=fitData.size();
- fitData.addPoint();
- itime[zz] = peakBin[ik]-5+ll;
- ifadc[zz] = adcSamples[peakBin[ik]-5+ll];
- fitData.point(zz).coordinate(0).setValue(peakBin[ik]-5+ll);
- fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-5+ll]);
- fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
- fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
-
- jj++;
- if (jj==3) {break;}
- }
- }
-
- double islope = ((double)(ifadc[2]-ifadc[0]))/(itime[2]-itime[0]);
- double icept = ifadc[1] - islope*itime[1];
- // Initialize fit parameters:
- fitFunction.setParameter("intercept",icept);
- fitFunction.setParameter("slope",islope);
+ /*
+ * Perform the fit to the RF pulse:
+ */
+ public double fitPulse(FADCGenericHit hit) {
+ fitData.clear();
+ final int adcSamples[]=hit.getData();
+ //stores the number of peaks
+ int iz=0;
+ int peakBin[]={-999,-999};
+ final int threshold = 300;
+ double fitThresh[]={-999,-999};
+ double pedVal[]={-999,-999};
+
+ // Look for bins containing the peaks (2-3 peaks)
+ for (int ii=4; ii<adcSamples.length; ii++) {
+ // After 2 peaks, stop looking for more
+ if (iz==2){break;}
+ if ((adcSamples[ii+1]>0) && (adcSamples[ii-1]>0) && (adcSamples[ii]>threshold) && ii>8){
+ if ((adcSamples[ii]>adcSamples[ii+1]) && (adcSamples[ii]>=adcSamples[ii-1]) ){
+
+ peakBin[iz]=ii;
+ iz++;
+ }
+ }
+ }
+
+
+ int jj=0;
+ // Choose peak closest to center of window (second peak, ik=1)
+ final int ik=1;
+ pedVal[ik] = (adcSamples[peakBin[ik]-6]+adcSamples[peakBin[ik]-7]+adcSamples[peakBin[ik]-8]+adcSamples[peakBin[ik]-9])/4.0;
+ fitThresh[ik]= (adcSamples[peakBin[ik]]+pedVal[ik])/3.0;
+
+ // Initial values: we find/fit 3 points:
+ double itime[] = {-999,-999,-999};
+ double ifadc[] = {-999,-999,-999};
+
+ // Find the points of the peak bin to peak bin-5
+ for (int ll=0; ll<5; ll++){
+ if ((adcSamples[peakBin[ik]-5+ll]) > fitThresh[ik]){
+ // One point is below fit threshold and two points are above
+ if(jj==0 && (adcSamples[peakBin[ik]-6+ll] > pedVal[ik])){
+ final int zz=fitData.size();
+ fitData.addPoint();
+ itime[zz] = peakBin[ik]-6+ll;
+ ifadc[zz] = adcSamples[peakBin[ik]-6+ll];
+ fitData.point(zz).coordinate(0).setValue(peakBin[ik]-6+ll);
+ fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-6+ll]);
+ fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
+ fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
+ jj++;
+ }
+ final int zz=fitData.size();
+ fitData.addPoint();
+ itime[zz] = peakBin[ik]-5+ll;
+ ifadc[zz] = adcSamples[peakBin[ik]-5+ll];
+ fitData.point(zz).coordinate(0).setValue(peakBin[ik]-5+ll);
+ fitData.point(zz).coordinate(1).setValue(adcSamples[peakBin[ik]-5+ll]);
+ fitData.point(zz).coordinate(1).setErrorMinus(NOISE);
+ fitData.point(zz).coordinate(1).setErrorPlus(NOISE);
+
+ jj++;
+ if (jj==3) {break;}
+ }
+ }
+
+ double islope = ((double)(ifadc[2]-ifadc[0]))/(itime[2]-itime[0]);
+ double icept = ifadc[1] - islope*itime[1];
+ // Initialize fit parameters:
+ fitFunction.setParameter("intercept",icept);
+ fitFunction.setParameter("slope",islope);
- // this used to be turned on somewhere else on every event, dunno if it still is:
- //Logger.getLogger("org.freehep.math.minuit").setLevel(Level.OFF);
-
- IFitResult fitResults = fitter.fit(fitData,fitFunction);
-
- // Read the time value at this location on the fit:
- double halfVal = (adcSamples[peakBin[1]]+pedVal[1])/2.0;
-
- return NSPERSAMPLE*(halfVal-fitResults.fittedParameter("intercept"))/fitResults.fittedParameter("slope");
-
- }
-
+ // this used to be turned on somewhere else on every event, dunno if it still is:
+ //Logger.getLogger("org.freehep.math.minuit").setLevel(Level.OFF);
+
+ IFitResult fitResults = fitter.fit(fitData,fitFunction);
+
+ // Read the time value at this location on the fit:
+ double halfVal = (adcSamples[peakBin[1]]+pedVal[1])/2.0;
+
+ return NSPERSAMPLE*(halfVal-fitResults.fittedParameter("intercept"))/fitResults.fittedParameter("slope");
+
+ }
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfHit.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfHit.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/baltzell/RfHit.java Wed Apr 27 11:11:32 2016
@@ -6,13 +6,13 @@
* class to store RF times after extracting from waveform.
*/
public class RfHit implements GenericObject {
- private double[] times;
- public RfHit(double[] times) { this.times=times; }
- public int getNInt() { return 0; }
- public int getNFloat() { return 0; }
- public int getNDouble() { return times.length; }
- public double getDoubleVal(int ii) { return times[ii]; }
- public float getFloatVal (int ii) { return 0; }
- public int getIntVal (int ii) { return 0; }
- public boolean isFixedSize() { return false; }
+ private double[] times;
+ public RfHit(double[] times) { this.times=times; }
+ public int getNInt() { return 0; }
+ public int getNFloat() { return 0; }
+ public int getNDouble() { return times.length; }
+ public double getDoubleVal(int ii) { return times[ii]; }
+ public float getFloatVal (int ii) { return 0; }
+ public int getIntVal (int ii) { return 0; }
+ public boolean isFixedSize() { return false; }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/RawPedestalComputator.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/RawPedestalComputator.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/RawPedestalComputator.java Wed Apr 27 11:11:32 2016
@@ -22,122 +22,122 @@
public class RawPedestalComputator extends Driver {
- String inputCollectionRaw = "EcalReadoutHits";
- int row, column;
- double energy;
+ String inputCollectionRaw = "EcalReadoutHits";
+ int row, column;
+ double energy;
- int[] windowRaw = new int[47 * 11];// in case we have the raw waveform, this is the window lenght (in samples)
- boolean[] isFirstRaw = new boolean[47 * 11];
+ int[] windowRaw = new int[47 * 11];// in case we have the raw waveform, this is the window lenght (in samples)
+ boolean[] isFirstRaw = new boolean[47 * 11];
- double[] pedestal = new double[47 * 11];
- double[] noise = new double[47 * 11];
- double[] result;
+ double[] pedestal = new double[47 * 11];
+ double[] noise = new double[47 * 11];
+ double[] result;
- int pedSamples = 50;
- int nEvents = 0;
+ int pedSamples = 50;
+ int nEvents = 0;
- private EcalConditions conditions;
- private IIdentifierHelper helper;
- private int systemId;
+ private EcalConditions conditions;
+ private IIdentifierHelper helper;
+ private int systemId;
- @Override
- public void detectorChanged(Detector detector) {
+ @Override
+ public void detectorChanged(Detector detector) {
- DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
- this.conditions = manager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
- this.helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
- this.systemId = detector.getSubdetector("Ecal").getSystemID();
+ DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
+ this.conditions = manager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
+ this.helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
+ this.systemId = detector.getSubdetector("Ecal").getSystemID();
- System.out.println("Pedestal computator: detector changed");
- for (int ii = 0; ii < 11 * 47; ii++) {
- isFirstRaw[ii] = true;
- pedestal[ii] = 0;
- noise[ii] = 0;
- }
- }
+ System.out.println("Pedestal computator: detector changed");
+ for (int ii = 0; ii < 11 * 47; ii++) {
+ isFirstRaw[ii] = true;
+ pedestal[ii] = 0;
+ noise[ii] = 0;
+ }
+ }
- @Override
- public void process(EventHeader event) {
- int ii = 0;
- if (event.hasCollection(RawTrackerHit.class, inputCollectionRaw)) {
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, inputCollectionRaw);
- for (RawTrackerHit hit : hits) {
- row = hit.getIdentifierFieldValue("iy");
- column = hit.getIdentifierFieldValue("ix");
- ii = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
- if ((row != 0) && (column != 0)) {
- if (!EcalMonitoringUtilities.isInHole(row, column)) {
- if (isFirstRaw[ii]) { // at the very first hit we read for this channel, we need to read the window length and save it
- isFirstRaw[ii] = false;
- windowRaw[ii] = hit.getADCValues().length;
- }
- result = EcalUtils.computeAmplitude(hit.getADCValues(), windowRaw[ii], pedSamples);
- pedestal[ii] += result[1];
- noise[ii] += result[2];
- }
- }
- }
- }
+ @Override
+ public void process(EventHeader event) {
+ int ii = 0;
+ if (event.hasCollection(RawTrackerHit.class, inputCollectionRaw)) {
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, inputCollectionRaw);
+ for (RawTrackerHit hit : hits) {
+ row = hit.getIdentifierFieldValue("iy");
+ column = hit.getIdentifierFieldValue("ix");
+ ii = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
+ if ((row != 0) && (column != 0)) {
+ if (!EcalMonitoringUtilities.isInHole(row, column)) {
+ if (isFirstRaw[ii]) { // at the very first hit we read for this channel, we need to read the window length and save it
+ isFirstRaw[ii] = false;
+ windowRaw[ii] = hit.getADCValues().length;
+ }
+ result = EcalUtils.computeAmplitude(hit.getADCValues(), windowRaw[ii], pedSamples);
+ pedestal[ii] += result[1];
+ noise[ii] += result[2];
+ }
+ }
+ }
+ }
- if (event.hasCollection(CalorimeterHit.class, "EcalCalHits")) {
- List<CalorimeterHit> hits = event.get(CalorimeterHit.class,"EcalCalHits");
- for (CalorimeterHit hit : hits) {
- column = hit.getIdentifierFieldValue("ix");
- row = hit.getIdentifierFieldValue("iy");
- energy = hit.getCorrectedEnergy();
- System.out.println("Row: "+row+" Column "+column+" Energy: "+energy);
- }
- }
+ if (event.hasCollection(CalorimeterHit.class, "EcalCalHits")) {
+ List<CalorimeterHit> hits = event.get(CalorimeterHit.class,"EcalCalHits");
+ for (CalorimeterHit hit : hits) {
+ column = hit.getIdentifierFieldValue("ix");
+ row = hit.getIdentifierFieldValue("iy");
+ energy = hit.getCorrectedEnergy();
+ System.out.println("Row: "+row+" Column "+column+" Energy: "+energy);
+ }
+ }
- nEvents++;
- }
+ nEvents++;
+ }
- @Override
- public void endOfData() {
- try {
- PrintWriter writerTop = new PrintWriter("default01.ped", "UTF-8");
- PrintWriter writerBottom = new PrintWriter("default02.ped", "UTF-8");
+ @Override
+ public void endOfData() {
+ try {
+ PrintWriter writerTop = new PrintWriter("default01.ped", "UTF-8");
+ PrintWriter writerBottom = new PrintWriter("default02.ped", "UTF-8");
- for (int ii = 0; ii < 11 * 47; ii++) {
- int row, column;
- row = EcalMonitoringUtilities.getRowFromHistoID(ii);
- column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
- if (EcalMonitoringUtilities.isInHole(row, column))
- continue;
- if ((row == 0) || (column == 0))
- continue;
- pedestal[ii] /= nEvents;
- noise[ii] /= nEvents;
+ for (int ii = 0; ii < 11 * 47; ii++) {
+ int row, column;
+ row = EcalMonitoringUtilities.getRowFromHistoID(ii);
+ column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
+ if (EcalMonitoringUtilities.isInHole(row, column))
+ continue;
+ if ((row == 0) || (column == 0))
+ continue;
+ pedestal[ii] /= nEvents;
+ noise[ii] /= nEvents;
- // FIXME: Is this right? --JM
- EcalChannel ecalChannel = conditions.getChannelCollection().findChannel(new GeometryId(helper, new int[] {systemId, column, row}));
- int crate = ecalChannel.getCrate();
- int slot = ecalChannel.getSlot();
- int channel = ecalChannel.getChannel();
+ // FIXME: Is this right? --JM
+ EcalChannel ecalChannel = conditions.getChannelCollection().findChannel(new GeometryId(helper, new int[] {systemId, column, row}));
+ int crate = ecalChannel.getCrate();
+ int slot = ecalChannel.getSlot();
+ int channel = ecalChannel.getChannel();
- System.out.println(column + " " + row + " " + crate + " " + slot + " " + channel + " " + pedestal[ii] + " " + noise[ii]);
+ System.out.println(column + " " + row + " " + crate + " " + slot + " " + channel + " " + pedestal[ii] + " " + noise[ii]);
- if (crate == 37) {
- writerTop.print(slot + " " + channel + " " + (int) (Math.round(pedestal[ii])) + " " + (int) (Math.round(noise[ii])) + "\r\n");
- } else if (crate == 39) {
- writerBottom.print(slot + " " + channel + " " + (int) (Math.round(pedestal[ii])) + " " + (int) (Math.round(noise[ii])) + "\r\n");
- }
+ if (crate == 37) {
+ writerTop.print(slot + " " + channel + " " + (int) (Math.round(pedestal[ii])) + " " + (int) (Math.round(noise[ii])) + "\r\n");
+ } else if (crate == 39) {
+ writerBottom.print(slot + " " + channel + " " + (int) (Math.round(pedestal[ii])) + " " + (int) (Math.round(noise[ii])) + "\r\n");
+ }
- }
+ }
- writerTop.close();
- writerBottom.close();
- } catch (FileNotFoundException fnfe) {
+ writerTop.close();
+ writerBottom.close();
+ } catch (FileNotFoundException fnfe) {
- System.out.println(fnfe.getMessage());
+ System.out.println(fnfe.getMessage());
- }
+ }
- catch (IOException ioe) {
+ catch (IOException ioe) {
- System.out.println(ioe.getMessage());
+ System.out.println(ioe.getMessage());
- }
- }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/StripChartTest.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/StripChartTest.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/celentan/StripChartTest.java Wed Apr 27 11:11:32 2016
@@ -6,5 +6,5 @@
*/
public class StripChartTest {
- int dummy;
+ int dummy;
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/ClusterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/ClusterDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/ClusterDriver.java Wed Apr 27 11:11:32 2016
@@ -84,8 +84,8 @@
* @param correctionClusterCollectionName
*/
public void setCorrectionClusterCollectionName(String correctionClusterCollectionName){
- this.correctionClusterCollectionName = correctionClusterCollectionName;
- getLogger().config("correctionClusterCollectionName = " + this.correctionClusterCollectionName);
+ this.correctionClusterCollectionName = correctionClusterCollectionName;
+ getLogger().config("correctionClusterCollectionName = " + this.correctionClusterCollectionName);
}
/**
@@ -147,7 +147,7 @@
* @param copyClusterCollection
*/
public void setCopyClusterCollection(boolean copyClusterCollection) {
- this.copyClusterCollection = copyClusterCollection;
+ this.copyClusterCollection = copyClusterCollection;
}
/**
@@ -267,9 +267,9 @@
event.put(outputClusterCollectionName, clusters, Cluster.class, flags);
if (copyClusterCollection
- && event.hasCollection(Cluster.class, outputClusterCollectionName)){
- List<Cluster> clusterCopy = event.get(Cluster.class, outputClusterCollectionName);
- event.put(correctionClusterCollectionName,clusterCopy,Cluster.class,flags);
+ && event.hasCollection(Cluster.class, outputClusterCollectionName)){
+ List<Cluster> clusterCopy = event.get(Cluster.class, outputClusterCollectionName);
+ event.put(correctionClusterCollectionName,clusterCopy,Cluster.class,flags);
}
if (!this.writeClusterCollection) {
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClusterICPosition.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClusterICPosition.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClusterICPosition.java Wed Apr 27 11:11:32 2016
@@ -53,7 +53,7 @@
*
*/
public class EcalClusterICPosition extends Driver {
- // File writer to output cluster results.
+ // File writer to output cluster results.
FileWriter writeHits;
// LCIO collection name for calorimeter hits.
String ecalCollectionName="EcalCalHits";
@@ -95,7 +95,7 @@
public void setTrackerCollectionName(String trackerCollectionName){
- this.trackerCollectionName = trackerCollectionName;
+ this.trackerCollectionName = trackerCollectionName;
}
@@ -112,7 +112,7 @@
}
public void setRejectedHitName(String rejectedHitName){
- this.rejectedHitName = rejectedHitName;
+ this.rejectedHitName = rejectedHitName;
}
/**
@@ -176,7 +176,7 @@
//get the list of Ecal scoring Tracker hits
public ArrayList<SimTrackerHit> trackHits = new ArrayList<SimTrackerHit>();
public void addTrackHit(SimTrackerHit trHit){
- trackHits.add(trHit);
+ trackHits.add(trHit);
}
// Make a map for quick calculation of the x-y position of crystal face
@@ -187,11 +187,11 @@
//attempt for mc particle list
public void addMCGen(MCParticle genMC){
- mcList.add(genMC);
+ mcList.add(genMC);
}
public void startOfData() {
- // Make sure that the calorimeter hit collection name is defined.
+ // Make sure that the calorimeter hit collection name is defined.
if (ecalCollectionName == null) {
throw new RuntimeException("The parameter ecalCollectionName was not set!");
}
@@ -211,25 +211,25 @@
public void detectorChanged(Detector detector) {
// Get the calorimeter.
- HPSEcal3 ecal = (HPSEcal3) detector.getSubdetector(ecalName);
-
+ HPSEcal3 ecal = (HPSEcal3) detector.getSubdetector(ecalName);
+
// Store the map of neighbor crystals for the current calorimeter set-up.
neighborMap = ecal.getNeighborMap();
}
public void process(EventHeader event) {
- // Make sure the current event contains calorimeter hits.
+ // Make sure the current event contains calorimeter hits.
if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
-
- // Get generated hits
+
+ // Get generated hits
List<MCParticle> genPart = event.getMCParticles();
for(MCParticle m : genPart){
- mcList.add(m);
+ mcList.add(m);
}
List<SimTrackerHit> trHit = event.get(SimTrackerHit.class, trackerCollectionName);
for (SimTrackerHit t : trHit){
- trackHits.add(t);
+ trackHits.add(t);
}
@@ -241,13 +241,13 @@
}
public void createClusters(EventHeader event) throws IOException {
-
- // Create a list to store the event hits in.
+
+ // Create a list to store the event hits in.
List<CalorimeterHit> hitList = new ArrayList<CalorimeterHit>();
List<CalorimeterHit> baseList = event.get(CalorimeterHit.class, ecalCollectionName);
for(CalorimeterHit r : baseList) {
- hitEnergyMap.put(r, (r.getCorrectedEnergy()+rNum.nextGaussian()*0.003));
- hitList.add(r);
+ hitEnergyMap.put(r, (r.getCorrectedEnergy()+rNum.nextGaussian()*0.003));
+ hitList.add(r);
}
// Create a list to store the newly created clusters in.
@@ -263,32 +263,32 @@
// designated threshold.
filterLoop:
for(int index = hitList.size() - 1; index >= 0; index--) {
- // If the hit is below threshold or outside of time window, kill it.
-/* if((hitList.get(index).getCorrectedEnergy() < hitEnergyThreshold)||
- (timeCut && (hitList.get(index).getTime() < minTime || hitList.get(index).getTime() > (minTime + timeWindow)))) {
- rejectedHitList.add(hitList.get(index));
- hitList.remove(index);
- }*/
- if((hitEnergyMap.get(hitList.get(index))< hitEnergyThreshold)||
- (timeCut && (hitList.get(index).getTime() < minTime || hitList.get(index).getTime() > (minTime + timeWindow)))) {
- rejectedHitList.add(hitList.get(index));
- hitList.remove(index);
- }
-
-
-
-
-
- // Since the hits are sorted by energy from highest to
- // lowest, any hit that is above threshold means that all
- // subsequent hits will also be above threshold. Continue through
- // list to check in time window.
- else { continue; }
- }
-
- // Create a map to connect the cell ID of a calorimeter crystal
+ // If the hit is below threshold or outside of time window, kill it.
+/* if((hitList.get(index).getCorrectedEnergy() < hitEnergyThreshold)||
+ (timeCut && (hitList.get(index).getTime() < minTime || hitList.get(index).getTime() > (minTime + timeWindow)))) {
+ rejectedHitList.add(hitList.get(index));
+ hitList.remove(index);
+ }*/
+ if((hitEnergyMap.get(hitList.get(index))< hitEnergyThreshold)||
+ (timeCut && (hitList.get(index).getTime() < minTime || hitList.get(index).getTime() > (minTime + timeWindow)))) {
+ rejectedHitList.add(hitList.get(index));
+ hitList.remove(index);
+ }
+
+
+
+
+
+ // Since the hits are sorted by energy from highest to
+ // lowest, any hit that is above threshold means that all
+ // subsequent hits will also be above threshold. Continue through
+ // list to check in time window.
+ else { continue; }
+ }
+
+ // Create a map to connect the cell ID of a calorimeter crystal
// to the hit which occurred in that crystal.
- HashMap<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
+ HashMap<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
for (CalorimeterHit hit : hitList) { hitMap.put(hit.getCellID(), hit); }
// Map a crystal to a list of all clusters in which it is a member.
@@ -297,13 +297,13 @@
// Map a crystal to the seed of the cluster of which it is a member.
HashMap<CalorimeterHit, CalorimeterHit> hitSeedMap = new HashMap<CalorimeterHit, CalorimeterHit>();
- // Set containing hits immediately around a seed hit.
- HashSet<CalorimeterHit> surrSeedSet = new HashSet<CalorimeterHit>();
+ // Set containing hits immediately around a seed hit.
+ HashSet<CalorimeterHit> surrSeedSet = new HashSet<CalorimeterHit>();
// Loop through all calorimeter hits to locate seeds and perform
// first pass calculations for component and common hits.
for (CalorimeterHit hit : hitList) {
- // Get the set of all neighboring crystals to the current hit.
+ // Get the set of all neighboring crystals to the current hit.
Set<Long> neighbors = neighborMap.get(hit.getCellID());
// Generate a list to store any neighboring hits in.
@@ -313,11 +313,11 @@
// which corresponds to a neighbor, add it to the list of
// neighboring hits.
for (Long neighbor : neighbors) {
- // Get the neighboring hit.
- CalorimeterHit neighborHit = hitMap.get(neighbor);
-
- // If it exists, add it to the list.
- if(neighborHit != null) { neighborHits.add(neighborHit); }
+ // Get the neighboring hit.
+ CalorimeterHit neighborHit = hitMap.get(neighbor);
+
+ // If it exists, add it to the list.
+ if(neighborHit != null) { neighborHits.add(neighborHit); }
}
// Track whether the current hit is a seed hit or not.
@@ -328,10 +328,10 @@
// neighboring hits.
seedHitLoop:
for(CalorimeterHit neighbor : neighborHits) {
- if(!equalEnergies(hit, neighbor)) {
- isSeed = false;
- break seedHitLoop;
- }
+ if(!equalEnergies(hit, neighbor)) {
+ isSeed = false;
+ break seedHitLoop;
+ }
}
@@ -343,22 +343,22 @@
else {
// Sort through the list of neighboring hits.
for (CalorimeterHit neighborHit : neighborHits) {
- // Check whether the neighboring hit is a seed.
- if(hitSeedMap.get(neighborHit) == neighborHit) {
+ // Check whether the neighboring hit is a seed.
+ if(hitSeedMap.get(neighborHit) == neighborHit) {
// If the neighboring hit is a seed hit and the
// current hit has been associated with a cluster,
// then it is a common hit between its previous
// seed and the neighboring seed.
if (hitSeedMap.containsKey(hit)) {
- // Check and see if a list of common seeds
- // for this hit already exists or not.
- List<CalorimeterHit> commonHitList = commonHits.get(hit);
-
- // If it does not, make a new one.
- if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>(); }
-
- // Add the neighbors to the seeds to set of
- // common seeds.
+ // Check and see if a list of common seeds
+ // for this hit already exists or not.
+ List<CalorimeterHit> commonHitList = commonHits.get(hit);
+
+ // If it does not, make a new one.
+ if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>(); }
+
+ // Add the neighbors to the seeds to set of
+ // common seeds.
commonHitList.add(neighborHit);
commonHitList.add(hitSeedMap.get(hit));
@@ -368,14 +368,14 @@
}
// If the neighboring hit is a seed hit and the
- // current hit has not been added to a cluster yet
- // associate it with the neighboring seed and note
+ // current hit has not been added to a cluster yet
+ // associate it with the neighboring seed and note
// that it has been clustered.
else {
- hitSeedMap.put(hit, neighborHit);
- surrSeedSet.add(hit);
+ hitSeedMap.put(hit, neighborHit);
+ surrSeedSet.add(hit);
}
- }
+ }
}
}
} // End primary seed loop.
@@ -383,11 +383,11 @@
// Performs second pass calculations for component hits.
secondaryHitsLoop:
for (CalorimeterHit secondaryHit : hitList) {
- // If the secondary hit is not associated with a seed, then
- // the rest of there is nothing further to be done.
- if(!hitSeedMap.containsKey(secondaryHit)) { continue secondaryHitsLoop; }
-
- // Get the secondary hit's neighboring crystals.
+ // If the secondary hit is not associated with a seed, then
+ // the rest of there is nothing further to be done.
+ if(!hitSeedMap.containsKey(secondaryHit)) { continue secondaryHitsLoop; }
+
+ // Get the secondary hit's neighboring crystals.
Set<Long> secondaryNeighbors = neighborMap.get(secondaryHit.getCellID());
// Make a list to store the hits associated with the
@@ -396,27 +396,27 @@
// Loop through the neighboring crystals.
for (Long secondaryNeighbor : secondaryNeighbors) {
- // Get the hit associated with the neighboring crystal.
- CalorimeterHit secondaryNeighborHit = hitMap.get(secondaryNeighbor);
-
- // If the neighboring crystal exists and is not already
- // in a cluster, add it to the list of neighboring hits.
+ // Get the hit associated with the neighboring crystal.
+ CalorimeterHit secondaryNeighborHit = hitMap.get(secondaryNeighbor);
+
+ // If the neighboring crystal exists and is not already
+ // in a cluster, add it to the list of neighboring hits.
if (secondaryNeighborHit != null && !hitSeedMap.containsKey(secondaryNeighborHit)) { //!clusteredHitSet.contains(secondaryNeighborHit)) {
- secondaryNeighborHits.add(secondaryNeighborHit);
+ secondaryNeighborHits.add(secondaryNeighborHit);
}
}
// Loop over the secondary neighbor hits.
for (CalorimeterHit secondaryNeighborHit : secondaryNeighborHits) {
- // If the neighboring hit is of lower energy than the
- // current secondary hit, then associate the neighboring
- // hit with the current secondary hit's seed.
-
- // if (secondaryNeighborHit.getCorrectedEnergy() < secondaryHit.getCorrectedEnergy()) {
- if(!equalEnergies(secondaryNeighborHit, secondaryHit)) {
- hitSeedMap.put(secondaryNeighborHit, hitSeedMap.get(secondaryHit));
+ // If the neighboring hit is of lower energy than the
+ // current secondary hit, then associate the neighboring
+ // hit with the current secondary hit's seed.
+
+ // if (secondaryNeighborHit.getCorrectedEnergy() < secondaryHit.getCorrectedEnergy()) {
+ if(!equalEnergies(secondaryNeighborHit, secondaryHit)) {
+ hitSeedMap.put(secondaryNeighborHit, hitSeedMap.get(secondaryHit));
}
- else {continue;}
+ else {continue;}
}
} // End component hits loop.
@@ -427,10 +427,10 @@
// Performs second pass calculations for common hits.
commonHitsLoop:
for (CalorimeterHit clusteredHit : hitSeedMap.keySet()) {
- // Seed hits are never common hits and can be skipped.
- if(hitSeedMap.get(clusteredHit) == clusteredHit || surrSeedSet.contains(clusteredHit)) { continue commonHitsLoop; }
-
- // Get the current clustered hit's neighboring crystals.
+ // Seed hits are never common hits and can be skipped.
+ if(hitSeedMap.get(clusteredHit) == clusteredHit || surrSeedSet.contains(clusteredHit)) { continue commonHitsLoop; }
+
+ // Get the current clustered hit's neighboring crystals.
Set<Long> clusteredNeighbors = neighborMap.get(clusteredHit.getCellID());
// Store a list of all the clustered hits neighboring
@@ -439,12 +439,12 @@
// Loop through the neighbors and see if they have hits.
for (Long neighbor : clusteredNeighbors) {
- // Get the hit associated with the neighbor.
- CalorimeterHit clusteredNeighborHit = hitMap.get(neighbor);
-
- // If it exists, add it to the neighboring hit list.
+ // Get the hit associated with the neighbor.
+ CalorimeterHit clusteredNeighborHit = hitMap.get(neighbor);
+
+ // If it exists, add it to the neighboring hit list.
if (clusteredNeighborHit != null) {
- clusteredNeighborHits.add(clusteredNeighborHit);
+ clusteredNeighborHits.add(clusteredNeighborHit);
}
}
@@ -453,25 +453,25 @@
// Loop over the clustered neighbor hits.
for (CalorimeterHit clusteredNeighborHit : clusteredNeighborHits) {
- // Check to make sure that the clustered neighbor hit
- // is not already associated with the current clustered
- // hit's seed.
-
+ // Check to make sure that the clustered neighbor hit
+ // is not already associated with the current clustered
+ // hit's seed.
+
if (hitSeedMap.get(clusteredNeighborHit) != clusteredHitSeed){
//if (clusteredHit.getCorrectedEnergy() < clusteredNeighborHit.getCorrectedEnergy()) {
- if(!equalEnergies(clusteredHit, clusteredNeighborHit)){
- // Check and see if a list of common seeds
- // for this hit already exists or not.
- List<CalorimeterHit> commonHitList = commonHits.get(clusteredHit);
-
- // If it does not, make a new one.
- if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>(); }
-
- // Add the neighbors to the seeds to set of
- // common seeds.
+ if(!equalEnergies(clusteredHit, clusteredNeighborHit)){
+ // Check and see if a list of common seeds
+ // for this hit already exists or not.
+ List<CalorimeterHit> commonHitList = commonHits.get(clusteredHit);
+
+ // If it does not, make a new one.
+ if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>(); }
+
+ // Add the neighbors to the seeds to set of
+ // common seeds.
commonHitList.add(clusteredHitSeed);
- commonHitList.add(hitSeedMap.get(clusteredNeighborHit));
+ commonHitList.add(hitSeedMap.get(clusteredNeighborHit));
// Put the common seed list back into the set.
commonHits.put(clusteredHit, commonHitList);
@@ -485,7 +485,7 @@
// Remove any common hits from the clustered hits collection.
for(CalorimeterHit commonHit : commonHits.keySet()) {
- hitSeedMap.remove(commonHit);
+ hitSeedMap.remove(commonHit);
}
@@ -501,7 +501,7 @@
// Get energy of each cluster, excluding common hits
for (CalorimeterHit iSeed : hitList) {
if(hitSeedMap.get(iSeed) == iSeed) {
- seedEnergy.put(iSeed, 0.0);
+ seedEnergy.put(iSeed, 0.0);
}
}
@@ -517,24 +517,24 @@
Map<CalorimeterHit, Double> seedEnergyTot = seedEnergy;
for (Map.Entry<CalorimeterHit, List<CalorimeterHit>> entry1 : commonHits.entrySet()) {
- CalorimeterHit commonCell = entry1.getKey();
- CalorimeterHit seedA = entry1.getValue().get(0);
- CalorimeterHit seedB = entry1.getValue().get(1);
- double eFractionA = seedEnergy.get(seedA)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
- double eFractionB = seedEnergy.get(seedB)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
- double currEnergyA = seedEnergyTot.get(seedA);
- double currEnergyB = seedEnergyTot.get(seedB);
- currEnergyA += eFractionA * (hitEnergyMap.get(commonCell));
- currEnergyB += eFractionB * (hitEnergyMap.get(commonCell));
-
- seedEnergyTot.put(seedA, currEnergyA);
- seedEnergyTot.put(seedB, currEnergyB);
+ CalorimeterHit commonCell = entry1.getKey();
+ CalorimeterHit seedA = entry1.getValue().get(0);
+ CalorimeterHit seedB = entry1.getValue().get(1);
+ double eFractionA = seedEnergy.get(seedA)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
+ double eFractionB = seedEnergy.get(seedB)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
+ double currEnergyA = seedEnergyTot.get(seedA);
+ double currEnergyB = seedEnergyTot.get(seedB);
+ currEnergyA += eFractionA * (hitEnergyMap.get(commonCell));
+ currEnergyB += eFractionB * (hitEnergyMap.get(commonCell));
+
+ seedEnergyTot.put(seedA, currEnergyA);
+ seedEnergyTot.put(seedB, currEnergyB);
}
// Choose only the highest energy cluster
List<CalorimeterHit> seedList = new ArrayList<CalorimeterHit>();
for (Map.Entry<CalorimeterHit, Double> entry1 : seedEnergyTot.entrySet()) {
- seedList.add(entry1.getKey());
+ seedList.add(entry1.getKey());
}
Collections.sort(seedList, new EnergyComparator());
@@ -550,64 +550,64 @@
double w0 = 3.1;
for (Map.Entry<CalorimeterHit, CalorimeterHit> entry1 : hitSeedMap.entrySet()) {
- CalorimeterHit eSeed1 = entry1.getValue();
- if(seedList.get(0)==eSeed1){// Check for if belonging to highest seed only.
-
-
- // Method 3 calculation.
- // Calculates x-y centroid for each crystal face
-// IGeometryInfo geom = entry1.getKey().getDetectorElement().getGeometry();
-// double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
-
- ///////////////////////////////
- // Get the hit indices as a Point.
- int ix = entry1.getKey().getIdentifierFieldValue("ix");
- int iy = entry1.getKey().getIdentifierFieldValue("iy");
- Point hitIndex = new Point(ix, iy);
-
- // Get the corrected position for this index pair.
- Double[] position = correctedPositionMap.get(hitIndex);
-
- // If the result is null, it hasn't been calculated yet.
- if(position == null) {
- // Calculate the corrected position.
- IGeometryInfo geom = entry1.getKey().getDetectorElement().getGeometry();
- double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
-
- // Convert the result to a Double[] array.
- position = new Double[3];
- position[0] = pos[0];
- position[1] = pos[1];
- position[2] = pos[2];
-
- // Store the result in the map.
- correctedPositionMap.put(hitIndex, position);
-// writeHits.append("\t"+ix+"\t"+iy+"\t"+position[0]+"\t"+position[1]+"\n"); //write out slic crystal maps
- }
- ///////////////////////////////
- // Method 3:
- eNumX += Math.max(0.0,(w0+Math.log((hitEnergyMap.get(entry1.getKey()))
- /seedEnergyTot.get(eSeed1))))*(correctedPositionMap.get(hitIndex)[0]/10.0);
- eNumY += Math.max(0.0,(w0+Math.log((hitEnergyMap.get(entry1.getKey()))
- /seedEnergyTot.get(eSeed1))))*(correctedPositionMap.get(hitIndex)[1]/10.0);
- eDen += Math.max(0.0, w0+Math.log((hitEnergyMap.get(entry1.getKey()))/
- seedEnergyTot.get(eSeed1)));
-
- // Method 1:
-/* eNumX += (hitEnergyMap.get(entry1.getKey()))*correctedPositionMap.get(hitIndex)[0]/10.0;
- eNumY += (hitEnergyMap.get(entry1.getKey()))*correctedPositionMap.get(hitIndex)[1]/10.0;
- eDen += hitEnergyMap.get(entry1.getKey());
-*/
-
-
- //Method 2:
-/* eNumX += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())))*correctedPositionMap.get(hitIndex)[0]/10.0;
- eNumY += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())))*correctedPositionMap.get(hitIndex)[1]/10.0;
- eDen += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())));
-*/
- crystalAngle = 0.967826*(eSeed1.getIdentifierFieldValue("ix"));
-
- }
+ CalorimeterHit eSeed1 = entry1.getValue();
+ if(seedList.get(0)==eSeed1){// Check for if belonging to highest seed only.
+
+
+ // Method 3 calculation.
+ // Calculates x-y centroid for each crystal face
+// IGeometryInfo geom = entry1.getKey().getDetectorElement().getGeometry();
+// double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
+
+ ///////////////////////////////
+ // Get the hit indices as a Point.
+ int ix = entry1.getKey().getIdentifierFieldValue("ix");
+ int iy = entry1.getKey().getIdentifierFieldValue("iy");
+ Point hitIndex = new Point(ix, iy);
+
+ // Get the corrected position for this index pair.
+ Double[] position = correctedPositionMap.get(hitIndex);
+
+ // If the result is null, it hasn't been calculated yet.
+ if(position == null) {
+ // Calculate the corrected position.
+ IGeometryInfo geom = entry1.getKey().getDetectorElement().getGeometry();
+ double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
+
+ // Convert the result to a Double[] array.
+ position = new Double[3];
+ position[0] = pos[0];
+ position[1] = pos[1];
+ position[2] = pos[2];
+
+ // Store the result in the map.
+ correctedPositionMap.put(hitIndex, position);
+// writeHits.append("\t"+ix+"\t"+iy+"\t"+position[0]+"\t"+position[1]+"\n"); //write out slic crystal maps
+ }
+ ///////////////////////////////
+ // Method 3:
+ eNumX += Math.max(0.0,(w0+Math.log((hitEnergyMap.get(entry1.getKey()))
+ /seedEnergyTot.get(eSeed1))))*(correctedPositionMap.get(hitIndex)[0]/10.0);
+ eNumY += Math.max(0.0,(w0+Math.log((hitEnergyMap.get(entry1.getKey()))
+ /seedEnergyTot.get(eSeed1))))*(correctedPositionMap.get(hitIndex)[1]/10.0);
+ eDen += Math.max(0.0, w0+Math.log((hitEnergyMap.get(entry1.getKey()))/
+ seedEnergyTot.get(eSeed1)));
+
+ // Method 1:
+/* eNumX += (hitEnergyMap.get(entry1.getKey()))*correctedPositionMap.get(hitIndex)[0]/10.0;
+ eNumY += (hitEnergyMap.get(entry1.getKey()))*correctedPositionMap.get(hitIndex)[1]/10.0;
+ eDen += hitEnergyMap.get(entry1.getKey());
+*/
+
+
+ //Method 2:
+/* eNumX += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())))*correctedPositionMap.get(hitIndex)[0]/10.0;
+ eNumY += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())))*correctedPositionMap.get(hitIndex)[1]/10.0;
+ eDen += Math.log10(1000*(hitEnergyMap.get(entry1.getKey())));
+*/
+ crystalAngle = 0.967826*(eSeed1.getIdentifierFieldValue("ix"));
+
+ }
}
@@ -621,36 +621,36 @@
if(trackHits.size() != 0 ){
-
- // Calculates the final generated particle position
- double d0 = 139.3 - trackHits.get(0).getPositionVec().z()/10.0;
- double px = trackHits.get(0).getMomentum()[0];
- double py = trackHits.get(0).getMomentum()[1];
- double pz = trackHits.get(0).getMomentum()[2];
- double xpos = trackHits.get(0).getPosition()[0]/10.0;
- double ypos = trackHits.get(0).getPosition()[1]/10.0;
-
- double xGen = xpos + d0*px/pz;
- double yGen = ypos + d0*py/pz;
-
- boolean validNum = false;
- if((Math.abs(xCl)>0)&&(Math.abs(yCl)>0)&&(Math.abs(xGen)>0)&&(Math.abs(yGen)>0)){
- validNum=true;
- }
-
-
-
- //position fitting
-// writeHits.append("\t"+seedList.get(0).getIdentifierFieldValue("ix")+"\t"+seedList.get(0).getIdentifierFieldValue("iy")+"\t"
-// +xCl+"\t"+yCl+"\t"+xF+"\t"+yF+"\t"+mcList.get(0).getEnergy()+"\t"+crystalAngle+"\t"+ECl+"\n");
- if(validNum==true){
-// writeHits.append("\t"+xCl+"\t"+yCl+"\t"+xGen+"\t"+yGen+"\t"+mcList.get(0).getEnergy()+"\t"+crystalAngle+"\t"+ECl+"\n");
-
- }
-
- }
-
-
+
+ // Calculates the final generated particle position
+ double d0 = 139.3 - trackHits.get(0).getPositionVec().z()/10.0;
+ double px = trackHits.get(0).getMomentum()[0];
+ double py = trackHits.get(0).getMomentum()[1];
+ double pz = trackHits.get(0).getMomentum()[2];
+ double xpos = trackHits.get(0).getPosition()[0]/10.0;
+ double ypos = trackHits.get(0).getPosition()[1]/10.0;
+
+ double xGen = xpos + d0*px/pz;
+ double yGen = ypos + d0*py/pz;
+
+ boolean validNum = false;
+ if((Math.abs(xCl)>0)&&(Math.abs(yCl)>0)&&(Math.abs(xGen)>0)&&(Math.abs(yGen)>0)){
+ validNum=true;
+ }
+
+
+
+ //position fitting
+// writeHits.append("\t"+seedList.get(0).getIdentifierFieldValue("ix")+"\t"+seedList.get(0).getIdentifierFieldValue("iy")+"\t"
+// +xCl+"\t"+yCl+"\t"+xF+"\t"+yF+"\t"+mcList.get(0).getEnergy()+"\t"+crystalAngle+"\t"+ECl+"\n");
+ if(validNum==true){
+// writeHits.append("\t"+xCl+"\t"+yCl+"\t"+xGen+"\t"+yGen+"\t"+mcList.get(0).getEnergy()+"\t"+crystalAngle+"\t"+ECl+"\n");
+
+ }
+
+ }
+
+
}// end seedList.size != 0
int flag = 1 << LCIOConstants.CLBIT_HITS;
@@ -663,95 +663,95 @@
public void endOfData() {
- // Close the event display output writer.
+ // Close the event display output writer.
try { writeHits.close(); }
catch (IOException e) { }
}
private static class EnergyComparator implements Comparator<CalorimeterHit> {
- /**
- * Compares the first hit with respect to the second. This
- * method will compare hits first by energy, and the spatially.
- * In the case of equal energy hits, the hit closest to the
- * beam gap and closest to the positron side of the detector
- * will be selected. If all of these conditions are true, the
- * hit with the positive y-index will be selected. Hits with
- * all four conditions matching are the same hit.
- * @param hit1 The hit to compare.
- * @param hit2 The hit with respect to which the first should
- * be compared.
- */
+ /**
+ * Compares the first hit with respect to the second. This
+ * method will compare hits first by energy, and the spatially.
+ * In the case of equal energy hits, the hit closest to the
+ * beam gap and closest to the positron side of the detector
+ * will be selected. If all of these conditions are true, the
+ * hit with the positive y-index will be selected. Hits with
+ * all four conditions matching are the same hit.
+ * @param hit1 The hit to compare.
+ * @param hit2 The hit with respect to which the first should
+ * be compared.
+ */
public int compare(CalorimeterHit hit1, CalorimeterHit hit2) {
- // Hits are sorted on a hierarchy by three conditions. First,
- // the hits with the highest energy come first. Next, they
- // are ranked by vertical proximity to the beam gap, and
- // lastly, they are sorted by horizontal proximity to the
- // positron side of the detector.
-
- // Get the hit energies.
- double[] e = { hit1.getCorrectedEnergy(), hit2.getCorrectedEnergy() };
-
- // Perform the energy comparison. The higher energy hit
- // will be ordered first.
- if(e[0] < e[1]) { return 1; }
- else if(e[0] > e[1]) { return -1; }
-
- // If the hits are the same energy, we must perform the
- // spatial comparisons.
- else {
- // Get the position with respect to the beam gap.
- int[] iy = { Math.abs(hit1.getIdentifierFieldValue("iy")), Math.abs(hit2.getIdentifierFieldValue("iy")) };
-
- // The closest hit is first.
- if(iy[0] > iy[1]) { return -1; }
- else if(iy[0] < iy[1]) { return 1; }
-
- // Hits that are identical in vertical distance from
- // beam gap and energy are differentiated with distance
- // horizontally from the positron side of the detector.
- else {
- // Get the position from the positron side.
- int[] ix = { hit1.getIdentifierFieldValue("ix"), hit2.getIdentifierFieldValue("ix") };
-
- // The closest hit is first.
- if(ix[0] > ix[1]) { return 1; }
- else if(ix[0] < ix[1]) { return -1; }
-
- // If all of these checks are the same, compare
- // the raw value for iy. If these are identical,
- // then the two hits are the same. Otherwise, sort
- // the numerical value of iy. (This removes the
- // issue where hits (x, y) and (x, -y) can have
- // the same energy and be otherwise seen as the
- // same hit from the above checks.
- else { return Integer.compare(hit1.getIdentifierFieldValue("iy"), hit2.getIdentifierFieldValue("iy")); }
- }
- }
+ // Hits are sorted on a hierarchy by three conditions. First,
+ // the hits with the highest energy come first. Next, they
+ // are ranked by vertical proximity to the beam gap, and
+ // lastly, they are sorted by horizontal proximity to the
+ // positron side of the detector.
+
+ // Get the hit energies.
+ double[] e = { hit1.getCorrectedEnergy(), hit2.getCorrectedEnergy() };
+
+ // Perform the energy comparison. The higher energy hit
+ // will be ordered first.
+ if(e[0] < e[1]) { return 1; }
+ else if(e[0] > e[1]) { return -1; }
+
+ // If the hits are the same energy, we must perform the
+ // spatial comparisons.
+ else {
+ // Get the position with respect to the beam gap.
+ int[] iy = { Math.abs(hit1.getIdentifierFieldValue("iy")), Math.abs(hit2.getIdentifierFieldValue("iy")) };
+
+ // The closest hit is first.
+ if(iy[0] > iy[1]) { return -1; }
+ else if(iy[0] < iy[1]) { return 1; }
+
+ // Hits that are identical in vertical distance from
+ // beam gap and energy are differentiated with distance
+ // horizontally from the positron side of the detector.
+ else {
+ // Get the position from the positron side.
+ int[] ix = { hit1.getIdentifierFieldValue("ix"), hit2.getIdentifierFieldValue("ix") };
+
+ // The closest hit is first.
+ if(ix[0] > ix[1]) { return 1; }
+ else if(ix[0] < ix[1]) { return -1; }
+
+ // If all of these checks are the same, compare
+ // the raw value for iy. If these are identical,
+ // then the two hits are the same. Otherwise, sort
+ // the numerical value of iy. (This removes the
+ // issue where hits (x, y) and (x, -y) can have
+ // the same energy and be otherwise seen as the
+ // same hit from the above checks.
+ else { return Integer.compare(hit1.getIdentifierFieldValue("iy"), hit2.getIdentifierFieldValue("iy")); }
+ }
+ }
}
}
// Handles pathological case where multiple neighboring crystals have EXACTLY the same energy.
private boolean equalEnergies(CalorimeterHit hit, CalorimeterHit neighbor){
- boolean isSeed = true;
-
- int hix = hit.getIdentifierFieldValue("ix");
- int hiy = Math.abs(hit.getIdentifierFieldValue("iy"));
- int nix = neighbor.getIdentifierFieldValue("ix");
- int niy = Math.abs(neighbor.getIdentifierFieldValue("iy"));
- double hE = hit.getCorrectedEnergy();
- double nE = neighbor.getCorrectedEnergy();
- if(hE < nE) {
- isSeed = false;
- }
- else if((hE == nE) && (hiy > niy)) {
- isSeed = false;
- }
- else if((hE == nE) && (hiy == niy) && (hix > nix)) {
- isSeed = false;
- }
- return isSeed;
+ boolean isSeed = true;
+
+ int hix = hit.getIdentifierFieldValue("ix");
+ int hiy = Math.abs(hit.getIdentifierFieldValue("iy"));
+ int nix = neighbor.getIdentifierFieldValue("ix");
+ int niy = Math.abs(neighbor.getIdentifierFieldValue("iy"));
+ double hE = hit.getCorrectedEnergy();
+ double nE = neighbor.getCorrectedEnergy();
+ if(hE < nE) {
+ isSeed = false;
+ }
+ else if((hE == nE) && (hiy > niy)) {
+ isSeed = false;
+ }
+ else if((hE == nE) && (hiy == niy) && (hix > nix)) {
+ isSeed = false;
+ }
+ return isSeed;
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClustererCosmics.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClustererCosmics.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalClustererCosmics.java Wed Apr 27 11:11:32 2016
@@ -124,13 +124,13 @@
// Loop over ECal hits to find cluster seeds.
for (CalorimeterHit hit : map.values()) {
- // int ix = hit.getIdentifierFieldValue("ix");
+ // int ix = hit.getIdentifierFieldValue("ix");
// int iy = hit.getIdentifierFieldValue("iy");
// System.out.println("ix = "+ix);
// System.out.println("iy = "+iy);
-
- // Cut on min seed E.
+
+ // Cut on min seed E.
if (hit.getRawEnergy() < seedEMin) {
continue;
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalRawConverter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalRawConverter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/EcalRawConverter.java Wed Apr 27 11:11:32 2016
@@ -106,42 +106,42 @@
private EcalConditions ecalConditions = null;
public EcalRawConverter() {
- // Track changes in the DAQ configuration.
- ConfigurationManager.addActionListener(new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- // If the DAQ configuration should be used, load the
- // relevant settings into the driver.
- if(useDAQConfig) {
- // Get the FADC configuration.
- config = ConfigurationManager.getInstance().getFADCConfig();
-
- // Load the settings.
- NSB = config.getNSB();
- NSA = config.getNSA();
- windowSamples = config.getWindowWidth() / 4;
-
- // Get the number of peaks.
- if(config.getMode() == 1) {
- nPeak = Integer.MAX_VALUE;
- } else {
- nPeak = config.getMaxPulses();
- }
-
- // Print the FADC configuration.
- System.out.println();
- System.out.println();
- System.out.printf("NSA :: %d ns%n", NSA);
- System.out.printf("NSB :: %d ns%n", NSB);
- System.out.printf("Window Samples :: %d clock-cycles%n", windowSamples);
- System.out.printf("Max Peaks :: %d peaks%n", nPeak);
- System.out.println("======================================================================");
- System.out.println("=== FADC Pulse-Processing Settings ===================================");
- System.out.println("======================================================================");
- config.printConfig();
- }
- }
- });
+ // Track changes in the DAQ configuration.
+ ConfigurationManager.addActionListener(new ActionListener() {
+ @Override
+ public void actionPerformed(ActionEvent e) {
+ // If the DAQ configuration should be used, load the
+ // relevant settings into the driver.
+ if(useDAQConfig) {
+ // Get the FADC configuration.
+ config = ConfigurationManager.getInstance().getFADCConfig();
+
+ // Load the settings.
+ NSB = config.getNSB();
+ NSA = config.getNSA();
+ windowSamples = config.getWindowWidth() / 4;
+
+ // Get the number of peaks.
+ if(config.getMode() == 1) {
+ nPeak = Integer.MAX_VALUE;
+ } else {
+ nPeak = config.getMaxPulses();
+ }
+
+ // Print the FADC configuration.
+ System.out.println();
+ System.out.println();
+ System.out.printf("NSA :: %d ns%n", NSA);
+ System.out.printf("NSB :: %d ns%n", NSB);
+ System.out.printf("Window Samples :: %d clock-cycles%n", windowSamples);
+ System.out.printf("Max Peaks :: %d peaks%n", nPeak);
+ System.out.println("======================================================================");
+ System.out.println("=== FADC Pulse-Processing Settings ===================================");
+ System.out.println("======================================================================");
+ config.printConfig(System.out);
+ }
+ }
+ });
}
public void setLeadingEdgeThreshold(double thresh) {
@@ -196,7 +196,7 @@
}
public void setUseDAQConfig(boolean state) {
- useDAQConfig = state;
+ useDAQConfig = state;
}
/*
@@ -206,10 +206,10 @@
EcalChannelConstants channelData = findChannel(hit.getCellID());
double pedestal;
if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
- pedestal = config.getPedestal(hit.getCellID());
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
+ pedestal = config.getPedestal(hit.getCellID());
} else {
- pedestal = channelData.getCalibration().getPedestal();
+ pedestal = channelData.getCalibration().getPedestal();
}
int sum = 0;
@@ -237,10 +237,10 @@
* Choose whether to use static pedestal from database or running pedestal from mode-7.
*/
public double getSingleSamplePedestal(EventHeader event,long cellID) {
- if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(cellID);
- return config.getPedestal(cellID);
- }
+ if(useDAQConfig) {
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(cellID);
+ return config.getPedestal(cellID);
+ }
if (useRunningPedestal && event!=null) {
if (event.hasItem("EcalRunningPedestals")) {
Map<EcalChannel, Double> runningPedMap = (Map<EcalChannel, Double>) event.get("EcalRunningPedestals");
@@ -409,12 +409,12 @@
// threshold is pedestal plus threshold configuration parameter:
final int absoluteThreshold;
if(useDAQConfig) {
- //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
- //int leadingEdgeThreshold = ConfigurationManager.getInstance().getFADCConfig().getThreshold(channel.getChannelId());
- int leadingEdgeThreshold = config.getThreshold(cellID);
- absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
+ //EcalChannel channel = ecalConditions.getChannelCollection().findGeometric(hit.getCellID());
+ //int leadingEdgeThreshold = ConfigurationManager.getInstance().getFADCConfig().getThreshold(channel.getChannelId());
+ int leadingEdgeThreshold = config.getThreshold(cellID);
+ absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
} else {
- absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
+ absoluteThreshold = (int) (getSingleSamplePedestal(event, cellID) + leadingEdgeThreshold);
}
ArrayList <Integer> thresholdCrossings = new ArrayList<Integer>();
@@ -435,10 +435,10 @@
// search for next threshold crossing begins at end of this pulse:
if(useDAQConfig && ConfigurationManager.getInstance().getFADCConfig().getMode() == 1) {
// special case, emulating SSP:
- ii += 8;
+ ii += 8;
} else {
// "normal" case, emulating FADC250:
- ii += NSA/nsPerSample - 1;
+ ii += NSA/nsPerSample - 1;
}
// firmware limit on # of peaks:
@@ -532,8 +532,8 @@
EcalChannelConstants channelData = findChannel(cellID);
if(useDAQConfig) {
- //float gain = ConfigurationManager.getInstance().getFADCConfig().getGain(ecalConditions.getChannelCollection().findGeometric(cellID));
- return config.getGain(cellID) * adcSum * EcalUtils.MeV;
+ //float gain = ConfigurationManager.getInstance().getFADCConfig().getGain(ecalConditions.getChannelCollection().findGeometric(cellID));
+ return config.getGain(cellID) * adcSum * EcalUtils.MeV;
} else if(use2014Gain) {
if (constantGain) {
return adcSum * EcalUtils.gainFactor * EcalUtils.ecalReadoutPeriod;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/HPSEcalClusterIC.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/HPSEcalClusterIC.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/holly/HPSEcalClusterIC.java Wed Apr 27 11:11:32 2016
@@ -55,11 +55,11 @@
}
public void addSharedHit(CalorimeterHit sharedHit) {
- sharedHitList.add(sharedHit);
+ sharedHitList.add(sharedHit);
}
public List<CalorimeterHit> getSharedHits() {
- return sharedHitList;
+ return sharedHitList;
}
// public double[] getPosition() {
@@ -155,12 +155,12 @@
for(int i=0;i<hits.size();i++)
{
CalorimeterHit hit = hits.get(i);
- // CalorimeterIDDecoder decoder = hit.getDecoder();
- // decoder.setID(hit.getCellID());
- // double[] pos = new double[3];
- // pos[0] = decoder.getX();
- // pos[1] = decoder.getY();
- // pos[2] = decoder.getZ();
+ // CalorimeterIDDecoder decoder = hit.getDecoder();
+ // decoder.setID(hit.getCellID());
+ // double[] pos = new double[3];
+ // pos[0] = decoder.getX();
+ // pos[1] = decoder.getY();
+ // pos[2] = decoder.getZ();
//double[] pos = hit.getPosition();
//Find position at shower max
IGeometryInfo geom = hit.getDetectorElement().getGeometry();
@@ -296,104 +296,104 @@
EV[1] = E2;
EV[2] = E3;
// Now calculate principal axes
- // For eigenvalue EV, the axis is (nx, ny, nz) where:
- // (Exx - EV)nx + (Exy)ny + (Exz)nz = 0
- // (Eyx)nx + (Eyy - EV)ny + (Eyz)nz = 0
- // (Ezx)nx + (Ezy)ny + (Ezz - EV)nz = 0
- // Setting nx = 1, we have:
- // (Exx - EV) + (Exy)ny + (Exz)nz = 0
- // (Eyx) + (Eyy - EV)ny + (Eyz)nz = 0
- // (Ezx) + (Ezy)ny + (Ezz - EV)nz = 0
- // and so
- // (Exy)ny = EV - Exx - (Exz)nz => ny = (EV - Exx - Exz*nz)/Exy
- // What if Exy = 0? Then provided Eyz is non-zero we can write:
- // (Ezx) + (Ezy)ny + (Ezz - EV)nz = 0
- // ny = (Exz - (Ezz-EV)*nz)/Eyz
- // What if Exy = 0 and Eyz = 0 but (Eyy - EV) is non-zero?
- // (Eyy - EV)ny + (Eyz)nz = 0
- // ny = -(Eyz*nz)/(Eyy-EV)
-
- // In the pathological case where Exz = Eyz = Ezz = 0:
- // (Exx - EV)nx + (Exy)ny = 0 => ny/nx = -(Exx-EV)/Exy
- // (Eyx)nx + (Eyy - EV)ny = 0 => ny/nx = -Eyx/(Eyy-EV)
- // (EV)nz = 0
- // so
- // -ny/nx = (EV-Exx)/Exy = Eyx/(EV-Eyy)
- // But watch out for order! Recalculate eigenvalues for this pathological case.
- // (EV-Exx)(EV-Eyy) = Eyx*Exy
- // EV^2 - EV(Exx+Eyy) + Exx*Eyy - Eyx*Exy = 0
- //
- // In another pathological case, Exz = Exy = 0:
- // (Exx - EV)nx = 0
- // (Eyy - EV)ny + (Eyz)nz = 0 => ny/nz = -(Eyz)/(Eyy-EV)
- // (Ezy)ny + (Ezz - EV)nz = 0 => ny/nz = -(Ezz-EV)/(Ezy)
- // so we cannot set nx = 1. Instead, write:
- // -ny/nz = (Eyz)/(Eyy-EV) = (Ezz-EV)/(Ezy)
- // Then
- // (Eyz)(Ezy) = (Eyy-EV)(Ezz-EV)
- // (Eyz)^2 = (Eyy)(Ezz) - (Eyy)(EV) - (Ezz)(EV) + (EV)^2
- // EV^2 - EV(Eyy+Ezz) + Eyy*Ezz - Eyz*Eyz = 0
-
- // Handle pathological case
- if (Exz == 0.0 && Eyz == 0.0) {
- // Recompute eigenvectors.
- EV[0] = 0.5*(Exx+Eyy) + 0.5*Math.sqrt((Exx+Eyy)*(Exx+Eyy) + 4.0*Exy*Exy);
- EV[1] = 0.5*(Exx+Eyy) - 0.5*Math.sqrt((Exx+Eyy)*(Exx+Eyy) + 4.0*Exy*Exy);
- EV[2] = 0.0;
- for( int i = 0 ; i < 2 ; i++ ) {
- double nx_over_ny = Exy / (Exx-EV[i]);
- double nx_unnormalized = nx_over_ny;
- double ny_unnormalized = 1.0;
- double norm = Math.sqrt(nx_unnormalized*nx_unnormalized + ny_unnormalized*ny_unnormalized);
- mm_PA[i][0] = ny_unnormalized/norm;
- mm_PA[i][1] = nx_unnormalized/norm;
- mm_PA[i][2] = 0.0;
- }
- // ... and now set third eigenvector to the z direction:
- mm_PA[2][0] = 0.0;
- mm_PA[2][1] = 0.0;
- mm_PA[2][2] = 1.0;
- } else if (Exz == 0.0 && Exy == 0.0) {
- // Another pathological case
- EV[0] = 0.5*(Eyy+Ezz) + 0.5*Math.sqrt((Eyy+Ezz)*(Eyy+Ezz) + 4.0*Eyz*Eyz);
- EV[1] = 0.5*(Eyy+Ezz) - 0.5*Math.sqrt((Eyy+Ezz)*(Eyy+Ezz) + 4.0*Eyz*Eyz);
- EV[2] = 0.0;
- for( int i = 0 ; i < 2 ; i++ ) {
- double ny_over_nz = Eyz / (Eyy-EV[i]);
- double ny_unnormalized = ny_over_nz;
- double nz_unnormalized = 1.0;
- double norm = Math.sqrt(ny_unnormalized*ny_unnormalized + nz_unnormalized*nz_unnormalized);
- mm_PA[i][0] = nz_unnormalized/norm;
- mm_PA[i][1] = ny_unnormalized/norm;
- mm_PA[i][2] = 0.0;
- }
- mm_PA[2][0] = 0.0;
- mm_PA[2][1] = 0.0;
- mm_PA[2][2] = 1.0;
- } else {
- for( int i = 0 ; i < 3 ; i++ )
- {
- double[] C = new double[3];
- C[0] = 1.0;
- C[2] = (Exy*Exy + (Eyy - EV[i])*(EV[i] - Exx))/
- ((Eyy - EV[i])*Exz - Eyz*Exy);
- C[1] = (EV[i] - Exx - Exz*C[2])/Exy;
- if (Exy == 0.0) {
- // Recompute
- if (Eyz != 0.0) {
- // ny = (Exz - (Ezz-EV)*nz)/Eyz
- C[1] = (Exz - (Ezz-EV[i])*C[2])/Eyz;
- } else {
- // ny = -(Eyz*nz)/(Eyy-EV)
- C[1] = -(Eyz*C[2])/(Eyy-EV[i]);
- }
- }
- double norm = Math.sqrt(C[0]*C[0] + C[1]*C[1] + C[2]*C[2]);
- mm_PA[i][0] = C[0]/norm;
- mm_PA[i][1] = C[1]/norm;
- mm_PA[i][2] = C[2]/norm;
- }
- }
+ // For eigenvalue EV, the axis is (nx, ny, nz) where:
+ // (Exx - EV)nx + (Exy)ny + (Exz)nz = 0
+ // (Eyx)nx + (Eyy - EV)ny + (Eyz)nz = 0
+ // (Ezx)nx + (Ezy)ny + (Ezz - EV)nz = 0
+ // Setting nx = 1, we have:
+ // (Exx - EV) + (Exy)ny + (Exz)nz = 0
+ // (Eyx) + (Eyy - EV)ny + (Eyz)nz = 0
+ // (Ezx) + (Ezy)ny + (Ezz - EV)nz = 0
+ // and so
+ // (Exy)ny = EV - Exx - (Exz)nz => ny = (EV - Exx - Exz*nz)/Exy
+ // What if Exy = 0? Then provided Eyz is non-zero we can write:
+ // (Ezx) + (Ezy)ny + (Ezz - EV)nz = 0
+ // ny = (Exz - (Ezz-EV)*nz)/Eyz
+ // What if Exy = 0 and Eyz = 0 but (Eyy - EV) is non-zero?
+ // (Eyy - EV)ny + (Eyz)nz = 0
+ // ny = -(Eyz*nz)/(Eyy-EV)
+
+ // In the pathological case where Exz = Eyz = Ezz = 0:
+ // (Exx - EV)nx + (Exy)ny = 0 => ny/nx = -(Exx-EV)/Exy
+ // (Eyx)nx + (Eyy - EV)ny = 0 => ny/nx = -Eyx/(Eyy-EV)
+ // (EV)nz = 0
+ // so
+ // -ny/nx = (EV-Exx)/Exy = Eyx/(EV-Eyy)
+ // But watch out for order! Recalculate eigenvalues for this pathological case.
+ // (EV-Exx)(EV-Eyy) = Eyx*Exy
+ // EV^2 - EV(Exx+Eyy) + Exx*Eyy - Eyx*Exy = 0
+ //
+ // In another pathological case, Exz = Exy = 0:
+ // (Exx - EV)nx = 0
+ // (Eyy - EV)ny + (Eyz)nz = 0 => ny/nz = -(Eyz)/(Eyy-EV)
+ // (Ezy)ny + (Ezz - EV)nz = 0 => ny/nz = -(Ezz-EV)/(Ezy)
+ // so we cannot set nx = 1. Instead, write:
+ // -ny/nz = (Eyz)/(Eyy-EV) = (Ezz-EV)/(Ezy)
+ // Then
+ // (Eyz)(Ezy) = (Eyy-EV)(Ezz-EV)
+ // (Eyz)^2 = (Eyy)(Ezz) - (Eyy)(EV) - (Ezz)(EV) + (EV)^2
+ // EV^2 - EV(Eyy+Ezz) + Eyy*Ezz - Eyz*Eyz = 0
+
+ // Handle pathological case
+ if (Exz == 0.0 && Eyz == 0.0) {
+ // Recompute eigenvectors.
+ EV[0] = 0.5*(Exx+Eyy) + 0.5*Math.sqrt((Exx+Eyy)*(Exx+Eyy) + 4.0*Exy*Exy);
+ EV[1] = 0.5*(Exx+Eyy) - 0.5*Math.sqrt((Exx+Eyy)*(Exx+Eyy) + 4.0*Exy*Exy);
+ EV[2] = 0.0;
+ for( int i = 0 ; i < 2 ; i++ ) {
+ double nx_over_ny = Exy / (Exx-EV[i]);
+ double nx_unnormalized = nx_over_ny;
+ double ny_unnormalized = 1.0;
+ double norm = Math.sqrt(nx_unnormalized*nx_unnormalized + ny_unnormalized*ny_unnormalized);
+ mm_PA[i][0] = ny_unnormalized/norm;
+ mm_PA[i][1] = nx_unnormalized/norm;
+ mm_PA[i][2] = 0.0;
+ }
+ // ... and now set third eigenvector to the z direction:
+ mm_PA[2][0] = 0.0;
+ mm_PA[2][1] = 0.0;
+ mm_PA[2][2] = 1.0;
+ } else if (Exz == 0.0 && Exy == 0.0) {
+ // Another pathological case
+ EV[0] = 0.5*(Eyy+Ezz) + 0.5*Math.sqrt((Eyy+Ezz)*(Eyy+Ezz) + 4.0*Eyz*Eyz);
+ EV[1] = 0.5*(Eyy+Ezz) - 0.5*Math.sqrt((Eyy+Ezz)*(Eyy+Ezz) + 4.0*Eyz*Eyz);
+ EV[2] = 0.0;
+ for( int i = 0 ; i < 2 ; i++ ) {
+ double ny_over_nz = Eyz / (Eyy-EV[i]);
+ double ny_unnormalized = ny_over_nz;
+ double nz_unnormalized = 1.0;
+ double norm = Math.sqrt(ny_unnormalized*ny_unnormalized + nz_unnormalized*nz_unnormalized);
+ mm_PA[i][0] = nz_unnormalized/norm;
+ mm_PA[i][1] = ny_unnormalized/norm;
+ mm_PA[i][2] = 0.0;
+ }
+ mm_PA[2][0] = 0.0;
+ mm_PA[2][1] = 0.0;
+ mm_PA[2][2] = 1.0;
+ } else {
+ for( int i = 0 ; i < 3 ; i++ )
+ {
+ double[] C = new double[3];
+ C[0] = 1.0;
+ C[2] = (Exy*Exy + (Eyy - EV[i])*(EV[i] - Exx))/
+ ((Eyy - EV[i])*Exz - Eyz*Exy);
+ C[1] = (EV[i] - Exx - Exz*C[2])/Exy;
+ if (Exy == 0.0) {
+ // Recompute
+ if (Eyz != 0.0) {
+ // ny = (Exz - (Ezz-EV)*nz)/Eyz
+ C[1] = (Exz - (Ezz-EV[i])*C[2])/Eyz;
+ } else {
+ // ny = -(Eyz*nz)/(Eyy-EV)
+ C[1] = -(Eyz*C[2])/(Eyy-EV[i]);
+ }
+ }
+ double norm = Math.sqrt(C[0]*C[0] + C[1]*C[1] + C[2]*C[2]);
+ mm_PA[i][0] = C[0]/norm;
+ mm_PA[i][1] = C[1]/norm;
+ mm_PA[i][2] = C[2]/norm;
+ }
+ }
}
mm_NE[0] = NE1;
mm_NE[1] = NE2;
@@ -417,7 +417,7 @@
double dr = Math.sqrt( (position[0]+mm_PA[0][0])*(position[0]+mm_PA[0][0]) +
(position[1]+mm_PA[0][1])*(position[1]+mm_PA[0][1]) +
(position[2]+mm_PA[0][2])*(position[2]+mm_PA[0][2]) ) -
- Math.sqrt( (position[0])*(position[0]) +
+ Math.sqrt( (position[0])*(position[0]) +
(position[1])*(position[1]) +
(position[2])*(position[2]) ) ;
double sign = 1.;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java Wed Apr 27 11:11:32 2016
@@ -28,7 +28,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.hps.conditions.database.ConnectionParameters;
import org.hps.record.evio.EvioEventConstants;
import org.hps.record.evio.EvioEventUtilities;
@@ -484,7 +484,7 @@
final Set<Integer> acceptRuns = new HashSet<Integer>();
- final DefaultParser parser = new DefaultParser();
+ final PosixParser parser = new PosixParser();
boolean printSummary = false;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ClusterAnalysisDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ClusterAnalysisDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ClusterAnalysisDriver.java Wed Apr 27 11:11:32 2016
@@ -11,84 +11,84 @@
import org.lcsim.util.aida.AIDA;
public class ClusterAnalysisDriver extends Driver {
- // Analysis plots.
+ // Analysis plots.
AIDA aida = AIDA.defaultInstance();
- IHistogram1D clusterTotalEnergy;
- IHistogram1D clusterSeedEnergy;
- IHistogram1D clusterHitCount;
- IHistogram2D clusterDistribution;
-
- IHistogram1D fClusterTotalEnergy;
- IHistogram1D fClusterSeedEnergy;
- IHistogram1D fClusterHitCount;
- IHistogram2D fClusterDistribution;
-
- IHistogram1D nClusterTotalEnergy;
- IHistogram1D nClusterSeedEnergy;
- IHistogram1D nClusterHitCount;
- IHistogram2D nClusterDistribution;
-
- // Hit collection names.
- private String clusterCollectionName = "EcalClusters";
-
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
-
- @Override
- public void startOfData() {
- // Initialize the histograms.
- clusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy", 110, 0.00, 2.2);
- clusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy", 110, 0.00, 2.2);
- clusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count", 8, 1, 9);
- clusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
-
- // Initialize the filtered histograms.
- fClusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy (Over 100 MeV)", 110, 0.00, 2.2);
- fClusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy (Over 100 MeV)", 110, 0.00, 2.2);
- fClusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count (Over 100 MeV)", 8, 1, 9);
- fClusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution (Over 100 MeV)", 46, -23, 23, 11, -5.5, 5.5);
-
- // Initialize the more filtered histograms.
- nClusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy (Over 100 MeV, > 1 Hit)", 110, 0.00, 2.2);
- nClusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy (Over 100 MeV, > 1 Hit)", 110, 0.00, 2.2);
- nClusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count (Over 100 MeV, > 1 Hit)", 8, 1, 9);
- nClusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution (Over 100 MeV, > 1 Hit)", 46, -23, 23, 11, -5.5, 5.5);
- }
-
- public void process(EventHeader event) {
- // Check if there exists a cluster collection.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the raw hit collection.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Output the information on each hit to the histograms.
- for(Cluster cluster : clusterList) {
- // Get the x and y indices for the hits.
- int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- int iy = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
- if(ix > 0) { ix = ix - 1; }
-
- // Write to the histograms.
- clusterTotalEnergy.fill(cluster.getEnergy());
- clusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
- clusterHitCount.fill(cluster.getCalorimeterHits().size());
- clusterDistribution.fill(ix, iy, 1.0);
-
- if(cluster.getCalorimeterHits().get(0).getCorrectedEnergy() > 0.100) {
- fClusterTotalEnergy.fill(cluster.getEnergy());
- fClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
- fClusterHitCount.fill(cluster.getCalorimeterHits().size());
- fClusterDistribution.fill(ix, iy, 1.0);
-
- if(cluster.getCalorimeterHits().size() > 1) {
- nClusterTotalEnergy.fill(cluster.getEnergy());
- nClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
- nClusterHitCount.fill(cluster.getCalorimeterHits().size());
- nClusterDistribution.fill(ix, iy, 1.0);
- }
- }
- }
- }
- }
+ IHistogram1D clusterTotalEnergy;
+ IHistogram1D clusterSeedEnergy;
+ IHistogram1D clusterHitCount;
+ IHistogram2D clusterDistribution;
+
+ IHistogram1D fClusterTotalEnergy;
+ IHistogram1D fClusterSeedEnergy;
+ IHistogram1D fClusterHitCount;
+ IHistogram2D fClusterDistribution;
+
+ IHistogram1D nClusterTotalEnergy;
+ IHistogram1D nClusterSeedEnergy;
+ IHistogram1D nClusterHitCount;
+ IHistogram2D nClusterDistribution;
+
+ // Hit collection names.
+ private String clusterCollectionName = "EcalClusters";
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ @Override
+ public void startOfData() {
+ // Initialize the histograms.
+ clusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy", 110, 0.00, 2.2);
+ clusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy", 110, 0.00, 2.2);
+ clusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count", 8, 1, 9);
+ clusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
+
+ // Initialize the filtered histograms.
+ fClusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy (Over 100 MeV)", 110, 0.00, 2.2);
+ fClusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy (Over 100 MeV)", 110, 0.00, 2.2);
+ fClusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count (Over 100 MeV)", 8, 1, 9);
+ fClusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution (Over 100 MeV)", 46, -23, 23, 11, -5.5, 5.5);
+
+ // Initialize the more filtered histograms.
+ nClusterTotalEnergy = aida.histogram1D("Cluster Plot :: Cluster Total Energy (Over 100 MeV, > 1 Hit)", 110, 0.00, 2.2);
+ nClusterSeedEnergy = aida.histogram1D("Cluster Plot :: Seed Hit Energy (Over 100 MeV, > 1 Hit)", 110, 0.00, 2.2);
+ nClusterHitCount = aida.histogram1D("Cluster Plot :: Cluster Hit Count (Over 100 MeV, > 1 Hit)", 8, 1, 9);
+ nClusterDistribution = aida.histogram2D("Cluster Plot :: Seed Hit Distribution (Over 100 MeV, > 1 Hit)", 46, -23, 23, 11, -5.5, 5.5);
+ }
+
+ public void process(EventHeader event) {
+ // Check if there exists a cluster collection.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the raw hit collection.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Output the information on each hit to the histograms.
+ for(Cluster cluster : clusterList) {
+ // Get the x and y indices for the hits.
+ int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ int iy = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+ if(ix > 0) { ix = ix - 1; }
+
+ // Write to the histograms.
+ clusterTotalEnergy.fill(cluster.getEnergy());
+ clusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
+ clusterHitCount.fill(cluster.getCalorimeterHits().size());
+ clusterDistribution.fill(ix, iy, 1.0);
+
+ if(cluster.getCalorimeterHits().get(0).getCorrectedEnergy() > 0.100) {
+ fClusterTotalEnergy.fill(cluster.getEnergy());
+ fClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
+ fClusterHitCount.fill(cluster.getCalorimeterHits().size());
+ fClusterDistribution.fill(ix, iy, 1.0);
+
+ if(cluster.getCalorimeterHits().size() > 1) {
+ nClusterTotalEnergy.fill(cluster.getEnergy());
+ nClusterSeedEnergy.fill(cluster.getCalorimeterHits().get(0).getCorrectedEnergy());
+ nClusterHitCount.fill(cluster.getCalorimeterHits().size());
+ nClusterDistribution.fill(ix, iy, 1.0);
+ }
+ }
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/CountTriggersDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/CountTriggersDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/CountTriggersDriver.java Wed Apr 27 11:11:32 2016
@@ -16,64 +16,64 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class CountTriggersDriver extends Driver {
- // Store programmable parameters.
- private String bankCollectionName = "TriggerBank";
-
- // Track the number of triggers seen for each trigger type.
- private int[] triggers = new int[6];
- private static final int PULSER = 0;
- private static final int SINGLES0 = 1;
- private static final int SINGLES1 = 2;
- private static final int PAIR0 = 3;
- private static final int PAIR1 = 4;
- private static final int COSMIC = 5;
-
- /**
- * Outputs the total number of triggers seen for each trigger type.
- */
- @Override
- public void endOfData() {
- System.out.println("Trigger Counts:");
- System.out.printf("Singles 0 :: %d%n", triggers[SINGLES0]);
- System.out.printf("Singles 1 :: %d%n", triggers[SINGLES1]);
- System.out.printf("Pair 0 :: %d%n", triggers[PAIR0]);
- System.out.printf("Pair 1 :: %d%n", triggers[PAIR1]);
- System.out.printf("Pulser :: %d%n", triggers[PULSER]);
- System.out.printf("Cosmic :: %d%n", triggers[COSMIC]);
- }
-
- /**
- * Checks whether a trigger of each given type was seen by the TI
- * for each event and increments the total trigger count for that
- * type as appropriate.
- */
- @Override
- public void process(EventHeader event) {
- // Extract the TI bank from the data stream.
- TIData tiBank = null;
- if(event.hasCollection(GenericObject.class, bankCollectionName)) {
- // Get the bank list.
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
-
- // Search through the banks and get the TI bank.
- for(GenericObject obj : bankList) {
- if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
- tiBank = new TIData(obj);
- }
- }
- }
-
- // If there is no TI bank, the event can not be processed.
- if(tiBank == null) {
- return;
- }
-
- // Otherwise, increment the relevant trigger counts.
- if(tiBank.isPulserTrigger()) { triggers[PULSER]++; }
- if(tiBank.isSingle0Trigger()) { triggers[SINGLES0]++; }
- if(tiBank.isSingle1Trigger()) { triggers[SINGLES1]++; }
- if(tiBank.isPair0Trigger()) { triggers[PAIR0]++; }
- if(tiBank.isPair1Trigger()) { triggers[PAIR1]++; }
- if(tiBank.isCalibTrigger()) { triggers[COSMIC]++; }
- }
+ // Store programmable parameters.
+ private String bankCollectionName = "TriggerBank";
+
+ // Track the number of triggers seen for each trigger type.
+ private int[] triggers = new int[6];
+ private static final int PULSER = 0;
+ private static final int SINGLES0 = 1;
+ private static final int SINGLES1 = 2;
+ private static final int PAIR0 = 3;
+ private static final int PAIR1 = 4;
+ private static final int COSMIC = 5;
+
+ /**
+ * Outputs the total number of triggers seen for each trigger type.
+ */
+ @Override
+ public void endOfData() {
+ System.out.println("Trigger Counts:");
+ System.out.printf("Singles 0 :: %d%n", triggers[SINGLES0]);
+ System.out.printf("Singles 1 :: %d%n", triggers[SINGLES1]);
+ System.out.printf("Pair 0 :: %d%n", triggers[PAIR0]);
+ System.out.printf("Pair 1 :: %d%n", triggers[PAIR1]);
+ System.out.printf("Pulser :: %d%n", triggers[PULSER]);
+ System.out.printf("Cosmic :: %d%n", triggers[COSMIC]);
+ }
+
+ /**
+ * Checks whether a trigger of each given type was seen by the TI
+ * for each event and increments the total trigger count for that
+ * type as appropriate.
+ */
+ @Override
+ public void process(EventHeader event) {
+ // Extract the TI bank from the data stream.
+ TIData tiBank = null;
+ if(event.hasCollection(GenericObject.class, bankCollectionName)) {
+ // Get the bank list.
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+
+ // Search through the banks and get the TI bank.
+ for(GenericObject obj : bankList) {
+ if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
+ tiBank = new TIData(obj);
+ }
+ }
+ }
+
+ // If there is no TI bank, the event can not be processed.
+ if(tiBank == null) {
+ return;
+ }
+
+ // Otherwise, increment the relevant trigger counts.
+ if(tiBank.isPulserTrigger()) { triggers[PULSER]++; }
+ if(tiBank.isSingle0Trigger()) { triggers[SINGLES0]++; }
+ if(tiBank.isSingle1Trigger()) { triggers[SINGLES1]++; }
+ if(tiBank.isPair0Trigger()) { triggers[PAIR0]++; }
+ if(tiBank.isPair1Trigger()) { triggers[PAIR1]++; }
+ if(tiBank.isCalibTrigger()) { triggers[COSMIC]++; }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/EvioAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/EvioAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/EvioAnalysis.java Wed Apr 27 11:11:32 2016
@@ -17,17 +17,17 @@
import org.lcsim.util.aida.AIDA;
public class EvioAnalysis extends Driver {
- // Store index reference variables.
+ // Store index reference variables.
private static final int RECON = 0;
private static final int SSP = 1;
-
- // Create histogram arrays for cut distributions.
+
+ // Create histogram arrays for cut distributions.
private AIDA aida = AIDA.defaultInstance();
private IHistogram1D[] clusterEnergyPlot = new IHistogram1D[2];
private IHistogram1D[] clusterHitCountPlot = new IHistogram1D[2];
private IHistogram1D[] clusterTimePlot = new IHistogram1D[2];
- private IHistogram2D[] clusterSlopePlot = new IHistogram2D[2];
-
+ private IHistogram2D[] clusterSlopePlot = new IHistogram2D[2];
+
private IHistogram1D[] pairClusterEnergyPlot = new IHistogram1D[2];
private IHistogram1D[] pairHitCountPlot = new IHistogram1D[2];
private IHistogram1D[] pairTimePlot = new IHistogram1D[2];
@@ -38,129 +38,129 @@
private IHistogram1D[] pairCoplanarityPlot = new IHistogram1D[2];
private IHistogram1D[] pairTriggerTimePlot = new IHistogram1D[2];
- // Store programmable values.
- private String clusterCollectionName = "EcalClusters";
- private String bankCollectionName = "SSPData";
- private double energySlopeParamF = 0.0055;
- private double beamEnergy = 1.1;
-
- @Override
- public void startOfData() {
- // Store the plot source type name.
- String[] plotType = new String[2];
- plotType[RECON] = " (Recon)";
- plotType[SSP] = " (SSP)";
-
- // Set the bin sizes based on the beam energy.
- int bins = (int) beamEnergy * 100;
-
- for(int i = 0; i < 2; i++) {
- // Instantiate the single cluster distribution plots.
- clusterEnergyPlot[i] = aida.histogram1D("Raw/Cluster Energy" + plotType[i], bins, 0.0, beamEnergy);
- clusterHitCountPlot[i] = aida.histogram1D("Raw/Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
- clusterTimePlot[i] = aida.histogram1D("Raw/Cluster Time" + plotType[i], 100, 0, 400);
- clusterSlopePlot[i] = aida.histogram2D("Raw/Cluster Energy Slope" + plotType[i], 300, 0.0, 3.0, 200, 0, 400);
-
- // Instantiate the cluster pair distribution plots.
- pairSumPlot[i] = aida.histogram1D("Raw/Pair Energy Sum" + plotType[i], (int) 1.5 * bins, 0.0, 1.5 * beamEnergy);
- pairSumEnergiesPlot[i] = aida.histogram2D("Raw/Pair 2D Energy Sum" + plotType[i], (int) 1.5 * bins, 0.0, 1.5 * beamEnergy, (int) 1.5 * bins, 0.0, 1.5 * beamEnergy);
- pairDiffPlot[i] = aida.histogram1D("Raw/Pair Energy Difference" + plotType[i], bins, 0.0, beamEnergy);
- pairSlopePlot[i] = aida.histogram1D("Raw/Pair Energy Slope" + plotType[i], 100, 0.0, 4.0);
- pairCoplanarityPlot[i] = aida.histogram1D("Raw/Pair Coplanarity" + plotType[i], 180, 0.0, 180);
- }
- }
-
- @Override
- public void process(EventHeader event) {
- // Skip the event if there are no clusters.
- if(!event.hasCollection(Cluster.class, clusterCollectionName) || !event.hasCollection(GenericObject.class, bankCollectionName)) {
- return;
- }
-
- // Get the list of clusters.
- List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
-
- // Get the SSP data bank.
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
-
- // Get the SSP bank from the generic object bank list.
- SSPData sspBank = null;
- for(GenericObject obj : bankList) {
- if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
- sspBank = new SSPData(obj);
- }
- }
-
- // Make sure that the SSP bank was initialized.
- if(sspBank == null) {
- return;
- }
-
- // Iterate over the reconstructed clusters and populate
- // the singles plots.
- for(Cluster cluster : clusters) {
- // Get the cluster properties.
- int hitCount = cluster.getCalorimeterHits().size();
- double x = TriggerModule.getClusterX(cluster);
- double z = TriggerModule.getClusterZ(cluster);
- double slopeParamR = Math.sqrt((x * x) + (z * z));
-
- // Populate the plots.
- clusterEnergyPlot[RECON].fill(cluster.getEnergy());
- clusterHitCountPlot[RECON].fill(cluster.getCalorimeterHits().size());
- clusterTimePlot[RECON].fill(cluster.getCalorimeterHits().get(0).getTime());
- clusterSlopePlot[RECON].fill(cluster.getEnergy(), slopeParamR);
- }
-
- // Get the list of pairs.
- List<Cluster[]> pairs = makePairs(clusters);
-
- // Iterate over the pairs and populate the pair plots.
- for(Cluster[] pair : pairs) {
- pairSumPlot[RECON].fill(TriggerModule.getValueEnergySum(pair));
- pairSumEnergiesPlot[RECON].fill(pair[0].getEnergy(), pair[1].getEnergy());
- pairDiffPlot[RECON].fill(TriggerModule.getValueEnergyDifference(pair));
- pairSlopePlot[RECON].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
- pairCoplanarityPlot[RECON].fill(TriggerModule.getValueCoplanarity(pair));
- }
- }
-
- private List<Cluster[]> makePairs(List<Cluster> clusters) {
- // Create seperate lists for top and bottom clusters.
- List<Cluster> topList = new ArrayList<Cluster>();
- List<Cluster> bottomList = new ArrayList<Cluster>();
- List<Cluster[]> pairList = new ArrayList<Cluster[]>();
-
- // Sort the clusters into the appropriate list.
- for(Cluster cluster : clusters) {
- if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
- topList.add(cluster);
- } else {
- bottomList.add(cluster);
- }
- }
-
- // Create all possible cluster pairs.
- for(Cluster topCluster : topList) {
- for(Cluster bottomCluster : bottomList) {
- Cluster[] pair = { topCluster, bottomCluster };
- pairList.add(pair);
- }
- }
-
- // Return the list of cluster pairs.
- return pairList;
- }
-
- public void setClusterCollectionName(String clusterCollectionName) {
- this.clusterCollectionName = clusterCollectionName;
- }
-
- public void setBankCollectionName(String bankCollectionName) {
- this.bankCollectionName = bankCollectionName;
- }
-
- public void setEnergySlopeParamF(double energySlopeParamF) {
- this.energySlopeParamF = energySlopeParamF;
- }
+ // Store programmable values.
+ private String clusterCollectionName = "EcalClusters";
+ private String bankCollectionName = "SSPData";
+ private double energySlopeParamF = 0.0055;
+ private double beamEnergy = 1.1;
+
+ @Override
+ public void startOfData() {
+ // Store the plot source type name.
+ String[] plotType = new String[2];
+ plotType[RECON] = " (Recon)";
+ plotType[SSP] = " (SSP)";
+
+ // Set the bin sizes based on the beam energy.
+ int bins = (int) beamEnergy * 100;
+
+ for(int i = 0; i < 2; i++) {
+ // Instantiate the single cluster distribution plots.
+ clusterEnergyPlot[i] = aida.histogram1D("Raw/Cluster Energy" + plotType[i], bins, 0.0, beamEnergy);
+ clusterHitCountPlot[i] = aida.histogram1D("Raw/Cluster Hit Count" + plotType[i], 9, 0.5, 9.5);
+ clusterTimePlot[i] = aida.histogram1D("Raw/Cluster Time" + plotType[i], 100, 0, 400);
+ clusterSlopePlot[i] = aida.histogram2D("Raw/Cluster Energy Slope" + plotType[i], 300, 0.0, 3.0, 200, 0, 400);
+
+ // Instantiate the cluster pair distribution plots.
+ pairSumPlot[i] = aida.histogram1D("Raw/Pair Energy Sum" + plotType[i], (int) 1.5 * bins, 0.0, 1.5 * beamEnergy);
+ pairSumEnergiesPlot[i] = aida.histogram2D("Raw/Pair 2D Energy Sum" + plotType[i], (int) 1.5 * bins, 0.0, 1.5 * beamEnergy, (int) 1.5 * bins, 0.0, 1.5 * beamEnergy);
+ pairDiffPlot[i] = aida.histogram1D("Raw/Pair Energy Difference" + plotType[i], bins, 0.0, beamEnergy);
+ pairSlopePlot[i] = aida.histogram1D("Raw/Pair Energy Slope" + plotType[i], 100, 0.0, 4.0);
+ pairCoplanarityPlot[i] = aida.histogram1D("Raw/Pair Coplanarity" + plotType[i], 180, 0.0, 180);
+ }
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Skip the event if there are no clusters.
+ if(!event.hasCollection(Cluster.class, clusterCollectionName) || !event.hasCollection(GenericObject.class, bankCollectionName)) {
+ return;
+ }
+
+ // Get the list of clusters.
+ List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
+
+ // Get the SSP data bank.
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+
+ // Get the SSP bank from the generic object bank list.
+ SSPData sspBank = null;
+ for(GenericObject obj : bankList) {
+ if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
+ sspBank = new SSPData(obj);
+ }
+ }
+
+ // Make sure that the SSP bank was initialized.
+ if(sspBank == null) {
+ return;
+ }
+
+ // Iterate over the reconstructed clusters and populate
+ // the singles plots.
+ for(Cluster cluster : clusters) {
+ // Get the cluster properties.
+ int hitCount = cluster.getCalorimeterHits().size();
+ double x = TriggerModule.getClusterX(cluster);
+ double z = TriggerModule.getClusterZ(cluster);
+ double slopeParamR = Math.sqrt((x * x) + (z * z));
+
+ // Populate the plots.
+ clusterEnergyPlot[RECON].fill(cluster.getEnergy());
+ clusterHitCountPlot[RECON].fill(cluster.getCalorimeterHits().size());
+ clusterTimePlot[RECON].fill(cluster.getCalorimeterHits().get(0).getTime());
+ clusterSlopePlot[RECON].fill(cluster.getEnergy(), slopeParamR);
+ }
+
+ // Get the list of pairs.
+ List<Cluster[]> pairs = makePairs(clusters);
+
+ // Iterate over the pairs and populate the pair plots.
+ for(Cluster[] pair : pairs) {
+ pairSumPlot[RECON].fill(TriggerModule.getValueEnergySum(pair));
+ pairSumEnergiesPlot[RECON].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ pairDiffPlot[RECON].fill(TriggerModule.getValueEnergyDifference(pair));
+ pairSlopePlot[RECON].fill(TriggerModule.getValueEnergySlope(pair, energySlopeParamF));
+ pairCoplanarityPlot[RECON].fill(TriggerModule.getValueCoplanarity(pair));
+ }
+ }
+
+ private List<Cluster[]> makePairs(List<Cluster> clusters) {
+ // Create seperate lists for top and bottom clusters.
+ List<Cluster> topList = new ArrayList<Cluster>();
+ List<Cluster> bottomList = new ArrayList<Cluster>();
+ List<Cluster[]> pairList = new ArrayList<Cluster[]>();
+
+ // Sort the clusters into the appropriate list.
+ for(Cluster cluster : clusters) {
+ if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
+ topList.add(cluster);
+ } else {
+ bottomList.add(cluster);
+ }
+ }
+
+ // Create all possible cluster pairs.
+ for(Cluster topCluster : topList) {
+ for(Cluster bottomCluster : bottomList) {
+ Cluster[] pair = { topCluster, bottomCluster };
+ pairList.add(pair);
+ }
+ }
+
+ // Return the list of cluster pairs.
+ return pairList;
+ }
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ public void setBankCollectionName(String bankCollectionName) {
+ this.bankCollectionName = bankCollectionName;
+ }
+
+ public void setEnergySlopeParamF(double energySlopeParamF) {
+ this.energySlopeParamF = energySlopeParamF;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/FADCAnalysisDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/FADCAnalysisDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/FADCAnalysisDriver.java Wed Apr 27 11:11:32 2016
@@ -11,82 +11,82 @@
import org.lcsim.util.aida.AIDA;
public class FADCAnalysisDriver extends Driver {
- // Analysis plots.
+ // Analysis plots.
AIDA aida = AIDA.defaultInstance();
- IHistogram1D rawHitEnergy;
- IHistogram1D fadcHitEnergy;
- IHistogram2D rawHitDistribution;
- IHistogram2D fadcHitDistribution;
- IHistogram2D fadcFilteredHitDistribution;
- IHistogram1D eventRawHitCount;
- IHistogram1D eventFADCHitCount;
-
- // Hit collection names.
- private String rawHitsCollectionName = "EcalHits";
- private String fadcHitsCollectionName = "EcalCorrectedHits";
-
- public void setFadcHitsCollectionName(String fadcHitsCollectionName) {
- this.fadcHitsCollectionName = fadcHitsCollectionName;
- }
-
- public void setRawHitsCollectionName(String rawHitsCollectionName) {
- this.rawHitsCollectionName = rawHitsCollectionName;
- }
-
- @Override
- public void startOfData() {
- // Initialize the histograms.
- rawHitEnergy = aida.histogram1D("FADC Plot :: Raw Hit Energy", 110, 0.00, 2.2);
- fadcHitEnergy = aida.histogram1D("FADC Plot :: FADC Hit Energy", 80, 0.00, 1.6);
- rawHitDistribution = aida.histogram2D("FADC Plot :: Raw Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
- fadcHitDistribution = aida.histogram2D("FADC Plot :: FADC Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
- fadcFilteredHitDistribution = aida.histogram2D("FADC Plot :: FADC Hit Distribution Over 100 MeV", 46, -23, 23, 11, -5.5, 5.5);
- eventRawHitCount = aida.histogram1D("FADC Plot :: Event Raw Hit Count", 159, 1, 160);
- eventFADCHitCount = aida.histogram1D("FADC Plot :: Event FADC Hit Count", 15, 1, 16);
- }
-
- public void process(EventHeader event) {
- // Check if there exists a raw hits collection.
- if(event.hasCollection(CalorimeterHit.class, rawHitsCollectionName)) {
- // Get the raw hit collection.
- List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, rawHitsCollectionName);
-
- // Output the information on each hit to the histograms.
- for(CalorimeterHit hit : hitList) {
- // Get the x and y indices for the hits.
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
- if(ix > 0) { ix = ix - 1; }
-
- // Write to the histograms.
- rawHitEnergy.fill(hit.getCorrectedEnergy());
- rawHitDistribution.fill(ix, iy, 1.0);
-
- // If there are hits, fill the hit count histogram.
- if(hitList.size() != 0) { eventRawHitCount.fill(hitList.size()); }
- }
- }
-
- // Check if there exists an FADC hits collection.
- if(event.hasCollection(CalorimeterHit.class, fadcHitsCollectionName)) {
- // Get the raw hit collection.
- List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, fadcHitsCollectionName);
-
- // Output the information on each hit to the histograms.
- for(CalorimeterHit hit : hitList) {
- // Get the x and y indices for the hits.
- int ix = hit.getIdentifierFieldValue("ix");
- int iy = hit.getIdentifierFieldValue("iy");
- if(ix > 0) { ix = ix - 1; }
-
- // Write to the histograms.
- fadcHitEnergy.fill(hit.getCorrectedEnergy());
- fadcHitDistribution.fill(ix, iy, 1.0);
- if(hit.getCorrectedEnergy() > 0.100) { fadcFilteredHitDistribution.fill(ix, iy, 1.0); }
-
- // If there are hits, fill the hit count histogram.
- if(hitList.size() != 0) { eventFADCHitCount.fill(hitList.size()); }
- }
- }
- }
+ IHistogram1D rawHitEnergy;
+ IHistogram1D fadcHitEnergy;
+ IHistogram2D rawHitDistribution;
+ IHistogram2D fadcHitDistribution;
+ IHistogram2D fadcFilteredHitDistribution;
+ IHistogram1D eventRawHitCount;
+ IHistogram1D eventFADCHitCount;
+
+ // Hit collection names.
+ private String rawHitsCollectionName = "EcalHits";
+ private String fadcHitsCollectionName = "EcalCorrectedHits";
+
+ public void setFadcHitsCollectionName(String fadcHitsCollectionName) {
+ this.fadcHitsCollectionName = fadcHitsCollectionName;
+ }
+
+ public void setRawHitsCollectionName(String rawHitsCollectionName) {
+ this.rawHitsCollectionName = rawHitsCollectionName;
+ }
+
+ @Override
+ public void startOfData() {
+ // Initialize the histograms.
+ rawHitEnergy = aida.histogram1D("FADC Plot :: Raw Hit Energy", 110, 0.00, 2.2);
+ fadcHitEnergy = aida.histogram1D("FADC Plot :: FADC Hit Energy", 80, 0.00, 1.6);
+ rawHitDistribution = aida.histogram2D("FADC Plot :: Raw Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
+ fadcHitDistribution = aida.histogram2D("FADC Plot :: FADC Hit Distribution", 46, -23, 23, 11, -5.5, 5.5);
+ fadcFilteredHitDistribution = aida.histogram2D("FADC Plot :: FADC Hit Distribution Over 100 MeV", 46, -23, 23, 11, -5.5, 5.5);
+ eventRawHitCount = aida.histogram1D("FADC Plot :: Event Raw Hit Count", 159, 1, 160);
+ eventFADCHitCount = aida.histogram1D("FADC Plot :: Event FADC Hit Count", 15, 1, 16);
+ }
+
+ public void process(EventHeader event) {
+ // Check if there exists a raw hits collection.
+ if(event.hasCollection(CalorimeterHit.class, rawHitsCollectionName)) {
+ // Get the raw hit collection.
+ List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, rawHitsCollectionName);
+
+ // Output the information on each hit to the histograms.
+ for(CalorimeterHit hit : hitList) {
+ // Get the x and y indices for the hits.
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+ if(ix > 0) { ix = ix - 1; }
+
+ // Write to the histograms.
+ rawHitEnergy.fill(hit.getCorrectedEnergy());
+ rawHitDistribution.fill(ix, iy, 1.0);
+
+ // If there are hits, fill the hit count histogram.
+ if(hitList.size() != 0) { eventRawHitCount.fill(hitList.size()); }
+ }
+ }
+
+ // Check if there exists an FADC hits collection.
+ if(event.hasCollection(CalorimeterHit.class, fadcHitsCollectionName)) {
+ // Get the raw hit collection.
+ List<CalorimeterHit> hitList = event.get(CalorimeterHit.class, fadcHitsCollectionName);
+
+ // Output the information on each hit to the histograms.
+ for(CalorimeterHit hit : hitList) {
+ // Get the x and y indices for the hits.
+ int ix = hit.getIdentifierFieldValue("ix");
+ int iy = hit.getIdentifierFieldValue("iy");
+ if(ix > 0) { ix = ix - 1; }
+
+ // Write to the histograms.
+ fadcHitEnergy.fill(hit.getCorrectedEnergy());
+ fadcHitDistribution.fill(ix, iy, 1.0);
+ if(hit.getCorrectedEnergy() > 0.100) { fadcFilteredHitDistribution.fill(ix, iy, 1.0); }
+
+ // If there are hits, fill the hit count histogram.
+ if(hitList.size() != 0) { eventFADCHitCount.fill(hitList.size()); }
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/HPSEcalDataPlotsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/HPSEcalDataPlotsDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/HPSEcalDataPlotsDriver.java Wed Apr 27 11:11:32 2016
@@ -27,22 +27,22 @@
* @author Kyle McCarty <[log in to unmask]>
*/
public class HPSEcalDataPlotsDriver extends Driver {
- private String plotsGroupName= "Data Plots";
- private String bankCollectionName = "TriggerBank";
- private String clusterCollectionName = "EcalClusters";
-
- private static final int PULSER = 0;
- private static final int SINGLES0 = 1;
- private static final int SINGLES1 = 2;
- private static final int PAIR0 = 3;
- private static final int PAIR1 = 4;
-
- private static final int ALL = 0;
- private static final int EDGE = 1;
- private static final int FIDUCIAL = 2;
-
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D[][] clusterTotalEnergy = new IHistogram1D[5][3];
+ private String plotsGroupName= "Data Plots";
+ private String bankCollectionName = "TriggerBank";
+ private String clusterCollectionName = "EcalClusters";
+
+ private static final int PULSER = 0;
+ private static final int SINGLES0 = 1;
+ private static final int SINGLES1 = 2;
+ private static final int PAIR0 = 3;
+ private static final int PAIR1 = 4;
+
+ private static final int ALL = 0;
+ private static final int EDGE = 1;
+ private static final int FIDUCIAL = 2;
+
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D[][] clusterTotalEnergy = new IHistogram1D[5][3];
private IHistogram1D[][] clusterTime = new IHistogram1D[5][3];
private IHistogram1D[][] clusterHitCount = new IHistogram1D[5][3];
private IHistogram1D[][] clusterSeedEnergy = new IHistogram1D[5][3];
@@ -56,249 +56,249 @@
private IHistogram2D[][] clusterSeedPosition = new IHistogram2D[5][3];
private IHistogram2D[][] pairEnergySlope2D = new IHistogram2D[5][3];
private IHistogram2D[][] pairCoplanarityEnergySum = new IHistogram2D[5][3];
-
+
/**
* Initializes the plots.
*/
- @Override
- public void startOfData() {
- // Define trigger names.
- String[] triggerNames = {
- "Pulser", "Singles 0", "Singles 1", "Pair 0", "Pair 1"
- };
-
- // Define the positional names.
- String[] positionNames = {
- "All", "Edge", "Fiducial"
- };
-
- // Instantiate the plots.
- for(int i = 0; i < 5; i++) {
- for(int j = 0; j < 3; j++) {
- clusterTotalEnergy[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Cluster Total Energy", 150, 0.000, 1.500);
+ @Override
+ public void startOfData() {
+ // Define trigger names.
+ String[] triggerNames = {
+ "Pulser", "Singles 0", "Singles 1", "Pair 0", "Pair 1"
+ };
+
+ // Define the positional names.
+ String[] positionNames = {
+ "All", "Edge", "Fiducial"
+ };
+
+ // Instantiate the plots.
+ for(int i = 0; i < 5; i++) {
+ for(int j = 0; j < 3; j++) {
+ clusterTotalEnergy[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
+ + positionNames[j] + "/Cluster Total Energy", 150, 0.000, 1.500);
clusterTime[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Cluster Hit Time", 100, 0.0, 100.0);
+ + positionNames[j] + "/Cluster Hit Time", 100, 0.0, 100.0);
clusterHitCount[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Cluster Hit Count", 10, -0.5, 9.5);
+ + positionNames[j] + "/Cluster Hit Count", 10, -0.5, 9.5);
clusterSeedEnergy[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Cluster Seed Energy", 150, 0.000, 1.500);
+ + positionNames[j] + "/Cluster Seed Energy", 150, 0.000, 1.500);
pairEnergySum[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Sum", 150, 0.000, 1.500);
+ + positionNames[j] + "/Pair Energy Sum", 150, 0.000, 1.500);
pairEnergyDifference[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Difference", 150, 0.000, 1.500);
+ + positionNames[j] + "/Pair Energy Difference", 150, 0.000, 1.500);
pairEnergySlope[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Slope", 100, 0.000, 4.000);
+ + positionNames[j] + "/Pair Energy Slope", 100, 0.000, 4.000);
pairCoplanarity[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Coplanarity", 180, 0.000, 180);
+ + positionNames[j] + "/Pair Coplanarity", 180, 0.000, 180);
pairTimeCoincidence[i][j] = aida.histogram1D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Time Coincidence", 80, 0, 20);
+ + positionNames[j] + "/Pair Time Coincidence", 80, 0, 20);
clusterSeedPosition[i][j] = aida.histogram2D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
+ + positionNames[j] + "/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
pairEnergySum2D[i][j] = aida.histogram2D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Sum 2D", 150, 0.000, 1.500, 150, 0.000, 1.500);
+ + positionNames[j] + "/Pair Energy Sum 2D", 150, 0.000, 1.500, 150, 0.000, 1.500);
pairCoplanarityEnergySum[i][j] = aida.histogram2D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Sum vs. Coplanarity", 150, 0.000, 1.500, 180, 0, 180);
+ + positionNames[j] + "/Pair Energy Sum vs. Coplanarity", 150, 0.000, 1.500, 180, 0, 180);
pairEnergySlope2D[i][j] = aida.histogram2D(plotsGroupName + "/" + triggerNames[i] + "/"
- + positionNames[j] + "/Pair Energy Slope 2D", 75, 0.000, 1.500, 100, 0.0, 400.0);
- }
- }
- }
-
- /**
- * Processes the event clusters and populates distribution charts
- * from them for each trigger. Also creates separate plots for the
- * edge and fiducial regions.
- * @param event - The event containing LCIO collections to be used
- * for plot population.
- */
- @Override
- public void process(EventHeader event) {
- // Get the TI and SSP banks.
- TIData tiBank = null;
- SSPData sspBank = null;
- if(event.hasCollection(GenericObject.class, bankCollectionName)) {
- // Get the bank list.
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
-
- // Search through the banks and get the SSP and TI banks.
- for(GenericObject obj : bankList) {
- // If this is an SSP bank, parse it.
- if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
- sspBank = new SSPData(obj);
- }
-
- // Otherwise, if this is a TI bank, parse it.
- else if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
- tiBank = new TIData(obj);
- }
- }
- }
-
- // Get the list of clusters.
- List<Cluster> clusters = null;
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- clusters = event.get(Cluster.class, clusterCollectionName);
- }
-
- // Require that all collections be initialized.
- if(sspBank == null || tiBank == null || clusters == null) {
- return;
- }
-
- // Track which triggers are active.
- boolean[] activeTrigger = new boolean[5];
- activeTrigger[PULSER] = tiBank.isPulserTrigger();
- activeTrigger[SINGLES0] = tiBank.isSingle0Trigger();
- activeTrigger[SINGLES1] = tiBank.isSingle1Trigger();
- activeTrigger[PAIR0] = tiBank.isPair0Trigger();
- activeTrigger[PAIR1] = tiBank.isPair1Trigger();
-
- // Plot all cluster properties for each trigger.
- for(Cluster cluster : clusters) {
- // Check whether the cluster is a fiducial or edge cluster.
- int positional = inFiducialRegion(cluster) ? FIDUCIAL : EDGE;
-
- // Fill the appropriate plots for each trigger with an
- // active trigger bit for single clusters.
- for(int i = 0; i < 5; i++) {
- if(activeTrigger[i]) {
- // Populate the ALL plots.
- clusterSeedEnergy[i][ALL].fill(TriggerModule.getValueClusterSeedEnergy(cluster));
- clusterTotalEnergy[i][ALL].fill(cluster.getEnergy());
- clusterHitCount[i][ALL].fill(TriggerModule.getClusterHitCount(cluster));
- clusterTime[i][ALL].fill(TriggerModule.getClusterTime(cluster));
- clusterSeedPosition[i][ALL].fill(TriggerModule.getClusterXIndex(cluster),
- TriggerModule.getClusterYIndex(cluster));
-
- // Populate the positional plots.
- clusterSeedEnergy[i][positional].fill(TriggerModule.getValueClusterSeedEnergy(cluster));
- clusterTotalEnergy[i][positional].fill(cluster.getEnergy());
- clusterHitCount[i][positional].fill(TriggerModule.getClusterHitCount(cluster));
- clusterTime[i][positional].fill(TriggerModule.getClusterTime(cluster));
- clusterSeedPosition[i][positional].fill(TriggerModule.getClusterXIndex(cluster),
- TriggerModule.getClusterYIndex(cluster));
- }
- }
- }
-
- // Plot all pair properties for each trigger.
- List<Cluster[]> pairs = TriggerModule.getTopBottomPairs(clusters, Cluster.class);
- for(Cluster[] pair : pairs) {
- // Check whether the cluster is a fiducial or edge cluster.
- boolean[] isFiducial = {
- inFiducialRegion(pair[0]),
- inFiducialRegion(pair[1])
- };
- int positional = (isFiducial[0] && isFiducial[1]) ? FIDUCIAL : EDGE;
-
- // Fill the appropriate plots for each trigger with an
- // active trigger bit for single clusters.
- for(int i = 0; i < 5; i++) {
- if(activeTrigger[i]) {
- // Calculate the values.
- double energySum = TriggerModule.getValueEnergySum(pair);
- double energyDiff = TriggerModule.getValueEnergyDifference(pair);
- double energySlope = TriggerModule.getValueEnergySlope(pair, 0.00550);
- double coplanarity = TriggerModule.getValueCoplanarity(pair);
- double timeCoincidence = TriggerModule.getValueTimeCoincidence(pair);
-
- // Get the energy slope values.
- Cluster lowCluster = pair[0].getEnergy() < pair[1].getEnergy() ? pair[0] : pair[1];
- double clusterDistance = TriggerModule.getClusterDistance(lowCluster);
-
- // Populate the ALL plots.
- pairEnergySum[i][ALL].fill(energySum);
- pairEnergyDifference[i][ALL].fill(energyDiff);
- pairEnergySlope[i][ALL].fill(energySlope);
- pairCoplanarity[i][ALL].fill(coplanarity);
- pairTimeCoincidence[i][ALL].fill(timeCoincidence);
- pairEnergySum2D[i][ALL].fill(pair[0].getEnergy(), pair[1].getEnergy());
- pairCoplanarityEnergySum[i][ALL].fill(energySum, coplanarity);
- pairEnergySlope2D[i][ALL].fill(lowCluster.getEnergy(), clusterDistance);
-
- // Populate the positional plots.
- pairEnergySum[i][positional].fill(energySum);
- pairEnergyDifference[i][positional].fill(energyDiff);
- pairEnergySlope[i][positional].fill(energySlope);
- pairCoplanarity[i][positional].fill(coplanarity);
- pairTimeCoincidence[i][positional].fill(timeCoincidence);
- pairEnergySum2D[i][positional].fill(pair[0].getEnergy(), pair[1].getEnergy());
- pairCoplanarityEnergySum[i][positional].fill(energySum, coplanarity);
- pairEnergySlope2D[i][positional].fill(lowCluster.getEnergy(), clusterDistance);
- }
- }
- }
- }
-
- /**
- * Indicates whether the argument cluster is located in the fiducial
- * region or not.
- * @param cluster - The cluster to check.
- * @return Returns <code>true</code> if the cluster is located in
- * the fiducial region and <code>false</code> otherwise.
- */
- private static final boolean inFiducialRegion(Cluster cluster) {
- // Get the x and y indices for the cluster.
- int ix = TriggerModule.getClusterXIndex(cluster);
- int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
- int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
-
- // Check if the cluster is on the top or the bottom of the
- // calorimeter, as defined by |y| == 5. This is an edge cluster
- // and is not in the fiducial region.
- if(absy == 5) {
- return false;
- }
-
- // Check if the cluster is on the extreme left or right side
- // of the calorimeter, as defined by |x| == 23. This is also
- // and edge cluster is not in the fiducial region.
- if(absx == 23) {
- return false;
- }
-
- // Check if the cluster is along the beam gap, as defined by
- // |y| == 1. This is an internal edge cluster and is not in the
- // fiducial region.
- if(absy == 1) {
- return false;
- }
-
- // Lastly, check if the cluster falls along the beam hole, as
- // defined by clusters with -11 <= x <= -1 and |y| == 2. This
- // is not the fiducial region.
- if(absy == 2 && ix <= -1 && ix >= -11) {
- return false;
- }
-
- // If all checks fail, the cluster is in the fiducial region.
- return true;
- }
-
- /**
- * Sets the name of the LCIO collection containing the clusters
- * that are to be plotted.
- * @param collection - The LCIO collection name.
- */
- public void setClusterCollectionName(String collection) {
- clusterCollectionName = collection;
- }
-
- /**
- * Defines the name of the LCIO collection containing the TI bank.
- * @param collection - The LCIO collection name.
- */
- public void setBankCollectionName(String collection) {
- bankCollectionName = collection;
- }
-
- /**
- * Sets the name of the super-group folder containing all plots.
- * @param name - The name of the plots folder.
- */
- public void setPlotsGroupName(String name) {
- plotsGroupName = name;
- }
+ + positionNames[j] + "/Pair Energy Slope 2D", 75, 0.000, 1.500, 100, 0.0, 400.0);
+ }
+ }
+ }
+
+ /**
+ * Processes the event clusters and populates distribution charts
+ * from them for each trigger. Also creates separate plots for the
+ * edge and fiducial regions.
+ * @param event - The event containing LCIO collections to be used
+ * for plot population.
+ */
+ @Override
+ public void process(EventHeader event) {
+ // Get the TI and SSP banks.
+ TIData tiBank = null;
+ SSPData sspBank = null;
+ if(event.hasCollection(GenericObject.class, bankCollectionName)) {
+ // Get the bank list.
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+
+ // Search through the banks and get the SSP and TI banks.
+ for(GenericObject obj : bankList) {
+ // If this is an SSP bank, parse it.
+ if(AbstractIntData.getTag(obj) == SSPData.BANK_TAG) {
+ sspBank = new SSPData(obj);
+ }
+
+ // Otherwise, if this is a TI bank, parse it.
+ else if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
+ tiBank = new TIData(obj);
+ }
+ }
+ }
+
+ // Get the list of clusters.
+ List<Cluster> clusters = null;
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ clusters = event.get(Cluster.class, clusterCollectionName);
+ }
+
+ // Require that all collections be initialized.
+ if(sspBank == null || tiBank == null || clusters == null) {
+ return;
+ }
+
+ // Track which triggers are active.
+ boolean[] activeTrigger = new boolean[5];
+ activeTrigger[PULSER] = tiBank.isPulserTrigger();
+ activeTrigger[SINGLES0] = tiBank.isSingle0Trigger();
+ activeTrigger[SINGLES1] = tiBank.isSingle1Trigger();
+ activeTrigger[PAIR0] = tiBank.isPair0Trigger();
+ activeTrigger[PAIR1] = tiBank.isPair1Trigger();
+
+ // Plot all cluster properties for each trigger.
+ for(Cluster cluster : clusters) {
+ // Check whether the cluster is a fiducial or edge cluster.
+ int positional = inFiducialRegion(cluster) ? FIDUCIAL : EDGE;
+
+ // Fill the appropriate plots for each trigger with an
+ // active trigger bit for single clusters.
+ for(int i = 0; i < 5; i++) {
+ if(activeTrigger[i]) {
+ // Populate the ALL plots.
+ clusterSeedEnergy[i][ALL].fill(TriggerModule.getValueClusterSeedEnergy(cluster));
+ clusterTotalEnergy[i][ALL].fill(cluster.getEnergy());
+ clusterHitCount[i][ALL].fill(TriggerModule.getClusterHitCount(cluster));
+ clusterTime[i][ALL].fill(TriggerModule.getClusterTime(cluster));
+ clusterSeedPosition[i][ALL].fill(TriggerModule.getClusterXIndex(cluster),
+ TriggerModule.getClusterYIndex(cluster));
+
+ // Populate the positional plots.
+ clusterSeedEnergy[i][positional].fill(TriggerModule.getValueClusterSeedEnergy(cluster));
+ clusterTotalEnergy[i][positional].fill(cluster.getEnergy());
+ clusterHitCount[i][positional].fill(TriggerModule.getClusterHitCount(cluster));
+ clusterTime[i][positional].fill(TriggerModule.getClusterTime(cluster));
+ clusterSeedPosition[i][positional].fill(TriggerModule.getClusterXIndex(cluster),
+ TriggerModule.getClusterYIndex(cluster));
+ }
+ }
+ }
+
+ // Plot all pair properties for each trigger.
+ List<Cluster[]> pairs = TriggerModule.getTopBottomPairs(clusters, Cluster.class);
+ for(Cluster[] pair : pairs) {
+ // Check whether the cluster is a fiducial or edge cluster.
+ boolean[] isFiducial = {
+ inFiducialRegion(pair[0]),
+ inFiducialRegion(pair[1])
+ };
+ int positional = (isFiducial[0] && isFiducial[1]) ? FIDUCIAL : EDGE;
+
+ // Fill the appropriate plots for each trigger with an
+ // active trigger bit for single clusters.
+ for(int i = 0; i < 5; i++) {
+ if(activeTrigger[i]) {
+ // Calculate the values.
+ double energySum = TriggerModule.getValueEnergySum(pair);
+ double energyDiff = TriggerModule.getValueEnergyDifference(pair);
+ double energySlope = TriggerModule.getValueEnergySlope(pair, 0.00550);
+ double coplanarity = TriggerModule.getValueCoplanarity(pair);
+ double timeCoincidence = TriggerModule.getValueTimeCoincidence(pair);
+
+ // Get the energy slope values.
+ Cluster lowCluster = pair[0].getEnergy() < pair[1].getEnergy() ? pair[0] : pair[1];
+ double clusterDistance = TriggerModule.getClusterDistance(lowCluster);
+
+ // Populate the ALL plots.
+ pairEnergySum[i][ALL].fill(energySum);
+ pairEnergyDifference[i][ALL].fill(energyDiff);
+ pairEnergySlope[i][ALL].fill(energySlope);
+ pairCoplanarity[i][ALL].fill(coplanarity);
+ pairTimeCoincidence[i][ALL].fill(timeCoincidence);
+ pairEnergySum2D[i][ALL].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ pairCoplanarityEnergySum[i][ALL].fill(energySum, coplanarity);
+ pairEnergySlope2D[i][ALL].fill(lowCluster.getEnergy(), clusterDistance);
+
+ // Populate the positional plots.
+ pairEnergySum[i][positional].fill(energySum);
+ pairEnergyDifference[i][positional].fill(energyDiff);
+ pairEnergySlope[i][positional].fill(energySlope);
+ pairCoplanarity[i][positional].fill(coplanarity);
+ pairTimeCoincidence[i][positional].fill(timeCoincidence);
+ pairEnergySum2D[i][positional].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ pairCoplanarityEnergySum[i][positional].fill(energySum, coplanarity);
+ pairEnergySlope2D[i][positional].fill(lowCluster.getEnergy(), clusterDistance);
+ }
+ }
+ }
+ }
+
+ /**
+ * Indicates whether the argument cluster is located in the fiducial
+ * region or not.
+ * @param cluster - The cluster to check.
+ * @return Returns <code>true</code> if the cluster is located in
+ * the fiducial region and <code>false</code> otherwise.
+ */
+ private static final boolean inFiducialRegion(Cluster cluster) {
+ // Get the x and y indices for the cluster.
+ int ix = TriggerModule.getClusterXIndex(cluster);
+ int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
+ int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
+
+ // Check if the cluster is on the top or the bottom of the
+ // calorimeter, as defined by |y| == 5. This is an edge cluster
+ // and is not in the fiducial region.
+ if(absy == 5) {
+ return false;
+ }
+
+ // Check if the cluster is on the extreme left or right side
+ // of the calorimeter, as defined by |x| == 23. This is also
+ // and edge cluster is not in the fiducial region.
+ if(absx == 23) {
+ return false;
+ }
+
+ // Check if the cluster is along the beam gap, as defined by
+ // |y| == 1. This is an internal edge cluster and is not in the
+ // fiducial region.
+ if(absy == 1) {
+ return false;
+ }
+
+ // Lastly, check if the cluster falls along the beam hole, as
+ // defined by clusters with -11 <= x <= -1 and |y| == 2. This
+ // is not the fiducial region.
+ if(absy == 2 && ix <= -1 && ix >= -11) {
+ return false;
+ }
+
+ // If all checks fail, the cluster is in the fiducial region.
+ return true;
+ }
+
+ /**
+ * Sets the name of the LCIO collection containing the clusters
+ * that are to be plotted.
+ * @param collection - The LCIO collection name.
+ */
+ public void setClusterCollectionName(String collection) {
+ clusterCollectionName = collection;
+ }
+
+ /**
+ * Defines the name of the LCIO collection containing the TI bank.
+ * @param collection - The LCIO collection name.
+ */
+ public void setBankCollectionName(String collection) {
+ bankCollectionName = collection;
+ }
+
+ /**
+ * Sets the name of the super-group folder containing all plots.
+ * @param name - The name of the plots folder.
+ */
+ public void setPlotsGroupName(String name) {
+ plotsGroupName = name;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/InvariantMassPairDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/InvariantMassPairDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/InvariantMassPairDriver.java Wed Apr 27 11:11:32 2016
@@ -15,255 +15,255 @@
import org.lcsim.util.aida.AIDA;
public class InvariantMassPairDriver extends Driver {
- private int[] events = new int[3];
- private TriggerModule[] trigger = new TriggerModule[2];
-
- private String gtpClusterCollectionName = "EcalClustersGTP";
- private String particleCollectionName = "FinalStateParticles";
- private String reconParticleCollectionName = "UnconstrainedV0Candidates";
-
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D electronEnergyHist = aida.histogram1D("Trident Analysis/Electron Energy", 150, 0.000, 1.500);
- private IHistogram1D positronEnergyHist = aida.histogram1D("Trident Analysis/Positron Energy", 150, 0.000, 1.500);
- private IHistogram1D pairEnergyHist = aida.histogram1D("Trident Analysis/Energy Sum Distribution", 220, 0.00, 2.200);
- private IHistogram2D pair2DEnergyHist = aida.histogram2D("Trident Analysis/2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1);
- private IHistogram1D pair1MassHist = aida.histogram1D("Trident Analysis/Particle Invariant Mass (1 Hit)", 240, 0.000, 0.120);
- private IHistogram1D pair1ModMassHist = aida.histogram1D("Trident Analysis/Particle Invariant Mass (2 Hit)", 240, 0.000, 0.120);
- private IHistogram1D elasticElectronEnergyHist = aida.histogram1D("Trident Analysis/Trident Electron Energy", 150, 0.000, 1.500);
- private IHistogram1D elasticPositronEnergyHist = aida.histogram1D("Trident Analysis/Trident Positron Energy", 150, 0.000, 1.500);
-
- @Override
- public void startOfData() {
- // Instantiate the pair 1 trigger.
- trigger[0] = new TriggerModule();
- trigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
- trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.054);
- trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 0.630);
- trigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 0.540);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.180);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 0.860);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.600);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.0055);
- trigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 12);
-
- // Instantiate the pair 1 trigger with a hit count cut of two.
- trigger[1] = new TriggerModule();
- trigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 2);
- trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.054);
- trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 0.630);
- trigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 0.540);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.180);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 0.860);
- trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.600);
- trigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.0055);
- trigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 12);
- }
-
- @Override
- public void endOfData() {
- System.out.printf("Pair 1 :: %d / %d%n", events[0], events[2]);
- System.out.printf("Pair 1 Mod :: %d / %d%n", events[1], events[2]);
- }
-
- @Override
- public void process(EventHeader event) {
- // Skip the event if there is no reconstructed particle list.
- if(!event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
- return;
- }
-
- // Get a list of all tracks in the event.
- List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
-
- // Plot the energies of the electrons and positrons.
- for(ReconstructedParticle track : trackList) {
- // Positive tracks are assumed to be positrons.
- if(track.getCharge() > 0) {
- positronEnergyHist.fill(track.getMomentum().magnitude());
- }
-
- // Negative tracks are assumed to be electrons.
- else if(track.getCharge() < 0) {
- electronEnergyHist.fill(track.getMomentum().magnitude());
- }
- }
-
- // Get track pairs.
- List<ReconstructedParticle[]> trackPairList = getTrackPairs(trackList);
-
- // Populate the pair plots.
- trackPairLoop:
- for(ReconstructedParticle[] trackPair : trackPairList) {
- // Note the polarity of the tracks.
- boolean[] trackIsPositive = {
- trackPair[0].getCharge() > 0,
- trackPair[1].getCharge() > 0
- };
-
- // Require that one track be positive and one be negative.
- if(!(trackIsPositive[0] ^ trackIsPositive[1])) {
- continue trackPairLoop;
- }
-
- // Populate the track pair plots.
- pairEnergyHist.fill(VecOp.add(trackPair[0].getMomentum(), trackPair[1].getMomentum()).magnitude());
- if(trackIsPositive[0]) {
- pair2DEnergyHist.fill(trackPair[0].getMomentum().magnitude(), trackPair[1].getMomentum().magnitude());
- } else {
- pair2DEnergyHist.fill(trackPair[1].getMomentum().magnitude(), trackPair[0].getMomentum().magnitude());
- }
- }
-
- // Check that the event has a collection of GTP clusters.
- if(!event.hasCollection(Cluster.class, gtpClusterCollectionName)) {
- return;
- }
-
- // Increment the total event count.
- events[2]++;
-
- // Get the GTP clusters.
- List<Cluster> clusters = event.get(Cluster.class, gtpClusterCollectionName);
-
- // Get the list of top/bottom pairs.
- List<Cluster[]> pairs = getClusterPairs(clusters);
-
- // Iterate over the pairs and determine if any cluster passes
- // pair 1 trigger or the pair 1 modified trigger.
- boolean passedPair1 = false;
- boolean passedPair1Mod = false;
- pairLoop:
- for(Cluster[] pair : pairs) {
- // Check the cluster energy cut.
- if(!trigger[0].clusterTotalEnergyCut(pair[0])) { continue pairLoop; }
- if(!trigger[0].clusterTotalEnergyCut(pair[1])) { continue pairLoop; }
-
- // Check the pair cuts.
- if(!trigger[0].pairCoplanarityCut(pair)) { continue pairLoop; }
- if(!trigger[0].pairEnergyDifferenceCut(pair)) { continue pairLoop; }
- if(!trigger[0].pairEnergySumCut(pair)) { continue pairLoop; }
- if(!trigger[0].pairEnergySlopeCut(pair)) { continue pairLoop; }
-
- // Check if the pair passes the singles 0 hit count cut.
- if(trigger[0].clusterHitCountCut(pair[0]) && trigger[0].clusterHitCountCut(pair[1])) {
- // Note that a pair passed the pair 1 trigger.
- passedPair1 = true;
-
- // Check whether the pair passed the modified pair 1
- // trigger hit count cut.
- if(trigger[1].clusterHitCountCut(pair[0]) && trigger[1].clusterHitCountCut(pair[1])) {
- passedPair1Mod = true;
- }
- } else { continue pairLoop; }
- }
-
- // If no pair passed the pair 1 cut, nothing further need be done.
- if(!passedPair1) { return; }
-
- // Otherwise, increment the "passed pair 1" count and the
- // "passed pair 1 mod" count, if appropriate.
- events[0]++;
- if(passedPair1Mod) { events[1]++; }
-
- // Get the collection of reconstructed V0 candidates.
- List<ReconstructedParticle> candidateList = event.get(ReconstructedParticle.class, reconParticleCollectionName);
-
- // Populate the invariant mass plot.
- candidateLoop:
- for(ReconstructedParticle particle : candidateList) {
- // Track the electron and positron momenta.
- double electronMomentum = 0.0;
- double positronMomentum = 0.0;
-
- // Check that it has component particles that meet the
- // trident condition.
- boolean seenPositive = false;
- boolean seenNegative = false;
- for(ReconstructedParticle track : particle.getParticles()) {
- // Exactly one track must be negative. Its energy is
- // disallowed from exceeding 900 MeV.
- if(track.getCharge() < 0) {
- // Reject a second negative particle.
- if(seenNegative) { continue candidateLoop; }
-
- // Otherwise, note that one has been seen.
- seenNegative = true;
- electronMomentum = track.getMomentum().magnitude();
-
- // Reject electrons with a momentum exceeding 900 MeV.
- if(track.getMomentum().magnitude() > 0.900) {
- continue candidateLoop;
- }
- }
-
- // Exactly one track must be positive. Its energy is
- // not constrained.
- else if(track.getCharge() > 0) {
- // Reject a second positive particle.
- if(seenPositive) { continue candidateLoop; }
-
- // Otherwise, note that one has been seen.
- seenPositive = true;
- positronMomentum = track.getMomentum().magnitude();
- }
-
- // Lastly, reject any particle that produced a photon.
- else { continue candidateLoop; }
- }
-
- // Populate the plots.
- pair1MassHist.fill(particle.getMass());
- elasticElectronEnergyHist.fill(electronMomentum);
- elasticPositronEnergyHist.fill(positronMomentum);
- if(passedPair1Mod) { pair1ModMassHist.fill(particle.getMass()); }
- }
- }
-
- /**
- * Creates a list of top/bottom cluster pairs.
- * @param clusters - A <code>List</code> collection of objects of
- * type <code>Cluster</code>.
- * @return Returns a <code>List</code> collection of 2-entry arrays
- * of <code>Cluster</code> objects representing top/bottom cluster
- * pairs. The first entry is always the top cluster.
- */
- private static final List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
- // Separate the clusters into top and bottom clusters.
- List<Cluster> topList = new ArrayList<Cluster>();
- List<Cluster> botList = new ArrayList<Cluster>();
- for(Cluster cluster : clusters) {
- if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
- topList.add(cluster);
- }
- else { botList.add(cluster); }
- }
-
- // Create a list of all top/bottom pairs.
- List<Cluster[]> pairs = new ArrayList<Cluster[]>();
- for(Cluster topCluster : topList) {
- for(Cluster botCluster : botList) {
- pairs.add(new Cluster[] { topCluster, botCluster });
- }
- }
-
- // Return the list of cluster pairs.
- return pairs;
- }
-
- private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> trackList) {
- // Create an empty list for the pairs.
- List<ReconstructedParticle[]> pairs = new ArrayList<ReconstructedParticle[]>();
-
- // Add all possible pairs of tracks.
- for(int i = 0; i < trackList.size(); i++) {
- for(int j = i + 1; j < trackList.size(); j++) {
- pairs.add(new ReconstructedParticle[] { trackList.get(i), trackList.get(j) });
- }
- }
-
- // Return the list of tracks.
- return pairs;
- }
+ private int[] events = new int[3];
+ private TriggerModule[] trigger = new TriggerModule[2];
+
+ private String gtpClusterCollectionName = "EcalClustersGTP";
+ private String particleCollectionName = "FinalStateParticles";
+ private String reconParticleCollectionName = "UnconstrainedV0Candidates";
+
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D electronEnergyHist = aida.histogram1D("Trident Analysis/Electron Energy", 150, 0.000, 1.500);
+ private IHistogram1D positronEnergyHist = aida.histogram1D("Trident Analysis/Positron Energy", 150, 0.000, 1.500);
+ private IHistogram1D pairEnergyHist = aida.histogram1D("Trident Analysis/Energy Sum Distribution", 220, 0.00, 2.200);
+ private IHistogram2D pair2DEnergyHist = aida.histogram2D("Trident Analysis/2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1);
+ private IHistogram1D pair1MassHist = aida.histogram1D("Trident Analysis/Particle Invariant Mass (1 Hit)", 240, 0.000, 0.120);
+ private IHistogram1D pair1ModMassHist = aida.histogram1D("Trident Analysis/Particle Invariant Mass (2 Hit)", 240, 0.000, 0.120);
+ private IHistogram1D elasticElectronEnergyHist = aida.histogram1D("Trident Analysis/Trident Electron Energy", 150, 0.000, 1.500);
+ private IHistogram1D elasticPositronEnergyHist = aida.histogram1D("Trident Analysis/Trident Positron Energy", 150, 0.000, 1.500);
+
+ @Override
+ public void startOfData() {
+ // Instantiate the pair 1 trigger.
+ trigger[0] = new TriggerModule();
+ trigger[0].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 1);
+ trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.054);
+ trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 0.630);
+ trigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 0.540);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.180);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 0.860);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.600);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.0055);
+ trigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 12);
+
+ // Instantiate the pair 1 trigger with a hit count cut of two.
+ trigger[1] = new TriggerModule();
+ trigger[1].setCutValue(TriggerModule.CLUSTER_HIT_COUNT_LOW, 2);
+ trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_LOW, 0.054);
+ trigger[0].setCutValue(TriggerModule.CLUSTER_TOTAL_ENERGY_HIGH, 0.630);
+ trigger[0].setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, 30);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_DIFFERENCE_HIGH, 0.540);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, 0.180);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SUM_HIGH, 0.860);
+ trigger[0].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_LOW, 0.600);
+ trigger[1].setCutValue(TriggerModule.PAIR_ENERGY_SLOPE_F, 0.0055);
+ trigger[0].setCutValue(TriggerModule.PAIR_TIME_COINCIDENCE, 12);
+ }
+
+ @Override
+ public void endOfData() {
+ System.out.printf("Pair 1 :: %d / %d%n", events[0], events[2]);
+ System.out.printf("Pair 1 Mod :: %d / %d%n", events[1], events[2]);
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Skip the event if there is no reconstructed particle list.
+ if(!event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
+ return;
+ }
+
+ // Get a list of all tracks in the event.
+ List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
+
+ // Plot the energies of the electrons and positrons.
+ for(ReconstructedParticle track : trackList) {
+ // Positive tracks are assumed to be positrons.
+ if(track.getCharge() > 0) {
+ positronEnergyHist.fill(track.getMomentum().magnitude());
+ }
+
+ // Negative tracks are assumed to be electrons.
+ else if(track.getCharge() < 0) {
+ electronEnergyHist.fill(track.getMomentum().magnitude());
+ }
+ }
+
+ // Get track pairs.
+ List<ReconstructedParticle[]> trackPairList = getTrackPairs(trackList);
+
+ // Populate the pair plots.
+ trackPairLoop:
+ for(ReconstructedParticle[] trackPair : trackPairList) {
+ // Note the polarity of the tracks.
+ boolean[] trackIsPositive = {
+ trackPair[0].getCharge() > 0,
+ trackPair[1].getCharge() > 0
+ };
+
+ // Require that one track be positive and one be negative.
+ if(!(trackIsPositive[0] ^ trackIsPositive[1])) {
+ continue trackPairLoop;
+ }
+
+ // Populate the track pair plots.
+ pairEnergyHist.fill(VecOp.add(trackPair[0].getMomentum(), trackPair[1].getMomentum()).magnitude());
+ if(trackIsPositive[0]) {
+ pair2DEnergyHist.fill(trackPair[0].getMomentum().magnitude(), trackPair[1].getMomentum().magnitude());
+ } else {
+ pair2DEnergyHist.fill(trackPair[1].getMomentum().magnitude(), trackPair[0].getMomentum().magnitude());
+ }
+ }
+
+ // Check that the event has a collection of GTP clusters.
+ if(!event.hasCollection(Cluster.class, gtpClusterCollectionName)) {
+ return;
+ }
+
+ // Increment the total event count.
+ events[2]++;
+
+ // Get the GTP clusters.
+ List<Cluster> clusters = event.get(Cluster.class, gtpClusterCollectionName);
+
+ // Get the list of top/bottom pairs.
+ List<Cluster[]> pairs = getClusterPairs(clusters);
+
+ // Iterate over the pairs and determine if any cluster passes
+ // pair 1 trigger or the pair 1 modified trigger.
+ boolean passedPair1 = false;
+ boolean passedPair1Mod = false;
+ pairLoop:
+ for(Cluster[] pair : pairs) {
+ // Check the cluster energy cut.
+ if(!trigger[0].clusterTotalEnergyCut(pair[0])) { continue pairLoop; }
+ if(!trigger[0].clusterTotalEnergyCut(pair[1])) { continue pairLoop; }
+
+ // Check the pair cuts.
+ if(!trigger[0].pairCoplanarityCut(pair)) { continue pairLoop; }
+ if(!trigger[0].pairEnergyDifferenceCut(pair)) { continue pairLoop; }
+ if(!trigger[0].pairEnergySumCut(pair)) { continue pairLoop; }
+ if(!trigger[0].pairEnergySlopeCut(pair)) { continue pairLoop; }
+
+ // Check if the pair passes the singles 0 hit count cut.
+ if(trigger[0].clusterHitCountCut(pair[0]) && trigger[0].clusterHitCountCut(pair[1])) {
+ // Note that a pair passed the pair 1 trigger.
+ passedPair1 = true;
+
+ // Check whether the pair passed the modified pair 1
+ // trigger hit count cut.
+ if(trigger[1].clusterHitCountCut(pair[0]) && trigger[1].clusterHitCountCut(pair[1])) {
+ passedPair1Mod = true;
+ }
+ } else { continue pairLoop; }
+ }
+
+ // If no pair passed the pair 1 cut, nothing further need be done.
+ if(!passedPair1) { return; }
+
+ // Otherwise, increment the "passed pair 1" count and the
+ // "passed pair 1 mod" count, if appropriate.
+ events[0]++;
+ if(passedPair1Mod) { events[1]++; }
+
+ // Get the collection of reconstructed V0 candidates.
+ List<ReconstructedParticle> candidateList = event.get(ReconstructedParticle.class, reconParticleCollectionName);
+
+ // Populate the invariant mass plot.
+ candidateLoop:
+ for(ReconstructedParticle particle : candidateList) {
+ // Track the electron and positron momenta.
+ double electronMomentum = 0.0;
+ double positronMomentum = 0.0;
+
+ // Check that it has component particles that meet the
+ // trident condition.
+ boolean seenPositive = false;
+ boolean seenNegative = false;
+ for(ReconstructedParticle track : particle.getParticles()) {
+ // Exactly one track must be negative. Its energy is
+ // disallowed from exceeding 900 MeV.
+ if(track.getCharge() < 0) {
+ // Reject a second negative particle.
+ if(seenNegative) { continue candidateLoop; }
+
+ // Otherwise, note that one has been seen.
+ seenNegative = true;
+ electronMomentum = track.getMomentum().magnitude();
+
+ // Reject electrons with a momentum exceeding 900 MeV.
+ if(track.getMomentum().magnitude() > 0.900) {
+ continue candidateLoop;
+ }
+ }
+
+ // Exactly one track must be positive. Its energy is
+ // not constrained.
+ else if(track.getCharge() > 0) {
+ // Reject a second positive particle.
+ if(seenPositive) { continue candidateLoop; }
+
+ // Otherwise, note that one has been seen.
+ seenPositive = true;
+ positronMomentum = track.getMomentum().magnitude();
+ }
+
+ // Lastly, reject any particle that produced a photon.
+ else { continue candidateLoop; }
+ }
+
+ // Populate the plots.
+ pair1MassHist.fill(particle.getMass());
+ elasticElectronEnergyHist.fill(electronMomentum);
+ elasticPositronEnergyHist.fill(positronMomentum);
+ if(passedPair1Mod) { pair1ModMassHist.fill(particle.getMass()); }
+ }
+ }
+
+ /**
+ * Creates a list of top/bottom cluster pairs.
+ * @param clusters - A <code>List</code> collection of objects of
+ * type <code>Cluster</code>.
+ * @return Returns a <code>List</code> collection of 2-entry arrays
+ * of <code>Cluster</code> objects representing top/bottom cluster
+ * pairs. The first entry is always the top cluster.
+ */
+ private static final List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
+ // Separate the clusters into top and bottom clusters.
+ List<Cluster> topList = new ArrayList<Cluster>();
+ List<Cluster> botList = new ArrayList<Cluster>();
+ for(Cluster cluster : clusters) {
+ if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
+ topList.add(cluster);
+ }
+ else { botList.add(cluster); }
+ }
+
+ // Create a list of all top/bottom pairs.
+ List<Cluster[]> pairs = new ArrayList<Cluster[]>();
+ for(Cluster topCluster : topList) {
+ for(Cluster botCluster : botList) {
+ pairs.add(new Cluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Return the list of cluster pairs.
+ return pairs;
+ }
+
+ private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> trackList) {
+ // Create an empty list for the pairs.
+ List<ReconstructedParticle[]> pairs = new ArrayList<ReconstructedParticle[]>();
+
+ // Add all possible pairs of tracks.
+ for(int i = 0; i < trackList.size(); i++) {
+ for(int j = i + 1; j < trackList.size(); j++) {
+ pairs.add(new ReconstructedParticle[] { trackList.get(i), trackList.get(j) });
+ }
+ }
+
+ // Return the list of tracks.
+ return pairs;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTEAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTEAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTEAnalysis.java Wed Apr 27 11:11:32 2016
@@ -24,145 +24,145 @@
import org.lcsim.util.aida.AIDA;
public class MTEAnalysis extends Driver {
- // Define track LCIO information.
- private boolean skipBadSVT = true;
- private String bankCollectionName = "TriggerBank";
- private String particleCollectionName = "FinalStateParticles";
- private static final AIDA aida = AIDA.defaultInstance();
- private IHistogram1D[] chargedTracksPlot = {
- aida.histogram1D("MTE Analysis/Møller Event Tracks", 10, -0.5, 9.5),
- aida.histogram1D("MTE Analysis/Trident Event Tracks", 10, -0.5, 9.5),
- aida.histogram1D("MTE Analysis/Elastic Event Tracks", 10, -0.5, 9.5)
- };
- private IHistogram1D[] clusterCountPlot = {
- aida.histogram1D("MTE Analysis/Møller Event Clusters", 10, -0.5, 9.5),
- aida.histogram1D("MTE Analysis/Trident Event Clusters", 10, -0.5, 9.5),
- aida.histogram1D("MTE Analysis/Elastic Event Clusters", 10, -0.5, 9.5)
- };
- private IHistogram1D[] energyPlot = {
- aida.histogram1D("MTE Analysis/Møller Energy Sum Distribution", 220, 0, 2.2),
- aida.histogram1D("MTE Analysis/Trident Energy Sum Distribution", 220, 0, 2.2),
- aida.histogram1D("MTE Analysis/Elastic Energy Distribution", 110, 0, 1.5)
- };
- private IHistogram1D[] electronPlot = {
- aida.histogram1D("MTE Analysis/Møller Electron Energy Distribution", 220, 0, 2.2),
- aida.histogram1D("MTE Analysis/Trident Electron Energy Distribution", 220, 0, 2.2),
- };
- private IHistogram1D positronPlot = aida.histogram1D("MTE Analysis/Trident Positron Energy Distribution", 220, 0, 2.2);
- private IHistogram2D[] energy2DPlot = {
- aida.histogram2D("MTE Analysis/Møller 2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1),
- aida.histogram2D("MTE Analysis/Trident 2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1),
- };
- private IHistogram1D timePlot = aida.histogram1D("MTE Analysis/Track Cluster Time Distribution", 4000, 0, 400);
- private IHistogram1D timeCoincidencePlot = aida.histogram1D("MTE Analysis/Møller Time Coincidence Distribution", 1000, 0, 100);
- private IHistogram1D timeCoincidenceAllCutsPlot = aida.histogram1D("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)", 1000, 0, 100);
- private IHistogram1D negTrackCount = aida.histogram1D("MTE Analysis/All Negative Tracks", 10, -0.5, 9.5);
- private IHistogram1D posTrackCount = aida.histogram1D("MTE Analysis/All Positive Event Tracks", 10, -0.5, 9.5);
- private IHistogram1D chargedTrackCount = aida.histogram1D("MTE Analysis/All Event Event Tracks", 10, -0.5, 9.5);
-
- private IHistogram1D trInvariantMassAll = aida.histogram1D("Trident/Invariant Mass", 1500, 0.0, 1.5);
- private IHistogram1D trInvariantMassFiducial = aida.histogram1D("Trident/Invariant Mass (Fiducial Region)", 1500, 0.0, 1.5);
- private IHistogram1D trTimeCoincidenceAll = aida.histogram1D("Trident/Time Coincidence", 300, -15.0, 15.0);
- private IHistogram1D trTimeCoincidenceFiducial = aida.histogram1D("Trident/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
- private IHistogram1D trEnergySumAll = aida.histogram1D("Trident/Energy Sum", 300, 0.0, 1.5);
- private IHistogram1D trEnergySumFiducial = aida.histogram1D("Trident/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
- private IHistogram2D trEnergySum2DAll = aida.histogram2D("Trident/First Cluster Energy vs. Second Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D trEnergySum2DFiducial = aida.histogram2D("Trident/First Cluster Energy vs. Second Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D trSumCoplanarityAll = aida.histogram2D("Trident/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 180);
- private IHistogram2D trSumCoplanarityFiducial = aida.histogram2D("Trident/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 180);
- private IHistogram2D trSumCoplanarityCalcAll = aida.histogram2D("Trident/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D trSumCoplanarityCalcFiducial = aida.histogram2D("Trident/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D trTimeEnergyAll = aida.histogram2D("Trident/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
- private IHistogram2D trTimeEnergyFiducial = aida.histogram2D("Trident/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
-
- private TriggerPlotsModule allPlots = new TriggerPlotsModule("All");
- private TriggerPlotsModule møllerPlots = new TriggerPlotsModule("Møller");
- private TriggerPlotsModule tridentPlots = new TriggerPlotsModule("Trident");
- private TriggerPlotsModule elasticPlots = new TriggerPlotsModule("Elastic");
- private static final int MÃLLER = 0;
- private static final int TRIDENT = 1;
- private static final int ELASTIC = 2;
- private boolean verbose = false;
- private boolean excludeNoTrackEvents = false;
- private double timeCoincidenceCut = Double.MAX_VALUE;
- private Map<String, Integer> møllerBitMap = new HashMap<String, Integer>();
- private Map<String, Integer> tridentBitMap = new HashMap<String, Integer>();
- private Map<String, Integer> elasticBitMap = new HashMap<String, Integer>();
- private int møllerEvents = 0;
- private int tridentEvents = 0;
- private int elasticEvents = 0;
- private int totalEvents = 0;
- private int pair1Events = 0;
- private int pair0Events = 0;
- private int singles1Events = 0;
- private int singles0Events = 0;
- private int pulserEvents = 0;
-
- @Override
- public void startOfData() {
- for(int s0 = 0; s0 <= 1; s0++) {
- for(int s1 = 0; s1 <= 1; s1++) {
- for(int p0 = 0; p0 <= 1; p0++) {
- for(int p1 = 0; p1 <= 1; p1++) {
- for(int pulser = 0; pulser <=1; pulser++) {
- // Set each "trigger bit."
- boolean s0bit = (s0 == 1);
- boolean s1bit = (s1 == 1);
- boolean p0bit = (p0 == 1);
- boolean p1bit = (p1 == 1);
- boolean pulserBit = (p1 == 1);
-
- // Generate the bit string.
- String bitString = getBitString(s0bit, s1bit, p0bit, p1bit, pulserBit);
-
- // Set a default value of zero for this bit combination.
- møllerBitMap.put(bitString, 1);
- tridentBitMap.put(bitString, 1);
- elasticBitMap.put(bitString, 1);
- }
- }
- }
- }
- }
- }
-
- @Override
- public void endOfData() {
- System.out.println("Møller Events :: " + møllerEvents);
- System.out.println("Trident Events :: " + tridentEvents);
- System.out.println("Elastic Events :: " + elasticEvents);
- System.out.println("Total Events :: " + totalEvents);
- System.out.println("Pair 1 Events :: " + pair1Events);
- System.out.println("Pair 0 Events :: " + pair0Events);
- System.out.println("Singles 1 Events :: " + singles1Events);
- System.out.println("Singles 0 Events :: " + singles0Events);
- System.out.println("Pulser Events :: " + pulserEvents);
-
- System.out.println("Plsr\tS0\tS1\tP0\tP1\tMøller");
- for(Entry<String, Integer> entry : møllerBitMap.entrySet()) {
- System.out.println(entry.getKey() + "\t" + entry.getValue());
- }
-
- System.out.println("Plsr\tS0\tS1\tP0\tP1\tTrident");
- for(Entry<String, Integer> entry : tridentBitMap.entrySet()) {
- System.out.println(entry.getKey() + "\t" + entry.getValue());
- }
-
- System.out.println("Plsr\tS0\tS1\tP0\tP1\tElastic");
- for(Entry<String, Integer> entry : elasticBitMap.entrySet()) {
- System.out.println(entry.getKey() + "\t" + entry.getValue());
- }
- }
-
- private static final String getBitString(boolean s0, boolean s1, boolean p0, boolean p1, boolean pulser) {
- return String.format("%d\t%d\t%d\t%d\t%d", (pulser ? 1 : 0), (s0 ? 1 : 0), (s1 ? 1 : 0), (p0 ? 1 : 0), (p1 ? 1 : 0));
- }
-
- @Override
- public void process(EventHeader event) {
- // Check whether the SVT was active in this event.
- final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
- boolean svtGood = true;
+ // Define track LCIO information.
+ private boolean skipBadSVT = true;
+ private String bankCollectionName = "TriggerBank";
+ private String particleCollectionName = "FinalStateParticles";
+ private static final AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D[] chargedTracksPlot = {
+ aida.histogram1D("MTE Analysis/Møller Event Tracks", 10, -0.5, 9.5),
+ aida.histogram1D("MTE Analysis/Trident Event Tracks", 10, -0.5, 9.5),
+ aida.histogram1D("MTE Analysis/Elastic Event Tracks", 10, -0.5, 9.5)
+ };
+ private IHistogram1D[] clusterCountPlot = {
+ aida.histogram1D("MTE Analysis/Møller Event Clusters", 10, -0.5, 9.5),
+ aida.histogram1D("MTE Analysis/Trident Event Clusters", 10, -0.5, 9.5),
+ aida.histogram1D("MTE Analysis/Elastic Event Clusters", 10, -0.5, 9.5)
+ };
+ private IHistogram1D[] energyPlot = {
+ aida.histogram1D("MTE Analysis/Møller Energy Sum Distribution", 220, 0, 2.2),
+ aida.histogram1D("MTE Analysis/Trident Energy Sum Distribution", 220, 0, 2.2),
+ aida.histogram1D("MTE Analysis/Elastic Energy Distribution", 110, 0, 1.5)
+ };
+ private IHistogram1D[] electronPlot = {
+ aida.histogram1D("MTE Analysis/Møller Electron Energy Distribution", 220, 0, 2.2),
+ aida.histogram1D("MTE Analysis/Trident Electron Energy Distribution", 220, 0, 2.2),
+ };
+ private IHistogram1D positronPlot = aida.histogram1D("MTE Analysis/Trident Positron Energy Distribution", 220, 0, 2.2);
+ private IHistogram2D[] energy2DPlot = {
+ aida.histogram2D("MTE Analysis/Møller 2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1),
+ aida.histogram2D("MTE Analysis/Trident 2D Energy Distribution", 55, 0, 1.1, 55, 0, 1.1),
+ };
+ private IHistogram1D timePlot = aida.histogram1D("MTE Analysis/Track Cluster Time Distribution", 4000, 0, 400);
+ private IHistogram1D timeCoincidencePlot = aida.histogram1D("MTE Analysis/Møller Time Coincidence Distribution", 1000, 0, 100);
+ private IHistogram1D timeCoincidenceAllCutsPlot = aida.histogram1D("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)", 1000, 0, 100);
+ private IHistogram1D negTrackCount = aida.histogram1D("MTE Analysis/All Negative Tracks", 10, -0.5, 9.5);
+ private IHistogram1D posTrackCount = aida.histogram1D("MTE Analysis/All Positive Event Tracks", 10, -0.5, 9.5);
+ private IHistogram1D chargedTrackCount = aida.histogram1D("MTE Analysis/All Event Event Tracks", 10, -0.5, 9.5);
+
+ private IHistogram1D trInvariantMassAll = aida.histogram1D("Trident/Invariant Mass", 1500, 0.0, 1.5);
+ private IHistogram1D trInvariantMassFiducial = aida.histogram1D("Trident/Invariant Mass (Fiducial Region)", 1500, 0.0, 1.5);
+ private IHistogram1D trTimeCoincidenceAll = aida.histogram1D("Trident/Time Coincidence", 300, -15.0, 15.0);
+ private IHistogram1D trTimeCoincidenceFiducial = aida.histogram1D("Trident/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
+ private IHistogram1D trEnergySumAll = aida.histogram1D("Trident/Energy Sum", 300, 0.0, 1.5);
+ private IHistogram1D trEnergySumFiducial = aida.histogram1D("Trident/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
+ private IHistogram2D trEnergySum2DAll = aida.histogram2D("Trident/First Cluster Energy vs. Second Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D trEnergySum2DFiducial = aida.histogram2D("Trident/First Cluster Energy vs. Second Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D trSumCoplanarityAll = aida.histogram2D("Trident/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 180);
+ private IHistogram2D trSumCoplanarityFiducial = aida.histogram2D("Trident/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 180);
+ private IHistogram2D trSumCoplanarityCalcAll = aida.histogram2D("Trident/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D trSumCoplanarityCalcFiducial = aida.histogram2D("Trident/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D trTimeEnergyAll = aida.histogram2D("Trident/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
+ private IHistogram2D trTimeEnergyFiducial = aida.histogram2D("Trident/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
+
+ private TriggerPlotsModule allPlots = new TriggerPlotsModule("All");
+ private TriggerPlotsModule møllerPlots = new TriggerPlotsModule("Møller");
+ private TriggerPlotsModule tridentPlots = new TriggerPlotsModule("Trident");
+ private TriggerPlotsModule elasticPlots = new TriggerPlotsModule("Elastic");
+ private static final int MÃLLER = 0;
+ private static final int TRIDENT = 1;
+ private static final int ELASTIC = 2;
+ private boolean verbose = false;
+ private boolean excludeNoTrackEvents = false;
+ private double timeCoincidenceCut = Double.MAX_VALUE;
+ private Map<String, Integer> møllerBitMap = new HashMap<String, Integer>();
+ private Map<String, Integer> tridentBitMap = new HashMap<String, Integer>();
+ private Map<String, Integer> elasticBitMap = new HashMap<String, Integer>();
+ private int møllerEvents = 0;
+ private int tridentEvents = 0;
+ private int elasticEvents = 0;
+ private int totalEvents = 0;
+ private int pair1Events = 0;
+ private int pair0Events = 0;
+ private int singles1Events = 0;
+ private int singles0Events = 0;
+ private int pulserEvents = 0;
+
+ @Override
+ public void startOfData() {
+ for(int s0 = 0; s0 <= 1; s0++) {
+ for(int s1 = 0; s1 <= 1; s1++) {
+ for(int p0 = 0; p0 <= 1; p0++) {
+ for(int p1 = 0; p1 <= 1; p1++) {
+ for(int pulser = 0; pulser <=1; pulser++) {
+ // Set each "trigger bit."
+ boolean s0bit = (s0 == 1);
+ boolean s1bit = (s1 == 1);
+ boolean p0bit = (p0 == 1);
+ boolean p1bit = (p1 == 1);
+ boolean pulserBit = (p1 == 1);
+
+ // Generate the bit string.
+ String bitString = getBitString(s0bit, s1bit, p0bit, p1bit, pulserBit);
+
+ // Set a default value of zero for this bit combination.
+ møllerBitMap.put(bitString, 1);
+ tridentBitMap.put(bitString, 1);
+ elasticBitMap.put(bitString, 1);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void endOfData() {
+ System.out.println("Møller Events :: " + møllerEvents);
+ System.out.println("Trident Events :: " + tridentEvents);
+ System.out.println("Elastic Events :: " + elasticEvents);
+ System.out.println("Total Events :: " + totalEvents);
+ System.out.println("Pair 1 Events :: " + pair1Events);
+ System.out.println("Pair 0 Events :: " + pair0Events);
+ System.out.println("Singles 1 Events :: " + singles1Events);
+ System.out.println("Singles 0 Events :: " + singles0Events);
+ System.out.println("Pulser Events :: " + pulserEvents);
+
+ System.out.println("Plsr\tS0\tS1\tP0\tP1\tMøller");
+ for(Entry<String, Integer> entry : møllerBitMap.entrySet()) {
+ System.out.println(entry.getKey() + "\t" + entry.getValue());
+ }
+
+ System.out.println("Plsr\tS0\tS1\tP0\tP1\tTrident");
+ for(Entry<String, Integer> entry : tridentBitMap.entrySet()) {
+ System.out.println(entry.getKey() + "\t" + entry.getValue());
+ }
+
+ System.out.println("Plsr\tS0\tS1\tP0\tP1\tElastic");
+ for(Entry<String, Integer> entry : elasticBitMap.entrySet()) {
+ System.out.println(entry.getKey() + "\t" + entry.getValue());
+ }
+ }
+
+ private static final String getBitString(boolean s0, boolean s1, boolean p0, boolean p1, boolean pulser) {
+ return String.format("%d\t%d\t%d\t%d\t%d", (pulser ? 1 : 0), (s0 ? 1 : 0), (s1 ? 1 : 0), (p0 ? 1 : 0), (p1 ? 1 : 0));
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Check whether the SVT was active in this event.
+ final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
+ boolean svtGood = true;
for(int i = 0; i < flagNames.length; i++) {
int[] flag = event.getIntegerParameters().get(flagNames[i]);
if(flag == null || flag[0] == 0) {
@@ -172,437 +172,437 @@
// If the SVT was bad, then skip the event.
if(!svtGood && skipBadSVT) {
- return;
- }
-
- if(event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
- // Get the list of tracks.
- List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
-
- // Plot the time stamps of all tracks.
- for(ReconstructedParticle track : trackList) {
- if(track.getClusters().size() != 0) {
- Cluster cluster = track.getClusters().get(0);
- timePlot.fill(cluster.getCalorimeterHits().get(0).getTime());
- }
- }
-
- if(verbose) {
- System.out.println(trackList.size() + " tracks found.");
- for(ReconstructedParticle track : trackList) {
- System.out.printf("Track :: Q = %4.1f; E = %6.3f%n",
- track.getCharge(), track.getEnergy());
- }
- }
-
- // Populate the all cluster plots.
- List<Cluster> topClusters = new ArrayList<Cluster>();
- List<Cluster> botClusters = new ArrayList<Cluster>();
- List<Cluster> clusters = event.get(Cluster.class, "EcalClusters");
- for(Cluster cluster : clusters) {
- allPlots.addCluster(cluster);
- if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) { topClusters.add(cluster); }
- else { botClusters.add(cluster); }
- }
-
- // Make cluster pairs.
- List<Cluster[]> clusterPairs = new ArrayList<Cluster[]>();
- for(Cluster topCluster : topClusters) {
- for(Cluster botCluster : botClusters) {
- clusterPairs.add(new Cluster[] { topCluster, botCluster });
- }
- }
-
- // Populate the all cluster pair plots.
- for(Cluster[] pair : clusterPairs) {
- allPlots.addClusterPair(pair);
- }
-
- // Check each of the event-type conditions.
- boolean isMøller = false;
- boolean isTrident = false;
- boolean isElastic = false;
-
- // Produce all possible pairs of tracks.
- List<ReconstructedParticle[]> pairList = getTrackPairs(trackList);
-
- // Check the Møller condition. A Møller event is expected
- // to have two tracks, both negative, with a net energy
- // within a certain band of the beam energy.
- møllerTrackLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // If trackless events are to be excluded, then require
- // that each "track" have a real track.
- if(excludeNoTrackEvents && (pair[0].getTracks().isEmpty() || pair[1].getTracks().isEmpty())) {
- continue møllerTrackLoop;
- }
-
- // Both tracks are required to be negatively charged.
- if(pair[0].getCharge() >= 0 || pair[1].getCharge() >= 0) {
- continue møllerTrackLoop;
- }
-
- // Both tracks must have clusters associated with them.
- Cluster[] trackClusters = new Cluster[2];
- for(int i = 0; i < 2; i++) {
- // Disallow tracks with no associated clusters.
- if(pair[i].getClusters().size() == 0) {
- continue møllerTrackLoop;
- }
-
- // Store the first cluster associated with the track.
- trackClusters[i] = pair[i].getClusters().get(0);
- }
-
- // Require that the track clusters be within a certain
- // time window of one another.
- CalorimeterHit[] seeds = new CalorimeterHit[2];
- seeds[0] = trackClusters[0].getCalorimeterHits().get(0);
- seeds[1] = trackClusters[1].getCalorimeterHits().get(0);
- timeCoincidencePlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
- if(Math.abs(trackClusters[0].getCalorimeterHits().get(0).getTime() - trackClusters[1].getCalorimeterHits().get(0).getTime()) > timeCoincidenceCut) {
- continue møllerTrackLoop;
- }
-
- // Require both tracks to occur within the range of
- // 36.5 and 49 ns.
- if(seeds[0].getTime() < 36.5 || seeds[0].getTime() > 49) {
- continue møllerTrackLoop;
- } if(seeds[1].getTime() < 36.5 || seeds[1].getTime() > 49) {
- continue møllerTrackLoop;
- }
-
- // No track may have an energy that exceeds 900 MeV.
- if(pair[0].getMomentum().magnitude() >= 0.900 || pair[1].getMomentum().magnitude() >= 0.900) {
- continue møllerTrackLoop;
- }
-
- // Get the energy sum.
- double sum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
-
- // "Møller-like" track pairs must have energies within
- // an allowed energy range.
- if(sum < 0.800 || sum > 1.500) {
- continue møllerTrackLoop;
- }
-
- //timeCoincidenceAllCutsPlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
-
- // Note that this is a Møller event.
- isMøller = true;
-
- // Populate the Møller plots.
- energyPlot[MÃLLER].fill(sum);
- møllerPlots.addClusterPair(trackClusters);
- electronPlot[MÃLLER].fill(pair[0].getMomentum().magnitude());
- electronPlot[MÃLLER].fill(pair[1].getMomentum().magnitude());
- energy2DPlot[MÃLLER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- }
-
- // Check the elastic condition. Elastic events should be
- // negatively and have an energy approximately equal to
- // the beam energy.
- elasticTrackLoop:
- for(ReconstructedParticle track : trackList) {
- // If trackless events are to be excluded, then require
- // that the "track" has a real track.
- if(excludeNoTrackEvents && track.getTracks().isEmpty()) {
- continue elasticTrackLoop;
- }
-
- // Check the elastic condition.
- if(track.getCharge() < 0 && track.getMomentum().magnitude() >= 0.900) {
- isElastic = true;
- energyPlot[ELASTIC].fill(track.getMomentum().magnitude());
- if(!track.getClusters().isEmpty()) {
- elasticPlots.addCluster(track.getClusters().get(0));
- }
- }
- }
-
- // Check the trident condition. Tridents are events that
- // contain both one positive and one negative track.
- tridentTrackLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // If trackless events are to be excluded, then require
- // that each "track" have a real track.
- if(pair[0].getTracks().isEmpty() || pair[1].getTracks().isEmpty()) {
- continue tridentTrackLoop;
- }
-
- // Require that all tridents consist of a positive and
- // negative pair.
- boolean isPosNeg = (pair[0].getCharge() < 0 && pair[1].getCharge() > 0) || (pair[0].getCharge() > 0 && pair[1].getCharge() < 0);
- if(!isPosNeg) { continue tridentTrackLoop; }
-
- // Both tracks must have clusters associated with them.
- Cluster[] trackClusters = new Cluster[pair.length];
- for(int i = 0; i < pair.length; i++) {
- // Disallow tracks with no associated clusters.
- if(pair[i].getClusters().size() == 0) {
- continue tridentTrackLoop;
- }
-
- // Store the first cluster associated with the track.
- trackClusters[i] = pair[i].getClusters().get(0);
- }
-
- // Make sure that the clusters are not the same.
- if(trackClusters[0] == trackClusters[1]) {
- continue tridentTrackLoop;
- }
-
- // Require that tridents also be a top/bottom pair.
- boolean isTopBot = (TriggerModule.getClusterYIndex(trackClusters[0]) > 0 && TriggerModule.getClusterYIndex(trackClusters[1]) < 0)
- || (TriggerModule.getClusterYIndex(trackClusters[0]) < 0 && TriggerModule.getClusterYIndex(trackClusters[1]) > 0);
- if(!isTopBot) {
- continue tridentTrackLoop;
- }
-
- // Require that the track clusters be within a certain
- // time window of one another.
- CalorimeterHit[] seeds = new CalorimeterHit[2];
- seeds[0] = trackClusters[0].getCalorimeterHits().get(0);
- seeds[1] = trackClusters[1].getCalorimeterHits().get(0);
- timeCoincidencePlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
- if(Math.abs(trackClusters[0].getCalorimeterHits().get(0).getTime() - trackClusters[1].getCalorimeterHits().get(0).getTime()) > timeCoincidenceCut) {
- continue tridentTrackLoop;
- }
-
- // Require that the energy of the electron is below
- // 900 MeV.
- boolean electronNotElastic = (pair[0].getCharge() < 0 && pair[0].getMomentum().magnitude() < 0.900)
- || (pair[1].getCharge() < 0 && pair[1].getMomentum().magnitude() < 0.900);
- if(!electronNotElastic) {
- continue tridentTrackLoop;
- }
-
- // If all tests are passed, this is a trident. Note
- // this and populate the trident plots.
- isTrident = true;
- tridentPlots.addClusterPair(trackClusters);
- if(pair[0].getCharge() > 0) {
- positronPlot.fill(pair[1].getMomentum().magnitude());
- electronPlot[TRIDENT].fill(pair[0].getMomentum().magnitude());
- } else {
- positronPlot.fill(pair[0].getMomentum().magnitude());
- electronPlot[TRIDENT].fill(pair[1].getMomentum().magnitude());
- }
- energyPlot[TRIDENT].fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
- energy2DPlot[TRIDENT].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
-
- // Track which clusters have already been added to the
- // singles plot so that there are no repeats.
- Set<Cluster> plotSet = new HashSet<Cluster>();
- Set<Cluster> plotFiducial = new HashSet<Cluster>();
-
- // Fill the all pairs plots.
- double pairEnergy = trackClusters[0].getEnergy() + trackClusters[1].getEnergy();
- trEnergySumAll.fill(pairEnergy);
- trEnergySum2DAll.fill(trackClusters[1].getEnergy(), trackClusters[0].getEnergy());
- trTimeCoincidenceAll.fill(RafoAnalysis.getTimeConicidence(trackClusters));
- trSumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(trackClusters));
- trSumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(trackClusters));
- trInvariantMassAll.fill(RafoAnalysis.getInvariantMass(pair));
-
- // Fill the singles plots.
- if(!plotSet.contains(trackClusters[0])) {
- plotSet.add(trackClusters[0]);
- trTimeEnergyAll.fill(trackClusters[0].getEnergy(), TriggerModule.getClusterTime(trackClusters[0]));
- } if(!plotSet.contains(trackClusters[1])) {
- plotSet.add(trackClusters[1]);
- trTimeEnergyAll.fill(trackClusters[1].getEnergy(), TriggerModule.getClusterTime(trackClusters[1]));
- }
-
- // Fill the fiducial plots if appropriate.
- if(inFiducialRegion(trackClusters[0]) && inFiducialRegion(trackClusters[1])) {
- trEnergySumFiducial.fill(pairEnergy);
- trEnergySum2DFiducial.fill(trackClusters[1].getEnergy(), trackClusters[0].getEnergy());
- trTimeCoincidenceFiducial.fill(RafoAnalysis.getTimeConicidence(trackClusters));
- trSumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(trackClusters));
- trSumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(trackClusters));
- trInvariantMassFiducial.fill(RafoAnalysis.getInvariantMass(pair));
- }
-
- // Fill the singles fiducial plots if appropriate.
- if(!plotFiducial.contains(trackClusters[0]) && inFiducialRegion(trackClusters[0])) {
- plotFiducial.add(trackClusters[0]);
- trTimeEnergyFiducial.fill(trackClusters[0].getEnergy(), TriggerModule.getClusterTime(trackClusters[0]));
- } if(!plotFiducial.contains(trackClusters[1]) && inFiducialRegion(trackClusters[1])) {
- plotFiducial.add(trackClusters[1]);
- trTimeEnergyFiducial.fill(trackClusters[1].getEnergy(), TriggerModule.getClusterTime(trackClusters[1]));
- }
- }
-
- if(verbose) {
- System.out.printf("\tMøller :: %b%n", isMøller);
- System.out.printf("\tTrident :: %b%n", isTrident);
- System.out.printf("\tElastic :: %b%n", isElastic);
- System.out.println();
- }
-
- // Get the TI bits.
- String bitString = null;
- TIData tiBank = null;
- List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
- for(GenericObject obj : bankList) {
- if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
- tiBank = new TIData(obj);
- bitString = getBitString(tiBank.isPulserTrigger(), tiBank.isSingle0Trigger(),
- tiBank.isSingle1Trigger(), tiBank.isPair0Trigger(), tiBank.isPair1Trigger());
-
- if(tiBank.isPair1Trigger()) {
- pair1Events++;
- } else if(tiBank.isPair0Trigger()) {
- pair0Events++;
- } else if(tiBank.isSingle1Trigger()) {
- singles1Events++;
- } else if(tiBank.isSingle0Trigger()) {
- singles0Events++;
- } else if(tiBank.isPulserTrigger()) {
- pulserEvents++;
- }
- }
- }
- if(bitString == null) {
- System.out.println("No TI data found!!");
- }
-
- // Get the number of charged tracks in the event.
- int tracks = 0;
- int posTracks = 0;
- int negTracks = 0;
- for(ReconstructedParticle track : trackList) {
- if(track.getCharge() != 0 && tiBank.isPulserTrigger()) {
- if(excludeNoTrackEvents && !track.getTracks().isEmpty()) {
- tracks++;
- if(track.getCharge() > 0) { posTracks++; }
- else { negTracks++; }
- } else {
- tracks++;
- if(track.getCharge() > 0) { posTracks++; }
- else { negTracks++; }
- }
- }
- }
-
- // Populate the "all tracks" plots.
- posTrackCount.fill(posTracks);
- negTrackCount.fill(negTracks);
- chargedTrackCount.fill(tracks);
-
- // Add the result to the appropriate plots and increment
- // the appropriate trigger bit combination.
- if(isMøller) {
- møllerEvents++;
- chargedTracksPlot[MÃLLER].fill(tracks);
- clusterCountPlot[MÃLLER].fill(clusters.size());
-
- Integer val = møllerBitMap.get(bitString);
- if(val == null) { møllerBitMap.put(bitString, 1); }
- else { møllerBitMap.put(bitString, val + 1); }
- } else if(isTrident) {
- tridentEvents++;
- chargedTracksPlot[TRIDENT].fill(tracks);
- clusterCountPlot[TRIDENT].fill(clusters.size());
-
- Integer val = tridentBitMap.get(bitString);
- if(val == null) { tridentBitMap.put(bitString, 1); }
- else { tridentBitMap.put(bitString, val + 1); }
- } else if(isElastic) {
- elasticEvents++;
- chargedTracksPlot[ELASTIC].fill(tracks);
- clusterCountPlot[ELASTIC].fill(clusters.size());
-
- Integer val = elasticBitMap.get(bitString);
- if(val == null) { elasticBitMap.put(bitString, 1); }
- else { elasticBitMap.put(bitString, val + 1); }
- }
- totalEvents++;
- }
- }
-
- private static final double getCalculatedCoplanarity(Cluster[] pair) {
- // Define the x- and y-coordinates of the clusters as well as
- // calorimeter center.
- final double ORIGIN_X = 42.52;
- double x[] = { pair[0].getPosition()[0], pair[1].getPosition()[0] };
- double y[] = { pair[0].getPosition()[1], pair[1].getPosition()[1] };
-
+ return;
+ }
+
+ if(event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
+ // Get the list of tracks.
+ List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
+
+ // Plot the time stamps of all tracks.
+ for(ReconstructedParticle track : trackList) {
+ if(track.getClusters().size() != 0) {
+ Cluster cluster = track.getClusters().get(0);
+ timePlot.fill(cluster.getCalorimeterHits().get(0).getTime());
+ }
+ }
+
+ if(verbose) {
+ System.out.println(trackList.size() + " tracks found.");
+ for(ReconstructedParticle track : trackList) {
+ System.out.printf("Track :: Q = %4.1f; E = %6.3f%n",
+ track.getCharge(), track.getEnergy());
+ }
+ }
+
+ // Populate the all cluster plots.
+ List<Cluster> topClusters = new ArrayList<Cluster>();
+ List<Cluster> botClusters = new ArrayList<Cluster>();
+ List<Cluster> clusters = event.get(Cluster.class, "EcalClusters");
+ for(Cluster cluster : clusters) {
+ allPlots.addCluster(cluster);
+ if(cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) { topClusters.add(cluster); }
+ else { botClusters.add(cluster); }
+ }
+
+ // Make cluster pairs.
+ List<Cluster[]> clusterPairs = new ArrayList<Cluster[]>();
+ for(Cluster topCluster : topClusters) {
+ for(Cluster botCluster : botClusters) {
+ clusterPairs.add(new Cluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Populate the all cluster pair plots.
+ for(Cluster[] pair : clusterPairs) {
+ allPlots.addClusterPair(pair);
+ }
+
+ // Check each of the event-type conditions.
+ boolean isMøller = false;
+ boolean isTrident = false;
+ boolean isElastic = false;
+
+ // Produce all possible pairs of tracks.
+ List<ReconstructedParticle[]> pairList = getTrackPairs(trackList);
+
+ // Check the Møller condition. A Møller event is expected
+ // to have two tracks, both negative, with a net energy
+ // within a certain band of the beam energy.
+ møllerTrackLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // If trackless events are to be excluded, then require
+ // that each "track" have a real track.
+ if(excludeNoTrackEvents && (pair[0].getTracks().isEmpty() || pair[1].getTracks().isEmpty())) {
+ continue møllerTrackLoop;
+ }
+
+ // Both tracks are required to be negatively charged.
+ if(pair[0].getCharge() >= 0 || pair[1].getCharge() >= 0) {
+ continue møllerTrackLoop;
+ }
+
+ // Both tracks must have clusters associated with them.
+ Cluster[] trackClusters = new Cluster[2];
+ for(int i = 0; i < 2; i++) {
+ // Disallow tracks with no associated clusters.
+ if(pair[i].getClusters().size() == 0) {
+ continue møllerTrackLoop;
+ }
+
+ // Store the first cluster associated with the track.
+ trackClusters[i] = pair[i].getClusters().get(0);
+ }
+
+ // Require that the track clusters be within a certain
+ // time window of one another.
+ CalorimeterHit[] seeds = new CalorimeterHit[2];
+ seeds[0] = trackClusters[0].getCalorimeterHits().get(0);
+ seeds[1] = trackClusters[1].getCalorimeterHits().get(0);
+ timeCoincidencePlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
+ if(Math.abs(trackClusters[0].getCalorimeterHits().get(0).getTime() - trackClusters[1].getCalorimeterHits().get(0).getTime()) > timeCoincidenceCut) {
+ continue møllerTrackLoop;
+ }
+
+ // Require both tracks to occur within the range of
+ // 36.5 and 49 ns.
+ if(seeds[0].getTime() < 36.5 || seeds[0].getTime() > 49) {
+ continue møllerTrackLoop;
+ } if(seeds[1].getTime() < 36.5 || seeds[1].getTime() > 49) {
+ continue møllerTrackLoop;
+ }
+
+ // No track may have an energy that exceeds 900 MeV.
+ if(pair[0].getMomentum().magnitude() >= 0.900 || pair[1].getMomentum().magnitude() >= 0.900) {
+ continue møllerTrackLoop;
+ }
+
+ // Get the energy sum.
+ double sum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
+
+ // "Møller-like" track pairs must have energies within
+ // an allowed energy range.
+ if(sum < 0.800 || sum > 1.500) {
+ continue møllerTrackLoop;
+ }
+
+ //timeCoincidenceAllCutsPlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
+
+ // Note that this is a Møller event.
+ isMøller = true;
+
+ // Populate the Møller plots.
+ energyPlot[MÃLLER].fill(sum);
+ møllerPlots.addClusterPair(trackClusters);
+ electronPlot[MÃLLER].fill(pair[0].getMomentum().magnitude());
+ electronPlot[MÃLLER].fill(pair[1].getMomentum().magnitude());
+ energy2DPlot[MÃLLER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ }
+
+ // Check the elastic condition. Elastic events should be
+ // negatively and have an energy approximately equal to
+ // the beam energy.
+ elasticTrackLoop:
+ for(ReconstructedParticle track : trackList) {
+ // If trackless events are to be excluded, then require
+ // that the "track" has a real track.
+ if(excludeNoTrackEvents && track.getTracks().isEmpty()) {
+ continue elasticTrackLoop;
+ }
+
+ // Check the elastic condition.
+ if(track.getCharge() < 0 && track.getMomentum().magnitude() >= 0.900) {
+ isElastic = true;
+ energyPlot[ELASTIC].fill(track.getMomentum().magnitude());
+ if(!track.getClusters().isEmpty()) {
+ elasticPlots.addCluster(track.getClusters().get(0));
+ }
+ }
+ }
+
+ // Check the trident condition. Tridents are events that
+ // contain both one positive and one negative track.
+ tridentTrackLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // If trackless events are to be excluded, then require
+ // that each "track" have a real track.
+ if(pair[0].getTracks().isEmpty() || pair[1].getTracks().isEmpty()) {
+ continue tridentTrackLoop;
+ }
+
+ // Require that all tridents consist of a positive and
+ // negative pair.
+ boolean isPosNeg = (pair[0].getCharge() < 0 && pair[1].getCharge() > 0) || (pair[0].getCharge() > 0 && pair[1].getCharge() < 0);
+ if(!isPosNeg) { continue tridentTrackLoop; }
+
+ // Both tracks must have clusters associated with them.
+ Cluster[] trackClusters = new Cluster[pair.length];
+ for(int i = 0; i < pair.length; i++) {
+ // Disallow tracks with no associated clusters.
+ if(pair[i].getClusters().size() == 0) {
+ continue tridentTrackLoop;
+ }
+
+ // Store the first cluster associated with the track.
+ trackClusters[i] = pair[i].getClusters().get(0);
+ }
+
+ // Make sure that the clusters are not the same.
+ if(trackClusters[0] == trackClusters[1]) {
+ continue tridentTrackLoop;
+ }
+
+ // Require that tridents also be a top/bottom pair.
+ boolean isTopBot = (TriggerModule.getClusterYIndex(trackClusters[0]) > 0 && TriggerModule.getClusterYIndex(trackClusters[1]) < 0)
+ || (TriggerModule.getClusterYIndex(trackClusters[0]) < 0 && TriggerModule.getClusterYIndex(trackClusters[1]) > 0);
+ if(!isTopBot) {
+ continue tridentTrackLoop;
+ }
+
+ // Require that the track clusters be within a certain
+ // time window of one another.
+ CalorimeterHit[] seeds = new CalorimeterHit[2];
+ seeds[0] = trackClusters[0].getCalorimeterHits().get(0);
+ seeds[1] = trackClusters[1].getCalorimeterHits().get(0);
+ timeCoincidencePlot.fill(Math.abs(seeds[0].getTime() - seeds[1].getTime()));
+ if(Math.abs(trackClusters[0].getCalorimeterHits().get(0).getTime() - trackClusters[1].getCalorimeterHits().get(0).getTime()) > timeCoincidenceCut) {
+ continue tridentTrackLoop;
+ }
+
+ // Require that the energy of the electron is below
+ // 900 MeV.
+ boolean electronNotElastic = (pair[0].getCharge() < 0 && pair[0].getMomentum().magnitude() < 0.900)
+ || (pair[1].getCharge() < 0 && pair[1].getMomentum().magnitude() < 0.900);
+ if(!electronNotElastic) {
+ continue tridentTrackLoop;
+ }
+
+ // If all tests are passed, this is a trident. Note
+ // this and populate the trident plots.
+ isTrident = true;
+ tridentPlots.addClusterPair(trackClusters);
+ if(pair[0].getCharge() > 0) {
+ positronPlot.fill(pair[1].getMomentum().magnitude());
+ electronPlot[TRIDENT].fill(pair[0].getMomentum().magnitude());
+ } else {
+ positronPlot.fill(pair[0].getMomentum().magnitude());
+ electronPlot[TRIDENT].fill(pair[1].getMomentum().magnitude());
+ }
+ energyPlot[TRIDENT].fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
+ energy2DPlot[TRIDENT].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+
+ // Track which clusters have already been added to the
+ // singles plot so that there are no repeats.
+ Set<Cluster> plotSet = new HashSet<Cluster>();
+ Set<Cluster> plotFiducial = new HashSet<Cluster>();
+
+ // Fill the all pairs plots.
+ double pairEnergy = trackClusters[0].getEnergy() + trackClusters[1].getEnergy();
+ trEnergySumAll.fill(pairEnergy);
+ trEnergySum2DAll.fill(trackClusters[1].getEnergy(), trackClusters[0].getEnergy());
+ trTimeCoincidenceAll.fill(RafoAnalysis.getTimeConicidence(trackClusters));
+ trSumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(trackClusters));
+ trSumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(trackClusters));
+ trInvariantMassAll.fill(RafoAnalysis.getInvariantMass(pair));
+
+ // Fill the singles plots.
+ if(!plotSet.contains(trackClusters[0])) {
+ plotSet.add(trackClusters[0]);
+ trTimeEnergyAll.fill(trackClusters[0].getEnergy(), TriggerModule.getClusterTime(trackClusters[0]));
+ } if(!plotSet.contains(trackClusters[1])) {
+ plotSet.add(trackClusters[1]);
+ trTimeEnergyAll.fill(trackClusters[1].getEnergy(), TriggerModule.getClusterTime(trackClusters[1]));
+ }
+
+ // Fill the fiducial plots if appropriate.
+ if(inFiducialRegion(trackClusters[0]) && inFiducialRegion(trackClusters[1])) {
+ trEnergySumFiducial.fill(pairEnergy);
+ trEnergySum2DFiducial.fill(trackClusters[1].getEnergy(), trackClusters[0].getEnergy());
+ trTimeCoincidenceFiducial.fill(RafoAnalysis.getTimeConicidence(trackClusters));
+ trSumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(trackClusters));
+ trSumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(trackClusters));
+ trInvariantMassFiducial.fill(RafoAnalysis.getInvariantMass(pair));
+ }
+
+ // Fill the singles fiducial plots if appropriate.
+ if(!plotFiducial.contains(trackClusters[0]) && inFiducialRegion(trackClusters[0])) {
+ plotFiducial.add(trackClusters[0]);
+ trTimeEnergyFiducial.fill(trackClusters[0].getEnergy(), TriggerModule.getClusterTime(trackClusters[0]));
+ } if(!plotFiducial.contains(trackClusters[1]) && inFiducialRegion(trackClusters[1])) {
+ plotFiducial.add(trackClusters[1]);
+ trTimeEnergyFiducial.fill(trackClusters[1].getEnergy(), TriggerModule.getClusterTime(trackClusters[1]));
+ }
+ }
+
+ if(verbose) {
+ System.out.printf("\tMøller :: %b%n", isMøller);
+ System.out.printf("\tTrident :: %b%n", isTrident);
+ System.out.printf("\tElastic :: %b%n", isElastic);
+ System.out.println();
+ }
+
+ // Get the TI bits.
+ String bitString = null;
+ TIData tiBank = null;
+ List<GenericObject> bankList = event.get(GenericObject.class, bankCollectionName);
+ for(GenericObject obj : bankList) {
+ if(AbstractIntData.getTag(obj) == TIData.BANK_TAG) {
+ tiBank = new TIData(obj);
+ bitString = getBitString(tiBank.isPulserTrigger(), tiBank.isSingle0Trigger(),
+ tiBank.isSingle1Trigger(), tiBank.isPair0Trigger(), tiBank.isPair1Trigger());
+
+ if(tiBank.isPair1Trigger()) {
+ pair1Events++;
+ } else if(tiBank.isPair0Trigger()) {
+ pair0Events++;
+ } else if(tiBank.isSingle1Trigger()) {
+ singles1Events++;
+ } else if(tiBank.isSingle0Trigger()) {
+ singles0Events++;
+ } else if(tiBank.isPulserTrigger()) {
+ pulserEvents++;
+ }
+ }
+ }
+ if(bitString == null) {
+ System.out.println("No TI data found!!");
+ }
+
+ // Get the number of charged tracks in the event.
+ int tracks = 0;
+ int posTracks = 0;
+ int negTracks = 0;
+ for(ReconstructedParticle track : trackList) {
+ if(track.getCharge() != 0 && tiBank.isPulserTrigger()) {
+ if(excludeNoTrackEvents && !track.getTracks().isEmpty()) {
+ tracks++;
+ if(track.getCharge() > 0) { posTracks++; }
+ else { negTracks++; }
+ } else {
+ tracks++;
+ if(track.getCharge() > 0) { posTracks++; }
+ else { negTracks++; }
+ }
+ }
+ }
+
+ // Populate the "all tracks" plots.
+ posTrackCount.fill(posTracks);
+ negTrackCount.fill(negTracks);
+ chargedTrackCount.fill(tracks);
+
+ // Add the result to the appropriate plots and increment
+ // the appropriate trigger bit combination.
+ if(isMøller) {
+ møllerEvents++;
+ chargedTracksPlot[MÃLLER].fill(tracks);
+ clusterCountPlot[MÃLLER].fill(clusters.size());
+
+ Integer val = møllerBitMap.get(bitString);
+ if(val == null) { møllerBitMap.put(bitString, 1); }
+ else { møllerBitMap.put(bitString, val + 1); }
+ } else if(isTrident) {
+ tridentEvents++;
+ chargedTracksPlot[TRIDENT].fill(tracks);
+ clusterCountPlot[TRIDENT].fill(clusters.size());
+
+ Integer val = tridentBitMap.get(bitString);
+ if(val == null) { tridentBitMap.put(bitString, 1); }
+ else { tridentBitMap.put(bitString, val + 1); }
+ } else if(isElastic) {
+ elasticEvents++;
+ chargedTracksPlot[ELASTIC].fill(tracks);
+ clusterCountPlot[ELASTIC].fill(clusters.size());
+
+ Integer val = elasticBitMap.get(bitString);
+ if(val == null) { elasticBitMap.put(bitString, 1); }
+ else { elasticBitMap.put(bitString, val + 1); }
+ }
+ totalEvents++;
+ }
+ }
+
+ private static final double getCalculatedCoplanarity(Cluster[] pair) {
+ // Define the x- and y-coordinates of the clusters as well as
+ // calorimeter center.
+ final double ORIGIN_X = 42.52;
+ double x[] = { pair[0].getPosition()[0], pair[1].getPosition()[0] };
+ double y[] = { pair[0].getPosition()[1], pair[1].getPosition()[1] };
+
// Get the cluster angles.
double[] clusterAngle = new double[2];
for(int i = 0; i < 2; i++) {
- clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
- if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
+ clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
+ if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
}
// Calculate the coplanarity cut value.
double clusterDiff = clusterAngle[0] - clusterAngle[1];
return clusterDiff > 0 ? clusterDiff : clusterDiff + 360;
- }
-
- public void setTimeCoincidenceCut(double value) {
- timeCoincidenceCut = value;
- }
-
- public void setExcludeNoTrackEvents(boolean state) {
- excludeNoTrackEvents = state;
- }
-
- public void setSkipBadSVT(boolean state) {
- skipBadSVT = state;
- }
-
- private static final boolean inFiducialRegion(Cluster cluster) {
- // Get the x and y indices for the cluster.
- int ix = TriggerModule.getClusterXIndex(cluster);
- int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
- int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
-
- // Check if the cluster is on the top or the bottom of the
- // calorimeter, as defined by |y| == 5. This is an edge cluster
- // and is not in the fiducial region.
- if(absy == 5) {
- return false;
- }
-
- // Check if the cluster is on the extreme left or right side
- // of the calorimeter, as defined by |x| == 23. This is also
- // and edge cluster is not in the fiducial region.
- if(absx == 23) {
- return false;
- }
-
- // Check if the cluster is along the beam gap, as defined by
- // |y| == 1. This is an internal edge cluster and is not in the
- // fiducial region.
- if(absy == 1) {
- return false;
- }
-
- // Lastly, check if the cluster falls along the beam hole, as
- // defined by clusters with -11 <= x <= -1 and |y| == 2. This
- // is not the fiducial region.
- if(absy == 2 && ix <= -1 && ix >= -11) {
- return false;
- }
-
- // If all checks fail, the cluster is in the fiducial region.
- return true;
- }
-
- private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> trackList) {
- // Create an empty list for the pairs.
- List<ReconstructedParticle[]> pairs = new ArrayList<ReconstructedParticle[]>();
-
- // Add all possible pairs of tracks.
- for(int i = 0; i < trackList.size(); i++) {
- for(int j = i + 1; j < trackList.size(); j++) {
- pairs.add(new ReconstructedParticle[] { trackList.get(i), trackList.get(j) });
- }
- }
-
- // Return the list of tracks.
- return pairs;
- }
+ }
+
+ public void setTimeCoincidenceCut(double value) {
+ timeCoincidenceCut = value;
+ }
+
+ public void setExcludeNoTrackEvents(boolean state) {
+ excludeNoTrackEvents = state;
+ }
+
+ public void setSkipBadSVT(boolean state) {
+ skipBadSVT = state;
+ }
+
+ private static final boolean inFiducialRegion(Cluster cluster) {
+ // Get the x and y indices for the cluster.
+ int ix = TriggerModule.getClusterXIndex(cluster);
+ int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
+ int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
+
+ // Check if the cluster is on the top or the bottom of the
+ // calorimeter, as defined by |y| == 5. This is an edge cluster
+ // and is not in the fiducial region.
+ if(absy == 5) {
+ return false;
+ }
+
+ // Check if the cluster is on the extreme left or right side
+ // of the calorimeter, as defined by |x| == 23. This is also
+ // and edge cluster is not in the fiducial region.
+ if(absx == 23) {
+ return false;
+ }
+
+ // Check if the cluster is along the beam gap, as defined by
+ // |y| == 1. This is an internal edge cluster and is not in the
+ // fiducial region.
+ if(absy == 1) {
+ return false;
+ }
+
+ // Lastly, check if the cluster falls along the beam hole, as
+ // defined by clusters with -11 <= x <= -1 and |y| == 2. This
+ // is not the fiducial region.
+ if(absy == 2 && ix <= -1 && ix >= -11) {
+ return false;
+ }
+
+ // If all checks fail, the cluster is in the fiducial region.
+ return true;
+ }
+
+ private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> trackList) {
+ // Create an empty list for the pairs.
+ List<ReconstructedParticle[]> pairs = new ArrayList<ReconstructedParticle[]>();
+
+ // Add all possible pairs of tracks.
+ for(int i = 0; i < trackList.size(); i++) {
+ for(int j = i + 1; j < trackList.size(); j++) {
+ pairs.add(new ReconstructedParticle[] { trackList.get(i), trackList.get(j) });
+ }
+ }
+
+ // Return the list of tracks.
+ return pairs;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTETriggerPlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTETriggerPlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/MTETriggerPlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -15,164 +15,164 @@
public class MTETriggerPlotsFormatter {
- public static void main(String[] args) throws IllegalArgumentException, IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "5772-ana.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Define the 1D trigger plot names for Møllers and tridents.
- String[] plotNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
- "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
- String[] displayNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
- "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
- String[] xAxisNames1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)",
- "Coplanarity (Degrees)", "Energy Difference (GeV)", "Energy Slope (GeV)", "Energy Sum (GeV)" };
- String yAxisName1D = "Count";
-
- // Define the 2D trigger plot names for Møllers and tridents.
- String[] plotNames2D = { "Cluster Seed", "Pair Energy Sum 2D" };
- String[] displayNames2D = { "Cluster Seed Distribution", "2D Energy Sum" };
- String[] xAxisNames2D = { "x-Index", "Second Cluster Energy (GeV)" };
- String[] yAxisNames2D = { "y-Index", "First Cluster Energy (GeV)" };
-
- // Define the 1D trigger plot names for elastics.
- String[] plotNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
- String[] displayNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
- String[] xAxisNamesElastic1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)" };
- String yAxisNameElastic1D = "Count";
-
- // Define the 2D trigger plot names for elastics.
- String[] plotNamesElastic2D = { "Cluster Seed" };
- String[] displayNamesElastic2D = { "Cluster Seed Distribution" };
- String[] xAxisNamesElastic2D = { "x-Index" };
- String[] yAxisNamesElastic2D = { "y-Index" };
-
- // Define the Møller, trident, and elastic prefixes.
- String allPrefix = "All Trigger Plots/Pair Plots/";
- String møllerPrefix = "Møller Trigger Plots/Pair Plots/";
- String tridentPrefix = "Trident Trigger Plots/Pair Plots/";
- String elasticPrefix = "Elastic Trigger Plots/Singles Plots/";
- String allSinglesPrefix = "All Trigger Plots/Singles Plots/";
-
- // Define the plot type prefix.
- String allTypeName = "All Pairs - ";
- String møllerTypeName = "Møller - ";
- String tridentTypeName = "Trident - ";
- String elasticTypeName = "Elastic - ";
- String allSinglesTypeName = "All Singles - ";
-
- // Define the plot type colors.
- ColorStyle allColor = PlotsFormatter.ColorStyle.GREY;
- ColorStyle møllerColor = PlotsFormatter.ColorStyle.MS_BLUE;
- ColorStyle tridentColor = PlotsFormatter.ColorStyle.MS_ORANGE;
- ColorStyle elasticColor = PlotsFormatter.ColorStyle.MS_GREEN;
-
- // Create a plot formatting module.
- PlotFormatModule module = new PlotFormatModule();
-
- // Get the histograms and add them to the module. Start with the
- // trident and Møller plots.
- for(int i = 0; i < plotNames1D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram1D allPlot = (IHistogram1D) tree.find(allPrefix + plotNames1D[i]);
- IHistogram1D møllerPlot = (IHistogram1D) tree.find(møllerPrefix + plotNames1D[i]);
- IHistogram1D tridentPlot = (IHistogram1D) tree.find(tridentPrefix + plotNames1D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNames1D[i], yAxisName1D, allTypeName + displayNames1D[i]);
- FormattedPlot1D møllerFormattedPlot = new FormattedPlot1D(møllerPlot, møllerColor, xAxisNames1D[i], yAxisName1D, møllerTypeName + displayNames1D[i]);
- FormattedPlot1D tridentFormattedPlot = new FormattedPlot1D(tridentPlot, tridentColor, xAxisNames1D[i], yAxisName1D, tridentTypeName + displayNames1D[i]);
-
- // Add them to the module.
- module.addPlot1D(allFormattedPlot);
- module.addPlot1D(møllerFormattedPlot);
- module.addPlot1D(tridentFormattedPlot);
- }
- for(int i = 0; i < plotNames2D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNames2D[i]);
- IHistogram2D møllerPlot = (IHistogram2D) tree.find(møllerPrefix + plotNames2D[i]);
- IHistogram2D tridentPlot = (IHistogram2D) tree.find(tridentPrefix + plotNames2D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], allTypeName + displayNames2D[i]);
- FormattedPlot2D møllerFormattedPlot = new FormattedPlot2D(møllerPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], møllerTypeName + displayNames2D[i]);
- FormattedPlot2D tridentFormattedPlot = new FormattedPlot2D(tridentPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], tridentTypeName + displayNames2D[i]);
-
- // Add them to the module.
- module.addPlot2D(allFormattedPlot);
- module.addPlot2D(møllerFormattedPlot);
- module.addPlot2D(tridentFormattedPlot);
- }
-
- // Get the histograms for the elastic plots and add them to the module.
- for(int i = 0; i < plotNamesElastic1D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram1D allPlot = (IHistogram1D) tree.find(allSinglesPrefix + plotNames1D[i]);
- IHistogram1D elasticPlot = (IHistogram1D) tree.find(elasticPrefix + plotNames1D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
- allSinglesTypeName + displayNamesElastic1D[i]);
- FormattedPlot1D elasticFormattedPlot = new FormattedPlot1D(elasticPlot, elasticColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
- elasticTypeName + displayNamesElastic1D[i]);
-
- // Add them to the module.
- module.addPlot1D(allFormattedPlot);
- module.addPlot1D(elasticFormattedPlot);
- }
- for(int i = 0; i < plotNamesElastic2D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNamesElastic2D[i]);
- IHistogram2D elasticPlot = (IHistogram2D) tree.find(møllerPrefix + plotNamesElastic2D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
- allSinglesTypeName + plotNames2D[i]);
- FormattedPlot2D elasticFormattedPlot = new FormattedPlot2D(elasticPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
- elasticTypeName + displayNamesElastic2D[i]);
-
- // Add them to the module.
- module.addPlot2D(allFormattedPlot);
- module.addPlot2D(elasticFormattedPlot);
- }
-
- // Add the MTE plots to the module.
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution"), elasticColor,
- "Momentum (GeV)", "Count", "Elastic - Momentum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks"), elasticColor,
- "Tracks", "Count", "Elastic - Tracks in Event"));
-
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution"), møllerColor,
- "Momentum Sum (GeV)", "Count", "Møller - Momentum Sum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution"), møllerColor,
- "Momentum (GeV)", "Count", "Møller - Momentum (Electron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)"), møllerColor,
- "Time (ns)", "Count", "Møller - Time Coincidence"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks"), møllerColor,
- "Tracks", "Count", "Møller - Tracks in Event"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution"), false,
- "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Møller - 2D Momentum Sum"));
-
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution"), tridentColor,
- "Momentum Sum (GeV)", "Count", "Trident - Momentum Sum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution"), tridentColor,
- "Momentum (GeV)", "Count", "Trident - Momentum (Electron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Positron Energy Distribution"), tridentColor,
- "Momentum (GeV)", "Count", "Trident - Momentum (Positron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks"), tridentColor,
- "Tracks", "Count", "Trident - Tracks in Event"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution"), false,
- "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Trident - 2D Momentum Sum"));
-
- // Display the plots.
- module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\TestPrint\\");
- }
+ public static void main(String[] args) throws IllegalArgumentException, IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "5772-ana.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Define the 1D trigger plot names for Møllers and tridents.
+ String[] plotNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
+ "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
+ String[] displayNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
+ "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
+ String[] xAxisNames1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)",
+ "Coplanarity (Degrees)", "Energy Difference (GeV)", "Energy Slope (GeV)", "Energy Sum (GeV)" };
+ String yAxisName1D = "Count";
+
+ // Define the 2D trigger plot names for Møllers and tridents.
+ String[] plotNames2D = { "Cluster Seed", "Pair Energy Sum 2D" };
+ String[] displayNames2D = { "Cluster Seed Distribution", "2D Energy Sum" };
+ String[] xAxisNames2D = { "x-Index", "Second Cluster Energy (GeV)" };
+ String[] yAxisNames2D = { "y-Index", "First Cluster Energy (GeV)" };
+
+ // Define the 1D trigger plot names for elastics.
+ String[] plotNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
+ String[] displayNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
+ String[] xAxisNamesElastic1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)" };
+ String yAxisNameElastic1D = "Count";
+
+ // Define the 2D trigger plot names for elastics.
+ String[] plotNamesElastic2D = { "Cluster Seed" };
+ String[] displayNamesElastic2D = { "Cluster Seed Distribution" };
+ String[] xAxisNamesElastic2D = { "x-Index" };
+ String[] yAxisNamesElastic2D = { "y-Index" };
+
+ // Define the Møller, trident, and elastic prefixes.
+ String allPrefix = "All Trigger Plots/Pair Plots/";
+ String møllerPrefix = "Møller Trigger Plots/Pair Plots/";
+ String tridentPrefix = "Trident Trigger Plots/Pair Plots/";
+ String elasticPrefix = "Elastic Trigger Plots/Singles Plots/";
+ String allSinglesPrefix = "All Trigger Plots/Singles Plots/";
+
+ // Define the plot type prefix.
+ String allTypeName = "All Pairs - ";
+ String møllerTypeName = "Møller - ";
+ String tridentTypeName = "Trident - ";
+ String elasticTypeName = "Elastic - ";
+ String allSinglesTypeName = "All Singles - ";
+
+ // Define the plot type colors.
+ ColorStyle allColor = PlotsFormatter.ColorStyle.GREY;
+ ColorStyle møllerColor = PlotsFormatter.ColorStyle.MS_BLUE;
+ ColorStyle tridentColor = PlotsFormatter.ColorStyle.MS_ORANGE;
+ ColorStyle elasticColor = PlotsFormatter.ColorStyle.MS_GREEN;
+
+ // Create a plot formatting module.
+ PlotFormatModule module = new PlotFormatModule();
+
+ // Get the histograms and add them to the module. Start with the
+ // trident and Møller plots.
+ for(int i = 0; i < plotNames1D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram1D allPlot = (IHistogram1D) tree.find(allPrefix + plotNames1D[i]);
+ IHistogram1D møllerPlot = (IHistogram1D) tree.find(møllerPrefix + plotNames1D[i]);
+ IHistogram1D tridentPlot = (IHistogram1D) tree.find(tridentPrefix + plotNames1D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNames1D[i], yAxisName1D, allTypeName + displayNames1D[i]);
+ FormattedPlot1D møllerFormattedPlot = new FormattedPlot1D(møllerPlot, møllerColor, xAxisNames1D[i], yAxisName1D, møllerTypeName + displayNames1D[i]);
+ FormattedPlot1D tridentFormattedPlot = new FormattedPlot1D(tridentPlot, tridentColor, xAxisNames1D[i], yAxisName1D, tridentTypeName + displayNames1D[i]);
+
+ // Add them to the module.
+ module.addPlot1D(allFormattedPlot);
+ module.addPlot1D(møllerFormattedPlot);
+ module.addPlot1D(tridentFormattedPlot);
+ }
+ for(int i = 0; i < plotNames2D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNames2D[i]);
+ IHistogram2D møllerPlot = (IHistogram2D) tree.find(møllerPrefix + plotNames2D[i]);
+ IHistogram2D tridentPlot = (IHistogram2D) tree.find(tridentPrefix + plotNames2D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], allTypeName + displayNames2D[i]);
+ FormattedPlot2D møllerFormattedPlot = new FormattedPlot2D(møllerPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], møllerTypeName + displayNames2D[i]);
+ FormattedPlot2D tridentFormattedPlot = new FormattedPlot2D(tridentPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], tridentTypeName + displayNames2D[i]);
+
+ // Add them to the module.
+ module.addPlot2D(allFormattedPlot);
+ module.addPlot2D(møllerFormattedPlot);
+ module.addPlot2D(tridentFormattedPlot);
+ }
+
+ // Get the histograms for the elastic plots and add them to the module.
+ for(int i = 0; i < plotNamesElastic1D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram1D allPlot = (IHistogram1D) tree.find(allSinglesPrefix + plotNames1D[i]);
+ IHistogram1D elasticPlot = (IHistogram1D) tree.find(elasticPrefix + plotNames1D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
+ allSinglesTypeName + displayNamesElastic1D[i]);
+ FormattedPlot1D elasticFormattedPlot = new FormattedPlot1D(elasticPlot, elasticColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
+ elasticTypeName + displayNamesElastic1D[i]);
+
+ // Add them to the module.
+ module.addPlot1D(allFormattedPlot);
+ module.addPlot1D(elasticFormattedPlot);
+ }
+ for(int i = 0; i < plotNamesElastic2D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNamesElastic2D[i]);
+ IHistogram2D elasticPlot = (IHistogram2D) tree.find(møllerPrefix + plotNamesElastic2D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
+ allSinglesTypeName + plotNames2D[i]);
+ FormattedPlot2D elasticFormattedPlot = new FormattedPlot2D(elasticPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
+ elasticTypeName + displayNamesElastic2D[i]);
+
+ // Add them to the module.
+ module.addPlot2D(allFormattedPlot);
+ module.addPlot2D(elasticFormattedPlot);
+ }
+
+ // Add the MTE plots to the module.
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution"), elasticColor,
+ "Momentum (GeV)", "Count", "Elastic - Momentum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks"), elasticColor,
+ "Tracks", "Count", "Elastic - Tracks in Event"));
+
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution"), møllerColor,
+ "Momentum Sum (GeV)", "Count", "Møller - Momentum Sum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution"), møllerColor,
+ "Momentum (GeV)", "Count", "Møller - Momentum (Electron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)"), møllerColor,
+ "Time (ns)", "Count", "Møller - Time Coincidence"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks"), møllerColor,
+ "Tracks", "Count", "Møller - Tracks in Event"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution"), false,
+ "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Møller - 2D Momentum Sum"));
+
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution"), tridentColor,
+ "Momentum Sum (GeV)", "Count", "Trident - Momentum Sum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution"), tridentColor,
+ "Momentum (GeV)", "Count", "Trident - Momentum (Electron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Positron Energy Distribution"), tridentColor,
+ "Momentum (GeV)", "Count", "Trident - Momentum (Positron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks"), tridentColor,
+ "Tracks", "Count", "Trident - Tracks in Event"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution"), false,
+ "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Trident - 2D Momentum Sum"));
+
+ // Display the plots.
+ module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\TestPrint\\");
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ParticleMCAnalysisDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ParticleMCAnalysisDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/ParticleMCAnalysisDriver.java Wed Apr 27 11:11:32 2016
@@ -13,136 +13,136 @@
import org.lcsim.util.aida.AIDA;
public class ParticleMCAnalysisDriver extends Driver {
- // Store collection names.
- private String particleCollectionName = "MCParticle";
-
- // Declare plots.
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D chargedTracksPlot = aida.histogram1D("MC Analysis/Event Tracks", 10, -0.5, 9.5);
- private IHistogram1D allPlot = aida.histogram1D("MC Analysis/Electron Energy Distribution", 110, 0, 1.1);
- private IHistogram1D electronPlot = aida.histogram1D("MC Analysis/Electron Energy Distribution", 110, 0, 1.1);
- private IHistogram1D positronPlot = aida.histogram1D("MC Analysis/Positron Energy Distribution", 110, 0, 1.1);
- private IHistogram1D momentumXPlot = aida.histogram1D("MC Analysis/Particle x-Momentum Distribution", 110, 0.0, 1.1);
- private IHistogram1D momentumYPlot = aida.histogram1D("MC Analysis/Particle y-Momentum Distribution", 110, 0.0, 1.1);
- private IHistogram1D momentumZPlot = aida.histogram1D("MC Analysis/Particle z-Momentum Distribution", 110, 0.0, 1.1);
- private IHistogram1D epAnglePlot = aida.histogram1D("MC Analysis/Positron\\Electron Pair Angle Distribution", 90, 0, 180);
- private IHistogram1D eeAnglePlot = aida.histogram1D("MC Analysis/Electron\\Electron Pair Angle Distribution", 90, 0, 180);
- private IHistogram1D epMomentumSumPlot = aida.histogram1D("MC Analysis/Positron\\Electron Momentum Sum Distribution", 220, 0, 2.2);
- private IHistogram1D eeMomentumSumPlot = aida.histogram1D("MC Analysis/Electron\\Electron Momentum Sum Distribution", 220, 0, 2.2);
- private IHistogram2D momentumPlot = aida.histogram2D("MC Analysis/Particle Momentum Distribution", 100, 0.0, 0.40, 110, 0.0, 0.40);
- private IHistogram2D epMomentumSum2DPlot = aida.histogram2D("MC Analysis/Positron\\Electron 2D Momentum Distribution", 55, 0, 1.1, 55, 0, 1.1);
- private IHistogram2D eeMomentumSum2DPlot = aida.histogram2D("MC Analysis/Electron\\Electron 2D Momentum Distribution", 55, 0, 1.1, 55, 0, 1.1);
-
- @Override
- public void process(EventHeader event) {
- // Skip the event if there is no Monte Carlo collection.
- if(!event.hasCollection(MCParticle.class, particleCollectionName)) {
- return;
- }
-
- // Get the list of Monte Carlo particles.
- List<MCParticle> particleList = event.get(MCParticle.class, particleCollectionName);
-
- // Track the positive and negative particles.
- List<MCParticle> electronList = new ArrayList<MCParticle>();
- List<MCParticle> positronList = new ArrayList<MCParticle>();
-
- // Count the number of particles in the event.
- int chargedParticles = 0;
-
- // Iterate through the particles.
- for(MCParticle particle : particleList) {
- // Look at only t = 0 particles.
- if(particle.getProductionTime() == 0) {
- // Plot the x/y momentum of each particle.
- momentumPlot.fill(particle.getMomentum().x(), particle.getMomentum().y());
-
- // If the particle is charged, increment the charged
- // particle count.
- if(particle.getCharge() > 0) {
- chargedParticles++;
- }
-
- // Get the particle momentum in each direction.
- momentumXPlot.fill(particle.getMomentum().x());
- momentumYPlot.fill(particle.getMomentum().y());
- momentumZPlot.fill(particle.getMomentum().z());
-
- // Populate the general momentum plot.
- allPlot.fill(particle.getMomentum().magnitude());
- momentumPlot.fill(particle.getMomentum().x(), particle.getMomentum().y());
-
- // Store each particle based on its PID and populate
- // the appropriate plot.
- if(particle.getPDGID() == 11) {
- electronList.add(particle);
- electronPlot.fill(particle.getMomentum().magnitude());
- } else if(particle.getPDGID() == -11) {
- positronList.add(particle);
- positronPlot.fill(particle.getMomentum().magnitude());
- }
- }
- }
-
- // Populate the charged particles plot.
- chargedTracksPlot.fill(chargedParticles);
-
- // Form all electron/positron pairs.
- List<MCParticle[]> epPairList = new ArrayList<MCParticle[]>();
- for(MCParticle electron : electronList) {
- for(MCParticle positron : positronList) {
- epPairList.add(new MCParticle[] { electron, positron });
- }
- }
-
- // Populate the positron/electron pair plots.
- for(MCParticle[] pair : epPairList) {
- epAnglePlot.fill(getVectorAngle(pair[0].getMomentum(), pair[1].getMomentum()));
- epMomentumSumPlot.fill(getVectorSum(pair[0].getMomentum(), pair[1].getMomentum()));
- epMomentumSum2DPlot.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- }
-
- // Form all electron/electron pairs.
- List<MCParticle[]> eePairList = new ArrayList<MCParticle[]>();
- for(int i = 0; i < electronList.size(); i++) {
- for(int j = i + 1; j < electronList.size(); j++) {
- eePairList.add(new MCParticle[] { electronList.get(i), electronList.get(j) });
- }
- }
-
- // Populate the electron/electron pair plots.
- for(MCParticle[] pair : eePairList) {
- eeAnglePlot.fill(getVectorAngle(pair[0].getMomentum(), pair[1].getMomentum()));
- eeMomentumSumPlot.fill(getVectorSum(pair[0].getMomentum(), pair[1].getMomentum()));
- eeMomentumSum2DPlot.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- }
- }
-
- private static final double getVectorSum(Hep3Vector v1, Hep3Vector v2) {
- // Calculate the sum of the sum of the vector components, squared.
- double sum = 0;
- for(int i = 0; i < 3; i++) {
- double elementSum = v1.v()[i] + v2.v()[i];
- sum += (elementSum * elementSum);
- }
-
- // Return the square root of the sum.
- return Math.sqrt(sum);
- }
-
- private static final double getVectorAngle(Hep3Vector v1, Hep3Vector v2) {
- // The vector angle is defined as Acos[(v1 · v2) / (âv1â à âv2â)]
- return Math.acos(getDotProduct(v1, v2) / (v1.magnitude() * v2.magnitude())) / Math.PI * 180.0;
- }
-
- private static final double getDotProduct(Hep3Vector v1, Hep3Vector v2) {
- // Calculate the sum of the vector element products.
- int product = 0;
- for(int i = 0; i < 3; i++) {
- product += (v1.v()[i] * v2.v()[i]);
- }
-
- // Return the result.
- return product;
- }
+ // Store collection names.
+ private String particleCollectionName = "MCParticle";
+
+ // Declare plots.
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D chargedTracksPlot = aida.histogram1D("MC Analysis/Event Tracks", 10, -0.5, 9.5);
+ private IHistogram1D allPlot = aida.histogram1D("MC Analysis/Electron Energy Distribution", 110, 0, 1.1);
+ private IHistogram1D electronPlot = aida.histogram1D("MC Analysis/Electron Energy Distribution", 110, 0, 1.1);
+ private IHistogram1D positronPlot = aida.histogram1D("MC Analysis/Positron Energy Distribution", 110, 0, 1.1);
+ private IHistogram1D momentumXPlot = aida.histogram1D("MC Analysis/Particle x-Momentum Distribution", 110, 0.0, 1.1);
+ private IHistogram1D momentumYPlot = aida.histogram1D("MC Analysis/Particle y-Momentum Distribution", 110, 0.0, 1.1);
+ private IHistogram1D momentumZPlot = aida.histogram1D("MC Analysis/Particle z-Momentum Distribution", 110, 0.0, 1.1);
+ private IHistogram1D epAnglePlot = aida.histogram1D("MC Analysis/Positron\\Electron Pair Angle Distribution", 90, 0, 180);
+ private IHistogram1D eeAnglePlot = aida.histogram1D("MC Analysis/Electron\\Electron Pair Angle Distribution", 90, 0, 180);
+ private IHistogram1D epMomentumSumPlot = aida.histogram1D("MC Analysis/Positron\\Electron Momentum Sum Distribution", 220, 0, 2.2);
+ private IHistogram1D eeMomentumSumPlot = aida.histogram1D("MC Analysis/Electron\\Electron Momentum Sum Distribution", 220, 0, 2.2);
+ private IHistogram2D momentumPlot = aida.histogram2D("MC Analysis/Particle Momentum Distribution", 100, 0.0, 0.40, 110, 0.0, 0.40);
+ private IHistogram2D epMomentumSum2DPlot = aida.histogram2D("MC Analysis/Positron\\Electron 2D Momentum Distribution", 55, 0, 1.1, 55, 0, 1.1);
+ private IHistogram2D eeMomentumSum2DPlot = aida.histogram2D("MC Analysis/Electron\\Electron 2D Momentum Distribution", 55, 0, 1.1, 55, 0, 1.1);
+
+ @Override
+ public void process(EventHeader event) {
+ // Skip the event if there is no Monte Carlo collection.
+ if(!event.hasCollection(MCParticle.class, particleCollectionName)) {
+ return;
+ }
+
+ // Get the list of Monte Carlo particles.
+ List<MCParticle> particleList = event.get(MCParticle.class, particleCollectionName);
+
+ // Track the positive and negative particles.
+ List<MCParticle> electronList = new ArrayList<MCParticle>();
+ List<MCParticle> positronList = new ArrayList<MCParticle>();
+
+ // Count the number of particles in the event.
+ int chargedParticles = 0;
+
+ // Iterate through the particles.
+ for(MCParticle particle : particleList) {
+ // Look at only t = 0 particles.
+ if(particle.getProductionTime() == 0) {
+ // Plot the x/y momentum of each particle.
+ momentumPlot.fill(particle.getMomentum().x(), particle.getMomentum().y());
+
+ // If the particle is charged, increment the charged
+ // particle count.
+ if(particle.getCharge() > 0) {
+ chargedParticles++;
+ }
+
+ // Get the particle momentum in each direction.
+ momentumXPlot.fill(particle.getMomentum().x());
+ momentumYPlot.fill(particle.getMomentum().y());
+ momentumZPlot.fill(particle.getMomentum().z());
+
+ // Populate the general momentum plot.
+ allPlot.fill(particle.getMomentum().magnitude());
+ momentumPlot.fill(particle.getMomentum().x(), particle.getMomentum().y());
+
+ // Store each particle based on its PID and populate
+ // the appropriate plot.
+ if(particle.getPDGID() == 11) {
+ electronList.add(particle);
+ electronPlot.fill(particle.getMomentum().magnitude());
+ } else if(particle.getPDGID() == -11) {
+ positronList.add(particle);
+ positronPlot.fill(particle.getMomentum().magnitude());
+ }
+ }
+ }
+
+ // Populate the charged particles plot.
+ chargedTracksPlot.fill(chargedParticles);
+
+ // Form all electron/positron pairs.
+ List<MCParticle[]> epPairList = new ArrayList<MCParticle[]>();
+ for(MCParticle electron : electronList) {
+ for(MCParticle positron : positronList) {
+ epPairList.add(new MCParticle[] { electron, positron });
+ }
+ }
+
+ // Populate the positron/electron pair plots.
+ for(MCParticle[] pair : epPairList) {
+ epAnglePlot.fill(getVectorAngle(pair[0].getMomentum(), pair[1].getMomentum()));
+ epMomentumSumPlot.fill(getVectorSum(pair[0].getMomentum(), pair[1].getMomentum()));
+ epMomentumSum2DPlot.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ }
+
+ // Form all electron/electron pairs.
+ List<MCParticle[]> eePairList = new ArrayList<MCParticle[]>();
+ for(int i = 0; i < electronList.size(); i++) {
+ for(int j = i + 1; j < electronList.size(); j++) {
+ eePairList.add(new MCParticle[] { electronList.get(i), electronList.get(j) });
+ }
+ }
+
+ // Populate the electron/electron pair plots.
+ for(MCParticle[] pair : eePairList) {
+ eeAnglePlot.fill(getVectorAngle(pair[0].getMomentum(), pair[1].getMomentum()));
+ eeMomentumSumPlot.fill(getVectorSum(pair[0].getMomentum(), pair[1].getMomentum()));
+ eeMomentumSum2DPlot.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ }
+ }
+
+ private static final double getVectorSum(Hep3Vector v1, Hep3Vector v2) {
+ // Calculate the sum of the sum of the vector components, squared.
+ double sum = 0;
+ for(int i = 0; i < 3; i++) {
+ double elementSum = v1.v()[i] + v2.v()[i];
+ sum += (elementSum * elementSum);
+ }
+
+ // Return the square root of the sum.
+ return Math.sqrt(sum);
+ }
+
+ private static final double getVectorAngle(Hep3Vector v1, Hep3Vector v2) {
+ // The vector angle is defined as Acos[(v1 · v2) / (âv1â à âv2â)]
+ return Math.acos(getDotProduct(v1, v2) / (v1.magnitude() * v2.magnitude())) / Math.PI * 180.0;
+ }
+
+ private static final double getDotProduct(Hep3Vector v1, Hep3Vector v2) {
+ // Calculate the sum of the vector element products.
+ int product = 0;
+ for(int i = 0; i < 3; i++) {
+ product += (v1.v()[i] * v2.v()[i]);
+ }
+
+ // Return the result.
+ return product;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/PlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/PlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/PlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -8,120 +8,120 @@
import java.awt.Font;
public class PlotsFormatter {
- // Define plot fonts.
- public static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 30);
- public static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 35);
- public static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 45);
-
- // Defines the color style options for plot data.
- public static enum ColorStyle {
- MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
- MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
- MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
- RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
- FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
- TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
- BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
- PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
-
- private final Color fillColor;
- private final Color lineColor;
-
- private ColorStyle(Color fillColor, Color lineColor) {
- this.fillColor = fillColor;
- this.lineColor = lineColor;
- }
-
- public Color getFillColor() { return fillColor; }
-
- public Color getLineColor() { return lineColor; }
- };
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- public static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the names of each plot on in the region.
- String[] dataNames = region.getAllDataNames();
-
- // Check whether this is an overlay plot. Overlay plots contain
- // more than one data name.
- boolean overlay = (dataNames.length > 1 ? true : false);
-
- // Iterate over each plot in the region.
- for(int i = 0; i < dataNames.length; i++) {
- // Set the overlay style if needed.
- if(overlay) {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with no fill. The color is set by the "color" argument.
- fillStyle.setHistogramFill(false);
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarLineColor(color[i].getFillColor());
-
- // Set the legend text style.
- region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
- }
-
- // Otherwise, set the fill style for a single plot.
- else {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with a fill color. The colors are defined by the
- // "color" argument.
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarColor(color[i].getFillColor());
- fillStyle.setHistogramBarLineColor(color[i].getLineColor());
- }
-
- // Set the statistics box style.
- region.getPlot().getStats().setVisible(true);
- region.getPlot().getStats().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- */
- public static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
- // Get the fill style object. 2D plots should never be overlay
- // plots, so there should only ever be one data name.
- JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
-
- // Set the fill style for a two-dimensional plot.
- if(logarithmic) { fillStyle.setLogZ(true); }
- fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
- fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
-
- // Make the statistics box invisible.
- region.getPlot().getStats().setVisible(false);
-
- // Set the general plot font (which is also the z-axis font).
- region.getPlot().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
+ // Define plot fonts.
+ public static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 30);
+ public static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 35);
+ public static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 45);
+
+ // Defines the color style options for plot data.
+ public static enum ColorStyle {
+ MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
+ MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
+ MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
+ RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
+ FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
+ TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
+ BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
+ PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
+
+ private final Color fillColor;
+ private final Color lineColor;
+
+ private ColorStyle(Color fillColor, Color lineColor) {
+ this.fillColor = fillColor;
+ this.lineColor = lineColor;
+ }
+
+ public Color getFillColor() { return fillColor; }
+
+ public Color getLineColor() { return lineColor; }
+ };
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ public static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the names of each plot on in the region.
+ String[] dataNames = region.getAllDataNames();
+
+ // Check whether this is an overlay plot. Overlay plots contain
+ // more than one data name.
+ boolean overlay = (dataNames.length > 1 ? true : false);
+
+ // Iterate over each plot in the region.
+ for(int i = 0; i < dataNames.length; i++) {
+ // Set the overlay style if needed.
+ if(overlay) {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with no fill. The color is set by the "color" argument.
+ fillStyle.setHistogramFill(false);
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarLineColor(color[i].getFillColor());
+
+ // Set the legend text style.
+ region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
+ }
+
+ // Otherwise, set the fill style for a single plot.
+ else {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with a fill color. The colors are defined by the
+ // "color" argument.
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarColor(color[i].getFillColor());
+ fillStyle.setHistogramBarLineColor(color[i].getLineColor());
+ }
+
+ // Set the statistics box style.
+ region.getPlot().getStats().setVisible(true);
+ region.getPlot().getStats().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ */
+ public static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
+ // Get the fill style object. 2D plots should never be overlay
+ // plots, so there should only ever be one data name.
+ JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+
+ // Set the fill style for a two-dimensional plot.
+ if(logarithmic) { fillStyle.setLogZ(true); }
+ fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
+ fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
+
+ // Make the statistics box invisible.
+ region.getPlot().getStats().setVisible(false);
+
+ // Set the general plot font (which is also the z-axis font).
+ region.getPlot().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/RafoAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/RafoAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/RafoAnalysis.java Wed Apr 27 11:11:32 2016
@@ -16,431 +16,431 @@
import org.lcsim.util.aida.AIDA;
public class RafoAnalysis extends Driver {
- private boolean useGoodSVT = false;
- private String clusterCollectionName = "EcalClustersCorr";
- private String particleCollectionName = "FinalStateParticles";
-
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D t0TimeCoincidenceAll = aida.histogram1D("Tier 0/Time Coincidence", 300, -15.0, 15.0);
- private IHistogram1D t0TimeCoincidenceFiducial = aida.histogram1D("Tier 0/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
- private IHistogram1D t0EnergySumAll = aida.histogram1D("Tier 0/Energy Sum", 300, 0.0, 1.5);
- private IHistogram1D t0EnergySumFiducial = aida.histogram1D("Tier 0/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
- private IHistogram1D t0InvariantMassAll = aida.histogram1D("Tier 0/Invariant Mass", 2200, 0.0, 1.1);
- private IHistogram2D t0EnergySum2DAll = aida.histogram2D("Tier 0/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t0EnergySum2DFiducial = aida.histogram2D("Tier 0/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t0SumCoplanarityAll = aida.histogram2D("Tier 0/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t0SumCoplanarityFiducial = aida.histogram2D("Tier 0/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t0SumCoplanarityCalcAll = aida.histogram2D("Tier 0/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t0SumCoplanarityCalcFiducial = aida.histogram2D("Tier 0/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t0TimeEnergyAll = aida.histogram2D("Tier 0/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
- private IHistogram2D t0TimeEnergyFiducial = aida.histogram2D("Tier 0/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
-
- private IHistogram1D t1TimeCoincidenceAll = aida.histogram1D("Tier 1/Time Coincidence", 300, -15.0, 15.0);
- private IHistogram1D t1TimeCoincidenceFiducial = aida.histogram1D("Tier 1/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
- private IHistogram1D t1EnergySumAll = aida.histogram1D("Tier 1/Energy Sum", 300, 0.0, 1.5);
- private IHistogram1D t1EnergySumFiducial = aida.histogram1D("Tier 1/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
- private IHistogram1D t1InvariantMassAll = aida.histogram1D("Tier 1/Invariant Mass", 2200, 0.0, 1.1);
- private IHistogram2D t1EnergySum2DAll = aida.histogram2D("Tier 1/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t1EnergySum2DFiducial = aida.histogram2D("Tier 1/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t1SumCoplanarityAll = aida.histogram2D("Tier 1/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t1SumCoplanarityFiducial = aida.histogram2D("Tier 1/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t1SumCoplanarityCalcAll = aida.histogram2D("Tier 1/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t1SumCoplanarityCalcFiducial = aida.histogram2D("Tier 1/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t1TimeEnergyAll = aida.histogram2D("Tier 1/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
- private IHistogram2D t1TimeEnergyFiducial = aida.histogram2D("Tier 1/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
-
- private IHistogram1D t2TimeCoincidenceAll = aida.histogram1D("Tier 2/Time Coincidence", 300, -15.0, 15.0);
- private IHistogram1D t2TimeCoincidenceFiducial = aida.histogram1D("Tier 2/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
- private IHistogram1D t2EnergySumAll = aida.histogram1D("Tier 2/Energy Sum", 300, 0.0, 1.5);
- private IHistogram1D t2EnergySumFiducial = aida.histogram1D("Tier 2/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
- private IHistogram1D t2InvariantMassAll = aida.histogram1D("Tier 2/Invariant Mass", 2200, 0.0, 1.1);
- private IHistogram2D t2EnergySum2DAll = aida.histogram2D("Tier 2/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t2EnergySum2DFiducial = aida.histogram2D("Tier 2/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
- private IHistogram2D t2SumCoplanarityAll = aida.histogram2D("Tier 2/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t2SumCoplanarityFiducial = aida.histogram2D("Tier 2/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
- private IHistogram2D t2SumCoplanarityCalcAll = aida.histogram2D("Tier 2/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t2SumCoplanarityCalcFiducial = aida.histogram2D("Tier 2/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
- private IHistogram2D t2TimeEnergyAll = aida.histogram2D("Tier 2/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
- private IHistogram2D t2TimeEnergyFiducial = aida.histogram2D("Tier 2/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
-
- private int t0Events = 0;
- private int t1Events = 0;
- private int t2Events = 0;
-
- @Override
- public void endOfData() {
- System.out.printf("Tier 0 Events: %d%n", t0Events);
- System.out.printf("Tier 1 Events: %d%n", t1Events);
- System.out.printf("Tier 2 Events: %d%n", t2Events);
- }
-
- @Override
- public void process(EventHeader event) {
- // Check whether the SVT was active in this event.
- final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
- boolean svtGood = true;
+ private boolean useGoodSVT = false;
+ private String clusterCollectionName = "EcalClustersCorr";
+ private String particleCollectionName = "FinalStateParticles";
+
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D t0TimeCoincidenceAll = aida.histogram1D("Tier 0/Time Coincidence", 300, -15.0, 15.0);
+ private IHistogram1D t0TimeCoincidenceFiducial = aida.histogram1D("Tier 0/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
+ private IHistogram1D t0EnergySumAll = aida.histogram1D("Tier 0/Energy Sum", 300, 0.0, 1.5);
+ private IHistogram1D t0EnergySumFiducial = aida.histogram1D("Tier 0/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
+ private IHistogram1D t0InvariantMassAll = aida.histogram1D("Tier 0/Invariant Mass", 2200, 0.0, 1.1);
+ private IHistogram2D t0EnergySum2DAll = aida.histogram2D("Tier 0/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t0EnergySum2DFiducial = aida.histogram2D("Tier 0/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t0SumCoplanarityAll = aida.histogram2D("Tier 0/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t0SumCoplanarityFiducial = aida.histogram2D("Tier 0/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t0SumCoplanarityCalcAll = aida.histogram2D("Tier 0/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t0SumCoplanarityCalcFiducial = aida.histogram2D("Tier 0/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t0TimeEnergyAll = aida.histogram2D("Tier 0/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
+ private IHistogram2D t0TimeEnergyFiducial = aida.histogram2D("Tier 0/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
+
+ private IHistogram1D t1TimeCoincidenceAll = aida.histogram1D("Tier 1/Time Coincidence", 300, -15.0, 15.0);
+ private IHistogram1D t1TimeCoincidenceFiducial = aida.histogram1D("Tier 1/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
+ private IHistogram1D t1EnergySumAll = aida.histogram1D("Tier 1/Energy Sum", 300, 0.0, 1.5);
+ private IHistogram1D t1EnergySumFiducial = aida.histogram1D("Tier 1/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
+ private IHistogram1D t1InvariantMassAll = aida.histogram1D("Tier 1/Invariant Mass", 2200, 0.0, 1.1);
+ private IHistogram2D t1EnergySum2DAll = aida.histogram2D("Tier 1/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t1EnergySum2DFiducial = aida.histogram2D("Tier 1/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t1SumCoplanarityAll = aida.histogram2D("Tier 1/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t1SumCoplanarityFiducial = aida.histogram2D("Tier 1/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t1SumCoplanarityCalcAll = aida.histogram2D("Tier 1/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t1SumCoplanarityCalcFiducial = aida.histogram2D("Tier 1/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t1TimeEnergyAll = aida.histogram2D("Tier 1/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
+ private IHistogram2D t1TimeEnergyFiducial = aida.histogram2D("Tier 1/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
+
+ private IHistogram1D t2TimeCoincidenceAll = aida.histogram1D("Tier 2/Time Coincidence", 300, -15.0, 15.0);
+ private IHistogram1D t2TimeCoincidenceFiducial = aida.histogram1D("Tier 2/Time Coincidence (Fiducial Region)", 300, -15.0, 15.0);
+ private IHistogram1D t2EnergySumAll = aida.histogram1D("Tier 2/Energy Sum", 300, 0.0, 1.5);
+ private IHistogram1D t2EnergySumFiducial = aida.histogram1D("Tier 2/Energy Sum (Fiducial Region)", 300, 0.0, 1.5);
+ private IHistogram1D t2InvariantMassAll = aida.histogram1D("Tier 2/Invariant Mass", 2200, 0.0, 1.1);
+ private IHistogram2D t2EnergySum2DAll = aida.histogram2D("Tier 2/Top Cluster Energy vs. Bottom Cluster Energy", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t2EnergySum2DFiducial = aida.histogram2D("Tier 2/Top Cluster Energy vs. Bottom Cluster Energy (Fiducial Region)", 300, 0, 1.5, 300, 0, 1.5);
+ private IHistogram2D t2SumCoplanarityAll = aida.histogram2D("Tier 2/Hardware Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t2SumCoplanarityFiducial = aida.histogram2D("Tier 2/Hardware Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 0, 230);
+ private IHistogram2D t2SumCoplanarityCalcAll = aida.histogram2D("Tier 2/Calculated Coplanarity vs. Energy Sum", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t2SumCoplanarityCalcFiducial = aida.histogram2D("Tier 2/Calculated Coplanarity vs. Energy Sum (Fiducial Region)", 300, 0, 1.5, 115, 130, 230);
+ private IHistogram2D t2TimeEnergyAll = aida.histogram2D("Tier 2/Cluster Time vs. Cluster Energy", 300, 0, 1.5, 100, 0, 100);
+ private IHistogram2D t2TimeEnergyFiducial = aida.histogram2D("Tier 2/Cluster Time vs. Cluster Energy (Fiducial Region)", 300, 0, 1.5, 100, 0, 100);
+
+ private int t0Events = 0;
+ private int t1Events = 0;
+ private int t2Events = 0;
+
+ @Override
+ public void endOfData() {
+ System.out.printf("Tier 0 Events: %d%n", t0Events);
+ System.out.printf("Tier 1 Events: %d%n", t1Events);
+ System.out.printf("Tier 2 Events: %d%n", t2Events);
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Check whether the SVT was active in this event.
+ final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
+ boolean svtGood = true;
for(int i = 0; i < flagNames.length; i++) {
int[] flag = event.getIntegerParameters().get(flagNames[i]);
if(flag == null || flag[0] == 0) {
svtGood = false;
}
}
-
+
// If the SVT is not properly running, skip the event.
if(!svtGood && useGoodSVT) { return; }
- // Get the list of particles, if it exists.
- List<ReconstructedParticle> trackList = null;
- if(event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
- trackList = event.get(ReconstructedParticle.class, particleCollectionName);
- }
-
- // Get the list of clusters, if it exists.
- List<Cluster> clusterList = null;
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- clusterList = event.get(Cluster.class, clusterCollectionName);
- }
-
- // Make sure that the cluster and track lists both exist.
- if(clusterList == null || trackList == null) {
- return;
- }
-
- // Perform tier 1 analysis. This requires that there be at
- // least one top/bottom cluster pair with a time difference
- // of less then 4 ns.
- double t1TimeThreshold = 1.5;
-
- // Get a list of cluster pairs.
- List<Cluster[]> pairList = getClusterPairs(clusterList);
-
- // Iterate over the cluster pairs.
- boolean t1Passed = false;
- t1ClusterLoop:
- for(Cluster[] pair : pairList) {
- // Check that the time difference for the cluster pair
- // meets the time cut.
- if(TriggerModule.getValueTimeCoincidence(pair) <= t1TimeThreshold) {
- // Note that the tier 1 analysis condition passed.
- t1Passed = true;
-
- // Break from the loop.
- break t1ClusterLoop;
- }
- }
-
- // Perform the additional checks for tier 2 analysis. This
- // requires that there be at least one top/bottom track pair
- // and that one track be positive and the other be negative.
-
- // Get a list of top and bottom track pairs.
- List<ReconstructedParticle[]> trackPairList = getTrackPairs(trackList);
-
- // Check that at least one top/bottom track has one negative and
- // one positive track.
- boolean t2Passed = false;
- t2TrackLoop:
- for(ReconstructedParticle[] pair : trackPairList) {
- if((pair[0].getCharge() > 0 && pair[1].getCharge() < 0) || (pair[0].getCharge() < 0 && pair[1].getCharge() > 0)) {
- t2Passed = true;
- break t2TrackLoop;
- }
- }
-
- // Populate the tier 0 analysis plot.
- if(true) {
- // Increment the number of tier 1 events found.
- t0Events++;
-
- // Track which clusters have already been added to the
- // singles plot so that there are no repeats.
- Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
- Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
-
- for(ReconstructedParticle[] pair : trackPairList) {
- t0InvariantMassAll.fill(getInvariantMass(pair));
- }
-
- for(Cluster[] pair : pairList) {
- // Fill the all pairs plots.
- double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
- t0EnergySumAll.fill(pairEnergy);
- t0EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t0TimeCoincidenceAll.fill(getTimeConicidence(pair));
- t0SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t0SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
-
- // Fill the singles plots.
- if(!plotSet.contains(pair[0])) {
- plotSet.add(pair[0]);
- t0TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotSet.contains(pair[1])) {
- plotSet.add(pair[1]);
- t0TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
-
- // Fill the fiducial plots if appropriate.
- if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
- t0EnergySumFiducial.fill(pairEnergy);
- t0EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t0TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
- t0SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t0SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
- }
-
- // Fill the singles fiducial plots if appropriate.
- if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
- plotFiducial.add(pair[0]);
- t0TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
- plotFiducial.add(pair[1]);
- t0TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
- }
- }
-
- // Populate the tier 1 analysis plots, if the conditions were met.
- if(t1Passed) {
- // Increment the number of tier 1 events found.
- t1Events++;
-
- // Track which clusters have already been added to the
- // singles plot so that there are no repeats.
- Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
- Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
-
- for(ReconstructedParticle[] pair : trackPairList) {
- t1InvariantMassAll.fill(getInvariantMass(pair));
- }
-
- for(Cluster[] pair : pairList) {
- // Only include clusters that pass the time coincidence.
- if(TriggerModule.getValueTimeCoincidence(pair) > t1TimeThreshold) {
- continue;
- }
-
- // Fill the all pairs plots.
- double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
- t1EnergySumAll.fill(pairEnergy);
- t1EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t1TimeCoincidenceAll.fill(getTimeConicidence(pair));
- t1SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t1SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
-
- // Fill the singles plots.
- if(!plotSet.contains(pair[0])) {
- plotSet.add(pair[0]);
- t1TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotSet.contains(pair[1])) {
- plotSet.add(pair[1]);
- t1TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
-
- // Fill the fiducial plots if appropriate.
- if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
- t1EnergySumFiducial.fill(pairEnergy);
- t1EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t1TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
- t1SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t1SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
- }
-
- // Fill the singles fiducial plots if appropriate.
- if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
- plotFiducial.add(pair[0]);
- t1TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
- plotFiducial.add(pair[1]);
- t1TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
- }
- }
-
- // Populate the tier 2 analysis plots, if the conditions were met.
- if(t1Passed && t2Passed) {
- // Increment the number of tier 2 events found.
- t2Events++;
-
- // Track which clusters have already been added to the
- // singles plot so that there are no repeats.
- Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
- Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
-
- for(ReconstructedParticle[] pair : trackPairList) {
- t2InvariantMassAll.fill(getInvariantMass(pair));
- }
-
- for(Cluster[] pair : pairList) {
- // Only include clusters that pass the time coincidence.
- if(TriggerModule.getValueTimeCoincidence(pair) > t1TimeThreshold) {
- continue;
- }
-
- // Fill the all pairs plots.
- double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
- t2EnergySumAll.fill(pairEnergy);
- t2EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t2TimeCoincidenceAll.fill(getTimeConicidence(pair));
- t2SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t2SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
-
- // Fill the singles plots.
- if(!plotSet.contains(pair[0])) {
- plotSet.add(pair[0]);
- t2TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotSet.contains(pair[1])) {
- plotSet.add(pair[1]);
- t2TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
-
- // Fill the fiducial plots if appropriate.
- if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
- t2EnergySumFiducial.fill(pairEnergy);
- t2EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
- t2TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
- t2SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
- t2SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
- }
-
- // Fill the singles fiducial plots if appropriate.
- if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
- plotFiducial.add(pair[0]);
- t2TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
- } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
- plotFiducial.add(pair[1]);
- t2TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
- }
- }
- }
- }
-
- public static final double getInvariantMass(ReconstructedParticle[] pair) {
- // Get energy.
- double[] energy = new double[2];
- final double electronMassSquared = Math.pow(0.00051099891, 2);
- energy[0] = Math.sqrt(pair[0].getMomentum().magnitudeSquared() + electronMassSquared);
- energy[1] = Math.sqrt(pair[1].getMomentum().magnitudeSquared() + electronMassSquared);
-
- // Calculate the invariant mass.
- return Math.sqrt(Math.pow(energy[0] + energy[1], 2) - Math.pow(pair[0].getMomentum().x() + pair[1].getMomentum().x(), 2)
- + Math.pow(pair[0].getMomentum().y() + pair[1].getMomentum().y(), 2) + Math.pow(pair[0].getMomentum().z() + pair[1].getMomentum().z(), 2));
- }
-
- public static final double getCalculatedCoplanarity(Cluster[] pair) {
- // Define the x- and y-coordinates of the clusters as well as
- // calorimeter center.
- final double ORIGIN_X = 42.52;
- double x[] = { pair[0].getPosition()[0], pair[1].getPosition()[0] };
- double y[] = { pair[0].getPosition()[1], pair[1].getPosition()[1] };
-
+ // Get the list of particles, if it exists.
+ List<ReconstructedParticle> trackList = null;
+ if(event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
+ trackList = event.get(ReconstructedParticle.class, particleCollectionName);
+ }
+
+ // Get the list of clusters, if it exists.
+ List<Cluster> clusterList = null;
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ clusterList = event.get(Cluster.class, clusterCollectionName);
+ }
+
+ // Make sure that the cluster and track lists both exist.
+ if(clusterList == null || trackList == null) {
+ return;
+ }
+
+ // Perform tier 1 analysis. This requires that there be at
+ // least one top/bottom cluster pair with a time difference
+ // of less then 4 ns.
+ double t1TimeThreshold = 1.5;
+
+ // Get a list of cluster pairs.
+ List<Cluster[]> pairList = getClusterPairs(clusterList);
+
+ // Iterate over the cluster pairs.
+ boolean t1Passed = false;
+ t1ClusterLoop:
+ for(Cluster[] pair : pairList) {
+ // Check that the time difference for the cluster pair
+ // meets the time cut.
+ if(TriggerModule.getValueTimeCoincidence(pair) <= t1TimeThreshold) {
+ // Note that the tier 1 analysis condition passed.
+ t1Passed = true;
+
+ // Break from the loop.
+ break t1ClusterLoop;
+ }
+ }
+
+ // Perform the additional checks for tier 2 analysis. This
+ // requires that there be at least one top/bottom track pair
+ // and that one track be positive and the other be negative.
+
+ // Get a list of top and bottom track pairs.
+ List<ReconstructedParticle[]> trackPairList = getTrackPairs(trackList);
+
+ // Check that at least one top/bottom track has one negative and
+ // one positive track.
+ boolean t2Passed = false;
+ t2TrackLoop:
+ for(ReconstructedParticle[] pair : trackPairList) {
+ if((pair[0].getCharge() > 0 && pair[1].getCharge() < 0) || (pair[0].getCharge() < 0 && pair[1].getCharge() > 0)) {
+ t2Passed = true;
+ break t2TrackLoop;
+ }
+ }
+
+ // Populate the tier 0 analysis plot.
+ if(true) {
+ // Increment the number of tier 1 events found.
+ t0Events++;
+
+ // Track which clusters have already been added to the
+ // singles plot so that there are no repeats.
+ Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
+ Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
+
+ for(ReconstructedParticle[] pair : trackPairList) {
+ t0InvariantMassAll.fill(getInvariantMass(pair));
+ }
+
+ for(Cluster[] pair : pairList) {
+ // Fill the all pairs plots.
+ double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
+ t0EnergySumAll.fill(pairEnergy);
+ t0EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t0TimeCoincidenceAll.fill(getTimeConicidence(pair));
+ t0SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t0SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+
+ // Fill the singles plots.
+ if(!plotSet.contains(pair[0])) {
+ plotSet.add(pair[0]);
+ t0TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotSet.contains(pair[1])) {
+ plotSet.add(pair[1]);
+ t0TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+
+ // Fill the fiducial plots if appropriate.
+ if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
+ t0EnergySumFiducial.fill(pairEnergy);
+ t0EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t0TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
+ t0SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t0SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+ }
+
+ // Fill the singles fiducial plots if appropriate.
+ if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
+ plotFiducial.add(pair[0]);
+ t0TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
+ plotFiducial.add(pair[1]);
+ t0TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+ }
+ }
+
+ // Populate the tier 1 analysis plots, if the conditions were met.
+ if(t1Passed) {
+ // Increment the number of tier 1 events found.
+ t1Events++;
+
+ // Track which clusters have already been added to the
+ // singles plot so that there are no repeats.
+ Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
+ Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
+
+ for(ReconstructedParticle[] pair : trackPairList) {
+ t1InvariantMassAll.fill(getInvariantMass(pair));
+ }
+
+ for(Cluster[] pair : pairList) {
+ // Only include clusters that pass the time coincidence.
+ if(TriggerModule.getValueTimeCoincidence(pair) > t1TimeThreshold) {
+ continue;
+ }
+
+ // Fill the all pairs plots.
+ double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
+ t1EnergySumAll.fill(pairEnergy);
+ t1EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t1TimeCoincidenceAll.fill(getTimeConicidence(pair));
+ t1SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t1SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+
+ // Fill the singles plots.
+ if(!plotSet.contains(pair[0])) {
+ plotSet.add(pair[0]);
+ t1TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotSet.contains(pair[1])) {
+ plotSet.add(pair[1]);
+ t1TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+
+ // Fill the fiducial plots if appropriate.
+ if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
+ t1EnergySumFiducial.fill(pairEnergy);
+ t1EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t1TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
+ t1SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t1SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+ }
+
+ // Fill the singles fiducial plots if appropriate.
+ if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
+ plotFiducial.add(pair[0]);
+ t1TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
+ plotFiducial.add(pair[1]);
+ t1TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+ }
+ }
+
+ // Populate the tier 2 analysis plots, if the conditions were met.
+ if(t1Passed && t2Passed) {
+ // Increment the number of tier 2 events found.
+ t2Events++;
+
+ // Track which clusters have already been added to the
+ // singles plot so that there are no repeats.
+ Set<Cluster> plotSet = new HashSet<Cluster>(clusterList.size());
+ Set<Cluster> plotFiducial = new HashSet<Cluster>(clusterList.size());
+
+ for(ReconstructedParticle[] pair : trackPairList) {
+ t2InvariantMassAll.fill(getInvariantMass(pair));
+ }
+
+ for(Cluster[] pair : pairList) {
+ // Only include clusters that pass the time coincidence.
+ if(TriggerModule.getValueTimeCoincidence(pair) > t1TimeThreshold) {
+ continue;
+ }
+
+ // Fill the all pairs plots.
+ double pairEnergy = pair[0].getEnergy() + pair[1].getEnergy();
+ t2EnergySumAll.fill(pairEnergy);
+ t2EnergySum2DAll.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t2TimeCoincidenceAll.fill(getTimeConicidence(pair));
+ t2SumCoplanarityCalcAll.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t2SumCoplanarityAll.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+
+ // Fill the singles plots.
+ if(!plotSet.contains(pair[0])) {
+ plotSet.add(pair[0]);
+ t2TimeEnergyAll.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotSet.contains(pair[1])) {
+ plotSet.add(pair[1]);
+ t2TimeEnergyAll.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+
+ // Fill the fiducial plots if appropriate.
+ if(inFiducialRegion(pair[0]) && inFiducialRegion(pair[1])) {
+ t2EnergySumFiducial.fill(pairEnergy);
+ t2EnergySum2DFiducial.fill(pair[1].getEnergy(), pair[0].getEnergy());
+ t2TimeCoincidenceFiducial.fill(getTimeConicidence(pair));
+ t2SumCoplanarityCalcFiducial.fill(pairEnergy, getCalculatedCoplanarity(pair));
+ t2SumCoplanarityFiducial.fill(pairEnergy, TriggerModule.getValueCoplanarity(pair));
+ }
+
+ // Fill the singles fiducial plots if appropriate.
+ if(!plotFiducial.contains(pair[0]) && inFiducialRegion(pair[0])) {
+ plotFiducial.add(pair[0]);
+ t2TimeEnergyFiducial.fill(pair[0].getEnergy(), TriggerModule.getClusterTime(pair[0]));
+ } if(!plotFiducial.contains(pair[1]) && inFiducialRegion(pair[1])) {
+ plotFiducial.add(pair[1]);
+ t2TimeEnergyFiducial.fill(pair[1].getEnergy(), TriggerModule.getClusterTime(pair[1]));
+ }
+ }
+ }
+ }
+
+ public static final double getInvariantMass(ReconstructedParticle[] pair) {
+ // Get energy.
+ double[] energy = new double[2];
+ final double electronMassSquared = Math.pow(0.00051099891, 2);
+ energy[0] = Math.sqrt(pair[0].getMomentum().magnitudeSquared() + electronMassSquared);
+ energy[1] = Math.sqrt(pair[1].getMomentum().magnitudeSquared() + electronMassSquared);
+
+ // Calculate the invariant mass.
+ return Math.sqrt(Math.pow(energy[0] + energy[1], 2) - Math.pow(pair[0].getMomentum().x() + pair[1].getMomentum().x(), 2)
+ + Math.pow(pair[0].getMomentum().y() + pair[1].getMomentum().y(), 2) + Math.pow(pair[0].getMomentum().z() + pair[1].getMomentum().z(), 2));
+ }
+
+ public static final double getCalculatedCoplanarity(Cluster[] pair) {
+ // Define the x- and y-coordinates of the clusters as well as
+ // calorimeter center.
+ final double ORIGIN_X = 42.52;
+ double x[] = { pair[0].getPosition()[0], pair[1].getPosition()[0] };
+ double y[] = { pair[0].getPosition()[1], pair[1].getPosition()[1] };
+
// Get the cluster angles.
double[] clusterAngle = new double[2];
for(int i = 0; i < 2; i++) {
- clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
- if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
+ clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
+ if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
}
// Calculate the coplanarity cut value.
double clusterDiff = clusterAngle[0] - clusterAngle[1];
return clusterDiff > 0 ? clusterDiff : clusterDiff + 360;
- }
-
- private static final boolean inFiducialRegion(Cluster cluster) {
- // Get the x and y indices for the cluster.
- int ix = TriggerModule.getClusterXIndex(cluster);
- int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
- int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
-
- // Check if the cluster is on the top or the bottom of the
- // calorimeter, as defined by |y| == 5. This is an edge cluster
- // and is not in the fiducial region.
- if(absy == 5) {
- return false;
- }
-
- // Check if the cluster is on the extreme left or right side
- // of the calorimeter, as defined by |x| == 23. This is also
- // and edge cluster is not in the fiducial region.
- if(absx == 23) {
- return false;
- }
-
- // Check if the cluster is along the beam gap, as defined by
- // |y| == 1. This is an internal edge cluster and is not in the
- // fiducial region.
- if(absy == 1) {
- return false;
- }
-
- // Lastly, check if the cluster falls along the beam hole, as
- // defined by clusters with -11 <= x <= -1 and |y| == 2. This
- // is not the fiducial region.
- if(absy == 2 && ix <= -1 && ix >= -11) {
- return false;
- }
-
- // If all checks fail, the cluster is in the fiducial region.
- return true;
- }
-
- private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> tracks) {
- // Separate the tracks into top and bottom tracks.
- List<ReconstructedParticle> topList = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> botList = new ArrayList<ReconstructedParticle>();
- for(ReconstructedParticle track : tracks) {
- // Make sure that the track actually contains tracks.
- if(track.getTracks().size() > 0) {
- // Use the tan(Î) to differentiate "top" and "bottom"
- // tracks from one another.
- if(track.getTracks().get(0).getTrackStates().get(0).getTanLambda() > 0) {
- topList.add(track);
- } else {
- botList.add(track);
- }
- }
- }
-
- // Form all permutations of top and bottom tracks.
- List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
- for(ReconstructedParticle topTrack : topList) {
- for(ReconstructedParticle botTrack : botList) {
- pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
- }
- }
-
- // Return the resulting cluster pairs.
- return pairList;
- }
-
- private static final List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
- // Separate the clusters into top and bottom clusters.
- List<Cluster> topList = new ArrayList<Cluster>();
- List<Cluster> botList = new ArrayList<Cluster>();
- for(Cluster cluster : clusters) {
- if(TriggerModule.getClusterYIndex(cluster) > 0) {
- topList.add(cluster);
- } else {
- botList.add(cluster);
- }
- }
-
- // Form all permutations of top and bottom clusters.
- List<Cluster[]> pairList = new ArrayList<Cluster[]>();
- for(Cluster topCluster : topList) {
- for(Cluster botCluster : botList) {
- pairList.add(new Cluster[] { topCluster, botCluster });
- }
- }
-
- // Return the resulting cluster pairs.
- return pairList;
- }
-
- public static final double getTimeConicidence(Cluster[] pair) {
- return TriggerModule.getClusterSeedHit(pair[1]).getTime() - TriggerModule.getClusterSeedHit(pair[0]).getTime();
- }
-
- public void setUseGoodSVT(boolean state) {
- useGoodSVT = state;
- }
+ }
+
+ private static final boolean inFiducialRegion(Cluster cluster) {
+ // Get the x and y indices for the cluster.
+ int ix = TriggerModule.getClusterXIndex(cluster);
+ int absx = Math.abs(TriggerModule.getClusterXIndex(cluster));
+ int absy = Math.abs(TriggerModule.getClusterYIndex(cluster));
+
+ // Check if the cluster is on the top or the bottom of the
+ // calorimeter, as defined by |y| == 5. This is an edge cluster
+ // and is not in the fiducial region.
+ if(absy == 5) {
+ return false;
+ }
+
+ // Check if the cluster is on the extreme left or right side
+ // of the calorimeter, as defined by |x| == 23. This is also
+ // and edge cluster is not in the fiducial region.
+ if(absx == 23) {
+ return false;
+ }
+
+ // Check if the cluster is along the beam gap, as defined by
+ // |y| == 1. This is an internal edge cluster and is not in the
+ // fiducial region.
+ if(absy == 1) {
+ return false;
+ }
+
+ // Lastly, check if the cluster falls along the beam hole, as
+ // defined by clusters with -11 <= x <= -1 and |y| == 2. This
+ // is not the fiducial region.
+ if(absy == 2 && ix <= -1 && ix >= -11) {
+ return false;
+ }
+
+ // If all checks fail, the cluster is in the fiducial region.
+ return true;
+ }
+
+ private static final List<ReconstructedParticle[]> getTrackPairs(List<ReconstructedParticle> tracks) {
+ // Separate the tracks into top and bottom tracks.
+ List<ReconstructedParticle> topList = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> botList = new ArrayList<ReconstructedParticle>();
+ for(ReconstructedParticle track : tracks) {
+ // Make sure that the track actually contains tracks.
+ if(track.getTracks().size() > 0) {
+ // Use the tan(Î) to differentiate "top" and "bottom"
+ // tracks from one another.
+ if(track.getTracks().get(0).getTrackStates().get(0).getTanLambda() > 0) {
+ topList.add(track);
+ } else {
+ botList.add(track);
+ }
+ }
+ }
+
+ // Form all permutations of top and bottom tracks.
+ List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
+ for(ReconstructedParticle topTrack : topList) {
+ for(ReconstructedParticle botTrack : botList) {
+ pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
+ }
+ }
+
+ // Return the resulting cluster pairs.
+ return pairList;
+ }
+
+ private static final List<Cluster[]> getClusterPairs(List<Cluster> clusters) {
+ // Separate the clusters into top and bottom clusters.
+ List<Cluster> topList = new ArrayList<Cluster>();
+ List<Cluster> botList = new ArrayList<Cluster>();
+ for(Cluster cluster : clusters) {
+ if(TriggerModule.getClusterYIndex(cluster) > 0) {
+ topList.add(cluster);
+ } else {
+ botList.add(cluster);
+ }
+ }
+
+ // Form all permutations of top and bottom clusters.
+ List<Cluster[]> pairList = new ArrayList<Cluster[]>();
+ for(Cluster topCluster : topList) {
+ for(Cluster botCluster : botList) {
+ pairList.add(new Cluster[] { topCluster, botCluster });
+ }
+ }
+
+ // Return the resulting cluster pairs.
+ return pairList;
+ }
+
+ public static final double getTimeConicidence(Cluster[] pair) {
+ return TriggerModule.getClusterSeedHit(pair[1]).getTime() - TriggerModule.getClusterSeedHit(pair[0]).getTime();
+ }
+
+ public void setUseGoodSVT(boolean state) {
+ useGoodSVT = state;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TridentTrackDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TridentTrackDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TridentTrackDriver.java Wed Apr 27 11:11:32 2016
@@ -15,229 +15,229 @@
import org.lcsim.util.aida.AIDA;
public class TridentTrackDriver extends Driver {
- private String finalStateCollectionName = "FinalStateParticles";
- private String candidateCollectionName = "UnconstrainedV0Candidates";
-
- private int tracksCandidate = 0;
- private int tracksFinalState = 0;
- private int tracksCandidateCluster = 0;
- private int tracksFinalStateCluster = 0;
-
- private static final int ANY_CLUSTER = 0;
- private static final int HAS_CLUSTER = 1;
-
- private AIDA aida = AIDA.defaultInstance();
- private IHistogram1D[] tracks = new IHistogram1D[2];
- private IHistogram1D[] posTracks = new IHistogram1D[2];
- private IHistogram1D[] negTracks = new IHistogram1D[2];
- private IHistogram1D[] posMomentum = new IHistogram1D[2];
- private IHistogram1D[] negMomentum = new IHistogram1D[2];
- private IHistogram1D[] energySum = new IHistogram1D[2];
- private IHistogram1D[] energyMomentumDiff = new IHistogram1D[2];
- private IHistogram1D[] momentumSum = new IHistogram1D[2];
- private IHistogram1D[] invariantMass = new IHistogram1D[2];
- private IHistogram2D[] energySum2D = new IHistogram2D[2];
- private IHistogram2D[] momentumSum2D = new IHistogram2D[2];
- private IHistogram2D[] position = new IHistogram2D[2];
-
- @Override
- public void startOfData() {
- // Instantiate the "any cluster status" plots.
- tracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (All)", 7, -0.5, 6.5);
- posTracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (Positive)", 7, -0.5, 6.5);
- negTracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (Negative)", 7, -0.5, 6.5);
- posMomentum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum (Positive)", 110, 0, 1.1);
- negMomentum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum (Negative)", 110, 0, 1.1);
- energySum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Energy Sum", 55, 0, 2.2);
- momentumSum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum Sum", 55, 0, 2.2);
- energyMomentumDiff[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Energy-Momentum Difference", 55, 0, 2.2);
- invariantMass[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Invariant Mass", 240, 0.000, 0.120);
- energySum2D[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/2D Energy Sum", 55, 0, 1.1, 55, 0, 1.1);
- momentumSum2D[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/2D Momentum Sum", 55, 0, 1.1, 55, 0, 1.1);
- position[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/Track Cluster Position", 46, -23, 23, 11, -5.5, 5.5);
-
- // Instantiate the "has a cluster" plots.
- tracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (All)", 7, -0.5, 6.5);
- posTracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (Positive)", 7, -0.5, 6.5);
- negTracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (Negative)", 7, -0.5, 6.5);
- posMomentum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum (Positive)", 110, 0, 1.1);
- negMomentum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum (Negative)", 110, 0, 1.1);
- energySum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Energy Sum", 55, 0, 2.2);
- momentumSum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum Sum", 55, 0, 2.2);
- energyMomentumDiff[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Energy-Momentum Difference", 55, 0, 2.2);
- invariantMass[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Invariant Mass", 240, 0.000, 0.120);
- energySum2D[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/2D Energy Sum", 55, 0, 1.1, 55, 0, 1.1);
- momentumSum2D[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/2D Momentum Sum", 55, 0, 1.1, 55, 0, 1.1);
- position[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/Track Cluster Position", 46, -23, 23, 11, -5.5, 5.5);
- }
-
- @Override
- public void endOfData() {
- System.out.printf("Tracks (Candidate) :: %d%n", tracksCandidate);
- System.out.printf("Tracks (Final State) :: %d%n", tracksFinalState);
- System.out.printf("Cluster Tracks (Candidate) :: %d%n", tracksCandidateCluster);
- System.out.printf("Cluster Tracks (Final State) :: %d%n", tracksFinalStateCluster);
- }
-
- @Override
- public void process(EventHeader event) {
- // Check for final state particles.
- if(event.hasCollection(ReconstructedParticle.class, finalStateCollectionName)) {
- // Get the final state particles.
- List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, finalStateCollectionName);
-
- // Store the positive and negative tracks.
- List<ReconstructedParticle> allTrackList = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> posTrackList = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> negTrackList = new ArrayList<ReconstructedParticle>();
-
- // Store the same tracks, but limited to those with clusters.
- List<ReconstructedParticle> allClusterTrackList = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> posClusterTrackList = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> negClusterTrackList = new ArrayList<ReconstructedParticle>();
-
- // Iterate over the tracks and populate the lists.
- for(ReconstructedParticle track : trackList) {
- // Skip instances with no raw tracks.
- if(track.getTracks().size() == 0) { continue; }
-
- // Add the cluster to the all track list.
- allTrackList.add(track);
-
- // Track the number of cluster tracks.
- tracksFinalState++;
- if(!track.getClusters().isEmpty()) {
- tracksFinalStateCluster++;
- }
-
- // Process the track position plots.
- Hep3Vector trackPosAtEcal = TrackUtils.extrapolateTrack(track.getTracks().get(0), 1394.5);
- position[ANY_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
-
- // Process the tracks based on charge.
- if(track.getCharge() > 0) {
- // Increment the counters and populate the momentum plots.
- posTrackList.add(track);
- posMomentum[ANY_CLUSTER].fill(track.getMomentum().magnitude());
-
- // Repeat for the "has clusters" plots if necessary.
- if(track.getClusters().size() > 0) {
- // Increment the counters and populate the
- // momentum plot.
- posClusterTrackList.add(track);
- allClusterTrackList.add(track);
- posMomentum[HAS_CLUSTER].fill(track.getMomentum().magnitude());
-
- // Populate the cluster position plot.
- //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
- position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
- }
- } else if(track.getCharge() < 0) {
- // Increment the counters and populate the momentum plots.
- negTrackList.add(track);
- negMomentum[ANY_CLUSTER].fill(track.getMomentum().magnitude());
-
- // Repeat for the "has clusters" plots if necessary.
- if(track.getClusters().size() > 0) {
- // Increment the counters and populate the
- // momentum plot.
- negClusterTrackList.add(track);
- allClusterTrackList.add(track);
- negMomentum[HAS_CLUSTER].fill(track.getMomentum().magnitude());
-
- // Populate the cluster position plot.
- //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
- position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
- }
- } else {
- if(track.getClusters().size() > 0) {
- // Increment the counter.
- allClusterTrackList.add(track);
-
- // Populate the cluster position plot.
- //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
- //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
- position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
- }
- }
- }
-
- // Populate the tracks per event plots.
- tracks[ANY_CLUSTER].fill(allTrackList.size());
- tracks[HAS_CLUSTER].fill(allClusterTrackList.size());
- posTracks[ANY_CLUSTER].fill(posTrackList.size());
- posTracks[HAS_CLUSTER].fill(posClusterTrackList.size());
- negTracks[ANY_CLUSTER].fill(negTrackList.size());
- negTracks[HAS_CLUSTER].fill(negClusterTrackList.size());
-
- /// Store track pairs.
- List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
- List<ReconstructedParticle[]> pairClusterList = new ArrayList<ReconstructedParticle[]>();
-
- // Form track pairs for all tracks.
- for(ReconstructedParticle posTrack : posTrackList) {
- for(ReconstructedParticle negTrack : negTrackList) {
- pairList.add(new ReconstructedParticle[] { posTrack, negTrack });
- }
- }
-
- // Form track pairs for cluster tracks.
- for(ReconstructedParticle posTrack : posClusterTrackList) {
- for(ReconstructedParticle negTrack : negClusterTrackList) {
- pairClusterList.add(new ReconstructedParticle[] { posTrack, negTrack });
- }
- }
-
- // Populate the track pair plots.
- for(ReconstructedParticle[] pair : pairList) {
- Hep3Vector pSum = new BasicHep3Vector(
- pair[0].getMomentum().x() + pair[1].getMomentum().x(),
- pair[0].getMomentum().y() + pair[1].getMomentum().y(),
- pair[0].getMomentum().z() + pair[1].getMomentum().z());
-
- energySum[ANY_CLUSTER].fill(pair[0].getEnergy() + pair[1].getEnergy());
- momentumSum[ANY_CLUSTER].fill(pSum.magnitude());
- energySum2D[ANY_CLUSTER].fill(pair[0].getEnergy(), pair[1].getEnergy());
- momentumSum2D[ANY_CLUSTER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- energyMomentumDiff[ANY_CLUSTER].fill(Math.abs((pair[0].getEnergy() + pair[1].getEnergy()) - pSum.magnitude()));
- }
-
- // Populate the cluster track pair plots.
- for(ReconstructedParticle[] pair : pairClusterList) {
- Hep3Vector pSum = new BasicHep3Vector(
- pair[0].getMomentum().x() + pair[1].getMomentum().x(),
- pair[0].getMomentum().y() + pair[1].getMomentum().y(),
- pair[0].getMomentum().z() + pair[1].getMomentum().z());
-
- energySum[HAS_CLUSTER].fill(pair[0].getEnergy() + pair[1].getEnergy());
- momentumSum[HAS_CLUSTER].fill(pSum.magnitude());
- energySum2D[HAS_CLUSTER].fill(pair[0].getEnergy(), pair[1].getEnergy());
- momentumSum2D[HAS_CLUSTER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- energyMomentumDiff[HAS_CLUSTER].fill(Math.abs((pair[0].getEnergy() + pair[1].getEnergy()) - pSum.magnitude()));
- }
- }
-
- // Check for V0 candidates.
- if(event.hasCollection(ReconstructedParticle.class, candidateCollectionName)) {
- // Get the candidate particles.
- List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, candidateCollectionName);
-
- // Increment the counter.
- tracksCandidate += trackList.size();
-
- // Increment the counter for cluster tracks.
- for(ReconstructedParticle track : trackList) {
- // Populate the invariant mass plot.
- invariantMass[ANY_CLUSTER].fill(track.getMass());
-
- // Check for a cluster track.
- if(track.getClusters().size() > 0) {
- tracksCandidateCluster++;
- invariantMass[HAS_CLUSTER].fill(track.getMass());
- }
- }
- }
- }
+ private String finalStateCollectionName = "FinalStateParticles";
+ private String candidateCollectionName = "UnconstrainedV0Candidates";
+
+ private int tracksCandidate = 0;
+ private int tracksFinalState = 0;
+ private int tracksCandidateCluster = 0;
+ private int tracksFinalStateCluster = 0;
+
+ private static final int ANY_CLUSTER = 0;
+ private static final int HAS_CLUSTER = 1;
+
+ private AIDA aida = AIDA.defaultInstance();
+ private IHistogram1D[] tracks = new IHistogram1D[2];
+ private IHistogram1D[] posTracks = new IHistogram1D[2];
+ private IHistogram1D[] negTracks = new IHistogram1D[2];
+ private IHistogram1D[] posMomentum = new IHistogram1D[2];
+ private IHistogram1D[] negMomentum = new IHistogram1D[2];
+ private IHistogram1D[] energySum = new IHistogram1D[2];
+ private IHistogram1D[] energyMomentumDiff = new IHistogram1D[2];
+ private IHistogram1D[] momentumSum = new IHistogram1D[2];
+ private IHistogram1D[] invariantMass = new IHistogram1D[2];
+ private IHistogram2D[] energySum2D = new IHistogram2D[2];
+ private IHistogram2D[] momentumSum2D = new IHistogram2D[2];
+ private IHistogram2D[] position = new IHistogram2D[2];
+
+ @Override
+ public void startOfData() {
+ // Instantiate the "any cluster status" plots.
+ tracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (All)", 7, -0.5, 6.5);
+ posTracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (Positive)", 7, -0.5, 6.5);
+ negTracks[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Tracks in Event (Negative)", 7, -0.5, 6.5);
+ posMomentum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum (Positive)", 110, 0, 1.1);
+ negMomentum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum (Negative)", 110, 0, 1.1);
+ energySum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Energy Sum", 55, 0, 2.2);
+ momentumSum[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Momentum Sum", 55, 0, 2.2);
+ energyMomentumDiff[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Energy-Momentum Difference", 55, 0, 2.2);
+ invariantMass[ANY_CLUSTER] = aida.histogram1D("Trident Analysis/All/Invariant Mass", 240, 0.000, 0.120);
+ energySum2D[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/2D Energy Sum", 55, 0, 1.1, 55, 0, 1.1);
+ momentumSum2D[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/2D Momentum Sum", 55, 0, 1.1, 55, 0, 1.1);
+ position[ANY_CLUSTER] = aida.histogram2D("Trident Analysis/All/Track Cluster Position", 46, -23, 23, 11, -5.5, 5.5);
+
+ // Instantiate the "has a cluster" plots.
+ tracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (All)", 7, -0.5, 6.5);
+ posTracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (Positive)", 7, -0.5, 6.5);
+ negTracks[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Tracks in Event (Negative)", 7, -0.5, 6.5);
+ posMomentum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum (Positive)", 110, 0, 1.1);
+ negMomentum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum (Negative)", 110, 0, 1.1);
+ energySum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Energy Sum", 55, 0, 2.2);
+ momentumSum[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Momentum Sum", 55, 0, 2.2);
+ energyMomentumDiff[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Energy-Momentum Difference", 55, 0, 2.2);
+ invariantMass[HAS_CLUSTER] = aida.histogram1D("Trident Analysis/Cluster/Invariant Mass", 240, 0.000, 0.120);
+ energySum2D[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/2D Energy Sum", 55, 0, 1.1, 55, 0, 1.1);
+ momentumSum2D[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/2D Momentum Sum", 55, 0, 1.1, 55, 0, 1.1);
+ position[HAS_CLUSTER] = aida.histogram2D("Trident Analysis/Cluster/Track Cluster Position", 46, -23, 23, 11, -5.5, 5.5);
+ }
+
+ @Override
+ public void endOfData() {
+ System.out.printf("Tracks (Candidate) :: %d%n", tracksCandidate);
+ System.out.printf("Tracks (Final State) :: %d%n", tracksFinalState);
+ System.out.printf("Cluster Tracks (Candidate) :: %d%n", tracksCandidateCluster);
+ System.out.printf("Cluster Tracks (Final State) :: %d%n", tracksFinalStateCluster);
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Check for final state particles.
+ if(event.hasCollection(ReconstructedParticle.class, finalStateCollectionName)) {
+ // Get the final state particles.
+ List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, finalStateCollectionName);
+
+ // Store the positive and negative tracks.
+ List<ReconstructedParticle> allTrackList = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> posTrackList = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> negTrackList = new ArrayList<ReconstructedParticle>();
+
+ // Store the same tracks, but limited to those with clusters.
+ List<ReconstructedParticle> allClusterTrackList = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> posClusterTrackList = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> negClusterTrackList = new ArrayList<ReconstructedParticle>();
+
+ // Iterate over the tracks and populate the lists.
+ for(ReconstructedParticle track : trackList) {
+ // Skip instances with no raw tracks.
+ if(track.getTracks().size() == 0) { continue; }
+
+ // Add the cluster to the all track list.
+ allTrackList.add(track);
+
+ // Track the number of cluster tracks.
+ tracksFinalState++;
+ if(!track.getClusters().isEmpty()) {
+ tracksFinalStateCluster++;
+ }
+
+ // Process the track position plots.
+ Hep3Vector trackPosAtEcal = TrackUtils.extrapolateTrack(track.getTracks().get(0), 1394.5);
+ position[ANY_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
+
+ // Process the tracks based on charge.
+ if(track.getCharge() > 0) {
+ // Increment the counters and populate the momentum plots.
+ posTrackList.add(track);
+ posMomentum[ANY_CLUSTER].fill(track.getMomentum().magnitude());
+
+ // Repeat for the "has clusters" plots if necessary.
+ if(track.getClusters().size() > 0) {
+ // Increment the counters and populate the
+ // momentum plot.
+ posClusterTrackList.add(track);
+ allClusterTrackList.add(track);
+ posMomentum[HAS_CLUSTER].fill(track.getMomentum().magnitude());
+
+ // Populate the cluster position plot.
+ //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+ position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
+ }
+ } else if(track.getCharge() < 0) {
+ // Increment the counters and populate the momentum plots.
+ negTrackList.add(track);
+ negMomentum[ANY_CLUSTER].fill(track.getMomentum().magnitude());
+
+ // Repeat for the "has clusters" plots if necessary.
+ if(track.getClusters().size() > 0) {
+ // Increment the counters and populate the
+ // momentum plot.
+ negClusterTrackList.add(track);
+ allClusterTrackList.add(track);
+ negMomentum[HAS_CLUSTER].fill(track.getMomentum().magnitude());
+
+ // Populate the cluster position plot.
+ //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+ position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
+ }
+ } else {
+ if(track.getClusters().size() > 0) {
+ // Increment the counter.
+ allClusterTrackList.add(track);
+
+ // Populate the cluster position plot.
+ //int ix = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+ //int iy = track.getClusters().get(0).getCalorimeterHits().get(0).getIdentifierFieldValue("iy");
+ position[HAS_CLUSTER].fill(trackPosAtEcal.x(), trackPosAtEcal.y());
+ }
+ }
+ }
+
+ // Populate the tracks per event plots.
+ tracks[ANY_CLUSTER].fill(allTrackList.size());
+ tracks[HAS_CLUSTER].fill(allClusterTrackList.size());
+ posTracks[ANY_CLUSTER].fill(posTrackList.size());
+ posTracks[HAS_CLUSTER].fill(posClusterTrackList.size());
+ negTracks[ANY_CLUSTER].fill(negTrackList.size());
+ negTracks[HAS_CLUSTER].fill(negClusterTrackList.size());
+
+ /// Store track pairs.
+ List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
+ List<ReconstructedParticle[]> pairClusterList = new ArrayList<ReconstructedParticle[]>();
+
+ // Form track pairs for all tracks.
+ for(ReconstructedParticle posTrack : posTrackList) {
+ for(ReconstructedParticle negTrack : negTrackList) {
+ pairList.add(new ReconstructedParticle[] { posTrack, negTrack });
+ }
+ }
+
+ // Form track pairs for cluster tracks.
+ for(ReconstructedParticle posTrack : posClusterTrackList) {
+ for(ReconstructedParticle negTrack : negClusterTrackList) {
+ pairClusterList.add(new ReconstructedParticle[] { posTrack, negTrack });
+ }
+ }
+
+ // Populate the track pair plots.
+ for(ReconstructedParticle[] pair : pairList) {
+ Hep3Vector pSum = new BasicHep3Vector(
+ pair[0].getMomentum().x() + pair[1].getMomentum().x(),
+ pair[0].getMomentum().y() + pair[1].getMomentum().y(),
+ pair[0].getMomentum().z() + pair[1].getMomentum().z());
+
+ energySum[ANY_CLUSTER].fill(pair[0].getEnergy() + pair[1].getEnergy());
+ momentumSum[ANY_CLUSTER].fill(pSum.magnitude());
+ energySum2D[ANY_CLUSTER].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ momentumSum2D[ANY_CLUSTER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ energyMomentumDiff[ANY_CLUSTER].fill(Math.abs((pair[0].getEnergy() + pair[1].getEnergy()) - pSum.magnitude()));
+ }
+
+ // Populate the cluster track pair plots.
+ for(ReconstructedParticle[] pair : pairClusterList) {
+ Hep3Vector pSum = new BasicHep3Vector(
+ pair[0].getMomentum().x() + pair[1].getMomentum().x(),
+ pair[0].getMomentum().y() + pair[1].getMomentum().y(),
+ pair[0].getMomentum().z() + pair[1].getMomentum().z());
+
+ energySum[HAS_CLUSTER].fill(pair[0].getEnergy() + pair[1].getEnergy());
+ momentumSum[HAS_CLUSTER].fill(pSum.magnitude());
+ energySum2D[HAS_CLUSTER].fill(pair[0].getEnergy(), pair[1].getEnergy());
+ momentumSum2D[HAS_CLUSTER].fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ energyMomentumDiff[HAS_CLUSTER].fill(Math.abs((pair[0].getEnergy() + pair[1].getEnergy()) - pSum.magnitude()));
+ }
+ }
+
+ // Check for V0 candidates.
+ if(event.hasCollection(ReconstructedParticle.class, candidateCollectionName)) {
+ // Get the candidate particles.
+ List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, candidateCollectionName);
+
+ // Increment the counter.
+ tracksCandidate += trackList.size();
+
+ // Increment the counter for cluster tracks.
+ for(ReconstructedParticle track : trackList) {
+ // Populate the invariant mass plot.
+ invariantMass[ANY_CLUSTER].fill(track.getMass());
+
+ // Check for a cluster track.
+ if(track.getClusters().size() > 0) {
+ tracksCandidateCluster++;
+ invariantMass[HAS_CLUSTER].fill(track.getMass());
+ }
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerPlotsModule.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerPlotsModule.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerPlotsModule.java Wed Apr 27 11:11:32 2016
@@ -8,8 +8,8 @@
import org.lcsim.util.aida.AIDA;
public class TriggerPlotsModule {
- // Define plots.
- private AIDA aida = AIDA.defaultInstance();
+ // Define plots.
+ private AIDA aida = AIDA.defaultInstance();
private IHistogram1D singleSeedEnergy;
private IHistogram1D singleHitCount;
private IHistogram1D singleTotalEnergy;
@@ -25,43 +25,43 @@
private IHistogram2D pairDistribution;
private IHistogram2D pairEnergySum2D;
- public TriggerPlotsModule(String moduleName) {
- singleSeedEnergy = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Seed Energy", 176, 0.0, 1.1);
- singleHitCount = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Hit Count", 9, 0.5, 9.5);
- singleTotalEnergy = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Total Energy", 176, 0.0, 1.1);
- singleDistribution = aida.histogram2D(moduleName + " Trigger Plots/Singles Plots/Cluster Seed", 46, -23.0, 23.0, 11, -5.5, 5.5);
- pairSeedEnergy = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Seed Energy", 176, 0.0, 1.1);
- pairHitCount = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Hit Count", 9, 0.5, 9.5);
- pairTotalEnergy = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Total Energy", 176, 0.0, 1.1);
- pairEnergySum = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Sum", 176, 0.0, 2.2);
- pairEnergyDifference = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Difference", 176, 0.0, 1.1);
- pairCoplanarity = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Coplanarity", 180, 0.0, 180.0);
- pairEnergySlope = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Slope", 200, 0.0, 3.0);
- pairDistribution = aida.histogram2D(moduleName + " Trigger Plots/Pair Plots/Cluster Seed", 46, -23.0, 23.0, 11, -5.5, 5.5);
- pairEnergySum2D = aida.histogram2D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Sum 2D", 110, 0.0, 1.1, 110, 0.0, 1.1);
- }
-
- public void addCluster(Cluster cluster) {
- singleSeedEnergy.fill(TriggerModule.getValueClusterSeedEnergy(cluster));
- singleHitCount.fill(TriggerModule.getValueClusterHitCount(cluster));
- singleTotalEnergy.fill(TriggerModule.getValueClusterTotalEnergy(cluster));
- singleDistribution.fill(TriggerModule.getClusterXIndex(cluster), TriggerModule.getClusterYIndex(cluster));
- }
-
- public void addClusterPair(Cluster[] pair) {
- // Populate the singles plots.
- for(Cluster cluster : pair) {
- pairSeedEnergy.fill(TriggerModule.getValueClusterSeedEnergy(cluster));
- pairHitCount.fill(TriggerModule.getValueClusterHitCount(cluster));
- pairTotalEnergy.fill(TriggerModule.getValueClusterTotalEnergy(cluster));
- pairDistribution.fill(TriggerModule.getClusterXIndex(cluster), TriggerModule.getClusterYIndex(cluster));
- }
-
- // Populate the pair plots.
- pairEnergySum.fill(TriggerModule.getValueEnergySum(pair));
- pairEnergyDifference.fill(TriggerModule.getValueEnergyDifference(pair));
- pairCoplanarity.fill(TriggerModule.getValueCoplanarity(pair));
- pairEnergySlope.fill(TriggerModule.getValueEnergySlope(pair, 0.0055));
- pairEnergySum2D.fill(TriggerModule.getValueClusterTotalEnergy(pair[0]), TriggerModule.getValueClusterTotalEnergy(pair[1]));
- }
+ public TriggerPlotsModule(String moduleName) {
+ singleSeedEnergy = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Seed Energy", 176, 0.0, 1.1);
+ singleHitCount = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Hit Count", 9, 0.5, 9.5);
+ singleTotalEnergy = aida.histogram1D(moduleName + " Trigger Plots/Singles Plots/Cluster Total Energy", 176, 0.0, 1.1);
+ singleDistribution = aida.histogram2D(moduleName + " Trigger Plots/Singles Plots/Cluster Seed", 46, -23.0, 23.0, 11, -5.5, 5.5);
+ pairSeedEnergy = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Seed Energy", 176, 0.0, 1.1);
+ pairHitCount = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Hit Count", 9, 0.5, 9.5);
+ pairTotalEnergy = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Cluster Total Energy", 176, 0.0, 1.1);
+ pairEnergySum = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Sum", 176, 0.0, 2.2);
+ pairEnergyDifference = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Difference", 176, 0.0, 1.1);
+ pairCoplanarity = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Coplanarity", 180, 0.0, 180.0);
+ pairEnergySlope = aida.histogram1D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Slope", 200, 0.0, 3.0);
+ pairDistribution = aida.histogram2D(moduleName + " Trigger Plots/Pair Plots/Cluster Seed", 46, -23.0, 23.0, 11, -5.5, 5.5);
+ pairEnergySum2D = aida.histogram2D(moduleName + " Trigger Plots/Pair Plots/Pair Energy Sum 2D", 110, 0.0, 1.1, 110, 0.0, 1.1);
+ }
+
+ public void addCluster(Cluster cluster) {
+ singleSeedEnergy.fill(TriggerModule.getValueClusterSeedEnergy(cluster));
+ singleHitCount.fill(TriggerModule.getValueClusterHitCount(cluster));
+ singleTotalEnergy.fill(TriggerModule.getValueClusterTotalEnergy(cluster));
+ singleDistribution.fill(TriggerModule.getClusterXIndex(cluster), TriggerModule.getClusterYIndex(cluster));
+ }
+
+ public void addClusterPair(Cluster[] pair) {
+ // Populate the singles plots.
+ for(Cluster cluster : pair) {
+ pairSeedEnergy.fill(TriggerModule.getValueClusterSeedEnergy(cluster));
+ pairHitCount.fill(TriggerModule.getValueClusterHitCount(cluster));
+ pairTotalEnergy.fill(TriggerModule.getValueClusterTotalEnergy(cluster));
+ pairDistribution.fill(TriggerModule.getClusterXIndex(cluster), TriggerModule.getClusterYIndex(cluster));
+ }
+
+ // Populate the pair plots.
+ pairEnergySum.fill(TriggerModule.getValueEnergySum(pair));
+ pairEnergyDifference.fill(TriggerModule.getValueEnergyDifference(pair));
+ pairCoplanarity.fill(TriggerModule.getValueCoplanarity(pair));
+ pairEnergySlope.fill(TriggerModule.getValueEnergySlope(pair, 0.0055));
+ pairEnergySum2D.fill(TriggerModule.getValueClusterTotalEnergy(pair[0]), TriggerModule.getValueClusterTotalEnergy(pair[1]));
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerProcessAnalysisDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerProcessAnalysisDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/TriggerProcessAnalysisDriver.java Wed Apr 27 11:11:32 2016
@@ -21,768 +21,768 @@
import org.lcsim.util.aida.AIDA;
public class TriggerProcessAnalysisDriver extends Driver {
- private int eventsProcessed = 0;
- private int møllersProcessed = 0;
- private boolean checkSVT = false;
- private int tridentsProcessed = 0;
- private int gblMøllersProcessed = 0;
- private int gblTridentsProcessed = 0;
- private double timeCoincidence = 2.5;
- private double elasticThreshold = 0.800;
- private double møllerLowerRange = 0.900;
- private double møllerUpperRange = 1.200;
- private AIDA aida = AIDA.defaultInstance();
- private boolean checkTriggerTimeWindow = false;
- private String clusterCollectionName = "EcalClustersCorr";
- private String particleCollectionName = "FinalStateParticles";
-
- // Define trident cluster-track matched condition plots.
- private IHistogram1D trctmInvariantMass = aida.histogram1D("Tridents CTMatched/Invariant Mass", 140, 0.0, 0.070);
- private IHistogram1D trctmInstancesInEvent = aida.histogram1D("Tridents CTMatched/Instances in Event", 9, 0.5, 9.5);
- private IHistogram1D trctmEnergySum1D = aida.histogram1D("Tridents CTMatched/Cluster Energy Sum", 150, 0.000, 1.500);
- private IHistogram1D trctmMomentumSum1D = aida.histogram1D("Tridents CTMatched/Track Momentum Sum", 150, 0.000, 1.500);
- private IHistogram1D trctmElectronEnergy = aida.histogram1D("Tridents CTMatched/Electron Cluster Energy", 150, 0.000, 1.500);
- private IHistogram1D trctmElectronMomentum = aida.histogram1D("Tridents CTMatched/Electron Track Momentum", 150, 0.000, 1.500);
- private IHistogram1D trctmPositronEnergy = aida.histogram1D("Tridents CTMatched/Positron Cluster Energy", 150, 0.000, 1.500);
- private IHistogram1D trctmPositronMomentum = aida.histogram1D("Tridents CTMatched/Positron Track Momentum", 150, 0.000, 1.500);
- private IHistogram1D trctmTimeCoincidence = aida.histogram1D("Tridents CTMatched/Time Coincidence", 100, -4, 4);
- private IHistogram2D trctmClusterPosition = aida.histogram2D("Tridents CTMatched/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
- private IHistogram2D trctmEnergySum2D = aida.histogram2D("Tridents CTMatched/Cluster Energy Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D trctmTrackPosition = aida.histogram2D("Tridents CTMatched/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
- private IHistogram2D trctmMomentumSum2D = aida.histogram2D("Tridents CTMatched/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D trctmESumCoplanarity = aida.histogram2D("Tridents CTMatched/Cluster Energy Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
- private IHistogram2D trctmPSumCoplanarity = aida.histogram2D("Tridents CTMatched/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
-
- // Define the Møller cluster-track matched condition plots.
- private IHistogram1D møctmInvariantMass = aida.histogram1D("Møller CTMatched/Invariant Mass", 140, 0.0, 0.070);
- private IHistogram1D møctmInstancesInEvent = aida.histogram1D("Møller CTMatched/Instances in Event", 9, 0.5, 9.5);
- private IHistogram1D møctmEnergySum1D = aida.histogram1D("Møller CTMatched/Cluster Energy Sum", 150, 0.000, 1.500);
- private IHistogram1D møctmMomentumSum1D = aida.histogram1D("Møller CTMatched/Track Momentum Sum", 150, 0.000, 1.500);
- private IHistogram1D møctmElectronEnergy = aida.histogram1D("Møller CTMatched/Electron Cluster Energy", 150, 0.000, 1.500);
- private IHistogram1D møctmElectronMomentum = aida.histogram1D("Møller CTMatched/Electron Track Momentum", 150, 0.000, 1.500);
- private IHistogram1D møctmTimeCoincidence = aida.histogram1D("Møller CTMatched/Time Coincidence", 100, -4, 4);
- private IHistogram2D møctmClusterPosition = aida.histogram2D("Møller CTMatched/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
- private IHistogram2D møctmEnergySum2D = aida.histogram2D("Møller CTMatched/Cluster Energy Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D møctmTrackPosition = aida.histogram2D("Møller CTMatched/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
- private IHistogram2D møctmMomentumSum2D = aida.histogram2D("Møller CTMatched/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D møctmESumCoplanarity = aida.histogram2D("Møller CTMatched/Cluster Energy Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
- private IHistogram2D møctmPSumCoplanarity = aida.histogram2D("Møller CTMatched/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
-
- // Define the Møller track-only condition plots.
- private IHistogram1D møgblTimeCoincidence = aida.histogram1D("Møller Track-Only/Time Coincidence", 100, -4, 4);
- private IHistogram1D møgblInvariantMass = aida.histogram1D("Møller Track-Only/Invariant Mass", 140, 0.0, 0.070);
- private IHistogram1D møgblInstancesInEvent = aida.histogram1D("Møller Track-Only/Instances in Event", 9, 0.5, 9.5);
- private IHistogram1D møgblMomentumSum1D = aida.histogram1D("Møller Track-Only/Track Momentum Sum", 150, 0.000, 1.500);
- private IHistogram1D møgblElectronMomentum = aida.histogram1D("Møller Track-Only/Electron Track Momentum", 150, 0.000, 1.500);
- private IHistogram2D møgblTrackPosition = aida.histogram2D("Møller Track-Only/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
- private IHistogram2D møgblMomentumSum2D = aida.histogram2D("Møller Track-Only/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D møgblPSumCoplanarity = aida.histogram2D("Møller Track-Only/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
-
- // Define the GBL trident condition plots.
- private IHistogram1D trgblInvariantMass = aida.histogram1D("Tridents Track-Only/Invariant Mass", 140, 0.0, 0.070);
- private IHistogram1D trgblInstancesInEvent = aida.histogram1D("Tridents Track-Only/Instances in Event", 9, 0.5, 9.5);
- private IHistogram1D trgblMomentumSum1D = aida.histogram1D("Tridents Track-Only/Track Momentum Sum", 150, 0.000, 1.500);
- private IHistogram1D trgblElectronMomentum = aida.histogram1D("Tridents Track-Only/Electron Track Momentum", 150, 0.000, 1.500);
- private IHistogram1D trgblPositronMomentum = aida.histogram1D("Tridents Track-Only/Positron Track Momentum", 150, 0.000, 1.500);
- private IHistogram1D trgblTimeCoincidence = aida.histogram1D("Tridents Track-Only/Time Coincidence", 100, -4, 4);
- private IHistogram2D trgblTrackPosition = aida.histogram2D("Tridents Track-Only/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
- private IHistogram2D trgblMomentumSum2D = aida.histogram2D("Tridents Track-Only/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
- private IHistogram2D trgblPSumCoplanarity = aida.histogram2D("Tridents Track-Only/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
-
- @Override
- public void endOfData() {
- // Calculate the scaling factor for Hertz.
- double scale = 19000.0 / eventsProcessed;
-
- System.out.println("Processed " + eventsProcessed + " events.");
- System.out.println("Processed " + møllersProcessed + " Møller events");
- System.out.println("\tAcceptance :: " + (100.0 * møllersProcessed / eventsProcessed) + "%");
- System.out.println("\tRate :: " + (møllersProcessed * scale) + " Hz");
-
- System.out.println("Processed " + tridentsProcessed + " trident events");
- System.out.println("\tAcceptance :: " + (100.0 * tridentsProcessed / eventsProcessed) + "%");
- System.out.println("\tRate :: " + (tridentsProcessed * scale) + " Hz");
-
- System.out.println("Processed " + gblMøllersProcessed + " track-only Møller events");
- System.out.println("\tAcceptance :: " + (100.0 * gblMøllersProcessed / eventsProcessed) + "%");
- System.out.println("\tRate :: " + (gblMøllersProcessed * scale) + " Hz");
-
- System.out.println("Processed " + gblTridentsProcessed + " Rafo trident events");
- System.out.println("\tAcceptance :: " + (100.0 * gblTridentsProcessed / eventsProcessed) + "%");
- System.out.println("\tRate :: " + (gblTridentsProcessed * scale) + " Hz");
- }
-
- @Override
- public void process(EventHeader event) {
- // Check whether the SVT was active in this event and, if so,
- // skip it. This can be disabled through the steering file for
- // Monte Carlo data, where the "SVT" is always active.
- if(checkSVT) {
- final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
- boolean svtGood = true;
- for(int i = 0; i < flagNames.length; i++) {
- int[] flag = event.getIntegerParameters().get(flagNames[i]);
- if(flag == null || flag[0] == 0) {
- svtGood = false;
- }
- }
- if(!svtGood) { return; }
- }
+ private int eventsProcessed = 0;
+ private int møllersProcessed = 0;
+ private boolean checkSVT = false;
+ private int tridentsProcessed = 0;
+ private int gblMøllersProcessed = 0;
+ private int gblTridentsProcessed = 0;
+ private double timeCoincidence = 2.5;
+ private double elasticThreshold = 0.800;
+ private double møllerLowerRange = 0.900;
+ private double møllerUpperRange = 1.200;
+ private AIDA aida = AIDA.defaultInstance();
+ private boolean checkTriggerTimeWindow = false;
+ private String clusterCollectionName = "EcalClustersCorr";
+ private String particleCollectionName = "FinalStateParticles";
+
+ // Define trident cluster-track matched condition plots.
+ private IHistogram1D trctmInvariantMass = aida.histogram1D("Tridents CTMatched/Invariant Mass", 140, 0.0, 0.070);
+ private IHistogram1D trctmInstancesInEvent = aida.histogram1D("Tridents CTMatched/Instances in Event", 9, 0.5, 9.5);
+ private IHistogram1D trctmEnergySum1D = aida.histogram1D("Tridents CTMatched/Cluster Energy Sum", 150, 0.000, 1.500);
+ private IHistogram1D trctmMomentumSum1D = aida.histogram1D("Tridents CTMatched/Track Momentum Sum", 150, 0.000, 1.500);
+ private IHistogram1D trctmElectronEnergy = aida.histogram1D("Tridents CTMatched/Electron Cluster Energy", 150, 0.000, 1.500);
+ private IHistogram1D trctmElectronMomentum = aida.histogram1D("Tridents CTMatched/Electron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram1D trctmPositronEnergy = aida.histogram1D("Tridents CTMatched/Positron Cluster Energy", 150, 0.000, 1.500);
+ private IHistogram1D trctmPositronMomentum = aida.histogram1D("Tridents CTMatched/Positron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram1D trctmTimeCoincidence = aida.histogram1D("Tridents CTMatched/Time Coincidence", 100, -4, 4);
+ private IHistogram2D trctmClusterPosition = aida.histogram2D("Tridents CTMatched/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
+ private IHistogram2D trctmEnergySum2D = aida.histogram2D("Tridents CTMatched/Cluster Energy Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D trctmTrackPosition = aida.histogram2D("Tridents CTMatched/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
+ private IHistogram2D trctmMomentumSum2D = aida.histogram2D("Tridents CTMatched/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D trctmESumCoplanarity = aida.histogram2D("Tridents CTMatched/Cluster Energy Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+ private IHistogram2D trctmPSumCoplanarity = aida.histogram2D("Tridents CTMatched/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+
+ // Define the Møller cluster-track matched condition plots.
+ private IHistogram1D møctmInvariantMass = aida.histogram1D("Møller CTMatched/Invariant Mass", 140, 0.0, 0.070);
+ private IHistogram1D møctmInstancesInEvent = aida.histogram1D("Møller CTMatched/Instances in Event", 9, 0.5, 9.5);
+ private IHistogram1D møctmEnergySum1D = aida.histogram1D("Møller CTMatched/Cluster Energy Sum", 150, 0.000, 1.500);
+ private IHistogram1D møctmMomentumSum1D = aida.histogram1D("Møller CTMatched/Track Momentum Sum", 150, 0.000, 1.500);
+ private IHistogram1D møctmElectronEnergy = aida.histogram1D("Møller CTMatched/Electron Cluster Energy", 150, 0.000, 1.500);
+ private IHistogram1D møctmElectronMomentum = aida.histogram1D("Møller CTMatched/Electron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram1D møctmTimeCoincidence = aida.histogram1D("Møller CTMatched/Time Coincidence", 100, -4, 4);
+ private IHistogram2D møctmClusterPosition = aida.histogram2D("Møller CTMatched/Cluster Seed Position", 46, -23, 23, 11, -5.5, 5.5);
+ private IHistogram2D møctmEnergySum2D = aida.histogram2D("Møller CTMatched/Cluster Energy Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D møctmTrackPosition = aida.histogram2D("Møller CTMatched/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
+ private IHistogram2D møctmMomentumSum2D = aida.histogram2D("Møller CTMatched/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D møctmESumCoplanarity = aida.histogram2D("Møller CTMatched/Cluster Energy Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+ private IHistogram2D møctmPSumCoplanarity = aida.histogram2D("Møller CTMatched/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+
+ // Define the Møller track-only condition plots.
+ private IHistogram1D møgblTimeCoincidence = aida.histogram1D("Møller Track-Only/Time Coincidence", 100, -4, 4);
+ private IHistogram1D møgblInvariantMass = aida.histogram1D("Møller Track-Only/Invariant Mass", 140, 0.0, 0.070);
+ private IHistogram1D møgblInstancesInEvent = aida.histogram1D("Møller Track-Only/Instances in Event", 9, 0.5, 9.5);
+ private IHistogram1D møgblMomentumSum1D = aida.histogram1D("Møller Track-Only/Track Momentum Sum", 150, 0.000, 1.500);
+ private IHistogram1D møgblElectronMomentum = aida.histogram1D("Møller Track-Only/Electron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram2D møgblTrackPosition = aida.histogram2D("Møller Track-Only/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
+ private IHistogram2D møgblMomentumSum2D = aida.histogram2D("Møller Track-Only/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D møgblPSumCoplanarity = aida.histogram2D("Møller Track-Only/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+
+ // Define the GBL trident condition plots.
+ private IHistogram1D trgblInvariantMass = aida.histogram1D("Tridents Track-Only/Invariant Mass", 140, 0.0, 0.070);
+ private IHistogram1D trgblInstancesInEvent = aida.histogram1D("Tridents Track-Only/Instances in Event", 9, 0.5, 9.5);
+ private IHistogram1D trgblMomentumSum1D = aida.histogram1D("Tridents Track-Only/Track Momentum Sum", 150, 0.000, 1.500);
+ private IHistogram1D trgblElectronMomentum = aida.histogram1D("Tridents Track-Only/Electron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram1D trgblPositronMomentum = aida.histogram1D("Tridents Track-Only/Positron Track Momentum", 150, 0.000, 1.500);
+ private IHistogram1D trgblTimeCoincidence = aida.histogram1D("Tridents Track-Only/Time Coincidence", 100, -4, 4);
+ private IHistogram2D trgblTrackPosition = aida.histogram2D("Tridents Track-Only/Extrapolated Track Position", 200, -400, 400, 55, -110, 110);
+ private IHistogram2D trgblMomentumSum2D = aida.histogram2D("Tridents Track-Only/Track Momentum Sum 2D", 300, 0.000, 1.500, 300, 0.000, 1.500);
+ private IHistogram2D trgblPSumCoplanarity = aida.histogram2D("Tridents Track-Only/Track Momentum Sum vs. Coplanarity", 300, 0.000, 1.500, 360, 0, 360);
+
+ @Override
+ public void endOfData() {
+ // Calculate the scaling factor for Hertz.
+ double scale = 19000.0 / eventsProcessed;
+
+ System.out.println("Processed " + eventsProcessed + " events.");
+ System.out.println("Processed " + møllersProcessed + " Møller events");
+ System.out.println("\tAcceptance :: " + (100.0 * møllersProcessed / eventsProcessed) + "%");
+ System.out.println("\tRate :: " + (møllersProcessed * scale) + " Hz");
+
+ System.out.println("Processed " + tridentsProcessed + " trident events");
+ System.out.println("\tAcceptance :: " + (100.0 * tridentsProcessed / eventsProcessed) + "%");
+ System.out.println("\tRate :: " + (tridentsProcessed * scale) + " Hz");
+
+ System.out.println("Processed " + gblMøllersProcessed + " track-only Møller events");
+ System.out.println("\tAcceptance :: " + (100.0 * gblMøllersProcessed / eventsProcessed) + "%");
+ System.out.println("\tRate :: " + (gblMøllersProcessed * scale) + " Hz");
+
+ System.out.println("Processed " + gblTridentsProcessed + " Rafo trident events");
+ System.out.println("\tAcceptance :: " + (100.0 * gblTridentsProcessed / eventsProcessed) + "%");
+ System.out.println("\tRate :: " + (gblTridentsProcessed * scale) + " Hz");
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ // Check whether the SVT was active in this event and, if so,
+ // skip it. This can be disabled through the steering file for
+ // Monte Carlo data, where the "SVT" is always active.
+ if(checkSVT) {
+ final String[] flagNames = { "svt_bias_good", "svt_burstmode_noise_good", "svt_position_good" };
+ boolean svtGood = true;
+ for(int i = 0; i < flagNames.length; i++) {
+ int[] flag = event.getIntegerParameters().get(flagNames[i]);
+ if(flag == null || flag[0] == 0) {
+ svtGood = false;
+ }
+ }
+ if(!svtGood) { return; }
+ }
// Track the number of events with good SVT.
eventsProcessed++;
- // Check if the event has a collection of tracks. If it exists,
+ // Check if the event has a collection of tracks. If it exists,
// extract it. Otherwise, skip the event.
- if(!event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
- return;
- }
- List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
-
- // Check if the event has a collection of clusters. If it
- // exists, extract it. Otherwise, skip the event.
- if(!event.hasCollection(Cluster.class, clusterCollectionName)) {
- return;
- }
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Get cluster-track matched top/bottom pairs.
- List<ReconstructedParticle[]> gblMatchedPairs = getTopBottomTracksGBL(trackList);
- List<ReconstructedParticle[]> ctMatchedPairs = getTopBottomTracksCTMatched(trackList);
-
- System.out.println("CTM Pairs :: " + ctMatchedPairs.size());
- System.out.println("GBL Pairs :: " + gblMatchedPairs.size());
-
- // Get the trident and Møller tracks for the matched track
- // and cluster pair condition sets.
- List<ReconstructedParticle[]> møllers = getMøllerTracksCTMatched(ctMatchedPairs);
- List<ReconstructedParticle[]> møllersGBL = getMøllerTracksGBL(gblMatchedPairs, event);
- List<ReconstructedParticle[]> tridents = getTridentTracksCTMatched(ctMatchedPairs);
- List<ReconstructedParticle[]> tridentsGBL = getTridentClustersGBL(gblMatchedPairs, TriggerModule.getTopBottomPairs(clusterList, Cluster.class), event);
-
- // Track how many events had tridents and Møllers.
- if(!møllers.isEmpty()) { møllersProcessed++; }
- if(!tridents.isEmpty()) { tridentsProcessed++; }
- if(!møllersGBL.isEmpty()) { gblMøllersProcessed++; }
- if(!tridentsGBL.isEmpty()) { gblTridentsProcessed++; }
-
- // Produce Møller cluster-track matched plots.
- møctmInstancesInEvent.fill(møllers.size());
- for(ReconstructedParticle[] pair : møllers) {
- // Get the track clusters.
- Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
-
- // Populate the cluster plots.
- møctmElectronEnergy.fill(trackClusters[0].getEnergy());
- møctmElectronEnergy.fill(trackClusters[1].getEnergy());
- møctmEnergySum1D.fill(TriggerModule.getValueEnergySum(trackClusters));
- møctmEnergySum2D.fill(trackClusters[0].getEnergy(), trackClusters[1].getEnergy());
- møctmESumCoplanarity.fill(TriggerModule.getValueEnergySum(trackClusters), getCalculatedCoplanarity(trackClusters));
- møctmTimeCoincidence.fill(TriggerModule.getClusterTime(trackClusters[0]) - TriggerModule.getClusterTime(trackClusters[1]));
- møctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[0]), TriggerModule.getClusterYIndex(trackClusters[0]));
- møctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[1]), TriggerModule.getClusterYIndex(trackClusters[1]));
-
- // Populate the momentum plots.
- møctmInvariantMass.fill(getInvariantMass(pair));
- møctmElectronMomentum.fill(pair[0].getMomentum().magnitude());
- møctmElectronMomentum.fill(pair[1].getMomentum().magnitude());
- møctmMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
- møctmMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- møctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
- møctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
- møctmPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
- getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
- }
-
- // Produce trident cluster-track matched plots.
- trctmInstancesInEvent.fill(tridents.size());
- for(ReconstructedParticle[] pair : tridents) {
- // Get the electron and positron tracks.
- ReconstructedParticle electronTrack = pair[pair[0].getCharge() < 0 ? 0 : 1];
- ReconstructedParticle positronTrack = pair[pair[0].getCharge() > 0 ? 0 : 1];
-
- // Get the track clusters.
- Cluster electronCluster = electronTrack.getClusters().get(0);
- Cluster positronCluster = positronTrack.getClusters().get(0);
- Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
-
- // Populate the cluster plots.
- trctmElectronEnergy.fill(electronCluster.getEnergy());
- trctmPositronEnergy.fill(positronCluster.getEnergy());
- trctmEnergySum2D.fill(pair[0].getEnergy(), pair[1].getEnergy());
- trctmEnergySum1D.fill(TriggerModule.getValueEnergySum(trackClusters));
- trctmESumCoplanarity.fill(TriggerModule.getValueEnergySum(trackClusters), getCalculatedCoplanarity(trackClusters));
- trctmTimeCoincidence.fill(TriggerModule.getClusterTime(trackClusters[0]) - TriggerModule.getClusterTime(trackClusters[1]));
- trctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[0]), TriggerModule.getClusterYIndex(trackClusters[0]));
- trctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[1]), TriggerModule.getClusterYIndex(trackClusters[1]));
-
- // Populate the momentum plots.
- trctmInvariantMass.fill(getInvariantMass(pair));
- trctmElectronMomentum.fill(electronTrack.getMomentum().magnitude());
- trctmPositronMomentum.fill(positronTrack.getMomentum().magnitude());
- trctmMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
- trctmMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- trctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
- trctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
- trctmPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
- getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
- }
-
- // Produce the Møller track-only plots.
- møgblInstancesInEvent.fill(møllersGBL.size());
- RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
- RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
- for(ReconstructedParticle pair[] : møllersGBL) {
- // Get the tracks and track times.
- Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
- double times[] = {
- TrackUtils.getTrackTime(tracks[0], hitToStrips, hitToRotated),
- TrackUtils.getTrackTime(tracks[1], hitToStrips, hitToRotated)
- };
-
- // Fill the plots.
- møgblTimeCoincidence.fill(times[0] - times[1]);
- møgblInvariantMass.fill(getInvariantMass(pair));
- møgblElectronMomentum.fill(pair[0].getMomentum().magnitude());
- møgblElectronMomentum.fill(pair[1].getMomentum().magnitude());
- møgblMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
- møgblMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- møgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
- møgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
- møgblPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
- getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
- }
-
- // Produce track-only trident plots.
- trgblInstancesInEvent.fill(tridentsGBL.size());
- for(ReconstructedParticle[] pair : tridentsGBL) {
- // Get the tracks and track times.
- Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
- double times[] = {
- TrackUtils.getTrackTime(tracks[0], hitToStrips, hitToRotated),
- TrackUtils.getTrackTime(tracks[1], hitToStrips, hitToRotated)
- };
-
- // Get the positron and the electron.
- ReconstructedParticle positron = pair[0].getCharge() > 0 ? pair[0] : pair[1];
- ReconstructedParticle electron = pair[0].getCharge() < 0 ? pair[0] : pair[1];
-
- // Fill the plots.
- trgblTimeCoincidence.fill(times[0] - times[1]);
- trgblInvariantMass.fill(getInvariantMass(pair));
- trgblElectronMomentum.fill(electron.getMomentum().magnitude());
- trgblPositronMomentum.fill(positron.getMomentum().magnitude());
- trgblMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
- trgblMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
- trgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
- trgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
- trgblPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
- getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
- }
- }
-
- public void setCheckSVT(boolean state) {
- checkSVT = state;
- }
-
- public void setCheckTriggerTimeWindow(boolean state) {
- checkTriggerTimeWindow = state;
- }
-
- /**
- * Gets a list of all possible GBL top/bottom track pairs. These
- * tracks are not guaranteed to have a matched cluster.
- * @param trackList - A list of all possible tracks.
- * @return Returns a list of track pairs.
- */
- private static final List<ReconstructedParticle[]> getTopBottomTracksGBL(List<ReconstructedParticle> trackList) {
- // Separate the tracks into top and bottom tracks based on
- // the value of tan(Î). Use only GBL tracks to avoid track
- // duplication.
- List<ReconstructedParticle> topTracks = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> botTracks = new ArrayList<ReconstructedParticle>();
- trackLoop:
- for(ReconstructedParticle track : trackList) {
- // Require that the ReconstructedParticle contain an actual
- // Track object.
- if(track.getTracks().isEmpty()) {
- continue trackLoop;
- }
-
- // Ignore tracks that are not GBL tracks.
- if(!TrackType.isGBL(track.getType())) {
- continue trackLoop;
- }
-
- // If the above tests pass, the ReconstructedParticle has
- // a track and is also a GBL track. Separate it into either
- // a top or a bottom track based on its tan(Î) value.
- if(track.getTracks().get(0).getTrackStates().get(0).getTanLambda() > 0) {
- topTracks.add(track);
- } else {
- botTracks.add(track);
- }
- }
-
- // Form all top/bottom pairs with the unique tracks.
- List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
- for(ReconstructedParticle topTrack : topTracks) {
- for(ReconstructedParticle botTrack : botTracks) {
- pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
- }
- }
-
- // Return the result.
- return pairList;
- }
-
- /**
- * Produces pairs of tracks. The track pairs are required to be
- * matched to a cluster and the associated clusters must form a
- * top/bottom pair. If more than one track points to the same
- * cluster, only the first track is retained.
- * @param trackList - A list of all tracks.
- * @return Returns a list of track pairs meeting the aforementioned
- * conditions.
- */
- private static final List<ReconstructedParticle[]> getTopBottomTracksCTMatched(List<ReconstructedParticle> trackList) {
- // Track clusters that have already been seen to prevent clusters
- // that have duplicate tracks from reappearing.
- Set<Cluster> clusterSet = new HashSet<Cluster>();
-
- // Separate the tracks into top and bottom tracks based on
- // the track cluster. Filter out tracks with no clusters.
- List<ReconstructedParticle> topTracks = new ArrayList<ReconstructedParticle>();
- List<ReconstructedParticle> botTracks = new ArrayList<ReconstructedParticle>();
- trackLoop:
- for(ReconstructedParticle track : trackList) {
- // Check if the track has a cluster. If not, skip it.
- if(track.getClusters().isEmpty()) {
- continue trackLoop;
- }
-
- // If the track doesn't have actual tracks, skip it.
- if(track.getTracks().isEmpty()) {
- continue trackLoop;
- }
-
- // Check if the track cluster has already seen.
- Cluster trackCluster = track.getClusters().get(0);
- if(clusterSet.contains(trackCluster)) {
- continue trackLoop;
- }
-
- // If the track has a unique cluster, add it to the proper
- // list based on the cluster y-index.
- clusterSet.add(trackCluster);
- if(TriggerModule.getClusterYIndex(trackCluster) > 0) {
- topTracks.add(track);
- } else {
- botTracks.add(track);
- }
- }
-
- // Form all top/bottom pairs with the unique tracks.
- List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
- for(ReconstructedParticle topTrack : topTracks) {
- for(ReconstructedParticle botTrack : botTracks) {
- pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
- }
- }
-
- // Return the result.
- return pairList;
- }
-
- private final List<ReconstructedParticle[]> getTridentClustersGBL(List<ReconstructedParticle[]> pairList, List<Cluster[]> clusterList, EventHeader event) {
- // Store the set of track pairs that meet the trident condition.
- List<ReconstructedParticle[]> tridentTracks = new ArrayList<ReconstructedParticle[]>();
-
- // Extract track relational tables from the event object.
- RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
- RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
-
- // Tracks will not be considered for trident analysis unless there
- // is at least one top/bottom cluster pair within the time window.
- boolean passesClusterCondition = false;
- tridentClusterLoop:
- for(Cluster[] pair : clusterList) {
- // Ignore clusters that are too far apart temporally.
- if(TriggerModule.getValueTimeCoincidence(pair) > timeCoincidence) {
- continue tridentClusterLoop;
- }
-
- // Require that the cluster pair be top/bottom.
- boolean hasTop = TriggerModule.getClusterYIndex(pair[0]) > 0 || TriggerModule.getClusterYIndex(pair[1]) > 0;
- boolean hasBot = TriggerModule.getClusterYIndex(pair[0]) < 0 || TriggerModule.getClusterYIndex(pair[1]) < 0;
- if(!hasTop || !hasBot) {
- continue tridentClusterLoop;
- }
-
- // If the cluster passes, mark that it has done so and skip
- // the rest. Only one pair need pass.
- passesClusterCondition = true;
- break tridentClusterLoop;
- }
-
- // If no cluster pair passed the cluster condition, no tracks
- // are allowed to pass either.
- if(!passesClusterCondition) {
- return tridentTracks;
- }
-
- // Next, check the track pair list. A track pair must have a
- // positive and a negative track and must also be within the
- // time coincidence window.
- tridentTrackLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // Check that there is at least one positive and one negative
- // track in the pair.
- boolean hasPositive = pair[0].getCharge() > 0 || pair[1].getCharge() > 0;
- boolean hasNegative = pair[0].getCharge() < 0 || pair[1].getCharge() < 0;
- if(!hasPositive || !hasNegative) {
- break tridentTrackLoop;
- }
-
- // Check that the track pair passes the time cut.
- double times[] = {
- TrackUtils.getTrackTime(pair[0].getTracks().get(0), hitToStrips, hitToRotated),
- TrackUtils.getTrackTime(pair[1].getTracks().get(0), hitToStrips, hitToRotated)
- };
-
- if(Math.abs(times[0] - times[1]) > timeCoincidence) {
- continue tridentTrackLoop;
- }
-
- // Require that the negative track have less than the
- // elastic threshold momentum to exclude elastic electrons.
- if(pair[0].getCharge() < 0 && pair[0].getMomentum().magnitude() > elasticThreshold
- || pair[1].getCharge() < 0 && pair[1].getMomentum().magnitude() > elasticThreshold) {
- continue tridentTrackLoop;
- }
-
- // If the track passes both, it is considered a trident pair.
- tridentTracks.add(pair);
- }
-
- // Return the resultant pairs.
- return tridentTracks;
- }
-
- /**
- * Gets a list track pairs that meet the trident condition defined
- * using tracks with matched calorimeter clusters. A pair meets the
- * cluster/track matched trident condition is it meets the following:
- * <ul><li>Both tracks have matched clusters.</li>
- * <li>Has one positive track.</li>
- * <li>Has one negative track.</li>
- * <li>Clusters have a time coincidence of 2.5 ns or less.</li>
- * <li>The electron momentum is below 900 MeV.</li></ul>
- * @param pairList - A <code>List</code> collection of parameterized
- * type <code>ReconstructedParticle[]</code> containing all valid
- * top/bottom pairs of tracks with matched clusters. These will be
- * tested to see if they meet the process criteria.
- * @return Returns a list containing pairs of tracks that meet the
- * trident condition.
- */
- private final List<ReconstructedParticle[]> getTridentTracksCTMatched(List<ReconstructedParticle[]> pairList) {
- // Store the set of track pairs that meet the trident condition.
- List<ReconstructedParticle[]> tridentTracks = new ArrayList<ReconstructedParticle[]>();
-
- // Loop over the filtered pair list and apply the trident
- // condition test.
- tridentLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // There must be one positive and one negative track.
- ReconstructedParticle electron = null;
- ReconstructedParticle positron = null;
- if(pair[0].getCharge() > 0) { positron = pair[0]; }
- else if(pair[1].getCharge() > 0) { positron = pair[1]; }
- if(pair[0].getCharge() < 0) { electron = pair[0]; }
- else if(pair[1].getCharge() < 0) { electron = pair[1]; }
- if(electron == null || positron == null) {
- continue tridentLoop;
- }
-
- // Make sure that the clusters are not the same. This should
- // not actually ever be possible...
- if(pair[0].getClusters().get(0) == pair[1].getClusters().get(0)) {
- continue tridentLoop;
- }
-
- // The clusters must within a limited time window.
- /*
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
- if(TriggerModule.getValueTimeCoincidence(trackClusters) > timeCoincidence) {
- continue tridentLoop;
- }
- */
-
- // The clusters must be coincidental within an energy
- // dependent coincidence window.
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
- if(!isCoincidental(trackClusters)) {
- continue tridentLoop;
- }
-
- // Require that the electron in the pair have an energy
- // below the elastic threshold to exclude elastic electrons.
- if(electron.getMomentum().magnitude() >= elasticThreshold) {
- continue tridentLoop;
- }
-
- // Require that all clusters occur within the trigger time
- // window to exclude accidentals.
- if(checkTriggerTimeWindow) {
- if(!inTriggerWindow(trackClusters[0]) || !inTriggerWindow(trackClusters[1])) {
- continue tridentLoop;
- }
- }
-
- // If all the above conditions are met, the pair is to be
- // considered a trident pair. Add it to the list.
- tridentTracks.add(pair);
- }
-
- // Return the list of pairs that passed the condition.
- return tridentTracks;
- }
-
- private final List<ReconstructedParticle[]> getMøllerTracksGBL(List<ReconstructedParticle[]> pairList, EventHeader event) {
- // Store the set of track pairs that meet the Møller condition.
- List<ReconstructedParticle[]> møllerTracks = new ArrayList<ReconstructedParticle[]>();
-
- // Extract track relational tables from the event object.
- RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
- RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
-
- // Loop over the filtered pair list and apply the Møller
- // condition test.
- møllerLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // Both tracks must be negatively charged.
- if(pair[0].getCharge() > 0 || pair[1].getCharge() > 0) {
- continue møllerLoop;
- }
-
- // The clusters must within a limited time window.
- double times[] = {
- TrackUtils.getTrackTime(pair[0].getTracks().get(0), hitToStrips, hitToRotated),
- TrackUtils.getTrackTime(pair[1].getTracks().get(0), hitToStrips, hitToRotated)
- };
-
- if(Math.abs(times[0] - times[1]) > timeCoincidence) {
- continue møllerLoop;
- }
-
- // Require that the electrons in the pair have energies
- // below the elastic threshold to exclude said electrons.
- if(pair[0].getMomentum().magnitude() > elasticThreshold || pair[1].getMomentum().magnitude() > elasticThreshold) {
- continue møllerLoop;
- }
-
- // Require that the energy of the pair be within a range
- // that is sufficiently "Møller-like."
- double momentumSum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
- if(momentumSum < møllerLowerRange || momentumSum > møllerUpperRange) {
- continue møllerLoop;
- }
-
- // If all the above conditions are met, the pair is to be
- // considered a trident pair. Add it to the list.
- møllerTracks.add(pair);
- }
-
- // Return the list of pairs that passed the condition.
- return møllerTracks;
- }
-
- /**
- * Gets a list track pairs that meet the Møller condition defined
- * using tracks with matched calorimeter clusters. A pair meets the
- * cluster/track matched Møller condition is it meets the following:
- * <ul><li>Both tracks have matched clusters.</li>
- * <li>Both tracks are negative.</li>
- * <li>Clusters have a time coincidence of 2.5 ns or less.</li>
- * <li>The electron momenta are below 900 MeV.</li>
- * <li>The momentum sum of the tracks is in the range <code>800 MeV
- * ⤠p1 + p2 ⤠1500 MeV</li></ul>
- * @param pairList - A <code>List</code> collection of parameterized
- * type <code>ReconstructedParticle[]</code> containing all valid
- * top/bottom pairs of tracks with matched clusters. These will be
- * tested to see if they meet the process criteria.
- * @return Returns a list containing pairs of tracks that meet the
- * Møller condition.
- */
- private final List<ReconstructedParticle[]> getMøllerTracksCTMatched(List<ReconstructedParticle[]> pairList) {
- // Store the set of track pairs that meet the Møller condition.
- List<ReconstructedParticle[]> møllerTracks = new ArrayList<ReconstructedParticle[]>();
-
- // Loop over the filtered pair list and apply the Møller
- // condition test.
- møllerLoop:
- for(ReconstructedParticle[] pair : pairList) {
- // Both tracks must be negatively charged.
- if(pair[0].getCharge() > 0 || pair[1].getCharge() > 0) {
- continue møllerLoop;
- }
-
- // The clusters must within a limited time window.
- /*
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
- if(TriggerModule.getValueTimeCoincidence(trackClusters) > timeCoincidence) {
- continue møllerLoop;
- }
- */
-
- // The clusters must be coincidental within an energy
- // dependent coincidence window.
- Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
- if(!isCoincidental(trackClusters)) {
- continue møllerLoop;
- }
-
- // Require that the electrons in the pair have energies
- // below the elastic threshold to exclude said electrons.
- if(pair[0].getMomentum().magnitude() > elasticThreshold || pair[1].getMomentum().magnitude() > elasticThreshold) {
- continue møllerLoop;
- }
-
- // Require that the energy of the pair be within a range
- // that is sufficiently "Møller-like."
- double momentumSum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
- if(momentumSum < møllerLowerRange || momentumSum > møllerUpperRange) {
- continue møllerLoop;
- }
-
- // Require that all clusters occur within the trigger time
- // window to exclude accidentals.
- if(checkTriggerTimeWindow) {
- if(!inTriggerWindow(trackClusters[0]) || !inTriggerWindow(trackClusters[1])) {
- continue møllerLoop;
- }
- }
-
- // If all the above conditions are met, the pair is to be
- // considered a trident pair. Add it to the list.
- møllerTracks.add(pair);
- }
-
- // Return the list of pairs that passed the condition.
- return møllerTracks;
- }
-
- /**
- * Calculates the approximate invariant mass for a pair of tracks
- * from their momentum. This assumes that the particles are either
- * electrons or positrons, and thusly have a sufficiently small
- * mass term that it can be safely excluded.
- * @param pair - The track pair for which to calculate the invariant
- * mass.
- * @return Returns the approximate invariant mass in units of GeV.
- */
- private static final double getInvariantMass(ReconstructedParticle[] pair) {
- // Get the momentum squared.
- double p2 = Math.pow(pair[0].getMomentum().magnitude() + pair[1].getMomentum().magnitude(), 2);
-
- // Get the remaining terms.
- double xPro = pair[0].getMomentum().x() + pair[1].getMomentum().x();
- double yPro = pair[0].getMomentum().y() + pair[1].getMomentum().y();
- double zPro = pair[0].getMomentum().z() + pair[1].getMomentum().z();
-
- // Calculate the invariant mass.
- return Math.sqrt(p2 - Math.pow(xPro, 2) - Math.pow(yPro, 2) - Math.pow(zPro, 2));
- }
-
- /**
- * Calculates the coplanarity angle between two points, specified
- * by a double array. The array must be of the format (x, y, z).
- * @param position - The first position array.
- * @param otherPosition - The second position array.
- * @return Returns the coplanarity angle between the points in units
- * of degrees.
- */
- private static final double getCalculatedCoplanarity(double[] position, double[] otherPosition) {
- // Define the x- and y-coordinates of the clusters as well as
- // calorimeter center.
- final double ORIGIN_X = 42.52;
- double x[] = { position[0], otherPosition[0] };
- double y[] = { position[1], otherPosition[1] };
-
+ if(!event.hasCollection(ReconstructedParticle.class, particleCollectionName)) {
+ return;
+ }
+ List<ReconstructedParticle> trackList = event.get(ReconstructedParticle.class, particleCollectionName);
+
+ // Check if the event has a collection of clusters. If it
+ // exists, extract it. Otherwise, skip the event.
+ if(!event.hasCollection(Cluster.class, clusterCollectionName)) {
+ return;
+ }
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Get cluster-track matched top/bottom pairs.
+ List<ReconstructedParticle[]> gblMatchedPairs = getTopBottomTracksGBL(trackList);
+ List<ReconstructedParticle[]> ctMatchedPairs = getTopBottomTracksCTMatched(trackList);
+
+ System.out.println("CTM Pairs :: " + ctMatchedPairs.size());
+ System.out.println("GBL Pairs :: " + gblMatchedPairs.size());
+
+ // Get the trident and Møller tracks for the matched track
+ // and cluster pair condition sets.
+ List<ReconstructedParticle[]> møllers = getMøllerTracksCTMatched(ctMatchedPairs);
+ List<ReconstructedParticle[]> møllersGBL = getMøllerTracksGBL(gblMatchedPairs, event);
+ List<ReconstructedParticle[]> tridents = getTridentTracksCTMatched(ctMatchedPairs);
+ List<ReconstructedParticle[]> tridentsGBL = getTridentClustersGBL(gblMatchedPairs, TriggerModule.getTopBottomPairs(clusterList, Cluster.class), event);
+
+ // Track how many events had tridents and Møllers.
+ if(!møllers.isEmpty()) { møllersProcessed++; }
+ if(!tridents.isEmpty()) { tridentsProcessed++; }
+ if(!møllersGBL.isEmpty()) { gblMøllersProcessed++; }
+ if(!tridentsGBL.isEmpty()) { gblTridentsProcessed++; }
+
+ // Produce Møller cluster-track matched plots.
+ møctmInstancesInEvent.fill(møllers.size());
+ for(ReconstructedParticle[] pair : møllers) {
+ // Get the track clusters.
+ Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+
+ // Populate the cluster plots.
+ møctmElectronEnergy.fill(trackClusters[0].getEnergy());
+ møctmElectronEnergy.fill(trackClusters[1].getEnergy());
+ møctmEnergySum1D.fill(TriggerModule.getValueEnergySum(trackClusters));
+ møctmEnergySum2D.fill(trackClusters[0].getEnergy(), trackClusters[1].getEnergy());
+ møctmESumCoplanarity.fill(TriggerModule.getValueEnergySum(trackClusters), getCalculatedCoplanarity(trackClusters));
+ møctmTimeCoincidence.fill(TriggerModule.getClusterTime(trackClusters[0]) - TriggerModule.getClusterTime(trackClusters[1]));
+ møctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[0]), TriggerModule.getClusterYIndex(trackClusters[0]));
+ møctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[1]), TriggerModule.getClusterYIndex(trackClusters[1]));
+
+ // Populate the momentum plots.
+ møctmInvariantMass.fill(getInvariantMass(pair));
+ møctmElectronMomentum.fill(pair[0].getMomentum().magnitude());
+ møctmElectronMomentum.fill(pair[1].getMomentum().magnitude());
+ møctmMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
+ møctmMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ møctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
+ møctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
+ møctmPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
+ getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
+ }
+
+ // Produce trident cluster-track matched plots.
+ trctmInstancesInEvent.fill(tridents.size());
+ for(ReconstructedParticle[] pair : tridents) {
+ // Get the electron and positron tracks.
+ ReconstructedParticle electronTrack = pair[pair[0].getCharge() < 0 ? 0 : 1];
+ ReconstructedParticle positronTrack = pair[pair[0].getCharge() > 0 ? 0 : 1];
+
+ // Get the track clusters.
+ Cluster electronCluster = electronTrack.getClusters().get(0);
+ Cluster positronCluster = positronTrack.getClusters().get(0);
+ Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+
+ // Populate the cluster plots.
+ trctmElectronEnergy.fill(electronCluster.getEnergy());
+ trctmPositronEnergy.fill(positronCluster.getEnergy());
+ trctmEnergySum2D.fill(pair[0].getEnergy(), pair[1].getEnergy());
+ trctmEnergySum1D.fill(TriggerModule.getValueEnergySum(trackClusters));
+ trctmESumCoplanarity.fill(TriggerModule.getValueEnergySum(trackClusters), getCalculatedCoplanarity(trackClusters));
+ trctmTimeCoincidence.fill(TriggerModule.getClusterTime(trackClusters[0]) - TriggerModule.getClusterTime(trackClusters[1]));
+ trctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[0]), TriggerModule.getClusterYIndex(trackClusters[0]));
+ trctmClusterPosition.fill(TriggerModule.getClusterXIndex(trackClusters[1]), TriggerModule.getClusterYIndex(trackClusters[1]));
+
+ // Populate the momentum plots.
+ trctmInvariantMass.fill(getInvariantMass(pair));
+ trctmElectronMomentum.fill(electronTrack.getMomentum().magnitude());
+ trctmPositronMomentum.fill(positronTrack.getMomentum().magnitude());
+ trctmMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
+ trctmMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ trctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
+ trctmTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
+ trctmPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
+ getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
+ }
+
+ // Produce the Møller track-only plots.
+ møgblInstancesInEvent.fill(møllersGBL.size());
+ RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
+ RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
+ for(ReconstructedParticle pair[] : møllersGBL) {
+ // Get the tracks and track times.
+ Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
+ double times[] = {
+ TrackUtils.getTrackTime(tracks[0], hitToStrips, hitToRotated),
+ TrackUtils.getTrackTime(tracks[1], hitToStrips, hitToRotated)
+ };
+
+ // Fill the plots.
+ møgblTimeCoincidence.fill(times[0] - times[1]);
+ møgblInvariantMass.fill(getInvariantMass(pair));
+ møgblElectronMomentum.fill(pair[0].getMomentum().magnitude());
+ møgblElectronMomentum.fill(pair[1].getMomentum().magnitude());
+ møgblMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
+ møgblMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ møgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
+ møgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
+ møgblPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
+ getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
+ }
+
+ // Produce track-only trident plots.
+ trgblInstancesInEvent.fill(tridentsGBL.size());
+ for(ReconstructedParticle[] pair : tridentsGBL) {
+ // Get the tracks and track times.
+ Track[] tracks = { pair[0].getTracks().get(0), pair[1].getTracks().get(0) };
+ double times[] = {
+ TrackUtils.getTrackTime(tracks[0], hitToStrips, hitToRotated),
+ TrackUtils.getTrackTime(tracks[1], hitToStrips, hitToRotated)
+ };
+
+ // Get the positron and the electron.
+ ReconstructedParticle positron = pair[0].getCharge() > 0 ? pair[0] : pair[1];
+ ReconstructedParticle electron = pair[0].getCharge() < 0 ? pair[0] : pair[1];
+
+ // Fill the plots.
+ trgblTimeCoincidence.fill(times[0] - times[1]);
+ trgblInvariantMass.fill(getInvariantMass(pair));
+ trgblElectronMomentum.fill(electron.getMomentum().magnitude());
+ trgblPositronMomentum.fill(positron.getMomentum().magnitude());
+ trgblMomentumSum1D.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude());
+ trgblMomentumSum2D.fill(pair[0].getMomentum().magnitude(), pair[1].getMomentum().magnitude());
+ trgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[0]).x(), TrackUtils.getTrackPositionAtEcal(tracks[0]).y());
+ trgblTrackPosition.fill(TrackUtils.getTrackPositionAtEcal(tracks[1]).x(), TrackUtils.getTrackPositionAtEcal(tracks[1]).y());
+ trgblPSumCoplanarity.fill(VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude(),
+ getCalculatedCoplanarity(new Track[] { pair[0].getTracks().get(0), pair[1].getTracks().get(0) }));
+ }
+ }
+
+ public void setCheckSVT(boolean state) {
+ checkSVT = state;
+ }
+
+ public void setCheckTriggerTimeWindow(boolean state) {
+ checkTriggerTimeWindow = state;
+ }
+
+ /**
+ * Gets a list of all possible GBL top/bottom track pairs. These
+ * tracks are not guaranteed to have a matched cluster.
+ * @param trackList - A list of all possible tracks.
+ * @return Returns a list of track pairs.
+ */
+ private static final List<ReconstructedParticle[]> getTopBottomTracksGBL(List<ReconstructedParticle> trackList) {
+ // Separate the tracks into top and bottom tracks based on
+ // the value of tan(Î). Use only GBL tracks to avoid track
+ // duplication.
+ List<ReconstructedParticle> topTracks = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> botTracks = new ArrayList<ReconstructedParticle>();
+ trackLoop:
+ for(ReconstructedParticle track : trackList) {
+ // Require that the ReconstructedParticle contain an actual
+ // Track object.
+ if(track.getTracks().isEmpty()) {
+ continue trackLoop;
+ }
+
+ // Ignore tracks that are not GBL tracks.
+ if(!TrackType.isGBL(track.getType())) {
+ continue trackLoop;
+ }
+
+ // If the above tests pass, the ReconstructedParticle has
+ // a track and is also a GBL track. Separate it into either
+ // a top or a bottom track based on its tan(Î) value.
+ if(track.getTracks().get(0).getTrackStates().get(0).getTanLambda() > 0) {
+ topTracks.add(track);
+ } else {
+ botTracks.add(track);
+ }
+ }
+
+ // Form all top/bottom pairs with the unique tracks.
+ List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
+ for(ReconstructedParticle topTrack : topTracks) {
+ for(ReconstructedParticle botTrack : botTracks) {
+ pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
+ }
+ }
+
+ // Return the result.
+ return pairList;
+ }
+
+ /**
+ * Produces pairs of tracks. The track pairs are required to be
+ * matched to a cluster and the associated clusters must form a
+ * top/bottom pair. If more than one track points to the same
+ * cluster, only the first track is retained.
+ * @param trackList - A list of all tracks.
+ * @return Returns a list of track pairs meeting the aforementioned
+ * conditions.
+ */
+ private static final List<ReconstructedParticle[]> getTopBottomTracksCTMatched(List<ReconstructedParticle> trackList) {
+ // Track clusters that have already been seen to prevent clusters
+ // that have duplicate tracks from reappearing.
+ Set<Cluster> clusterSet = new HashSet<Cluster>();
+
+ // Separate the tracks into top and bottom tracks based on
+ // the track cluster. Filter out tracks with no clusters.
+ List<ReconstructedParticle> topTracks = new ArrayList<ReconstructedParticle>();
+ List<ReconstructedParticle> botTracks = new ArrayList<ReconstructedParticle>();
+ trackLoop:
+ for(ReconstructedParticle track : trackList) {
+ // Check if the track has a cluster. If not, skip it.
+ if(track.getClusters().isEmpty()) {
+ continue trackLoop;
+ }
+
+ // If the track doesn't have actual tracks, skip it.
+ if(track.getTracks().isEmpty()) {
+ continue trackLoop;
+ }
+
+ // Check if the track cluster has already seen.
+ Cluster trackCluster = track.getClusters().get(0);
+ if(clusterSet.contains(trackCluster)) {
+ continue trackLoop;
+ }
+
+ // If the track has a unique cluster, add it to the proper
+ // list based on the cluster y-index.
+ clusterSet.add(trackCluster);
+ if(TriggerModule.getClusterYIndex(trackCluster) > 0) {
+ topTracks.add(track);
+ } else {
+ botTracks.add(track);
+ }
+ }
+
+ // Form all top/bottom pairs with the unique tracks.
+ List<ReconstructedParticle[]> pairList = new ArrayList<ReconstructedParticle[]>();
+ for(ReconstructedParticle topTrack : topTracks) {
+ for(ReconstructedParticle botTrack : botTracks) {
+ pairList.add(new ReconstructedParticle[] { topTrack, botTrack });
+ }
+ }
+
+ // Return the result.
+ return pairList;
+ }
+
+ private final List<ReconstructedParticle[]> getTridentClustersGBL(List<ReconstructedParticle[]> pairList, List<Cluster[]> clusterList, EventHeader event) {
+ // Store the set of track pairs that meet the trident condition.
+ List<ReconstructedParticle[]> tridentTracks = new ArrayList<ReconstructedParticle[]>();
+
+ // Extract track relational tables from the event object.
+ RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
+ RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
+
+ // Tracks will not be considered for trident analysis unless there
+ // is at least one top/bottom cluster pair within the time window.
+ boolean passesClusterCondition = false;
+ tridentClusterLoop:
+ for(Cluster[] pair : clusterList) {
+ // Ignore clusters that are too far apart temporally.
+ if(TriggerModule.getValueTimeCoincidence(pair) > timeCoincidence) {
+ continue tridentClusterLoop;
+ }
+
+ // Require that the cluster pair be top/bottom.
+ boolean hasTop = TriggerModule.getClusterYIndex(pair[0]) > 0 || TriggerModule.getClusterYIndex(pair[1]) > 0;
+ boolean hasBot = TriggerModule.getClusterYIndex(pair[0]) < 0 || TriggerModule.getClusterYIndex(pair[1]) < 0;
+ if(!hasTop || !hasBot) {
+ continue tridentClusterLoop;
+ }
+
+ // If the cluster passes, mark that it has done so and skip
+ // the rest. Only one pair need pass.
+ passesClusterCondition = true;
+ break tridentClusterLoop;
+ }
+
+ // If no cluster pair passed the cluster condition, no tracks
+ // are allowed to pass either.
+ if(!passesClusterCondition) {
+ return tridentTracks;
+ }
+
+ // Next, check the track pair list. A track pair must have a
+ // positive and a negative track and must also be within the
+ // time coincidence window.
+ tridentTrackLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // Check that there is at least one positive and one negative
+ // track in the pair.
+ boolean hasPositive = pair[0].getCharge() > 0 || pair[1].getCharge() > 0;
+ boolean hasNegative = pair[0].getCharge() < 0 || pair[1].getCharge() < 0;
+ if(!hasPositive || !hasNegative) {
+ break tridentTrackLoop;
+ }
+
+ // Check that the track pair passes the time cut.
+ double times[] = {
+ TrackUtils.getTrackTime(pair[0].getTracks().get(0), hitToStrips, hitToRotated),
+ TrackUtils.getTrackTime(pair[1].getTracks().get(0), hitToStrips, hitToRotated)
+ };
+
+ if(Math.abs(times[0] - times[1]) > timeCoincidence) {
+ continue tridentTrackLoop;
+ }
+
+ // Require that the negative track have less than the
+ // elastic threshold momentum to exclude elastic electrons.
+ if(pair[0].getCharge() < 0 && pair[0].getMomentum().magnitude() > elasticThreshold
+ || pair[1].getCharge() < 0 && pair[1].getMomentum().magnitude() > elasticThreshold) {
+ continue tridentTrackLoop;
+ }
+
+ // If the track passes both, it is considered a trident pair.
+ tridentTracks.add(pair);
+ }
+
+ // Return the resultant pairs.
+ return tridentTracks;
+ }
+
+ /**
+ * Gets a list track pairs that meet the trident condition defined
+ * using tracks with matched calorimeter clusters. A pair meets the
+ * cluster/track matched trident condition is it meets the following:
+ * <ul><li>Both tracks have matched clusters.</li>
+ * <li>Has one positive track.</li>
+ * <li>Has one negative track.</li>
+ * <li>Clusters have a time coincidence of 2.5 ns or less.</li>
+ * <li>The electron momentum is below 900 MeV.</li></ul>
+ * @param pairList - A <code>List</code> collection of parameterized
+ * type <code>ReconstructedParticle[]</code> containing all valid
+ * top/bottom pairs of tracks with matched clusters. These will be
+ * tested to see if they meet the process criteria.
+ * @return Returns a list containing pairs of tracks that meet the
+ * trident condition.
+ */
+ private final List<ReconstructedParticle[]> getTridentTracksCTMatched(List<ReconstructedParticle[]> pairList) {
+ // Store the set of track pairs that meet the trident condition.
+ List<ReconstructedParticle[]> tridentTracks = new ArrayList<ReconstructedParticle[]>();
+
+ // Loop over the filtered pair list and apply the trident
+ // condition test.
+ tridentLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // There must be one positive and one negative track.
+ ReconstructedParticle electron = null;
+ ReconstructedParticle positron = null;
+ if(pair[0].getCharge() > 0) { positron = pair[0]; }
+ else if(pair[1].getCharge() > 0) { positron = pair[1]; }
+ if(pair[0].getCharge() < 0) { electron = pair[0]; }
+ else if(pair[1].getCharge() < 0) { electron = pair[1]; }
+ if(electron == null || positron == null) {
+ continue tridentLoop;
+ }
+
+ // Make sure that the clusters are not the same. This should
+ // not actually ever be possible...
+ if(pair[0].getClusters().get(0) == pair[1].getClusters().get(0)) {
+ continue tridentLoop;
+ }
+
+ // The clusters must within a limited time window.
+ /*
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+ if(TriggerModule.getValueTimeCoincidence(trackClusters) > timeCoincidence) {
+ continue tridentLoop;
+ }
+ */
+
+ // The clusters must be coincidental within an energy
+ // dependent coincidence window.
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+ if(!isCoincidental(trackClusters)) {
+ continue tridentLoop;
+ }
+
+ // Require that the electron in the pair have an energy
+ // below the elastic threshold to exclude elastic electrons.
+ if(electron.getMomentum().magnitude() >= elasticThreshold) {
+ continue tridentLoop;
+ }
+
+ // Require that all clusters occur within the trigger time
+ // window to exclude accidentals.
+ if(checkTriggerTimeWindow) {
+ if(!inTriggerWindow(trackClusters[0]) || !inTriggerWindow(trackClusters[1])) {
+ continue tridentLoop;
+ }
+ }
+
+ // If all the above conditions are met, the pair is to be
+ // considered a trident pair. Add it to the list.
+ tridentTracks.add(pair);
+ }
+
+ // Return the list of pairs that passed the condition.
+ return tridentTracks;
+ }
+
+ private final List<ReconstructedParticle[]> getMøllerTracksGBL(List<ReconstructedParticle[]> pairList, EventHeader event) {
+ // Store the set of track pairs that meet the Møller condition.
+ List<ReconstructedParticle[]> møllerTracks = new ArrayList<ReconstructedParticle[]>();
+
+ // Extract track relational tables from the event object.
+ RelationalTable<?, ?> hitToStrips = TrackUtils.getHitToStripsTable(event);
+ RelationalTable<?, ?> hitToRotated = TrackUtils.getHitToRotatedTable(event);
+
+ // Loop over the filtered pair list and apply the Møller
+ // condition test.
+ møllerLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // Both tracks must be negatively charged.
+ if(pair[0].getCharge() > 0 || pair[1].getCharge() > 0) {
+ continue møllerLoop;
+ }
+
+ // The clusters must within a limited time window.
+ double times[] = {
+ TrackUtils.getTrackTime(pair[0].getTracks().get(0), hitToStrips, hitToRotated),
+ TrackUtils.getTrackTime(pair[1].getTracks().get(0), hitToStrips, hitToRotated)
+ };
+
+ if(Math.abs(times[0] - times[1]) > timeCoincidence) {
+ continue møllerLoop;
+ }
+
+ // Require that the electrons in the pair have energies
+ // below the elastic threshold to exclude said electrons.
+ if(pair[0].getMomentum().magnitude() > elasticThreshold || pair[1].getMomentum().magnitude() > elasticThreshold) {
+ continue møllerLoop;
+ }
+
+ // Require that the energy of the pair be within a range
+ // that is sufficiently "Møller-like."
+ double momentumSum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
+ if(momentumSum < møllerLowerRange || momentumSum > møllerUpperRange) {
+ continue møllerLoop;
+ }
+
+ // If all the above conditions are met, the pair is to be
+ // considered a trident pair. Add it to the list.
+ møllerTracks.add(pair);
+ }
+
+ // Return the list of pairs that passed the condition.
+ return møllerTracks;
+ }
+
+ /**
+ * Gets a list track pairs that meet the Møller condition defined
+ * using tracks with matched calorimeter clusters. A pair meets the
+ * cluster/track matched Møller condition is it meets the following:
+ * <ul><li>Both tracks have matched clusters.</li>
+ * <li>Both tracks are negative.</li>
+ * <li>Clusters have a time coincidence of 2.5 ns or less.</li>
+ * <li>The electron momenta are below 900 MeV.</li>
+ * <li>The momentum sum of the tracks is in the range <code>800 MeV
+ * ⤠p1 + p2 ⤠1500 MeV</li></ul>
+ * @param pairList - A <code>List</code> collection of parameterized
+ * type <code>ReconstructedParticle[]</code> containing all valid
+ * top/bottom pairs of tracks with matched clusters. These will be
+ * tested to see if they meet the process criteria.
+ * @return Returns a list containing pairs of tracks that meet the
+ * Møller condition.
+ */
+ private final List<ReconstructedParticle[]> getMøllerTracksCTMatched(List<ReconstructedParticle[]> pairList) {
+ // Store the set of track pairs that meet the Møller condition.
+ List<ReconstructedParticle[]> møllerTracks = new ArrayList<ReconstructedParticle[]>();
+
+ // Loop over the filtered pair list and apply the Møller
+ // condition test.
+ møllerLoop:
+ for(ReconstructedParticle[] pair : pairList) {
+ // Both tracks must be negatively charged.
+ if(pair[0].getCharge() > 0 || pair[1].getCharge() > 0) {
+ continue møllerLoop;
+ }
+
+ // The clusters must within a limited time window.
+ /*
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+ if(TriggerModule.getValueTimeCoincidence(trackClusters) > timeCoincidence) {
+ continue møllerLoop;
+ }
+ */
+
+ // The clusters must be coincidental within an energy
+ // dependent coincidence window.
+ Cluster[] trackClusters = { pair[0].getClusters().get(0), pair[1].getClusters().get(0) };
+ if(!isCoincidental(trackClusters)) {
+ continue møllerLoop;
+ }
+
+ // Require that the electrons in the pair have energies
+ // below the elastic threshold to exclude said electrons.
+ if(pair[0].getMomentum().magnitude() > elasticThreshold || pair[1].getMomentum().magnitude() > elasticThreshold) {
+ continue møllerLoop;
+ }
+
+ // Require that the energy of the pair be within a range
+ // that is sufficiently "Møller-like."
+ double momentumSum = VecOp.add(pair[0].getMomentum(), pair[1].getMomentum()).magnitude();
+ if(momentumSum < møllerLowerRange || momentumSum > møllerUpperRange) {
+ continue møllerLoop;
+ }
+
+ // Require that all clusters occur within the trigger time
+ // window to exclude accidentals.
+ if(checkTriggerTimeWindow) {
+ if(!inTriggerWindow(trackClusters[0]) || !inTriggerWindow(trackClusters[1])) {
+ continue møllerLoop;
+ }
+ }
+
+ // If all the above conditions are met, the pair is to be
+ // considered a trident pair. Add it to the list.
+ møllerTracks.add(pair);
+ }
+
+ // Return the list of pairs that passed the condition.
+ return møllerTracks;
+ }
+
+ /**
+ * Calculates the approximate invariant mass for a pair of tracks
+ * from their momentum. This assumes that the particles are either
+ * electrons or positrons, and thusly have a sufficiently small
+ * mass term that it can be safely excluded.
+ * @param pair - The track pair for which to calculate the invariant
+ * mass.
+ * @return Returns the approximate invariant mass in units of GeV.
+ */
+ private static final double getInvariantMass(ReconstructedParticle[] pair) {
+ // Get the momentum squared.
+ double p2 = Math.pow(pair[0].getMomentum().magnitude() + pair[1].getMomentum().magnitude(), 2);
+
+ // Get the remaining terms.
+ double xPro = pair[0].getMomentum().x() + pair[1].getMomentum().x();
+ double yPro = pair[0].getMomentum().y() + pair[1].getMomentum().y();
+ double zPro = pair[0].getMomentum().z() + pair[1].getMomentum().z();
+
+ // Calculate the invariant mass.
+ return Math.sqrt(p2 - Math.pow(xPro, 2) - Math.pow(yPro, 2) - Math.pow(zPro, 2));
+ }
+
+ /**
+ * Calculates the coplanarity angle between two points, specified
+ * by a double array. The array must be of the format (x, y, z).
+ * @param position - The first position array.
+ * @param otherPosition - The second position array.
+ * @return Returns the coplanarity angle between the points in units
+ * of degrees.
+ */
+ private static final double getCalculatedCoplanarity(double[] position, double[] otherPosition) {
+ // Define the x- and y-coordinates of the clusters as well as
+ // calorimeter center.
+ final double ORIGIN_X = 42.52;
+ double x[] = { position[0], otherPosition[0] };
+ double y[] = { position[1], otherPosition[1] };
+
// Get the cluster angles.
double[] clusterAngle = new double[2];
for(int i = 0; i < 2; i++) {
- clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
- if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
+ clusterAngle[i] = Math.atan2(y[i], x[i] - ORIGIN_X) * 180 / Math.PI;
+ if(clusterAngle[i] <= 0) { clusterAngle[i] += 360; }
}
// Calculate the coplanarity cut value.
double clusterDiff = clusterAngle[0] - clusterAngle[1];
return clusterDiff > 0 ? clusterDiff : clusterDiff + 360;
- }
-
- /**
- * Calculates the coplanarity angle of a pair of clusters.
- * @param pair - The pair of clusters for which to calculate the
- * coplanarity angle.
- * @return Returns the coplanarity angle between the two clusters
- * in degrees.
- */
- private static final double getCalculatedCoplanarity(Cluster[] pair) {
- return getCalculatedCoplanarity(pair[0].getPosition(), pair[1].getPosition());
- }
-
- /**
- * Calculates the coplanarity angle of a pair of tracks. The track
- * is extrapolated to the calorimeter face and its position there
- * used for the arguments in the calculation.
- * @param pair - The pair of tracks for which to calculate the
- * coplanarity angle.
- * @return Returns the coplanarity angle between the two tracks
- * in degrees.
- */
- private static final double getCalculatedCoplanarity(Track[] pair) {
- return getCalculatedCoplanarity(TrackUtils.getTrackPositionAtEcal(pair[0]).v(), TrackUtils.getTrackPositionAtEcal(pair[1]).v());
- }
-
- private static final boolean inTriggerWindow(Cluster cluster) {
- // Get the cluster time.
- double clusterTime = TriggerModule.getClusterTime(cluster);
-
- // Check that it is within the allowed bounds.
- return (35 <= clusterTime && clusterTime <= 50);
- }
-
- private static final boolean isCoincidental(Cluster[] pair) {
- // Get the energy sum and the time coincidence.
- double energySum = pair[0].getEnergy() + pair[1].getEnergy();
- double timeCoincidence = TriggerModule.getValueTimeCoincidence(pair);
-
- // Get the upper and lower bounds of the allowed range.
- double mean = getTimeDependenceMean(energySum);
- double threeSigma = 3.0 * getTimeDependenceSigma(energySum);
- double lowerBound = mean - threeSigma;
- double upperBound = mean + threeSigma;
-
- // Perform the time coincidence check.
- return (lowerBound <= timeCoincidence && timeCoincidence <= upperBound);
- }
-
- private static final double getTimeDependenceMean(double energySum) {
- // Define the fit parameters.
- double[] param = { 0.289337, -2.81998, 9.03475, -12.93, 8.71476, -2.26969 };
-
- // Calculate the mean.
- return param[0] + energySum * (param[1] + energySum * (param[2] + energySum * (param[3] + energySum * (param[4] + energySum * (param[5])))));
- }
-
- private static final double getTimeDependenceSigma(double energySum) {
- // Define the fit parameters.
- double[] param = { 4.3987, -24.2371, 68.9567, -98.2586, 67.562, -17.8987 };
-
- // Calculate the standard deviation.
- return param[0] + energySum * (param[1] + energySum * (param[2] + energySum * (param[3] + energySum * (param[4] + energySum * (param[5])))));
- }
+ }
+
+ /**
+ * Calculates the coplanarity angle of a pair of clusters.
+ * @param pair - The pair of clusters for which to calculate the
+ * coplanarity angle.
+ * @return Returns the coplanarity angle between the two clusters
+ * in degrees.
+ */
+ private static final double getCalculatedCoplanarity(Cluster[] pair) {
+ return getCalculatedCoplanarity(pair[0].getPosition(), pair[1].getPosition());
+ }
+
+ /**
+ * Calculates the coplanarity angle of a pair of tracks. The track
+ * is extrapolated to the calorimeter face and its position there
+ * used for the arguments in the calculation.
+ * @param pair - The pair of tracks for which to calculate the
+ * coplanarity angle.
+ * @return Returns the coplanarity angle between the two tracks
+ * in degrees.
+ */
+ private static final double getCalculatedCoplanarity(Track[] pair) {
+ return getCalculatedCoplanarity(TrackUtils.getTrackPositionAtEcal(pair[0]).v(), TrackUtils.getTrackPositionAtEcal(pair[1]).v());
+ }
+
+ private static final boolean inTriggerWindow(Cluster cluster) {
+ // Get the cluster time.
+ double clusterTime = TriggerModule.getClusterTime(cluster);
+
+ // Check that it is within the allowed bounds.
+ return (35 <= clusterTime && clusterTime <= 50);
+ }
+
+ private static final boolean isCoincidental(Cluster[] pair) {
+ // Get the energy sum and the time coincidence.
+ double energySum = pair[0].getEnergy() + pair[1].getEnergy();
+ double timeCoincidence = TriggerModule.getValueTimeCoincidence(pair);
+
+ // Get the upper and lower bounds of the allowed range.
+ double mean = getTimeDependenceMean(energySum);
+ double threeSigma = 3.0 * getTimeDependenceSigma(energySum);
+ double lowerBound = mean - threeSigma;
+ double upperBound = mean + threeSigma;
+
+ // Perform the time coincidence check.
+ return (lowerBound <= timeCoincidence && timeCoincidence <= upperBound);
+ }
+
+ private static final double getTimeDependenceMean(double energySum) {
+ // Define the fit parameters.
+ double[] param = { 0.289337, -2.81998, 9.03475, -12.93, 8.71476, -2.26969 };
+
+ // Calculate the mean.
+ return param[0] + energySum * (param[1] + energySum * (param[2] + energySum * (param[3] + energySum * (param[4] + energySum * (param[5])))));
+ }
+
+ private static final double getTimeDependenceSigma(double energySum) {
+ // Define the fit parameters.
+ double[] param = { 4.3987, -24.2371, 68.9567, -98.2586, 67.562, -17.8987 };
+
+ // Calculate the standard deviation.
+ return param[0] + energySum * (param[1] + energySum * (param[2] + energySum * (param[3] + energySum * (param[4] + energySum * (param[5])))));
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/AddPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/AddPlots.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/AddPlots.java Wed Apr 27 11:11:32 2016
@@ -14,241 +14,241 @@
import hep.aida.ITree;
public class AddPlots {
-
- public static void main(String[] args) throws IllegalArgumentException, IOException {
- // Define the root directory for the plots.
- String rootDir = null;
-
- // Get the option identifier from the command arguments.
- boolean isHelp = false;
- boolean isFileList = false;
- boolean isDirectory = false;
- if(args.length > 0) {
- if(args[0].compareTo("-h") == 0) { isHelp = true; }
- else if(args[0].compareTo("-f") == 0) { isFileList = true; }
- else if(args[0].compareTo("-d") == 0) { isDirectory = true; }
- } else {
- System.err.println("Insufficient arguments. See \"AddPlots -h\"");
- System.exit(1);
- }
-
- // Process the command line argument.
- List<File> plotFiles = new ArrayList<File>();
- if(isHelp) {
- System.out.println("Usage:");
- System.out.println("\tAddPlots -d [PLOT_DIRECTORY]");
- System.out.println("\tAddPlots -f [PLOT_FILE] [PLOT_FILE] ...");
- System.exit(0);
- } else if(isDirectory) {
- // Make sure that a directory is specified.
- if(args.length < 2) {
- System.err.println("Insufficient arguments. Must specify at least two files.");
- System.exit(1);
- }
-
- // Get the plot directory from the second argument.
- File plotDirectory = new File(args[1]);
-
- // Verify that it exists and is a directory.
- if(!plotDirectory.exists()) {
- System.err.println("File path does not exist.");
- System.exit(1);
- } if(!plotDirectory.isDirectory()) {
- System.err.println("Indicated path must be a directory.");
- System.exit(1);
- }
-
- // Store the root directory.
- rootDir = plotDirectory.getAbsolutePath() + "/";
-
- // Extract the AIDA files from the directory.
- for(File file : plotDirectory.listFiles()) {
- System.out.println(file.getName());
- int indexOfExtension = file.getName().lastIndexOf('.');
- if(indexOfExtension == -1) { continue; }
- if(file.getName().substring(indexOfExtension).compareToIgnoreCase(".aida") == 0) {
- plotFiles.add(file);
- }
- }
-
- // Debug status print.
- System.out.println("Processing plots in directory \"" + plotDirectory.getAbsolutePath() + "\"");
- } else if(isFileList) {
- // Make sure that at least one file was specified.
- if(args.length < 3) {
- System.err.println("Insufficient arguments. Must specify at least two files.");
- System.exit(1);
- }
-
- // Get the root directory.
- rootDir = System.getProperty("user.dir") + "/";
-
- // Create and verify the specified files.
- for(int i = 1; i < args.length; i++) {
- // Create the file object and make sure that it exists.
- File file = new File(args[i]);
- if(!file.exists()) {
- System.err.println("Specified file does not exist: " + args[i]);
- System.exit(1);
- }
-
- // Add it to the file list.
- plotFiles.add(file);
- }
- } else {
- System.err.println("Option \"" + args[0] + "\" is not recognized.");
- System.exit(1);
- }
-
- // Make sure that there are actually files.
- if(plotFiles.isEmpty()) {
- System.err.println("No AIDA files found!");
- System.exit(1);
- }
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFiles.get(0).getAbsolutePath());
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Get the histograms names.
- List<String> objectNameList = getTreeFiles(tree);
-
- // Separate the plots into 1D and 2D plots and extract their
- // bin sizes and other properties.
- List<Integer> xBins1D = new ArrayList<Integer>();
- List<Double> xBins1DMin = new ArrayList<Double>();
- List<Double> xBins1DMax = new ArrayList<Double>();
- List<Integer> xBins2D = new ArrayList<Integer>();
- List<Double> xBins2DMin = new ArrayList<Double>();
- List<Double> xBins2DMax = new ArrayList<Double>();
- List<Integer> yBins2D = new ArrayList<Integer>();
- List<Double> yBins2DMin = new ArrayList<Double>();
- List<Double> yBins2DMax = new ArrayList<Double>();
- List<String> histogramNames1D = new ArrayList<String>();
- List<String> histogramNames2D = new ArrayList<String>();
- for(String objectName : objectNameList) {
- // Get the object.
- IManagedObject object = tree.find(objectName);
-
- // If it is a 1D histogram, process it.
- if(object instanceof IHistogram1D) {
- // Add the object to the 1D histogram list.
- histogramNames1D.add(objectName);
-
- // Get the bin size.
- IHistogram1D plot = (IHistogram1D) object;
- xBins1D.add(plot.axis().bins());
- xBins1DMin.add(plot.axis().lowerEdge());
- xBins1DMax.add(plot.axis().upperEdge());
- }
-
- // If it is a 1D histogram, process it.
- else if(object instanceof IHistogram2D) {
- // Add the object to the 2D histogram list.
- histogramNames2D.add(objectName);
-
- // Get the bin size.
- IHistogram2D plot = (IHistogram2D) object;
- xBins2D.add(plot.xAxis().bins());
- xBins2DMin.add(plot.xAxis().lowerEdge());
- xBins2DMax.add(plot.xAxis().upperEdge());
- yBins2D.add(plot.yAxis().bins());
- yBins2DMin.add(plot.yAxis().lowerEdge());
- yBins2DMax.add(plot.yAxis().upperEdge());
- }
- }
-
- // Create plots corresponding to each of the plot objects.
- AIDA aida = AIDA.defaultInstance();
- List<IHistogram1D> histograms1D = new ArrayList<IHistogram1D>(histogramNames1D.size());
- List<IHistogram2D> histograms2D = new ArrayList<IHistogram2D>(histogramNames2D.size());
- for(int i = 0; i < histogramNames1D.size(); i++) {
- IHistogram1D histogram = aida.histogram1D(histogramNames1D.get(i), xBins1D.get(i), xBins1DMin.get(i), xBins1DMax.get(i));
- histograms1D.add(histogram);
- }
- for(int i = 0; i < histogramNames2D.size(); i++) {
- IHistogram2D histogram = aida.histogram2D(histogramNames2D.get(i), xBins2D.get(i), xBins2DMin.get(i), xBins2DMax.get(i), yBins2D.get(i), yBins2DMin.get(i), yBins2DMax.get(i));
- histograms2D.add(histogram);
- }
-
- // Iterate over each file and add their entries to the compiled
- // plots.
- for(File file : plotFiles) {
- // Open the file.
- ITree fileTree = af.createTreeFactory().create(file.getAbsolutePath());
-
- // For each plot, get the equivalent plot from the file
- // and add each bin entry to the compiled plot.
- for(int i = 0; i < histogramNames1D.size(); i++) {
- // Get the histogram object.
- IHistogram1D histogram = (IHistogram1D) fileTree.find(histogramNames1D.get(i));
-
- // Iterate over the bins.
- for(int x = 0; x < xBins1D.get(i); x++) {
- // Get the entries in this bin and the bin average.
- int entries = histogram.binEntries(x);
- double average = histogram.binMean(x);
-
- // Add the entries to the compiled plot.
- for(int j = 0; j < entries; j++) {
- histograms1D.get(i).fill(average);
- }
- }
- }
- for(int i = 0; i < histogramNames2D.size(); i++) {
- // Get the histogram object.
- IHistogram2D histogram = (IHistogram2D) fileTree.find(histogramNames2D.get(i));
-
- // Iterate over the bins.
- for(int x = 0; x < xBins2D.get(i); x++) {
- for(int y = 0; y < yBins2D.get(i); y++) {
- // Get the entries in this bin and the bin average.
- int entries = histogram.binEntries(x, y);
- double averageX = histogram.binMeanX(x, y);
- double averageY = histogram.binMeanY(x, y);
-
- // Add the entries to the compiled plot.
- for(int j = 0; j < entries; j++) {
- histograms2D.get(i).fill(averageX, averageY);
- }
- }
- }
- }
- }
-
- // Save the compiled plots to a new file.
- aida.saveAs(rootDir + "compiled-plots.aida");
- System.out.println("Plots written to path " + rootDir + "compiled-plots.aida");
- }
-
- private static final List<String> getTreeFiles(ITree tree) {
- return getTreeFiles(tree, "/");
- }
-
- private static final List<String> getTreeFiles(ITree tree, String rootDir) {
- // Make a list to contain the plot names.
- List<String> list = new ArrayList<String>();
-
- // Iterate over the objects at the indicated directory of the tree.
- String objectNames[] = tree.listObjectNames(rootDir);
- for(String objectName : objectNames) {
- // Convert the object name to a char array and check the
- // last character. Directories end in '/'.
- char[] plotChars = objectName.toCharArray();
-
- // If the object is a directory, process any objects inside
- // of it as well.
- if(plotChars[plotChars.length - 1] == '/') {
- List<String> dirList = getTreeFiles(tree, objectName);
- list.addAll(dirList);
- }
-
- // Otherwise, just add the object to the list.
- else { list.add(objectName); }
- }
-
- // Return the compiled list.
- return list;
- }
+
+ public static void main(String[] args) throws IllegalArgumentException, IOException {
+ // Define the root directory for the plots.
+ String rootDir = null;
+
+ // Get the option identifier from the command arguments.
+ boolean isHelp = false;
+ boolean isFileList = false;
+ boolean isDirectory = false;
+ if(args.length > 0) {
+ if(args[0].compareTo("-h") == 0) { isHelp = true; }
+ else if(args[0].compareTo("-f") == 0) { isFileList = true; }
+ else if(args[0].compareTo("-d") == 0) { isDirectory = true; }
+ } else {
+ System.err.println("Insufficient arguments. See \"AddPlots -h\"");
+ System.exit(1);
+ }
+
+ // Process the command line argument.
+ List<File> plotFiles = new ArrayList<File>();
+ if(isHelp) {
+ System.out.println("Usage:");
+ System.out.println("\tAddPlots -d [PLOT_DIRECTORY]");
+ System.out.println("\tAddPlots -f [PLOT_FILE] [PLOT_FILE] ...");
+ System.exit(0);
+ } else if(isDirectory) {
+ // Make sure that a directory is specified.
+ if(args.length < 2) {
+ System.err.println("Insufficient arguments. Must specify at least two files.");
+ System.exit(1);
+ }
+
+ // Get the plot directory from the second argument.
+ File plotDirectory = new File(args[1]);
+
+ // Verify that it exists and is a directory.
+ if(!plotDirectory.exists()) {
+ System.err.println("File path does not exist.");
+ System.exit(1);
+ } if(!plotDirectory.isDirectory()) {
+ System.err.println("Indicated path must be a directory.");
+ System.exit(1);
+ }
+
+ // Store the root directory.
+ rootDir = plotDirectory.getAbsolutePath() + "/";
+
+ // Extract the AIDA files from the directory.
+ for(File file : plotDirectory.listFiles()) {
+ System.out.println(file.getName());
+ int indexOfExtension = file.getName().lastIndexOf('.');
+ if(indexOfExtension == -1) { continue; }
+ if(file.getName().substring(indexOfExtension).compareToIgnoreCase(".aida") == 0) {
+ plotFiles.add(file);
+ }
+ }
+
+ // Debug status print.
+ System.out.println("Processing plots in directory \"" + plotDirectory.getAbsolutePath() + "\"");
+ } else if(isFileList) {
+ // Make sure that at least one file was specified.
+ if(args.length < 3) {
+ System.err.println("Insufficient arguments. Must specify at least two files.");
+ System.exit(1);
+ }
+
+ // Get the root directory.
+ rootDir = System.getProperty("user.dir") + "/";
+
+ // Create and verify the specified files.
+ for(int i = 1; i < args.length; i++) {
+ // Create the file object and make sure that it exists.
+ File file = new File(args[i]);
+ if(!file.exists()) {
+ System.err.println("Specified file does not exist: " + args[i]);
+ System.exit(1);
+ }
+
+ // Add it to the file list.
+ plotFiles.add(file);
+ }
+ } else {
+ System.err.println("Option \"" + args[0] + "\" is not recognized.");
+ System.exit(1);
+ }
+
+ // Make sure that there are actually files.
+ if(plotFiles.isEmpty()) {
+ System.err.println("No AIDA files found!");
+ System.exit(1);
+ }
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFiles.get(0).getAbsolutePath());
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Get the histograms names.
+ List<String> objectNameList = getTreeFiles(tree);
+
+ // Separate the plots into 1D and 2D plots and extract their
+ // bin sizes and other properties.
+ List<Integer> xBins1D = new ArrayList<Integer>();
+ List<Double> xBins1DMin = new ArrayList<Double>();
+ List<Double> xBins1DMax = new ArrayList<Double>();
+ List<Integer> xBins2D = new ArrayList<Integer>();
+ List<Double> xBins2DMin = new ArrayList<Double>();
+ List<Double> xBins2DMax = new ArrayList<Double>();
+ List<Integer> yBins2D = new ArrayList<Integer>();
+ List<Double> yBins2DMin = new ArrayList<Double>();
+ List<Double> yBins2DMax = new ArrayList<Double>();
+ List<String> histogramNames1D = new ArrayList<String>();
+ List<String> histogramNames2D = new ArrayList<String>();
+ for(String objectName : objectNameList) {
+ // Get the object.
+ IManagedObject object = tree.find(objectName);
+
+ // If it is a 1D histogram, process it.
+ if(object instanceof IHistogram1D) {
+ // Add the object to the 1D histogram list.
+ histogramNames1D.add(objectName);
+
+ // Get the bin size.
+ IHistogram1D plot = (IHistogram1D) object;
+ xBins1D.add(plot.axis().bins());
+ xBins1DMin.add(plot.axis().lowerEdge());
+ xBins1DMax.add(plot.axis().upperEdge());
+ }
+
+ // If it is a 1D histogram, process it.
+ else if(object instanceof IHistogram2D) {
+ // Add the object to the 2D histogram list.
+ histogramNames2D.add(objectName);
+
+ // Get the bin size.
+ IHistogram2D plot = (IHistogram2D) object;
+ xBins2D.add(plot.xAxis().bins());
+ xBins2DMin.add(plot.xAxis().lowerEdge());
+ xBins2DMax.add(plot.xAxis().upperEdge());
+ yBins2D.add(plot.yAxis().bins());
+ yBins2DMin.add(plot.yAxis().lowerEdge());
+ yBins2DMax.add(plot.yAxis().upperEdge());
+ }
+ }
+
+ // Create plots corresponding to each of the plot objects.
+ AIDA aida = AIDA.defaultInstance();
+ List<IHistogram1D> histograms1D = new ArrayList<IHistogram1D>(histogramNames1D.size());
+ List<IHistogram2D> histograms2D = new ArrayList<IHistogram2D>(histogramNames2D.size());
+ for(int i = 0; i < histogramNames1D.size(); i++) {
+ IHistogram1D histogram = aida.histogram1D(histogramNames1D.get(i), xBins1D.get(i), xBins1DMin.get(i), xBins1DMax.get(i));
+ histograms1D.add(histogram);
+ }
+ for(int i = 0; i < histogramNames2D.size(); i++) {
+ IHistogram2D histogram = aida.histogram2D(histogramNames2D.get(i), xBins2D.get(i), xBins2DMin.get(i), xBins2DMax.get(i), yBins2D.get(i), yBins2DMin.get(i), yBins2DMax.get(i));
+ histograms2D.add(histogram);
+ }
+
+ // Iterate over each file and add their entries to the compiled
+ // plots.
+ for(File file : plotFiles) {
+ // Open the file.
+ ITree fileTree = af.createTreeFactory().create(file.getAbsolutePath());
+
+ // For each plot, get the equivalent plot from the file
+ // and add each bin entry to the compiled plot.
+ for(int i = 0; i < histogramNames1D.size(); i++) {
+ // Get the histogram object.
+ IHistogram1D histogram = (IHistogram1D) fileTree.find(histogramNames1D.get(i));
+
+ // Iterate over the bins.
+ for(int x = 0; x < xBins1D.get(i); x++) {
+ // Get the entries in this bin and the bin average.
+ int entries = histogram.binEntries(x);
+ double average = histogram.binMean(x);
+
+ // Add the entries to the compiled plot.
+ for(int j = 0; j < entries; j++) {
+ histograms1D.get(i).fill(average);
+ }
+ }
+ }
+ for(int i = 0; i < histogramNames2D.size(); i++) {
+ // Get the histogram object.
+ IHistogram2D histogram = (IHistogram2D) fileTree.find(histogramNames2D.get(i));
+
+ // Iterate over the bins.
+ for(int x = 0; x < xBins2D.get(i); x++) {
+ for(int y = 0; y < yBins2D.get(i); y++) {
+ // Get the entries in this bin and the bin average.
+ int entries = histogram.binEntries(x, y);
+ double averageX = histogram.binMeanX(x, y);
+ double averageY = histogram.binMeanY(x, y);
+
+ // Add the entries to the compiled plot.
+ for(int j = 0; j < entries; j++) {
+ histograms2D.get(i).fill(averageX, averageY);
+ }
+ }
+ }
+ }
+ }
+
+ // Save the compiled plots to a new file.
+ aida.saveAs(rootDir + "compiled-plots.aida");
+ System.out.println("Plots written to path " + rootDir + "compiled-plots.aida");
+ }
+
+ private static final List<String> getTreeFiles(ITree tree) {
+ return getTreeFiles(tree, "/");
+ }
+
+ private static final List<String> getTreeFiles(ITree tree, String rootDir) {
+ // Make a list to contain the plot names.
+ List<String> list = new ArrayList<String>();
+
+ // Iterate over the objects at the indicated directory of the tree.
+ String objectNames[] = tree.listObjectNames(rootDir);
+ for(String objectName : objectNames) {
+ // Convert the object name to a char array and check the
+ // last character. Directories end in '/'.
+ char[] plotChars = objectName.toCharArray();
+
+ // If the object is a directory, process any objects inside
+ // of it as well.
+ if(plotChars[plotChars.length - 1] == '/') {
+ List<String> dirList = getTreeFiles(tree, objectName);
+ list.addAll(dirList);
+ }
+
+ // Otherwise, just add the object to the list.
+ else { list.add(objectName); }
+ }
+
+ // Return the compiled list.
+ return list;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot.java Wed Apr 27 11:11:32 2016
@@ -1,25 +1,25 @@
package org.hps.users.kmccarty.plots;
public abstract class FormattedPlot {
- private final String xAxis;
- private final String yAxis;
- private final String plotName;
-
- public FormattedPlot(String xAxis, String yAxis, String plotName) {
- this.xAxis = xAxis;
- this.yAxis = yAxis;
- this.plotName = plotName;
- }
-
- public String getPlotName() {
- return plotName;
- }
-
- public String getXAxisName() {
- return xAxis;
- }
-
- public String getYAxisName() {
- return yAxis;
- }
+ private final String xAxis;
+ private final String yAxis;
+ private final String plotName;
+
+ public FormattedPlot(String xAxis, String yAxis, String plotName) {
+ this.xAxis = xAxis;
+ this.yAxis = yAxis;
+ this.plotName = plotName;
+ }
+
+ public String getPlotName() {
+ return plotName;
+ }
+
+ public String getXAxisName() {
+ return xAxis;
+ }
+
+ public String getYAxisName() {
+ return yAxis;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot1D.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot1D.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot1D.java Wed Apr 27 11:11:32 2016
@@ -5,37 +5,37 @@
import hep.aida.IHistogram1D;
public class FormattedPlot1D extends FormattedPlot {
- private final ColorStyle style;
- private final IHistogram1D plot;
- private final double axisRange;
-
- public FormattedPlot1D(IHistogram1D plot, ColorStyle style, String xAxis, String yAxis, String plotName) {
- super(xAxis, yAxis, plotName);
- this.plot = plot;
- this.style = style;
- this.axisRange = -1;
- }
-
- public FormattedPlot1D(IHistogram1D plot, ColorStyle style, String xAxis, String yAxis, String plotName, double axisRange) {
- super(xAxis, yAxis, plotName);
- this.plot = plot;
- this.style = style;
- this.axisRange = axisRange;
- }
-
- public IHistogram1D getPlot() {
- return plot;
- }
-
- public ColorStyle getColorStyle() {
- return style;
- }
-
- public boolean definesAxisRange() {
- return axisRange != -1;
- }
-
- public double getAxisRange() {
- return axisRange;
- }
+ private final ColorStyle style;
+ private final IHistogram1D plot;
+ private final double axisRange;
+
+ public FormattedPlot1D(IHistogram1D plot, ColorStyle style, String xAxis, String yAxis, String plotName) {
+ super(xAxis, yAxis, plotName);
+ this.plot = plot;
+ this.style = style;
+ this.axisRange = -1;
+ }
+
+ public FormattedPlot1D(IHistogram1D plot, ColorStyle style, String xAxis, String yAxis, String plotName, double axisRange) {
+ super(xAxis, yAxis, plotName);
+ this.plot = plot;
+ this.style = style;
+ this.axisRange = axisRange;
+ }
+
+ public IHistogram1D getPlot() {
+ return plot;
+ }
+
+ public ColorStyle getColorStyle() {
+ return style;
+ }
+
+ public boolean definesAxisRange() {
+ return axisRange != -1;
+ }
+
+ public double getAxisRange() {
+ return axisRange;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot2D.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot2D.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/FormattedPlot2D.java Wed Apr 27 11:11:32 2016
@@ -3,48 +3,48 @@
import hep.aida.IHistogram2D;
public class FormattedPlot2D extends FormattedPlot {
- private final IHistogram2D plot;
- private final boolean logarithmic;
- private final double xAxisRange;
- private final double yAxisRange;
-
- public FormattedPlot2D(IHistogram2D plot, boolean logarithmic, String xAxis, String yAxis, String plotName) {
- super(xAxis, yAxis, plotName);
- this.plot = plot;
- this.xAxisRange = -1;
- this.yAxisRange = -1;
- this.logarithmic = logarithmic;
- }
-
- public FormattedPlot2D(IHistogram2D plot, boolean logarithmic, String xAxis, String yAxis, String plotName, double xAxisRange, double yAxisRange) {
- super(xAxis, yAxis, plotName);
- this.plot = plot;
- this.xAxisRange = xAxisRange;
- this.yAxisRange = yAxisRange;
- this.logarithmic = logarithmic;
- }
-
- public IHistogram2D getPlot() {
- return plot;
- }
-
- public boolean isLogarithmic() {
- return logarithmic;
- }
-
- public boolean definesXAxisRange() {
- return xAxisRange != -1;
- }
-
- public boolean definesYAxisRange() {
- return yAxisRange != -1;
- }
-
- public double getXAxisRange() {
- return xAxisRange;
- }
-
- public double getYAxisRange() {
- return yAxisRange;
- }
+ private final IHistogram2D plot;
+ private final boolean logarithmic;
+ private final double xAxisRange;
+ private final double yAxisRange;
+
+ public FormattedPlot2D(IHistogram2D plot, boolean logarithmic, String xAxis, String yAxis, String plotName) {
+ super(xAxis, yAxis, plotName);
+ this.plot = plot;
+ this.xAxisRange = -1;
+ this.yAxisRange = -1;
+ this.logarithmic = logarithmic;
+ }
+
+ public FormattedPlot2D(IHistogram2D plot, boolean logarithmic, String xAxis, String yAxis, String plotName, double xAxisRange, double yAxisRange) {
+ super(xAxis, yAxis, plotName);
+ this.plot = plot;
+ this.xAxisRange = xAxisRange;
+ this.yAxisRange = yAxisRange;
+ this.logarithmic = logarithmic;
+ }
+
+ public IHistogram2D getPlot() {
+ return plot;
+ }
+
+ public boolean isLogarithmic() {
+ return logarithmic;
+ }
+
+ public boolean definesXAxisRange() {
+ return xAxisRange != -1;
+ }
+
+ public boolean definesYAxisRange() {
+ return yAxisRange != -1;
+ }
+
+ public double getXAxisRange() {
+ return xAxisRange;
+ }
+
+ public double getYAxisRange() {
+ return yAxisRange;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotFormatModule.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotFormatModule.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotFormatModule.java Wed Apr 27 11:11:32 2016
@@ -17,189 +17,189 @@
import java.util.List;
public class PlotFormatModule {
- private String width = "2000";
- private String height = "1200";
- private List<FormattedPlot1D> formattedPlots1D = new ArrayList<FormattedPlot1D>();
- private List<FormattedPlot2D> formattedPlots2D = new ArrayList<FormattedPlot2D>();
-
- public void addPlot1D(FormattedPlot1D plot) {
- formattedPlots1D.add(plot);
- }
-
- public void addPlot2D(FormattedPlot2D plot) {
- formattedPlots2D.add(plot);
- }
-
- public void setDisplayHeight(int height) {
- this.height = "" + height;
- }
-
- public void setDisplayWidth(int width) {
- this.width = "" + width;
- }
-
- public void displayPlots() {
- try { processPlots(null); }
- catch (IOException e) { e.printStackTrace(); }
- }
-
- public void savePlots(String filePath) throws IOException {
- processPlots(filePath);
- }
-
- private void processPlots(String filePath) throws IOException {
- // Create a plotter factory.
- IAnalysisFactory af = IAnalysisFactory.create();
- IPlotterFactory plotterFactory = af.createPlotterFactory();
-
- // Format and display the 1D plots.
- for(FormattedPlot1D formattedPlot : formattedPlots1D) {
- // Get the plot.
- IHistogram1D plot = formattedPlot.getPlot();
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(plot.title());
- plotter.createRegions(1);
- plotter.region(0).plot(plot);
-
- // Set the axis range.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- if(formattedPlot.definesAxisRange()) {
- region.getPlot().getXAxis().setMax(formattedPlot.getAxisRange());
- }
-
- // Format the axis labels.
- region.getPlot().setTitle(formattedPlot.getPlotName());
- region.getPlot().getXAxis().setLabel(formattedPlot.getXAxisName());
- region.getPlot().getYAxis().setLabel(formattedPlot.getYAxisName());
-
- // Format the fonts and general plot presentation.
- PlotsFormatter.setDefault1DStyle(region, new ColorStyle[] { formattedPlot.getColorStyle() });
-
- // Set the plotter dimensions.
- plotter.setParameter("plotterWidth", width);
- plotter.setParameter("plotterHeight", height);
-
- // If the file path is null, display the plots. Otherwise,
- // save them to the destination folder.
- if(filePath == null) { plotter.show(); }
- else {
- File plotFile = new File(filePath + formattedPlot.getPlotName() + ".png");
- if(plotFile.exists()) { plotFile.delete(); }
- plotter.writeToFile(filePath + formattedPlot.getPlotName() + ".png");
- System.out.printf("Saved plot \"%s\" to path: %s%n", formattedPlot.getPlotName(), filePath + formattedPlot.getPlotName() + ".png");
- }
- }
-
- // Format and display the 2D plots.
- for(FormattedPlot2D formattedPlot : formattedPlots2D) {
- // Get the plot.
- IHistogram2D plot = formattedPlot.getPlot();
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(formattedPlot.getPlotName());
- plotter.createRegions(1);
- plotter.region(0).plot(plot);
-
- // Set the axis range.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- if(formattedPlot.definesXAxisRange()) {
- region.getPlot().getXAxis().setMax(formattedPlot.getXAxisRange());
- } if(formattedPlot.definesYAxisRange()) {
- region.getPlot().getYAxis().setMax(formattedPlot.getYAxisRange());
- }
-
- // Format the axis labels.
- region.getPlot().setTitle(formattedPlot.getPlotName());
- region.getPlot().getXAxis().setLabel(formattedPlot.getXAxisName());
- region.getPlot().getYAxis().setLabel(formattedPlot.getYAxisName());
-
- // Format the fonts and general plot presentation.
- PlotsFormatter.setDefault2DStyle(region, formattedPlot.isLogarithmic());
-
- // Set the plotter dimensions.
- plotter.setParameter("plotterWidth", width);
- plotter.setParameter("plotterHeight", height);
-
- // If the file path is null, display the plots. Otherwise,
- // save them to the destination folder.
- if(filePath == null) { plotter.show(); }
- else {
- File plotFile = new File(filePath + formattedPlot.getPlotName() + ".png");
- if(plotFile.exists()) { plotFile.delete(); }
- plotter.writeToFile(filePath + formattedPlot.getPlotName() + ".png");
- System.out.printf("Saved plot \"%s\" to path: %s%n", formattedPlot.getPlotName(), filePath + formattedPlot.getPlotName() + ".png");
- }
- }
- }
-
- public void exportPlots(String filePath) throws IOException {
- // Export the 1D plots in a text format.
- for(FormattedPlot1D plot : formattedPlots1D) {
- exportPlot(filePath, plot);
- }
-
- // Export the 2D plots in a text format.
- for(FormattedPlot2D plot : formattedPlots2D) {
- exportPlot(filePath, plot);
- }
- }
-
- private static final void exportPlot(String filePath, FormattedPlot plot) throws IOException {
- // Check if this is a one or two dimensional plot.
- boolean is1D = plot instanceof FormattedPlot1D;
-
- // Create a file object for the plot.
- String plotPath = filePath + plot.getPlotName() + (is1D ? ".aida1D" : ".aida2D");
- File datFile = new File(plotPath);
-
- // If the plot file already exists, delete it.
- if(datFile.exists()) { datFile.delete(); }
-
- // Create a new file for the plot to occupy.
- datFile.createNewFile();
-
- // Get the textual form of the plot.
- String plotText = null;
- if(is1D) { plotText = toTextFormat(((FormattedPlot1D) plot).getPlot()); }
- else { plotText = toTextFormat(((FormattedPlot2D) plot).getPlot()); }
-
- // Write the plot text to the file.
- BufferedWriter writer = new BufferedWriter(new FileWriter(datFile));
- writer.write(plotText);
- writer.close();
-
- // Note that the file was written.
- System.out.printf("Plot \"%s\" was exported to path: %s%n", plot.getPlotName(), plotPath);
- }
-
- private static final String toTextFormat(IHistogram1D plot) {
- // Create a buffer to hold the converted plot.
- StringBuffer buffer = new StringBuffer();
-
- // Iterate over the bins and output the plot in the format of
- // "[BIN_MEAN] [BIN_VALUE]" with a tab delimiter.
- for(int bin = 0; bin < plot.axis().bins(); bin++) {
- buffer.append(String.format("%f\t%f%n", plot.binMean(bin), plot.binHeight(bin)));
- }
-
- // Return the converted file.
- return buffer.toString();
- }
-
- private static final String toTextFormat(IHistogram2D plot) {
- // Create a buffer to hold the converted plot.
- StringBuffer buffer = new StringBuffer();
-
- // Iterate over the bins and output the plot in the format of
- // "[X_BIN_MEAN] [Y_BIN_MEAN] [BIN_VALUE]" with a tab delimiter.
- for(int xBin = 0; xBin < plot.xAxis().bins(); xBin++) {
- for(int yBin = 0; yBin < plot.yAxis().bins(); yBin++) {
- buffer.append(String.format("%f\t%f\t%f%n", plot.binMeanX(xBin, yBin), plot.binMeanY(xBin, yBin), plot.binHeight(xBin, yBin)));
- }
- }
-
- // Return the converted file.
- return buffer.toString();
- }
+ private String width = "2000";
+ private String height = "1200";
+ private List<FormattedPlot1D> formattedPlots1D = new ArrayList<FormattedPlot1D>();
+ private List<FormattedPlot2D> formattedPlots2D = new ArrayList<FormattedPlot2D>();
+
+ public void addPlot1D(FormattedPlot1D plot) {
+ formattedPlots1D.add(plot);
+ }
+
+ public void addPlot2D(FormattedPlot2D plot) {
+ formattedPlots2D.add(plot);
+ }
+
+ public void setDisplayHeight(int height) {
+ this.height = "" + height;
+ }
+
+ public void setDisplayWidth(int width) {
+ this.width = "" + width;
+ }
+
+ public void displayPlots() {
+ try { processPlots(null); }
+ catch (IOException e) { e.printStackTrace(); }
+ }
+
+ public void savePlots(String filePath) throws IOException {
+ processPlots(filePath);
+ }
+
+ private void processPlots(String filePath) throws IOException {
+ // Create a plotter factory.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ IPlotterFactory plotterFactory = af.createPlotterFactory();
+
+ // Format and display the 1D plots.
+ for(FormattedPlot1D formattedPlot : formattedPlots1D) {
+ // Get the plot.
+ IHistogram1D plot = formattedPlot.getPlot();
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(plot.title());
+ plotter.createRegions(1);
+ plotter.region(0).plot(plot);
+
+ // Set the axis range.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ if(formattedPlot.definesAxisRange()) {
+ region.getPlot().getXAxis().setMax(formattedPlot.getAxisRange());
+ }
+
+ // Format the axis labels.
+ region.getPlot().setTitle(formattedPlot.getPlotName());
+ region.getPlot().getXAxis().setLabel(formattedPlot.getXAxisName());
+ region.getPlot().getYAxis().setLabel(formattedPlot.getYAxisName());
+
+ // Format the fonts and general plot presentation.
+ PlotsFormatter.setDefault1DStyle(region, new ColorStyle[] { formattedPlot.getColorStyle() });
+
+ // Set the plotter dimensions.
+ plotter.setParameter("plotterWidth", width);
+ plotter.setParameter("plotterHeight", height);
+
+ // If the file path is null, display the plots. Otherwise,
+ // save them to the destination folder.
+ if(filePath == null) { plotter.show(); }
+ else {
+ File plotFile = new File(filePath + formattedPlot.getPlotName() + ".png");
+ if(plotFile.exists()) { plotFile.delete(); }
+ plotter.writeToFile(filePath + formattedPlot.getPlotName() + ".png");
+ System.out.printf("Saved plot \"%s\" to path: %s%n", formattedPlot.getPlotName(), filePath + formattedPlot.getPlotName() + ".png");
+ }
+ }
+
+ // Format and display the 2D plots.
+ for(FormattedPlot2D formattedPlot : formattedPlots2D) {
+ // Get the plot.
+ IHistogram2D plot = formattedPlot.getPlot();
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(formattedPlot.getPlotName());
+ plotter.createRegions(1);
+ plotter.region(0).plot(plot);
+
+ // Set the axis range.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ if(formattedPlot.definesXAxisRange()) {
+ region.getPlot().getXAxis().setMax(formattedPlot.getXAxisRange());
+ } if(formattedPlot.definesYAxisRange()) {
+ region.getPlot().getYAxis().setMax(formattedPlot.getYAxisRange());
+ }
+
+ // Format the axis labels.
+ region.getPlot().setTitle(formattedPlot.getPlotName());
+ region.getPlot().getXAxis().setLabel(formattedPlot.getXAxisName());
+ region.getPlot().getYAxis().setLabel(formattedPlot.getYAxisName());
+
+ // Format the fonts and general plot presentation.
+ PlotsFormatter.setDefault2DStyle(region, formattedPlot.isLogarithmic());
+
+ // Set the plotter dimensions.
+ plotter.setParameter("plotterWidth", width);
+ plotter.setParameter("plotterHeight", height);
+
+ // If the file path is null, display the plots. Otherwise,
+ // save them to the destination folder.
+ if(filePath == null) { plotter.show(); }
+ else {
+ File plotFile = new File(filePath + formattedPlot.getPlotName() + ".png");
+ if(plotFile.exists()) { plotFile.delete(); }
+ plotter.writeToFile(filePath + formattedPlot.getPlotName() + ".png");
+ System.out.printf("Saved plot \"%s\" to path: %s%n", formattedPlot.getPlotName(), filePath + formattedPlot.getPlotName() + ".png");
+ }
+ }
+ }
+
+ public void exportPlots(String filePath) throws IOException {
+ // Export the 1D plots in a text format.
+ for(FormattedPlot1D plot : formattedPlots1D) {
+ exportPlot(filePath, plot);
+ }
+
+ // Export the 2D plots in a text format.
+ for(FormattedPlot2D plot : formattedPlots2D) {
+ exportPlot(filePath, plot);
+ }
+ }
+
+ private static final void exportPlot(String filePath, FormattedPlot plot) throws IOException {
+ // Check if this is a one or two dimensional plot.
+ boolean is1D = plot instanceof FormattedPlot1D;
+
+ // Create a file object for the plot.
+ String plotPath = filePath + plot.getPlotName() + (is1D ? ".aida1D" : ".aida2D");
+ File datFile = new File(plotPath);
+
+ // If the plot file already exists, delete it.
+ if(datFile.exists()) { datFile.delete(); }
+
+ // Create a new file for the plot to occupy.
+ datFile.createNewFile();
+
+ // Get the textual form of the plot.
+ String plotText = null;
+ if(is1D) { plotText = toTextFormat(((FormattedPlot1D) plot).getPlot()); }
+ else { plotText = toTextFormat(((FormattedPlot2D) plot).getPlot()); }
+
+ // Write the plot text to the file.
+ BufferedWriter writer = new BufferedWriter(new FileWriter(datFile));
+ writer.write(plotText);
+ writer.close();
+
+ // Note that the file was written.
+ System.out.printf("Plot \"%s\" was exported to path: %s%n", plot.getPlotName(), plotPath);
+ }
+
+ private static final String toTextFormat(IHistogram1D plot) {
+ // Create a buffer to hold the converted plot.
+ StringBuffer buffer = new StringBuffer();
+
+ // Iterate over the bins and output the plot in the format of
+ // "[BIN_MEAN] [BIN_VALUE]" with a tab delimiter.
+ for(int bin = 0; bin < plot.axis().bins(); bin++) {
+ buffer.append(String.format("%f\t%f%n", plot.binMean(bin), plot.binHeight(bin)));
+ }
+
+ // Return the converted file.
+ return buffer.toString();
+ }
+
+ private static final String toTextFormat(IHistogram2D plot) {
+ // Create a buffer to hold the converted plot.
+ StringBuffer buffer = new StringBuffer();
+
+ // Iterate over the bins and output the plot in the format of
+ // "[X_BIN_MEAN] [Y_BIN_MEAN] [BIN_VALUE]" with a tab delimiter.
+ for(int xBin = 0; xBin < plot.xAxis().bins(); xBin++) {
+ for(int yBin = 0; yBin < plot.yAxis().bins(); yBin++) {
+ buffer.append(String.format("%f\t%f\t%f%n", plot.binMeanX(xBin, yBin), plot.binMeanY(xBin, yBin), plot.binHeight(xBin, yBin)));
+ }
+ }
+
+ // Return the converted file.
+ return buffer.toString();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/PlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -8,120 +8,120 @@
import java.awt.Font;
public class PlotsFormatter {
- // Define plot fonts.
- public static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 30);
- public static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 35);
- public static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 45);
-
- // Defines the color style options for plot data.
- public static enum ColorStyle {
- MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
- MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
- MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
- RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
- FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
- TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
- BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
- PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
-
- private final Color fillColor;
- private final Color lineColor;
-
- private ColorStyle(Color fillColor, Color lineColor) {
- this.fillColor = fillColor;
- this.lineColor = lineColor;
- }
-
- public Color getFillColor() { return fillColor; }
-
- public Color getLineColor() { return lineColor; }
- };
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- public static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the names of each plot on in the region.
- String[] dataNames = region.getAllDataNames();
-
- // Check whether this is an overlay plot. Overlay plots contain
- // more than one data name.
- boolean overlay = (dataNames.length > 1 ? true : false);
-
- // Iterate over each plot in the region.
- for(int i = 0; i < dataNames.length; i++) {
- // Set the overlay style if needed.
- if(overlay) {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with no fill. The color is set by the "color" argument.
- fillStyle.setHistogramFill(false);
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarLineColor(color[i].getFillColor());
-
- // Set the legend text style.
- region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
- }
-
- // Otherwise, set the fill style for a single plot.
- else {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with a fill color. The colors are defined by the
- // "color" argument.
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarColor(color[i].getFillColor());
- fillStyle.setHistogramBarLineColor(color[i].getLineColor());
- }
-
- // Set the statistics box style.
- region.getPlot().getStats().setVisible(true);
- region.getPlot().getStats().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- */
- public static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
- // Get the fill style object. 2D plots should never be overlay
- // plots, so there should only ever be one data name.
- JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
-
- // Set the fill style for a two-dimensional plot.
- if(logarithmic) { fillStyle.setLogZ(true); }
- fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
- fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
-
- // Make the statistics box invisible.
- region.getPlot().getStats().setVisible(false);
-
- // Set the general plot font (which is also the z-axis font).
- region.getPlot().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
+ // Define plot fonts.
+ public static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 30);
+ public static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 35);
+ public static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 45);
+
+ // Defines the color style options for plot data.
+ public static enum ColorStyle {
+ MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
+ MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
+ MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
+ RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
+ FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
+ TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
+ BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
+ PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
+
+ private final Color fillColor;
+ private final Color lineColor;
+
+ private ColorStyle(Color fillColor, Color lineColor) {
+ this.fillColor = fillColor;
+ this.lineColor = lineColor;
+ }
+
+ public Color getFillColor() { return fillColor; }
+
+ public Color getLineColor() { return lineColor; }
+ };
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ public static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the names of each plot on in the region.
+ String[] dataNames = region.getAllDataNames();
+
+ // Check whether this is an overlay plot. Overlay plots contain
+ // more than one data name.
+ boolean overlay = (dataNames.length > 1 ? true : false);
+
+ // Iterate over each plot in the region.
+ for(int i = 0; i < dataNames.length; i++) {
+ // Set the overlay style if needed.
+ if(overlay) {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with no fill. The color is set by the "color" argument.
+ fillStyle.setHistogramFill(false);
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarLineColor(color[i].getFillColor());
+
+ // Set the legend text style.
+ region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
+ }
+
+ // Otherwise, set the fill style for a single plot.
+ else {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with a fill color. The colors are defined by the
+ // "color" argument.
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarColor(color[i].getFillColor());
+ fillStyle.setHistogramBarLineColor(color[i].getLineColor());
+ }
+
+ // Set the statistics box style.
+ region.getPlot().getStats().setVisible(true);
+ region.getPlot().getStats().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ */
+ public static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
+ // Get the fill style object. 2D plots should never be overlay
+ // plots, so there should only ever be one data name.
+ JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+
+ // Set the fill style for a two-dimensional plot.
+ if(logarithmic) { fillStyle.setLogZ(true); }
+ fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
+ fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
+
+ // Make the statistics box invisible.
+ region.getPlot().getStats().setVisible(false);
+
+ // Set the general plot font (which is also the z-axis font).
+ region.getPlot().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/InvariantMassPlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/InvariantMassPlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/InvariantMassPlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -19,317 +19,317 @@
import hep.aida.ref.plotter.PlotterRegion;
public class InvariantMassPlotsFormatter {
- // Define plot fonts.
- private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
- private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
- private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
-
- // Defines the color style options for plot data.
- private enum ColorStyle {
- MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
- MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
- MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
- RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
- FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
- TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
- BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
- PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
-
- private final Color fillColor;
- private final Color lineColor;
-
- private ColorStyle(Color fillColor, Color lineColor) {
- this.fillColor = fillColor;
- this.lineColor = lineColor;
- }
-
- public Color getFillColor() { return fillColor; }
-
- public Color getLineColor() { return lineColor; }
- };
-
- /**
- * Loads all plots in a file and formats them according to the
- * indicated style.
- * @param args - Unused default executable parameter.
- * @throws IOException Occurs if there is an issue opening the file.
- */
- public static void main(String[] args) throws IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String[] plotFile = {
- rootDir + "temp.aida"
- };
-
- // Define the run numbers for each file.
- String[] runNumber = { "1 Hits", "2 Hits" };
-
- // Define the scaling factors for each plot.
- double scaleFactor = 13.254;
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree[] tree = new ITree[plotFile.length];
- for(int i = 0; i < plotFile.length; i++) {
- tree[i] = af.createTreeFactory().create(plotFile[i]);
- if(tree[i] == null) { throw new IllegalArgumentException("Unable to load plot file."); }
- }
-
- // Get the histograms.
- IHistogram1D[] invariantMassPlots = new IHistogram1D[3];
- invariantMassPlots[0] = (IHistogram1D) tree[0].find("Trident Analysis/Particle Invariant Mass (1 Hit)");
- invariantMassPlots[1] = (IHistogram1D) tree[0].find("Trident Analysis/Particle Invariant Mass (2 Hit)");
- IHistogram1D electronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Electron Energy");
- IHistogram1D positronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Positron Energy");
- IHistogram1D energySumPlot = (IHistogram1D) tree[0].find("Trident Analysis/Energy Sum Distribution");
- IHistogram2D energySum2DPlot = (IHistogram2D) tree[0].find("Trident Analysis/2D Energy Distribution");
- IHistogram1D tridentElectronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Trident Electron Energy");
- IHistogram1D tridentPositronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Trident Positron Energy");
-
- // Define the plot titles and arrays.
- IHistogram[] plots = { electronEnergyPlot, positronEnergyPlot, energySumPlot, tridentElectronEnergyPlot, tridentPositronEnergyPlot };
- String[] titles = { "Electron Energy", "Positron Energy", "Energy Sum", "Trident Electron Energy", "Trident Positron Energy" };
- String[] xTitles = { "Energy (GeV)", "Energy (GeV)", "Energy Sum (GeV)", "Energy (GeV)", "Energy (GeV)" };
-
- // Re-bin the histograms to have 5-times larger bins. First,
- // get the bin count and upper and lower bounds of the plot.
- int bins = invariantMassPlots[0].axis().bins();
- double low = invariantMassPlots[0].axis().binLowerEdge(0);
- double high = invariantMassPlots[0].axis().binUpperEdge(invariantMassPlots[0].axis().bins() - 1);
-
- // Create new plots with the larger bin sizes.
- AIDA aida = AIDA.defaultInstance();
- IHistogram1D[] newPlot = new IHistogram1D[2];
- newPlot[0] = aida.histogram1D("Particle Invariant Mass (1 Hit)", bins / 5, low, high);
- newPlot[1] = aida.histogram1D("Particle Invariant Mass (2 Hit)", bins / 5, low, high);
-
- // Populate the new plots with the data from the old ones.
- for(int j = 0; j < 2; j++) {
- for(int i = 0; i < bins; i++) {
- int entries = invariantMassPlots[j].binEntries(i);
- double center = invariantMassPlots[j].axis().binCenter(i);
- for(int k = 0; k < entries; k++) {
- newPlot[j].fill(center);
- }
- }
- }
-
- // Replace the old plots.
- invariantMassPlots = newPlot;
-
- // Create a plotter factory.
- IPlotterFactory plotterFactory = af.createPlotterFactory();
-
- // Format and display the basic histograms.
- for(int i = 0; i < plots.length; i++) {
- // Scale the histogram by the appropriate scaling factor.
- plots[i].scale(1.0 / scaleFactor);
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(titles[i]);
- plotter.createRegions(1);
- plotter.region(0).plot(plots[i]);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle(titles[i]);
- region.getPlot().getXAxis().setLabel(xTitles[i]);
- region.getPlot().getYAxis().setLabel("Rate (Hz)");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Format and display the 2D histogram.
- energySum2DPlot.scale(1.0 / scaleFactor);
- IPlotter plotter2D = plotterFactory.create("2D Energy Sum");
- plotter2D.createRegions(1);
- plotter2D.region(0).plot(energySum2DPlot);
-
- // Format the axis labels.
- PlotterRegion region2D = (PlotterRegion) plotter2D.region(0);
- region2D.getPlot().setTitle("2D Energy Sum");
- region2D.getPlot().getXAxis().setLabel("Electron Energy (GeV)");
- region2D.getPlot().getYAxis().setLabel("Positron Energy (GeV)");
-
- // Format the fonts and general plot presentation.
- setDefault2DStyle(region2D, false);
-
- // Show the plot.
- plotter2D.setParameter("plotterWidth", "2000");
- plotter2D.setParameter("plotterHeight", "1200");
- plotter2D.show();
-
- // Format and display the histograms.
- for(int i = 0; i < 2; i++) {
- // Scale the histogram by the appropriate scaling factor.
- invariantMassPlots[i].scale(1.0 / scaleFactor);
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create("Particle Invariant Mass (" + runNumber[i] + ")");
- plotter.createRegions(1);
- plotter.region(0).plot(invariantMassPlots[i]);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle("Particle Invariant Mass (" + runNumber[i] + ")");
- region.getPlot().getXAxis().setLabel("Invariant Mass (GeV)");
- region.getPlot().getYAxis().setLabel("Rate (Hz)");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Note which plot is the numerator and which is the denominator.
- int numerator = 0;
- int denominator = 1;
-
- // Create a new histogram to display the ratios of the rates.
- IHistogram1D ratioPlot = AIDA.defaultInstance().histogram1D("Invariant Mass Ratio (" + runNumber[numerator] + " / "
- + runNumber[denominator] + ")", invariantMassPlots[0].axis().bins(),
- invariantMassPlots[0].axis().lowerEdge(), invariantMassPlots[0].axis().upperEdge());
-
- // Iterate over each bin.
- for(int bin = 0; bin < invariantMassPlots[0].axis().bins(); bin++) {
- // Calculate the ratio.
- double ratio = invariantMassPlots[numerator].binHeight(bin) / invariantMassPlots[denominator].binHeight(bin);
-
- // If the ratio is either not a number of infinite, skip
- // this bin.
- if(Double.isNaN(ratio) || Double.isInfinite(ratio)) { continue; }
-
- // Populate the ratio plot bin.
- ratioPlot.fill(invariantMassPlots[0].axis().binCenter(bin), ratio);
- }
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create("Invariant Mass Ratio (5411 / 5554)");
- plotter.createRegions(1);
- plotter.region(0).plot(ratioPlot);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle("Invariant Mass Ratio (" + runNumber[numerator] + " / " + runNumber[denominator] + ")");
- region.getPlot().getXAxis().setLabel("Invariant Mass (GeV)");
- region.getPlot().getXAxis().setMin(0.010);
- region.getPlot().getXAxis().setMax(0.060);
- region.getPlot().getYAxis().setLabel("Ratio");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Disable the error bars.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
- fillStyle.setShowErrorBars(false);
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
-
- // Close the trees.
- for(int i = 0; i < plotFile.length; i++) {
- tree[i].close();
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the names of each plot on in the region.
- String[] dataNames = region.getAllDataNames();
-
- // Check whether this is an overlay plot. Overlay plots contain
- // more than one data name.
- boolean overlay = (dataNames.length > 1 ? true : false);
-
- // Iterate over each plot in the region.
- for(int i = 0; i < dataNames.length; i++) {
- // Set the overlay style if needed.
- if(overlay) {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with no fill. The color is set by the "color" argument.
- fillStyle.setHistogramFill(false);
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarLineColor(color[i].getFillColor());
-
- // Set the legend text style.
- region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
- }
-
- // Otherwise, set the fill style for a single plot.
- else {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with a fill color. The colors are defined by the
- // "color" argument.
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarColor(color[i].getFillColor());
- fillStyle.setHistogramBarLineColor(color[i].getLineColor());
- }
-
- // Set the statistics box style.
- region.getPlot().getStats().setVisible(true);
- region.getPlot().getStats().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- */
- private static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
- // Get the fill style object. 2D plots should never be overlay
- // plots, so there should only ever be one data name.
- JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
-
- // Set the fill style for a two-dimensional plot.
- if(logarithmic) { fillStyle.setLogZ(true); }
- fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
- fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
-
- // Make the statistics box invisible.
- region.getPlot().getStats().setVisible(false);
-
- // Set the general plot font (which is also the z-axis font).
- region.getPlot().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
+ // Define plot fonts.
+ private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
+ private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
+ private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
+
+ // Defines the color style options for plot data.
+ private enum ColorStyle {
+ MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
+ MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
+ MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
+ RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
+ FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
+ TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
+ BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
+ PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
+
+ private final Color fillColor;
+ private final Color lineColor;
+
+ private ColorStyle(Color fillColor, Color lineColor) {
+ this.fillColor = fillColor;
+ this.lineColor = lineColor;
+ }
+
+ public Color getFillColor() { return fillColor; }
+
+ public Color getLineColor() { return lineColor; }
+ };
+
+ /**
+ * Loads all plots in a file and formats them according to the
+ * indicated style.
+ * @param args - Unused default executable parameter.
+ * @throws IOException Occurs if there is an issue opening the file.
+ */
+ public static void main(String[] args) throws IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String[] plotFile = {
+ rootDir + "temp.aida"
+ };
+
+ // Define the run numbers for each file.
+ String[] runNumber = { "1 Hits", "2 Hits" };
+
+ // Define the scaling factors for each plot.
+ double scaleFactor = 13.254;
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree[] tree = new ITree[plotFile.length];
+ for(int i = 0; i < plotFile.length; i++) {
+ tree[i] = af.createTreeFactory().create(plotFile[i]);
+ if(tree[i] == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+ }
+
+ // Get the histograms.
+ IHistogram1D[] invariantMassPlots = new IHistogram1D[3];
+ invariantMassPlots[0] = (IHistogram1D) tree[0].find("Trident Analysis/Particle Invariant Mass (1 Hit)");
+ invariantMassPlots[1] = (IHistogram1D) tree[0].find("Trident Analysis/Particle Invariant Mass (2 Hit)");
+ IHistogram1D electronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Electron Energy");
+ IHistogram1D positronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Positron Energy");
+ IHistogram1D energySumPlot = (IHistogram1D) tree[0].find("Trident Analysis/Energy Sum Distribution");
+ IHistogram2D energySum2DPlot = (IHistogram2D) tree[0].find("Trident Analysis/2D Energy Distribution");
+ IHistogram1D tridentElectronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Trident Electron Energy");
+ IHistogram1D tridentPositronEnergyPlot = (IHistogram1D) tree[0].find("Trident Analysis/Trident Positron Energy");
+
+ // Define the plot titles and arrays.
+ IHistogram[] plots = { electronEnergyPlot, positronEnergyPlot, energySumPlot, tridentElectronEnergyPlot, tridentPositronEnergyPlot };
+ String[] titles = { "Electron Energy", "Positron Energy", "Energy Sum", "Trident Electron Energy", "Trident Positron Energy" };
+ String[] xTitles = { "Energy (GeV)", "Energy (GeV)", "Energy Sum (GeV)", "Energy (GeV)", "Energy (GeV)" };
+
+ // Re-bin the histograms to have 5-times larger bins. First,
+ // get the bin count and upper and lower bounds of the plot.
+ int bins = invariantMassPlots[0].axis().bins();
+ double low = invariantMassPlots[0].axis().binLowerEdge(0);
+ double high = invariantMassPlots[0].axis().binUpperEdge(invariantMassPlots[0].axis().bins() - 1);
+
+ // Create new plots with the larger bin sizes.
+ AIDA aida = AIDA.defaultInstance();
+ IHistogram1D[] newPlot = new IHistogram1D[2];
+ newPlot[0] = aida.histogram1D("Particle Invariant Mass (1 Hit)", bins / 5, low, high);
+ newPlot[1] = aida.histogram1D("Particle Invariant Mass (2 Hit)", bins / 5, low, high);
+
+ // Populate the new plots with the data from the old ones.
+ for(int j = 0; j < 2; j++) {
+ for(int i = 0; i < bins; i++) {
+ int entries = invariantMassPlots[j].binEntries(i);
+ double center = invariantMassPlots[j].axis().binCenter(i);
+ for(int k = 0; k < entries; k++) {
+ newPlot[j].fill(center);
+ }
+ }
+ }
+
+ // Replace the old plots.
+ invariantMassPlots = newPlot;
+
+ // Create a plotter factory.
+ IPlotterFactory plotterFactory = af.createPlotterFactory();
+
+ // Format and display the basic histograms.
+ for(int i = 0; i < plots.length; i++) {
+ // Scale the histogram by the appropriate scaling factor.
+ plots[i].scale(1.0 / scaleFactor);
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(titles[i]);
+ plotter.createRegions(1);
+ plotter.region(0).plot(plots[i]);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle(titles[i]);
+ region.getPlot().getXAxis().setLabel(xTitles[i]);
+ region.getPlot().getYAxis().setLabel("Rate (Hz)");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Format and display the 2D histogram.
+ energySum2DPlot.scale(1.0 / scaleFactor);
+ IPlotter plotter2D = plotterFactory.create("2D Energy Sum");
+ plotter2D.createRegions(1);
+ plotter2D.region(0).plot(energySum2DPlot);
+
+ // Format the axis labels.
+ PlotterRegion region2D = (PlotterRegion) plotter2D.region(0);
+ region2D.getPlot().setTitle("2D Energy Sum");
+ region2D.getPlot().getXAxis().setLabel("Electron Energy (GeV)");
+ region2D.getPlot().getYAxis().setLabel("Positron Energy (GeV)");
+
+ // Format the fonts and general plot presentation.
+ setDefault2DStyle(region2D, false);
+
+ // Show the plot.
+ plotter2D.setParameter("plotterWidth", "2000");
+ plotter2D.setParameter("plotterHeight", "1200");
+ plotter2D.show();
+
+ // Format and display the histograms.
+ for(int i = 0; i < 2; i++) {
+ // Scale the histogram by the appropriate scaling factor.
+ invariantMassPlots[i].scale(1.0 / scaleFactor);
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create("Particle Invariant Mass (" + runNumber[i] + ")");
+ plotter.createRegions(1);
+ plotter.region(0).plot(invariantMassPlots[i]);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle("Particle Invariant Mass (" + runNumber[i] + ")");
+ region.getPlot().getXAxis().setLabel("Invariant Mass (GeV)");
+ region.getPlot().getYAxis().setLabel("Rate (Hz)");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Note which plot is the numerator and which is the denominator.
+ int numerator = 0;
+ int denominator = 1;
+
+ // Create a new histogram to display the ratios of the rates.
+ IHistogram1D ratioPlot = AIDA.defaultInstance().histogram1D("Invariant Mass Ratio (" + runNumber[numerator] + " / "
+ + runNumber[denominator] + ")", invariantMassPlots[0].axis().bins(),
+ invariantMassPlots[0].axis().lowerEdge(), invariantMassPlots[0].axis().upperEdge());
+
+ // Iterate over each bin.
+ for(int bin = 0; bin < invariantMassPlots[0].axis().bins(); bin++) {
+ // Calculate the ratio.
+ double ratio = invariantMassPlots[numerator].binHeight(bin) / invariantMassPlots[denominator].binHeight(bin);
+
+ // If the ratio is either not a number of infinite, skip
+ // this bin.
+ if(Double.isNaN(ratio) || Double.isInfinite(ratio)) { continue; }
+
+ // Populate the ratio plot bin.
+ ratioPlot.fill(invariantMassPlots[0].axis().binCenter(bin), ratio);
+ }
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create("Invariant Mass Ratio (5411 / 5554)");
+ plotter.createRegions(1);
+ plotter.region(0).plot(ratioPlot);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle("Invariant Mass Ratio (" + runNumber[numerator] + " / " + runNumber[denominator] + ")");
+ region.getPlot().getXAxis().setLabel("Invariant Mass (GeV)");
+ region.getPlot().getXAxis().setMin(0.010);
+ region.getPlot().getXAxis().setMax(0.060);
+ region.getPlot().getYAxis().setLabel("Ratio");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Disable the error bars.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+ fillStyle.setShowErrorBars(false);
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+
+ // Close the trees.
+ for(int i = 0; i < plotFile.length; i++) {
+ tree[i].close();
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the names of each plot on in the region.
+ String[] dataNames = region.getAllDataNames();
+
+ // Check whether this is an overlay plot. Overlay plots contain
+ // more than one data name.
+ boolean overlay = (dataNames.length > 1 ? true : false);
+
+ // Iterate over each plot in the region.
+ for(int i = 0; i < dataNames.length; i++) {
+ // Set the overlay style if needed.
+ if(overlay) {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with no fill. The color is set by the "color" argument.
+ fillStyle.setHistogramFill(false);
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarLineColor(color[i].getFillColor());
+
+ // Set the legend text style.
+ region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
+ }
+
+ // Otherwise, set the fill style for a single plot.
+ else {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with a fill color. The colors are defined by the
+ // "color" argument.
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarColor(color[i].getFillColor());
+ fillStyle.setHistogramBarLineColor(color[i].getLineColor());
+ }
+
+ // Set the statistics box style.
+ region.getPlot().getStats().setVisible(true);
+ region.getPlot().getStats().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ */
+ private static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
+ // Get the fill style object. 2D plots should never be overlay
+ // plots, so there should only ever be one data name.
+ JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+
+ // Set the fill style for a two-dimensional plot.
+ if(logarithmic) { fillStyle.setLogZ(true); }
+ fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
+ fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
+
+ // Make the statistics box invisible.
+ region.getPlot().getStats().setVisible(false);
+
+ // Set the general plot font (which is also the z-axis font).
+ region.getPlot().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTEPlotFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTEPlotFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTEPlotFormatter.java Wed Apr 27 11:11:32 2016
@@ -19,308 +19,308 @@
import hep.aida.ref.plotter.PlotterRegion;
public class MTEPlotFormatter {
- // Define plot fonts.
- private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
- private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
- private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
-
- // Defines the color style options for plot data.
- private enum ColorStyle {
- MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
- MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
- MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
- RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
- FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
- TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
- BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
- PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
-
- private final Color fillColor;
- private final Color lineColor;
-
- private ColorStyle(Color fillColor, Color lineColor) {
- this.fillColor = fillColor;
- this.lineColor = lineColor;
- }
-
- public Color getFillColor() { return fillColor; }
-
- public Color getLineColor() { return lineColor; }
- };
-
- /**
- * Loads all plots in a file and formats them according to the
- * indicated style.
- * @param args - Unused default executable parameter.
- * @throws IOException Occurs if there is an issue opening the file.
- */
- public static void main(String[] args) throws IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "temp.aida";
-
- // Define the scaling factors for each plot.
- double scaleFactor = 1;
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Define index references for each event type.
- int MOLLER = 0;
- int TRIDENT = 1;
- int ELASTIC = 2;
-
- // Get the histograms.
- IHistogram1D[] trackCountPlots = new IHistogram1D[3];
- trackCountPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks");
- trackCountPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks");
- trackCountPlots[ELASTIC] = (IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks");
-
- IHistogram1D[] energyPlots = new IHistogram1D[3];
- energyPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution");
- energyPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution");
- energyPlots[ELASTIC] = (IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution");
-
- IHistogram1D[] energySumPlots = new IHistogram1D[2];
- energySumPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution");
- energySumPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution");
-
- IHistogram2D[] energy2DPlots = new IHistogram2D[2];
- energy2DPlots[MOLLER] = (IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution");
- energy2DPlots[TRIDENT] = (IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution");
-
- // Create a plotter factory.
- IPlotterFactory plotterFactory = af.createPlotterFactory();
-
- // Format the track count plots.
- for(IHistogram1D trackCountPlot : trackCountPlots) {
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(trackCountPlot.title());
- plotter.createRegions(1);
- plotter.region(0).plot(trackCountPlot);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle(trackCountPlot.title());
- region.getPlot().getXAxis().setLabel("Number of Tracks");
- region.getPlot().getYAxis().setLabel("Count");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Format the electron energy plots.
- for(IHistogram1D energyPlot : energyPlots) {
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(energyPlot.title());
- plotter.createRegions(1);
- plotter.region(0).plot(energyPlot);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle(energyPlot.title());
- region.getPlot().getXAxis().setLabel("Track Energy (GeV)");
- region.getPlot().getYAxis().setLabel("Count");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Format the energy sum plots.
- for(IHistogram1D energySumPlot : energySumPlots) {
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(energySumPlot.title());
- plotter.createRegions(1);
- plotter.region(0).plot(energySumPlot);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle(energySumPlot.title());
- region.getPlot().getXAxis().setLabel("Track Energy (GeV)");
- region.getPlot().getYAxis().setLabel("Count");
-
- // Format the fonts and general plot presentation.
- setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Format the 2D energy sum plots.
- for(IHistogram2D energy2DPlot : energy2DPlots) {
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create(energy2DPlot.title());
- plotter.createRegions(1);
- plotter.region(0).plot(energy2DPlot);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle(energy2DPlot.title());
- region.getPlot().getXAxis().setLabel("First Track Energy (GeV)");
- region.getPlot().getYAxis().setLabel("Second Track Energy (GeV)");
-
-
- // Format the fonts and general plot presentation.
- setDefault2DStyle(region, false);
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Disable the error bars.
- //JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
- //fillStyle.setShowErrorBars(false);
-
- // Close the tree.
- tree.close();
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the names of each plot on in the region.
- String[] dataNames = region.getAllDataNames();
-
- // Check whether this is an overlay plot. Overlay plots contain
- // more than one data name.
- boolean overlay = (dataNames.length > 1 ? true : false);
-
- // Iterate over each plot in the region.
- for(int i = 0; i < dataNames.length; i++) {
- // Set the overlay style if needed.
- if(overlay) {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with no fill. The color is set by the "color" argument.
- fillStyle.setHistogramFill(false);
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarLineColor(color[i].getFillColor());
-
- // Set the legend text style.
- region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
- }
-
- // Otherwise, set the fill style for a single plot.
- else {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with a fill color. The colors are defined by the
- // "color" argument.
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarColor(color[i].getFillColor());
- fillStyle.setHistogramBarLineColor(color[i].getLineColor());
- }
-
- // Set the statistics box style.
- region.getPlot().getStats().setVisible(true);
- region.getPlot().getStats().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- */
- private static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
- // Get the fill style object. 2D plots should never be overlay
- // plots, so there should only ever be one data name.
- JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
-
- // Set the fill style for a two-dimensional plot.
- if(logarithmic) { fillStyle.setLogZ(true); }
- fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
- fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
-
- // Make the statistics box invisible.
- region.getPlot().getStats().setVisible(false);
-
- // Set the general plot font (which is also the z-axis font).
- region.getPlot().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
-
- /**
- * Recursive method that gets all object names from a tree that
- * are not directories. Method should not be called directly, but
- * rather called only through the <code>getHistograms(ITree)</code>
- * method.
- * @param tree - The tree from which to obtain the object names.
- * @param directory - The directory in which to search for objects.
- * @param list - The list in which to place the objects.
- * @return Returns the <code>List</code> collection that was given
- * as an argument.
- */
- private static final List<String> getHistograms(ITree tree, String directory, List<String> list) {
- // Get the list of objects in the directory.
- String[] treeObjects = tree.listObjectNames(directory);
-
- // Print the objects.
- for(String objectName : treeObjects) {
- // Check if the object is a directory.
- boolean isDirectory = isDirectory(objectName);
-
- // If the object is a directory, get the histograms from it.
- if(isDirectory) {
- getHistograms(tree, objectName, list);
- }
-
- // If the object is a plot, add it to the list.
- else { list.add(objectName); }
- }
-
- // Return the list.
- return list;
- }
-
- /**
- * Checks whether a tree object is a directory.
- * @param object - The object to check.
- * @return Returns <code>true</code> if the object is a directory
- * and <code>false</code> otherwise.
- */
- private static final boolean isDirectory(String object) {
- return (object.toCharArray()[object.length() - 1] == '/');
- }
+ // Define plot fonts.
+ private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
+ private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
+ private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
+
+ // Defines the color style options for plot data.
+ private enum ColorStyle {
+ MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
+ MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
+ MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
+ RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
+ FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
+ TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
+ BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
+ PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
+
+ private final Color fillColor;
+ private final Color lineColor;
+
+ private ColorStyle(Color fillColor, Color lineColor) {
+ this.fillColor = fillColor;
+ this.lineColor = lineColor;
+ }
+
+ public Color getFillColor() { return fillColor; }
+
+ public Color getLineColor() { return lineColor; }
+ };
+
+ /**
+ * Loads all plots in a file and formats them according to the
+ * indicated style.
+ * @param args - Unused default executable parameter.
+ * @throws IOException Occurs if there is an issue opening the file.
+ */
+ public static void main(String[] args) throws IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "temp.aida";
+
+ // Define the scaling factors for each plot.
+ double scaleFactor = 1;
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Define index references for each event type.
+ int MOLLER = 0;
+ int TRIDENT = 1;
+ int ELASTIC = 2;
+
+ // Get the histograms.
+ IHistogram1D[] trackCountPlots = new IHistogram1D[3];
+ trackCountPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks");
+ trackCountPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks");
+ trackCountPlots[ELASTIC] = (IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks");
+
+ IHistogram1D[] energyPlots = new IHistogram1D[3];
+ energyPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution");
+ energyPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution");
+ energyPlots[ELASTIC] = (IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution");
+
+ IHistogram1D[] energySumPlots = new IHistogram1D[2];
+ energySumPlots[MOLLER] = (IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution");
+ energySumPlots[TRIDENT] = (IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution");
+
+ IHistogram2D[] energy2DPlots = new IHistogram2D[2];
+ energy2DPlots[MOLLER] = (IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution");
+ energy2DPlots[TRIDENT] = (IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution");
+
+ // Create a plotter factory.
+ IPlotterFactory plotterFactory = af.createPlotterFactory();
+
+ // Format the track count plots.
+ for(IHistogram1D trackCountPlot : trackCountPlots) {
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(trackCountPlot.title());
+ plotter.createRegions(1);
+ plotter.region(0).plot(trackCountPlot);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle(trackCountPlot.title());
+ region.getPlot().getXAxis().setLabel("Number of Tracks");
+ region.getPlot().getYAxis().setLabel("Count");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Format the electron energy plots.
+ for(IHistogram1D energyPlot : energyPlots) {
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(energyPlot.title());
+ plotter.createRegions(1);
+ plotter.region(0).plot(energyPlot);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle(energyPlot.title());
+ region.getPlot().getXAxis().setLabel("Track Energy (GeV)");
+ region.getPlot().getYAxis().setLabel("Count");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Format the energy sum plots.
+ for(IHistogram1D energySumPlot : energySumPlots) {
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(energySumPlot.title());
+ plotter.createRegions(1);
+ plotter.region(0).plot(energySumPlot);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle(energySumPlot.title());
+ region.getPlot().getXAxis().setLabel("Track Energy (GeV)");
+ region.getPlot().getYAxis().setLabel("Count");
+
+ // Format the fonts and general plot presentation.
+ setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Format the 2D energy sum plots.
+ for(IHistogram2D energy2DPlot : energy2DPlots) {
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create(energy2DPlot.title());
+ plotter.createRegions(1);
+ plotter.region(0).plot(energy2DPlot);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle(energy2DPlot.title());
+ region.getPlot().getXAxis().setLabel("First Track Energy (GeV)");
+ region.getPlot().getYAxis().setLabel("Second Track Energy (GeV)");
+
+
+ // Format the fonts and general plot presentation.
+ setDefault2DStyle(region, false);
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Disable the error bars.
+ //JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+ //fillStyle.setShowErrorBars(false);
+
+ // Close the tree.
+ tree.close();
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the names of each plot on in the region.
+ String[] dataNames = region.getAllDataNames();
+
+ // Check whether this is an overlay plot. Overlay plots contain
+ // more than one data name.
+ boolean overlay = (dataNames.length > 1 ? true : false);
+
+ // Iterate over each plot in the region.
+ for(int i = 0; i < dataNames.length; i++) {
+ // Set the overlay style if needed.
+ if(overlay) {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with no fill. The color is set by the "color" argument.
+ fillStyle.setHistogramFill(false);
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarLineColor(color[i].getFillColor());
+
+ // Set the legend text style.
+ region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
+ }
+
+ // Otherwise, set the fill style for a single plot.
+ else {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with a fill color. The colors are defined by the
+ // "color" argument.
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarColor(color[i].getFillColor());
+ fillStyle.setHistogramBarLineColor(color[i].getLineColor());
+ }
+
+ // Set the statistics box style.
+ region.getPlot().getStats().setVisible(true);
+ region.getPlot().getStats().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ */
+ private static final void setDefault2DStyle(PlotterRegion region, boolean logarithmic) {
+ // Get the fill style object. 2D plots should never be overlay
+ // plots, so there should only ever be one data name.
+ JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+
+ // Set the fill style for a two-dimensional plot.
+ if(logarithmic) { fillStyle.setLogZ(true); }
+ fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
+ fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
+
+ // Make the statistics box invisible.
+ region.getPlot().getStats().setVisible(false);
+
+ // Set the general plot font (which is also the z-axis font).
+ region.getPlot().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+
+ /**
+ * Recursive method that gets all object names from a tree that
+ * are not directories. Method should not be called directly, but
+ * rather called only through the <code>getHistograms(ITree)</code>
+ * method.
+ * @param tree - The tree from which to obtain the object names.
+ * @param directory - The directory in which to search for objects.
+ * @param list - The list in which to place the objects.
+ * @return Returns the <code>List</code> collection that was given
+ * as an argument.
+ */
+ private static final List<String> getHistograms(ITree tree, String directory, List<String> list) {
+ // Get the list of objects in the directory.
+ String[] treeObjects = tree.listObjectNames(directory);
+
+ // Print the objects.
+ for(String objectName : treeObjects) {
+ // Check if the object is a directory.
+ boolean isDirectory = isDirectory(objectName);
+
+ // If the object is a directory, get the histograms from it.
+ if(isDirectory) {
+ getHistograms(tree, objectName, list);
+ }
+
+ // If the object is a plot, add it to the list.
+ else { list.add(objectName); }
+ }
+
+ // Return the list.
+ return list;
+ }
+
+ /**
+ * Checks whether a tree object is a directory.
+ * @param object - The object to check.
+ * @return Returns <code>true</code> if the object is a directory
+ * and <code>false</code> otherwise.
+ */
+ private static final boolean isDirectory(String object) {
+ return (object.toCharArray()[object.length() - 1] == '/');
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTETriggerPlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTETriggerPlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/MTETriggerPlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -15,164 +15,164 @@
public class MTETriggerPlotsFormatter {
- public static void main(String[] args) throws IllegalArgumentException, IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "5772-ana.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Define the 1D trigger plot names for Møllers and tridents.
- String[] plotNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
- "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
- String[] displayNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
- "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
- String[] xAxisNames1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)",
- "Coplanarity (Degrees)", "Energy Difference (GeV)", "Energy Slope (GeV)", "Energy Sum (GeV)" };
- String yAxisName1D = "Count";
-
- // Define the 2D trigger plot names for Møllers and tridents.
- String[] plotNames2D = { "Cluster Seed", "Pair Energy Sum 2D" };
- String[] displayNames2D = { "Cluster Seed Distribution", "2D Energy Sum" };
- String[] xAxisNames2D = { "x-Index", "Second Cluster Energy (GeV)" };
- String[] yAxisNames2D = { "y-Index", "First Cluster Energy (GeV)" };
-
- // Define the 1D trigger plot names for elastics.
- String[] plotNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
- String[] displayNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
- String[] xAxisNamesElastic1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)" };
- String yAxisNameElastic1D = "Count";
-
- // Define the 2D trigger plot names for elastics.
- String[] plotNamesElastic2D = { "Cluster Seed" };
- String[] displayNamesElastic2D = { "Cluster Seed Distribution" };
- String[] xAxisNamesElastic2D = { "x-Index" };
- String[] yAxisNamesElastic2D = { "y-Index" };
-
- // Define the Møller, trident, and elastic prefixes.
- String allPrefix = "All Trigger Plots/Pair Plots/";
- String møllerPrefix = "Møller Trigger Plots/Pair Plots/";
- String tridentPrefix = "Trident Trigger Plots/Pair Plots/";
- String elasticPrefix = "Elastic Trigger Plots/Singles Plots/";
- String allSinglesPrefix = "All Trigger Plots/Singles Plots/";
-
- // Define the plot type prefix.
- String allTypeName = "All Pairs - ";
- String møllerTypeName = "Møller - ";
- String tridentTypeName = "Trident - ";
- String elasticTypeName = "Elastic - ";
- String allSinglesTypeName = "All Singles - ";
-
- // Define the plot type colors.
- ColorStyle allColor = PlotsFormatter.ColorStyle.GREY;
- ColorStyle møllerColor = PlotsFormatter.ColorStyle.MS_BLUE;
- ColorStyle tridentColor = PlotsFormatter.ColorStyle.MS_ORANGE;
- ColorStyle elasticColor = PlotsFormatter.ColorStyle.MS_GREEN;
-
- // Create a plot formatting module.
- PlotFormatModule module = new PlotFormatModule();
-
- // Get the histograms and add them to the module. Start with the
- // trident and Møller plots.
- for(int i = 0; i < plotNames1D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram1D allPlot = (IHistogram1D) tree.find(allPrefix + plotNames1D[i]);
- IHistogram1D møllerPlot = (IHistogram1D) tree.find(møllerPrefix + plotNames1D[i]);
- IHistogram1D tridentPlot = (IHistogram1D) tree.find(tridentPrefix + plotNames1D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNames1D[i], yAxisName1D, allTypeName + displayNames1D[i]);
- FormattedPlot1D møllerFormattedPlot = new FormattedPlot1D(møllerPlot, møllerColor, xAxisNames1D[i], yAxisName1D, møllerTypeName + displayNames1D[i]);
- FormattedPlot1D tridentFormattedPlot = new FormattedPlot1D(tridentPlot, tridentColor, xAxisNames1D[i], yAxisName1D, tridentTypeName + displayNames1D[i]);
-
- // Add them to the module.
- module.addPlot1D(allFormattedPlot);
- module.addPlot1D(møllerFormattedPlot);
- module.addPlot1D(tridentFormattedPlot);
- }
- for(int i = 0; i < plotNames2D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNames2D[i]);
- IHistogram2D møllerPlot = (IHistogram2D) tree.find(møllerPrefix + plotNames2D[i]);
- IHistogram2D tridentPlot = (IHistogram2D) tree.find(tridentPrefix + plotNames2D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], allTypeName + displayNames2D[i]);
- FormattedPlot2D møllerFormattedPlot = new FormattedPlot2D(møllerPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], møllerTypeName + displayNames2D[i]);
- FormattedPlot2D tridentFormattedPlot = new FormattedPlot2D(tridentPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], tridentTypeName + displayNames2D[i]);
-
- // Add them to the module.
- module.addPlot2D(allFormattedPlot);
- module.addPlot2D(møllerFormattedPlot);
- module.addPlot2D(tridentFormattedPlot);
- }
-
- // Get the histograms for the elastic plots and add them to the module.
- for(int i = 0; i < plotNamesElastic1D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram1D allPlot = (IHistogram1D) tree.find(allSinglesPrefix + plotNames1D[i]);
- IHistogram1D elasticPlot = (IHistogram1D) tree.find(elasticPrefix + plotNames1D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
- allSinglesTypeName + displayNamesElastic1D[i]);
- FormattedPlot1D elasticFormattedPlot = new FormattedPlot1D(elasticPlot, elasticColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
- elasticTypeName + displayNamesElastic1D[i]);
-
- // Add them to the module.
- module.addPlot1D(allFormattedPlot);
- module.addPlot1D(elasticFormattedPlot);
- }
- for(int i = 0; i < plotNamesElastic2D.length; i++) {
- // Get the Møller and trident plots.
- IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNamesElastic2D[i]);
- IHistogram2D elasticPlot = (IHistogram2D) tree.find(møllerPrefix + plotNamesElastic2D[i]);
-
- // Make a formatted plot for each.
- FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
- allSinglesTypeName + plotNames2D[i]);
- FormattedPlot2D elasticFormattedPlot = new FormattedPlot2D(elasticPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
- elasticTypeName + displayNamesElastic2D[i]);
-
- // Add them to the module.
- module.addPlot2D(allFormattedPlot);
- module.addPlot2D(elasticFormattedPlot);
- }
-
- // Add the MTE plots to the module.
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution"), elasticColor,
- "Momentum (GeV)", "Count", "Elastic - Momentum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks"), elasticColor,
- "Tracks", "Count", "Elastic - Tracks in Event"));
-
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution"), møllerColor,
- "Momentum Sum (GeV)", "Count", "Møller - Momentum Sum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution"), møllerColor,
- "Momentum (GeV)", "Count", "Møller - Momentum (Electron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)"), møllerColor,
- "Time (ns)", "Count", "Møller - Time Coincidence"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks"), møllerColor,
- "Tracks", "Count", "Møller - Tracks in Event"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution"), false,
- "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Møller - 2D Momentum Sum"));
-
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution"), tridentColor,
- "Momentum Sum (GeV)", "Count", "Trident - Momentum Sum"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution"), tridentColor,
- "Momentum (GeV)", "Count", "Trident - Momentum (Electron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Positron Energy Distribution"), tridentColor,
- "Momentum (GeV)", "Count", "Trident - Momentum (Positron)"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks"), tridentColor,
- "Tracks", "Count", "Trident - Tracks in Event"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution"), false,
- "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Trident - 2D Momentum Sum"));
-
- // Display the plots.
- module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\TestPrint\\");
- }
+ public static void main(String[] args) throws IllegalArgumentException, IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "5772-ana.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Define the 1D trigger plot names for Møllers and tridents.
+ String[] plotNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
+ "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
+ String[] displayNames1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy",
+ "Pair Coplanarity", "Pair Energy Difference", "Pair Energy Slope", "Pair Energy Sum" };
+ String[] xAxisNames1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)",
+ "Coplanarity (Degrees)", "Energy Difference (GeV)", "Energy Slope (GeV)", "Energy Sum (GeV)" };
+ String yAxisName1D = "Count";
+
+ // Define the 2D trigger plot names for Møllers and tridents.
+ String[] plotNames2D = { "Cluster Seed", "Pair Energy Sum 2D" };
+ String[] displayNames2D = { "Cluster Seed Distribution", "2D Energy Sum" };
+ String[] xAxisNames2D = { "x-Index", "Second Cluster Energy (GeV)" };
+ String[] yAxisNames2D = { "y-Index", "First Cluster Energy (GeV)" };
+
+ // Define the 1D trigger plot names for elastics.
+ String[] plotNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
+ String[] displayNamesElastic1D = { "Cluster Hit Count", "Cluster Seed Energy", "Cluster Total Energy" };
+ String[] xAxisNamesElastic1D = { "Hit Count", "Seed Energy (GeV)", "Total Energy (GeV)" };
+ String yAxisNameElastic1D = "Count";
+
+ // Define the 2D trigger plot names for elastics.
+ String[] plotNamesElastic2D = { "Cluster Seed" };
+ String[] displayNamesElastic2D = { "Cluster Seed Distribution" };
+ String[] xAxisNamesElastic2D = { "x-Index" };
+ String[] yAxisNamesElastic2D = { "y-Index" };
+
+ // Define the Møller, trident, and elastic prefixes.
+ String allPrefix = "All Trigger Plots/Pair Plots/";
+ String møllerPrefix = "Møller Trigger Plots/Pair Plots/";
+ String tridentPrefix = "Trident Trigger Plots/Pair Plots/";
+ String elasticPrefix = "Elastic Trigger Plots/Singles Plots/";
+ String allSinglesPrefix = "All Trigger Plots/Singles Plots/";
+
+ // Define the plot type prefix.
+ String allTypeName = "All Pairs - ";
+ String møllerTypeName = "Møller - ";
+ String tridentTypeName = "Trident - ";
+ String elasticTypeName = "Elastic - ";
+ String allSinglesTypeName = "All Singles - ";
+
+ // Define the plot type colors.
+ ColorStyle allColor = PlotsFormatter.ColorStyle.GREY;
+ ColorStyle møllerColor = PlotsFormatter.ColorStyle.MS_BLUE;
+ ColorStyle tridentColor = PlotsFormatter.ColorStyle.MS_ORANGE;
+ ColorStyle elasticColor = PlotsFormatter.ColorStyle.MS_GREEN;
+
+ // Create a plot formatting module.
+ PlotFormatModule module = new PlotFormatModule();
+
+ // Get the histograms and add them to the module. Start with the
+ // trident and Møller plots.
+ for(int i = 0; i < plotNames1D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram1D allPlot = (IHistogram1D) tree.find(allPrefix + plotNames1D[i]);
+ IHistogram1D møllerPlot = (IHistogram1D) tree.find(møllerPrefix + plotNames1D[i]);
+ IHistogram1D tridentPlot = (IHistogram1D) tree.find(tridentPrefix + plotNames1D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNames1D[i], yAxisName1D, allTypeName + displayNames1D[i]);
+ FormattedPlot1D møllerFormattedPlot = new FormattedPlot1D(møllerPlot, møllerColor, xAxisNames1D[i], yAxisName1D, møllerTypeName + displayNames1D[i]);
+ FormattedPlot1D tridentFormattedPlot = new FormattedPlot1D(tridentPlot, tridentColor, xAxisNames1D[i], yAxisName1D, tridentTypeName + displayNames1D[i]);
+
+ // Add them to the module.
+ module.addPlot1D(allFormattedPlot);
+ module.addPlot1D(møllerFormattedPlot);
+ module.addPlot1D(tridentFormattedPlot);
+ }
+ for(int i = 0; i < plotNames2D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNames2D[i]);
+ IHistogram2D møllerPlot = (IHistogram2D) tree.find(møllerPrefix + plotNames2D[i]);
+ IHistogram2D tridentPlot = (IHistogram2D) tree.find(tridentPrefix + plotNames2D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], allTypeName + displayNames2D[i]);
+ FormattedPlot2D møllerFormattedPlot = new FormattedPlot2D(møllerPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], møllerTypeName + displayNames2D[i]);
+ FormattedPlot2D tridentFormattedPlot = new FormattedPlot2D(tridentPlot, i == 0 ? true : false, xAxisNames2D[i], yAxisNames2D[i], tridentTypeName + displayNames2D[i]);
+
+ // Add them to the module.
+ module.addPlot2D(allFormattedPlot);
+ module.addPlot2D(møllerFormattedPlot);
+ module.addPlot2D(tridentFormattedPlot);
+ }
+
+ // Get the histograms for the elastic plots and add them to the module.
+ for(int i = 0; i < plotNamesElastic1D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram1D allPlot = (IHistogram1D) tree.find(allSinglesPrefix + plotNames1D[i]);
+ IHistogram1D elasticPlot = (IHistogram1D) tree.find(elasticPrefix + plotNames1D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot1D allFormattedPlot = new FormattedPlot1D(allPlot, allColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
+ allSinglesTypeName + displayNamesElastic1D[i]);
+ FormattedPlot1D elasticFormattedPlot = new FormattedPlot1D(elasticPlot, elasticColor, xAxisNamesElastic1D[i], yAxisNameElastic1D,
+ elasticTypeName + displayNamesElastic1D[i]);
+
+ // Add them to the module.
+ module.addPlot1D(allFormattedPlot);
+ module.addPlot1D(elasticFormattedPlot);
+ }
+ for(int i = 0; i < plotNamesElastic2D.length; i++) {
+ // Get the Møller and trident plots.
+ IHistogram2D allPlot = (IHistogram2D) tree.find(allPrefix + plotNamesElastic2D[i]);
+ IHistogram2D elasticPlot = (IHistogram2D) tree.find(møllerPrefix + plotNamesElastic2D[i]);
+
+ // Make a formatted plot for each.
+ FormattedPlot2D allFormattedPlot = new FormattedPlot2D(allPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
+ allSinglesTypeName + plotNames2D[i]);
+ FormattedPlot2D elasticFormattedPlot = new FormattedPlot2D(elasticPlot, i == 0 ? true : false, xAxisNamesElastic2D[i], yAxisNamesElastic2D[i],
+ elasticTypeName + displayNamesElastic2D[i]);
+
+ // Add them to the module.
+ module.addPlot2D(allFormattedPlot);
+ module.addPlot2D(elasticFormattedPlot);
+ }
+
+ // Add the MTE plots to the module.
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Energy Distribution"), elasticColor,
+ "Momentum (GeV)", "Count", "Elastic - Momentum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Elastic Event Tracks"), elasticColor,
+ "Tracks", "Count", "Elastic - Tracks in Event"));
+
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Energy Sum Distribution"), møllerColor,
+ "Momentum Sum (GeV)", "Count", "Møller - Momentum Sum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Electron Energy Distribution"), møllerColor,
+ "Momentum (GeV)", "Count", "Møller - Momentum (Electron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Time Coincidence Distribution (All Møller Cuts)"), møllerColor,
+ "Time (ns)", "Count", "Møller - Time Coincidence"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Møller Event Tracks"), møllerColor,
+ "Tracks", "Count", "Møller - Tracks in Event"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Møller 2D Energy Distribution"), false,
+ "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Møller - 2D Momentum Sum"));
+
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Energy Sum Distribution"), tridentColor,
+ "Momentum Sum (GeV)", "Count", "Trident - Momentum Sum"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Electron Energy Distribution"), tridentColor,
+ "Momentum (GeV)", "Count", "Trident - Momentum (Electron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Positron Energy Distribution"), tridentColor,
+ "Momentum (GeV)", "Count", "Trident - Momentum (Positron)"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MTE Analysis/Trident Event Tracks"), tridentColor,
+ "Tracks", "Count", "Trident - Tracks in Event"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MTE Analysis/Trident 2D Energy Distribution"), false,
+ "First Track Momentum (GeV)", "Second Track Momentum (GeV)", "Trident - 2D Momentum Sum"));
+
+ // Display the plots.
+ module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\TestPrint\\");
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/ParticleMCAnalysisPlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/ParticleMCAnalysisPlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/ParticleMCAnalysisPlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -13,43 +13,43 @@
import org.hps.users.kmccarty.plots.PlotsFormatter.ColorStyle;
public class ParticleMCAnalysisPlotsFormatter {
- public static void main(String[] args) throws IllegalArgumentException, IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "moller-mc-out_triggerPlots.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Create a plot formatting module.
- PlotFormatModule module = new PlotFormatModule();
-
- // Define the plot color.
- ColorStyle plotColor = ColorStyle.MS_BLUE;
-
- // Define the plots to be read.
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron Energy Distribution"),
- plotColor, "Electron Energy (GeV)", "Count", "Electron Energy Distribution"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron\\Electron Momentum Sum Distribution"),
- plotColor, "Momentum Sum (GeV)", "Count", "Momentum Sum Distribution"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron\\Electron Pair Angle Distribution"),
- plotColor, "Momentum Sum (GeV)", "Count", "Pair Angle Distribution"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle x-Momentum Distribution"),
- plotColor, "Momentum (GeV)", "Count", "Particle x-Momentum Distribution"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle y-Momentum Distribution"),
- plotColor, "Momentum (GeV)", "Count", "Particle y-Momentum Distribution"));
- module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle z-Momentum Distribution"),
- plotColor, "Momentum (GeV)", "Count", "Particle z-Momentum Distribution"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MC Analysis/Electron\\Electron 2D Momentum Distribution"),
- true, "Particle 1 Momentum (GeV)", "Particle 2 Momentum (GeV)", "2D Momentum Sum Distribution"));
- module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MC Analysis/Particle Momentum Distribution"),
- true, "px (GeV)", "py (GeV)", "Particle x/y Momentum Distribution"));
-
- // Display the plots.
- module.displayPlots();
- }
+ public static void main(String[] args) throws IllegalArgumentException, IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "moller-mc-out_triggerPlots.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Create a plot formatting module.
+ PlotFormatModule module = new PlotFormatModule();
+
+ // Define the plot color.
+ ColorStyle plotColor = ColorStyle.MS_BLUE;
+
+ // Define the plots to be read.
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron Energy Distribution"),
+ plotColor, "Electron Energy (GeV)", "Count", "Electron Energy Distribution"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron\\Electron Momentum Sum Distribution"),
+ plotColor, "Momentum Sum (GeV)", "Count", "Momentum Sum Distribution"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Electron\\Electron Pair Angle Distribution"),
+ plotColor, "Momentum Sum (GeV)", "Count", "Pair Angle Distribution"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle x-Momentum Distribution"),
+ plotColor, "Momentum (GeV)", "Count", "Particle x-Momentum Distribution"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle y-Momentum Distribution"),
+ plotColor, "Momentum (GeV)", "Count", "Particle y-Momentum Distribution"));
+ module.addPlot1D(new FormattedPlot1D((IHistogram1D) tree.find("MC Analysis/Particle z-Momentum Distribution"),
+ plotColor, "Momentum (GeV)", "Count", "Particle z-Momentum Distribution"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MC Analysis/Electron\\Electron 2D Momentum Distribution"),
+ true, "Particle 1 Momentum (GeV)", "Particle 2 Momentum (GeV)", "2D Momentum Sum Distribution"));
+ module.addPlot2D(new FormattedPlot2D((IHistogram2D) tree.find("MC Analysis/Particle Momentum Distribution"),
+ true, "px (GeV)", "py (GeV)", "Particle x/y Momentum Distribution"));
+
+ // Display the plots.
+ module.displayPlots();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/RafoTridentFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/RafoTridentFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/RafoTridentFormatter.java Wed Apr 27 11:11:32 2016
@@ -13,101 +13,101 @@
import hep.aida.ITree;
public class RafoTridentFormatter {
- /**
- * Loads all plots in a file and formats them according to the
- * indicated style.
- * @param args - Unused default executable parameter.
- * @throws IOException Occurs if there is an issue opening the file.
- */
- public static void main(String[] args) throws IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "mte-out.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Declare the histogram names.
- String energySumName = "Energy Sum";
- String timeCoincidenceName = "Time Coincidence";
- String timeEnergy2DName = "Cluster Time vs. Cluster Energy";
- String hCoplanaritySum2DName = "Hardware Coplanarity vs. Energy Sum";
- String coplanaritySum2DName = "Calculated Coplanarity vs. Energy Sum";
- String energySum2DName = "Top Cluster Energy vs. Bottom Cluster Energy";
- String fiducial = " (Fiducial Region)";
-
- // Get the histograms.
- IHistogram1D[] energySum = {
- (IHistogram1D) tree.find("Trident/" + energySumName),
- (IHistogram1D) tree.find("Trident/" + energySumName + fiducial)
- };
- IHistogram1D[] timeCoincidence = {
- (IHistogram1D) tree.find("Trident/" + timeCoincidenceName),
- (IHistogram1D) tree.find("Trident/" + timeCoincidenceName + fiducial)
- };
- IHistogram2D[] coplanaritySum = {
- (IHistogram2D) tree.find("Trident/" + coplanaritySum2DName),
- (IHistogram2D) tree.find("Trident/" + coplanaritySum2DName + fiducial)
- };
- IHistogram2D[] hcoplanaritySum = {
- (IHistogram2D) tree.find("Trident/" + hCoplanaritySum2DName),
- (IHistogram2D) tree.find("Trident/" + hCoplanaritySum2DName + fiducial)
- };
- IHistogram2D[] energySum2D = {
- (IHistogram2D) tree.find("Trident/" + energySum2DName),
- (IHistogram2D) tree.find("Trident/" + energySum2DName + fiducial)
- };
- IHistogram2D[] timeEnergy = {
- (IHistogram2D) tree.find("Trident/" + timeEnergy2DName),
- (IHistogram2D) tree.find("Trident/" + timeEnergy2DName + fiducial)
- };
-
- // Define the scaling factors for each plot.
- double scaleFactor = 19000.0 / 9736969.0;
-
- // Define the plot titles and arrays for 1D plots.
- IHistogram1D[][] plots = { energySum, timeCoincidence };
- String titles[] = { energySumName, timeCoincidenceName, coplanaritySum2DName, hCoplanaritySum2DName, energySum2DName, timeEnergy2DName };
- String[] xTitles = { "Energy (GeV)", "Time Difference (ns)" };
- String yTitle = "Rate (Hz)";
-
- // Define the plot titles and arrays for 2D plots.
- IHistogram2D[][] plots2D = { coplanaritySum, hcoplanaritySum, energySum2D, timeEnergy };
- String[] titles2D = { coplanaritySum2DName, hCoplanaritySum2DName, energySum2DName, timeEnergy2DName };
- String[] xTitles2D = { "Coplanarity (Degrees)", "Coplanarity (Degrees)", "Top Cluster Energy (GeV)", "Time Coincidence (ns)" };
- String[] yTitles2D = { "Energy Sum (GeV)", "Energy Sum (GeV)", "Bottom Cluster Energy (GeV)", "Energy Sum (GeV)" };
- String zTitle2D = "Rate (Hz)";
-
- // Create a plot formatting module.
- PlotFormatModule module = new PlotFormatModule();
-
- // Define the plot color.
- ColorStyle plotColor = ColorStyle.MS_BLUE;
-
- // Define the plots to be read.
- for(int i = 0; i < plots.length; i++) {
- plots[i][0].scale(scaleFactor);
- plots[i][1].scale(scaleFactor);
- module.addPlot1D(new FormattedPlot1D(plots[i][0], plotColor, xTitles[i], yTitle, titles[i]));
- module.addPlot1D(new FormattedPlot1D(plots[i][1], plotColor, xTitles[i], yTitle, titles[i] + fiducial));
- }
- for(int i = 0; i < plots2D.length; i++) {
- plots2D[i][0].scale(scaleFactor);
- plots2D[i][1].scale(scaleFactor);
- module.addPlot2D(new FormattedPlot2D(plots2D[i][0], false, xTitles2D[i], yTitles2D[i], titles2D[i]));
- module.addPlot2D(new FormattedPlot2D(plots2D[i][1], false, xTitles2D[i], yTitles2D[i], titles2D[i] + fiducial));
- }
-
- // Display the plots.
- //module.displayPlots();
- module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\run-5772\\RafoPlots\\");
- module.exportPlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\run-5772\\RafoPlots\\");
-
- // Close the tree.
- tree.close();
- }
+ /**
+ * Loads all plots in a file and formats them according to the
+ * indicated style.
+ * @param args - Unused default executable parameter.
+ * @throws IOException Occurs if there is an issue opening the file.
+ */
+ public static void main(String[] args) throws IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "mte-out.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Declare the histogram names.
+ String energySumName = "Energy Sum";
+ String timeCoincidenceName = "Time Coincidence";
+ String timeEnergy2DName = "Cluster Time vs. Cluster Energy";
+ String hCoplanaritySum2DName = "Hardware Coplanarity vs. Energy Sum";
+ String coplanaritySum2DName = "Calculated Coplanarity vs. Energy Sum";
+ String energySum2DName = "Top Cluster Energy vs. Bottom Cluster Energy";
+ String fiducial = " (Fiducial Region)";
+
+ // Get the histograms.
+ IHistogram1D[] energySum = {
+ (IHistogram1D) tree.find("Trident/" + energySumName),
+ (IHistogram1D) tree.find("Trident/" + energySumName + fiducial)
+ };
+ IHistogram1D[] timeCoincidence = {
+ (IHistogram1D) tree.find("Trident/" + timeCoincidenceName),
+ (IHistogram1D) tree.find("Trident/" + timeCoincidenceName + fiducial)
+ };
+ IHistogram2D[] coplanaritySum = {
+ (IHistogram2D) tree.find("Trident/" + coplanaritySum2DName),
+ (IHistogram2D) tree.find("Trident/" + coplanaritySum2DName + fiducial)
+ };
+ IHistogram2D[] hcoplanaritySum = {
+ (IHistogram2D) tree.find("Trident/" + hCoplanaritySum2DName),
+ (IHistogram2D) tree.find("Trident/" + hCoplanaritySum2DName + fiducial)
+ };
+ IHistogram2D[] energySum2D = {
+ (IHistogram2D) tree.find("Trident/" + energySum2DName),
+ (IHistogram2D) tree.find("Trident/" + energySum2DName + fiducial)
+ };
+ IHistogram2D[] timeEnergy = {
+ (IHistogram2D) tree.find("Trident/" + timeEnergy2DName),
+ (IHistogram2D) tree.find("Trident/" + timeEnergy2DName + fiducial)
+ };
+
+ // Define the scaling factors for each plot.
+ double scaleFactor = 19000.0 / 9736969.0;
+
+ // Define the plot titles and arrays for 1D plots.
+ IHistogram1D[][] plots = { energySum, timeCoincidence };
+ String titles[] = { energySumName, timeCoincidenceName, coplanaritySum2DName, hCoplanaritySum2DName, energySum2DName, timeEnergy2DName };
+ String[] xTitles = { "Energy (GeV)", "Time Difference (ns)" };
+ String yTitle = "Rate (Hz)";
+
+ // Define the plot titles and arrays for 2D plots.
+ IHistogram2D[][] plots2D = { coplanaritySum, hcoplanaritySum, energySum2D, timeEnergy };
+ String[] titles2D = { coplanaritySum2DName, hCoplanaritySum2DName, energySum2DName, timeEnergy2DName };
+ String[] xTitles2D = { "Coplanarity (Degrees)", "Coplanarity (Degrees)", "Top Cluster Energy (GeV)", "Time Coincidence (ns)" };
+ String[] yTitles2D = { "Energy Sum (GeV)", "Energy Sum (GeV)", "Bottom Cluster Energy (GeV)", "Energy Sum (GeV)" };
+ String zTitle2D = "Rate (Hz)";
+
+ // Create a plot formatting module.
+ PlotFormatModule module = new PlotFormatModule();
+
+ // Define the plot color.
+ ColorStyle plotColor = ColorStyle.MS_BLUE;
+
+ // Define the plots to be read.
+ for(int i = 0; i < plots.length; i++) {
+ plots[i][0].scale(scaleFactor);
+ plots[i][1].scale(scaleFactor);
+ module.addPlot1D(new FormattedPlot1D(plots[i][0], plotColor, xTitles[i], yTitle, titles[i]));
+ module.addPlot1D(new FormattedPlot1D(plots[i][1], plotColor, xTitles[i], yTitle, titles[i] + fiducial));
+ }
+ for(int i = 0; i < plots2D.length; i++) {
+ plots2D[i][0].scale(scaleFactor);
+ plots2D[i][1].scale(scaleFactor);
+ module.addPlot2D(new FormattedPlot2D(plots2D[i][0], false, xTitles2D[i], yTitles2D[i], titles2D[i]));
+ module.addPlot2D(new FormattedPlot2D(plots2D[i][1], false, xTitles2D[i], yTitles2D[i], titles2D[i] + fiducial));
+ }
+
+ // Display the plots.
+ //module.displayPlots();
+ module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\run-5772\\RafoPlots\\");
+ module.exportPlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\run-5772\\RafoPlots\\");
+
+ // Close the tree.
+ tree.close();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/SingleTriggerPlotsFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/SingleTriggerPlotsFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/SingleTriggerPlotsFormatter.java Wed Apr 27 11:11:32 2016
@@ -13,151 +13,151 @@
import hep.aida.ITree;
public class SingleTriggerPlotsFormatter {
-
- public static void main(String[] args) throws IllegalArgumentException, IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "trident-readout-full.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Define plots variables.
- int UNCUT = 0;
- int TRIGGERED = 1;
- String[] plotsDir = { "NoCuts/", "PassedAll/" };
- int PLOT_HIT_COUNT = 0;
- int PLOT_SEED_ENERGY = 1;
- int PLOT_CLUSTER_ENERGY = 2;
- int PLOT_COPLANARITY = 3;
- int PLOT_ENERGY_SUM = 4;
- int PLOT_ENERGY_DIFF = 5;
- int PLOT_ENERGY_SLOPE = 6;
- int PLOT_SEED_DIST = 0;
- int PLOT_ENERGY_SUM_2D = 1;
-
- // Define the internal plot names.
- String[] plotNameInternal1D = new String[7];
- String[] plotNameInternal2D = new String[2];
- plotNameInternal1D[PLOT_HIT_COUNT] = "Cluster Hit Count";
- plotNameInternal1D[PLOT_SEED_ENERGY] = "Cluster Seed Energy";
- plotNameInternal1D[PLOT_CLUSTER_ENERGY] = "Cluster Total Energy";
- plotNameInternal1D[PLOT_COPLANARITY] = "Pair Coplanarity";
- plotNameInternal1D[PLOT_ENERGY_SUM] = "Pair Energy Sum";
- plotNameInternal1D[PLOT_ENERGY_DIFF] = "Pair Energy Difference";
- plotNameInternal1D[PLOT_ENERGY_SLOPE] = "Pair Energy Slope";
- plotNameInternal2D[PLOT_SEED_DIST] = "Cluster Seed";
- plotNameInternal2D[PLOT_ENERGY_SUM_2D] = "Pair Energy Sum 2D";
-
- // Define the plot display names.
- String[] plotName1D = new String[7];
- String[] plotName2D = new String[2];
- for(int j = 0; j < plotNameInternal1D.length; j++) {
- plotName1D[j] = plotNameInternal1D[j];
- }
- for(int j = 0; j < plotNameInternal2D.length; j++) {
- plotName2D[j] = plotNameInternal2D[j];
- }
- plotName1D[PLOT_ENERGY_SUM] = "1D Pair Energy Sum";
- plotName2D[PLOT_SEED_DIST] = "Cluster Seed Distribution";
- plotName2D[PLOT_ENERGY_SUM_2D] = "2D Pair Energy Sum";
-
- String[] xTitles1D = new String[plotName1D.length];
- String[] xTitles2D = new String[plotName2D.length];
- xTitles1D[PLOT_HIT_COUNT] = "Hit Count";
- xTitles1D[PLOT_SEED_ENERGY] = "Seed Energy (GeV)";
- xTitles1D[PLOT_CLUSTER_ENERGY] = "Cluster Energy (GeV)";
- xTitles1D[PLOT_COPLANARITY] = "Coplanarity Angle (Degrees)";
- xTitles1D[PLOT_ENERGY_SUM] = "Energy Sum (GeV)";
- xTitles1D[PLOT_ENERGY_DIFF] = "Energy Difference (GeV)";
- xTitles1D[PLOT_ENERGY_SLOPE] = "Energy Slope (GeV)";
- xTitles2D[PLOT_SEED_DIST] = "x-Index";
- xTitles2D[PLOT_ENERGY_SUM_2D] = "First Cluster Energy (GeV)";
- String yTitle1D = "Count";
- String[] yTitles2D = new String[plotName2D.length];
- yTitles2D[PLOT_SEED_DIST] = "y-Index";
- yTitles2D[PLOT_ENERGY_SUM_2D] = "Second Cluster Energy (GeV)";
-
- // Define axis ranges.
- double[] axisRanges1D = new double[plotName1D.length];
- axisRanges1D[PLOT_HIT_COUNT] = -1;
- axisRanges1D[PLOT_SEED_ENERGY] = 1.1;
- axisRanges1D[PLOT_CLUSTER_ENERGY] = 1.1;
- axisRanges1D[PLOT_COPLANARITY] = 180;
- axisRanges1D[PLOT_ENERGY_SUM] = 2.2;
- axisRanges1D[PLOT_ENERGY_DIFF] = 1.1;
- axisRanges1D[PLOT_ENERGY_SLOPE] = 2.4;
- double[] xAxisRanges2D = new double[plotName2D.length];
- double[] yAxisRanges2D = new double[plotName2D.length];
- xAxisRanges2D[PLOT_SEED_DIST] = -1;
- xAxisRanges2D[PLOT_ENERGY_SUM_2D] = 1.1;
- yAxisRanges2D[PLOT_SEED_DIST] = -1;
- yAxisRanges2D[PLOT_ENERGY_SUM_2D] = 1.1;
-
- // Define the plot names.
- String[][] plotLocations1D = new String[plotsDir.length][plotNameInternal1D.length];
- String[][] plotLocations2D = new String[plotsDir.length][plotNameInternal2D.length];
- for(int i = 0; i < plotsDir.length; i++) {
- for(int j = 0; j < plotNameInternal1D.length; j++) {
- plotLocations1D[i][j] = plotsDir[i] + plotNameInternal1D[j];
- }
- }
- for(int i = 0; i < plotsDir.length; i++) {
- for(int j = 0; j < plotNameInternal2D.length; j++) {
- plotLocations2D[i][j] = plotsDir[i] + plotNameInternal2D[j];
- }
- }
-
- // Create a plot formatting module.
- PlotFormatModule module = new PlotFormatModule();
-
- // Load the plot objects.
- for(int i = 0; i < plotName1D.length; i++) {
- // Get the uncut and triggered plots.
- IHistogram1D uncutPlot = (IHistogram1D) tree.find(plotLocations1D[UNCUT][i]);
- IHistogram1D triggeredPlot = (IHistogram1D) tree.find(plotLocations1D[TRIGGERED][i] + " (Passed All Cuts)");
-
- // Make a formatted plot for each.
- FormattedPlot1D uncutFormattedPlot;
- FormattedPlot1D triggeredFormattedPlot;
- if(axisRanges1D[i] != -1) {
- uncutFormattedPlot = new FormattedPlot1D(uncutPlot, PlotsFormatter.ColorStyle.GREY, xTitles1D[i], yTitle1D, plotName1D[i] + " (No Cuts)", axisRanges1D[i]);
- triggeredFormattedPlot = new FormattedPlot1D(triggeredPlot, PlotsFormatter.ColorStyle.MS_GREEN, xTitles1D[i], yTitle1D, plotName1D[i] + " (Triggered)", axisRanges1D[i]);
- } else {
- uncutFormattedPlot = new FormattedPlot1D(uncutPlot, PlotsFormatter.ColorStyle.GREY, xTitles1D[i], yTitle1D, plotName1D[i] + " (No Cuts)");
- triggeredFormattedPlot = new FormattedPlot1D(triggeredPlot, PlotsFormatter.ColorStyle.MS_GREEN, xTitles1D[i], yTitle1D, plotName1D[i] + " (Triggered)");
- }
-
- // Add the plots to the module.
- module.addPlot1D(uncutFormattedPlot);
- module.addPlot1D(triggeredFormattedPlot);
- }
- for(int i = 0; i < plotName2D.length; i++) {
- // Get the uncut and triggered plots.
- IHistogram2D uncutPlot = (IHistogram2D) tree.find(plotLocations2D[UNCUT][i]);
- IHistogram2D triggeredPlot = (IHistogram2D) tree.find(plotLocations2D[TRIGGERED][i] + " (Passed All Cuts)");
-
- // Make a formatted plot for each.
- FormattedPlot2D uncutFormattedPlot;
- FormattedPlot2D triggeredFormattedPlot;
- if(xAxisRanges2D[i] != -1) {
- uncutFormattedPlot = new FormattedPlot2D(uncutPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (No Cuts)", xAxisRanges2D[i], yAxisRanges2D[i]);
- triggeredFormattedPlot = new FormattedPlot2D(triggeredPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (Triggered)", xAxisRanges2D[i], yAxisRanges2D[i]);
- } else {
- uncutFormattedPlot = new FormattedPlot2D(uncutPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (No Cuts)");
- triggeredFormattedPlot = new FormattedPlot2D(triggeredPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (Triggered)");
- }
-
- // Add the plots to the module.
- module.addPlot2D(uncutFormattedPlot);
- module.addPlot2D(triggeredFormattedPlot);
- }
-
- // Save the plots.
- module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\MonteCarlo\\Trident\\Trigger\\");
- }
+
+ public static void main(String[] args) throws IllegalArgumentException, IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "trident-readout-full.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Define plots variables.
+ int UNCUT = 0;
+ int TRIGGERED = 1;
+ String[] plotsDir = { "NoCuts/", "PassedAll/" };
+ int PLOT_HIT_COUNT = 0;
+ int PLOT_SEED_ENERGY = 1;
+ int PLOT_CLUSTER_ENERGY = 2;
+ int PLOT_COPLANARITY = 3;
+ int PLOT_ENERGY_SUM = 4;
+ int PLOT_ENERGY_DIFF = 5;
+ int PLOT_ENERGY_SLOPE = 6;
+ int PLOT_SEED_DIST = 0;
+ int PLOT_ENERGY_SUM_2D = 1;
+
+ // Define the internal plot names.
+ String[] plotNameInternal1D = new String[7];
+ String[] plotNameInternal2D = new String[2];
+ plotNameInternal1D[PLOT_HIT_COUNT] = "Cluster Hit Count";
+ plotNameInternal1D[PLOT_SEED_ENERGY] = "Cluster Seed Energy";
+ plotNameInternal1D[PLOT_CLUSTER_ENERGY] = "Cluster Total Energy";
+ plotNameInternal1D[PLOT_COPLANARITY] = "Pair Coplanarity";
+ plotNameInternal1D[PLOT_ENERGY_SUM] = "Pair Energy Sum";
+ plotNameInternal1D[PLOT_ENERGY_DIFF] = "Pair Energy Difference";
+ plotNameInternal1D[PLOT_ENERGY_SLOPE] = "Pair Energy Slope";
+ plotNameInternal2D[PLOT_SEED_DIST] = "Cluster Seed";
+ plotNameInternal2D[PLOT_ENERGY_SUM_2D] = "Pair Energy Sum 2D";
+
+ // Define the plot display names.
+ String[] plotName1D = new String[7];
+ String[] plotName2D = new String[2];
+ for(int j = 0; j < plotNameInternal1D.length; j++) {
+ plotName1D[j] = plotNameInternal1D[j];
+ }
+ for(int j = 0; j < plotNameInternal2D.length; j++) {
+ plotName2D[j] = plotNameInternal2D[j];
+ }
+ plotName1D[PLOT_ENERGY_SUM] = "1D Pair Energy Sum";
+ plotName2D[PLOT_SEED_DIST] = "Cluster Seed Distribution";
+ plotName2D[PLOT_ENERGY_SUM_2D] = "2D Pair Energy Sum";
+
+ String[] xTitles1D = new String[plotName1D.length];
+ String[] xTitles2D = new String[plotName2D.length];
+ xTitles1D[PLOT_HIT_COUNT] = "Hit Count";
+ xTitles1D[PLOT_SEED_ENERGY] = "Seed Energy (GeV)";
+ xTitles1D[PLOT_CLUSTER_ENERGY] = "Cluster Energy (GeV)";
+ xTitles1D[PLOT_COPLANARITY] = "Coplanarity Angle (Degrees)";
+ xTitles1D[PLOT_ENERGY_SUM] = "Energy Sum (GeV)";
+ xTitles1D[PLOT_ENERGY_DIFF] = "Energy Difference (GeV)";
+ xTitles1D[PLOT_ENERGY_SLOPE] = "Energy Slope (GeV)";
+ xTitles2D[PLOT_SEED_DIST] = "x-Index";
+ xTitles2D[PLOT_ENERGY_SUM_2D] = "First Cluster Energy (GeV)";
+ String yTitle1D = "Count";
+ String[] yTitles2D = new String[plotName2D.length];
+ yTitles2D[PLOT_SEED_DIST] = "y-Index";
+ yTitles2D[PLOT_ENERGY_SUM_2D] = "Second Cluster Energy (GeV)";
+
+ // Define axis ranges.
+ double[] axisRanges1D = new double[plotName1D.length];
+ axisRanges1D[PLOT_HIT_COUNT] = -1;
+ axisRanges1D[PLOT_SEED_ENERGY] = 1.1;
+ axisRanges1D[PLOT_CLUSTER_ENERGY] = 1.1;
+ axisRanges1D[PLOT_COPLANARITY] = 180;
+ axisRanges1D[PLOT_ENERGY_SUM] = 2.2;
+ axisRanges1D[PLOT_ENERGY_DIFF] = 1.1;
+ axisRanges1D[PLOT_ENERGY_SLOPE] = 2.4;
+ double[] xAxisRanges2D = new double[plotName2D.length];
+ double[] yAxisRanges2D = new double[plotName2D.length];
+ xAxisRanges2D[PLOT_SEED_DIST] = -1;
+ xAxisRanges2D[PLOT_ENERGY_SUM_2D] = 1.1;
+ yAxisRanges2D[PLOT_SEED_DIST] = -1;
+ yAxisRanges2D[PLOT_ENERGY_SUM_2D] = 1.1;
+
+ // Define the plot names.
+ String[][] plotLocations1D = new String[plotsDir.length][plotNameInternal1D.length];
+ String[][] plotLocations2D = new String[plotsDir.length][plotNameInternal2D.length];
+ for(int i = 0; i < plotsDir.length; i++) {
+ for(int j = 0; j < plotNameInternal1D.length; j++) {
+ plotLocations1D[i][j] = plotsDir[i] + plotNameInternal1D[j];
+ }
+ }
+ for(int i = 0; i < plotsDir.length; i++) {
+ for(int j = 0; j < plotNameInternal2D.length; j++) {
+ plotLocations2D[i][j] = plotsDir[i] + plotNameInternal2D[j];
+ }
+ }
+
+ // Create a plot formatting module.
+ PlotFormatModule module = new PlotFormatModule();
+
+ // Load the plot objects.
+ for(int i = 0; i < plotName1D.length; i++) {
+ // Get the uncut and triggered plots.
+ IHistogram1D uncutPlot = (IHistogram1D) tree.find(plotLocations1D[UNCUT][i]);
+ IHistogram1D triggeredPlot = (IHistogram1D) tree.find(plotLocations1D[TRIGGERED][i] + " (Passed All Cuts)");
+
+ // Make a formatted plot for each.
+ FormattedPlot1D uncutFormattedPlot;
+ FormattedPlot1D triggeredFormattedPlot;
+ if(axisRanges1D[i] != -1) {
+ uncutFormattedPlot = new FormattedPlot1D(uncutPlot, PlotsFormatter.ColorStyle.GREY, xTitles1D[i], yTitle1D, plotName1D[i] + " (No Cuts)", axisRanges1D[i]);
+ triggeredFormattedPlot = new FormattedPlot1D(triggeredPlot, PlotsFormatter.ColorStyle.MS_GREEN, xTitles1D[i], yTitle1D, plotName1D[i] + " (Triggered)", axisRanges1D[i]);
+ } else {
+ uncutFormattedPlot = new FormattedPlot1D(uncutPlot, PlotsFormatter.ColorStyle.GREY, xTitles1D[i], yTitle1D, plotName1D[i] + " (No Cuts)");
+ triggeredFormattedPlot = new FormattedPlot1D(triggeredPlot, PlotsFormatter.ColorStyle.MS_GREEN, xTitles1D[i], yTitle1D, plotName1D[i] + " (Triggered)");
+ }
+
+ // Add the plots to the module.
+ module.addPlot1D(uncutFormattedPlot);
+ module.addPlot1D(triggeredFormattedPlot);
+ }
+ for(int i = 0; i < plotName2D.length; i++) {
+ // Get the uncut and triggered plots.
+ IHistogram2D uncutPlot = (IHistogram2D) tree.find(plotLocations2D[UNCUT][i]);
+ IHistogram2D triggeredPlot = (IHistogram2D) tree.find(plotLocations2D[TRIGGERED][i] + " (Passed All Cuts)");
+
+ // Make a formatted plot for each.
+ FormattedPlot2D uncutFormattedPlot;
+ FormattedPlot2D triggeredFormattedPlot;
+ if(xAxisRanges2D[i] != -1) {
+ uncutFormattedPlot = new FormattedPlot2D(uncutPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (No Cuts)", xAxisRanges2D[i], yAxisRanges2D[i]);
+ triggeredFormattedPlot = new FormattedPlot2D(triggeredPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (Triggered)", xAxisRanges2D[i], yAxisRanges2D[i]);
+ } else {
+ uncutFormattedPlot = new FormattedPlot2D(uncutPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (No Cuts)");
+ triggeredFormattedPlot = new FormattedPlot2D(triggeredPlot, true, xTitles2D[i], yTitles2D[i], plotName2D[i] + " (Triggered)");
+ }
+
+ // Add the plots to the module.
+ module.addPlot2D(uncutFormattedPlot);
+ module.addPlot2D(triggeredFormattedPlot);
+ }
+
+ // Save the plots.
+ module.savePlots("C:\\Users\\Kyle\\Desktop\\EnergyShift\\MonteCarlo\\Trident\\Trigger\\");
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TridentTrackFormatter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TridentTrackFormatter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TridentTrackFormatter.java Wed Apr 27 11:11:32 2016
@@ -16,187 +16,187 @@
import hep.aida.ref.plotter.PlotterRegion;
public class TridentTrackFormatter {
- /**
- * Loads all plots in a file and formats them according to the
- * indicated style.
- * @param args - Unused default executable parameter.
- * @throws IOException Occurs if there is an issue opening the file.
- */
- public static void main(String[] args) throws IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\tmp\\";
-
- // Define the new name of the file containing the trigger plots.
- String plotFile = rootDir + "trident-out.aida";
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(plotFile);
- if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
-
- // Declare the histogram names.
- String trackName = "Tracks in Event (All)";
- String posTrackName = "Tracks in Event (Positive)";
- String negTrackName = "Tracks in Event (Negative)";
- String posMomentumName = "Momentum (Positive)";
- String negMomentumName = "Momentum (Negative)";
- String energySumName = "Energy Sum";
- String momentumSumName = "Momentum Sum";
- String energyMomentumDiffName = "Energy-Momentum Difference";
- String invariantMassName = "Invariant Mass";
- String energySum2DName = "2D Energy Sum";
- String momentumSum2DName = "2D Momentum Sum";
- String positionName = "Track Cluster Position";
-
- // Get the histograms.
- IHistogram1D[] tracks = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + trackName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + trackName)
- };
- IHistogram1D[] posTracks = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + posTrackName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + posTrackName)
- };
- IHistogram1D[] negTracks = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + negTrackName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + negTrackName)
- };
- IHistogram1D[] posMomentum = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + posMomentumName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + posMomentumName)
- };
- IHistogram1D[] negMomentum = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + negMomentumName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + negMomentumName)
- };
- IHistogram1D[] energySum = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + energySumName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + energySumName)
- };
- IHistogram1D[] momentumSum = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + momentumSumName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + momentumSumName)
- };
- IHistogram1D[] energyMomentumDiff = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + energyMomentumDiffName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + energyMomentumDiffName)
- };
- IHistogram1D[] invariantMass = {
- (IHistogram1D) tree.find("Trident Analysis/All/" + invariantMassName),
- (IHistogram1D) tree.find("Trident Analysis/Cluster/" + invariantMassName)
- };
- IHistogram2D[] energySum2D = {
- (IHistogram2D) tree.find("Trident Analysis/All/" + energySum2DName),
- (IHistogram2D) tree.find("Trident Analysis/Cluster/" + energySum2DName)
- };
- IHistogram2D[] momentumSum2D = {
- (IHistogram2D) tree.find("Trident Analysis/All/" + momentumSum2DName),
- (IHistogram2D) tree.find("Trident Analysis/Cluster/" + momentumSum2DName)
- };
- IHistogram2D[] position = {
- (IHistogram2D) tree.find("Trident Analysis/All/" + positionName),
- (IHistogram2D) tree.find("Trident Analysis/Cluster/" + positionName)
- };
-
- // Re-bin the histograms to have 5-times larger bins. First,
- // get the bin count and upper and lower bounds of the plot.
- int bins = invariantMass[0].axis().bins();
- double low = invariantMass[0].axis().binLowerEdge(0);
- double high = invariantMass[0].axis().binUpperEdge(invariantMass[0].axis().bins() - 1);
-
- // Create new plots with the larger bin sizes.
- AIDA aida = AIDA.defaultInstance();
- IHistogram1D[] newPlot = new IHistogram1D[2];
- newPlot[0] = aida.histogram1D(invariantMassName, bins / 5, low, high);
- newPlot[1] = aida.histogram1D("Cluster " + invariantMassName, bins / 5, low, high);
-
- // Populate the new plots with the data from the old ones.
- for(int j = 0; j < 2; j++) {
- for(int i = 0; i < bins; i++) {
- int entries = invariantMass[j].binEntries(i);
- double center = invariantMass[j].axis().binCenter(i);
- for(int k = 0; k < entries; k++) {
- newPlot[j].fill(center);
- }
- }
- }
-
- // Replace the old plots.
- invariantMass = newPlot;
-
- // Define the scaling factors for each plot.
- double scaleFactor = 1;
-
- // Define the plot titles and arrays for 1D plots.
- IHistogram[][] plots = { tracks, posTracks, negTracks, posMomentum, negMomentum, energySum, momentumSum, energyMomentumDiff, invariantMass };
- String[] titles = { trackName, posTrackName, negTrackName, posMomentumName, negMomentumName, energySumName, momentumSumName,
- energyMomentumDiffName, invariantMassName };
- String[] xTitles = { "Tracks", "Tracks", "Tracks", "Momentum (GeV)", "Momentum (GeV)", "Energy Sum (GeV)", "Momentum Sum (GeV)",
- "|E_Cluster - P_Track| (GeV)", "Invariant Mass (GeV)" };
- String yTitle = "Count";
-
- // Define the plot titles and arrays for 2D plots.
- IHistogram2D[][] plots2D = { energySum2D, momentumSum2D, position };
- String[] titles2D = { energySum2DName, momentumSum2DName, positionName };
- String[] xTitles2D = { "Positive Cluster Energy", "Positive Track Momentum", "x-Index" };
- String[] yTitles2D = { "Negative Cluster Energy", "Negative Track Momentum", "y-Index" };
- String zTitle2D = "Count";
-
- // Create a plotter factory.
- IPlotterFactory plotterFactory = af.createPlotterFactory();
-
- // Format and display the basic histograms.
- for(int i = 0; i < plots.length; i++) {
- for(int j = 0; j < 2; j++) {
- // Scale the histogram by the appropriate scaling factor.
- plots[i][j].scale(1.0 / scaleFactor);
-
- // Create a plotter and plotting region for the plot.
- IPlotter plotter = plotterFactory.create((j == 1 ? "Cluster " : "") + titles[i]);
- plotter.createRegions(1);
- plotter.region(0).plot(plots[i][j]);
-
- // Format the axis labels.
- PlotterRegion region = (PlotterRegion) plotter.region(0);
- region.getPlot().setTitle((j == 1 ? "Cluster " : "") + titles[i]);
- region.getPlot().getXAxis().setLabel(xTitles[i]);
- region.getPlot().getYAxis().setLabel(yTitle);
-
- // Format the fonts and general plot presentation.
- PlotsFormatter.setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
-
- // Show the plot.
- plotter.setParameter("plotterWidth", "2000");
- plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
- }
-
- // Format and display the 2D histogram.
- for(int i = 0; i < plots2D.length; i++) {
- for(int j = 0; j < 2; j++) {
- plots2D[i][j].scale(1.0 / scaleFactor);
- IPlotter plotter2D = plotterFactory.create((j == 1 ? "Cluster " : "") + titles2D[i]);
- plotter2D.createRegions(1);
- plotter2D.region(0).plot(plots2D[i][j]);
-
- // Format the axis labels.
- PlotterRegion region2D = (PlotterRegion) plotter2D.region(0);
- region2D.getPlot().setTitle((j == 1 ? "Cluster " : "") + titles2D[i]);
- region2D.getPlot().getXAxis().setLabel(xTitles2D[i]);
- region2D.getPlot().getYAxis().setLabel(yTitles2D[i]);
-
- // Format the fonts and general plot presentation.
- PlotsFormatter.setDefault2DStyle(region2D, true);
-
- // Show the plot.
- plotter2D.setParameter("plotterWidth", "2000");
- plotter2D.setParameter("plotterHeight", "1200");
- plotter2D.show();
- }
- }
-
- // Close the tree.
- tree.close();
- }
+ /**
+ * Loads all plots in a file and formats them according to the
+ * indicated style.
+ * @param args - Unused default executable parameter.
+ * @throws IOException Occurs if there is an issue opening the file.
+ */
+ public static void main(String[] args) throws IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\tmp\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String plotFile = rootDir + "trident-out.aida";
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(plotFile);
+ if(tree == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+
+ // Declare the histogram names.
+ String trackName = "Tracks in Event (All)";
+ String posTrackName = "Tracks in Event (Positive)";
+ String negTrackName = "Tracks in Event (Negative)";
+ String posMomentumName = "Momentum (Positive)";
+ String negMomentumName = "Momentum (Negative)";
+ String energySumName = "Energy Sum";
+ String momentumSumName = "Momentum Sum";
+ String energyMomentumDiffName = "Energy-Momentum Difference";
+ String invariantMassName = "Invariant Mass";
+ String energySum2DName = "2D Energy Sum";
+ String momentumSum2DName = "2D Momentum Sum";
+ String positionName = "Track Cluster Position";
+
+ // Get the histograms.
+ IHistogram1D[] tracks = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + trackName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + trackName)
+ };
+ IHistogram1D[] posTracks = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + posTrackName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + posTrackName)
+ };
+ IHistogram1D[] negTracks = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + negTrackName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + negTrackName)
+ };
+ IHistogram1D[] posMomentum = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + posMomentumName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + posMomentumName)
+ };
+ IHistogram1D[] negMomentum = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + negMomentumName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + negMomentumName)
+ };
+ IHistogram1D[] energySum = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + energySumName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + energySumName)
+ };
+ IHistogram1D[] momentumSum = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + momentumSumName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + momentumSumName)
+ };
+ IHistogram1D[] energyMomentumDiff = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + energyMomentumDiffName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + energyMomentumDiffName)
+ };
+ IHistogram1D[] invariantMass = {
+ (IHistogram1D) tree.find("Trident Analysis/All/" + invariantMassName),
+ (IHistogram1D) tree.find("Trident Analysis/Cluster/" + invariantMassName)
+ };
+ IHistogram2D[] energySum2D = {
+ (IHistogram2D) tree.find("Trident Analysis/All/" + energySum2DName),
+ (IHistogram2D) tree.find("Trident Analysis/Cluster/" + energySum2DName)
+ };
+ IHistogram2D[] momentumSum2D = {
+ (IHistogram2D) tree.find("Trident Analysis/All/" + momentumSum2DName),
+ (IHistogram2D) tree.find("Trident Analysis/Cluster/" + momentumSum2DName)
+ };
+ IHistogram2D[] position = {
+ (IHistogram2D) tree.find("Trident Analysis/All/" + positionName),
+ (IHistogram2D) tree.find("Trident Analysis/Cluster/" + positionName)
+ };
+
+ // Re-bin the histograms to have 5-times larger bins. First,
+ // get the bin count and upper and lower bounds of the plot.
+ int bins = invariantMass[0].axis().bins();
+ double low = invariantMass[0].axis().binLowerEdge(0);
+ double high = invariantMass[0].axis().binUpperEdge(invariantMass[0].axis().bins() - 1);
+
+ // Create new plots with the larger bin sizes.
+ AIDA aida = AIDA.defaultInstance();
+ IHistogram1D[] newPlot = new IHistogram1D[2];
+ newPlot[0] = aida.histogram1D(invariantMassName, bins / 5, low, high);
+ newPlot[1] = aida.histogram1D("Cluster " + invariantMassName, bins / 5, low, high);
+
+ // Populate the new plots with the data from the old ones.
+ for(int j = 0; j < 2; j++) {
+ for(int i = 0; i < bins; i++) {
+ int entries = invariantMass[j].binEntries(i);
+ double center = invariantMass[j].axis().binCenter(i);
+ for(int k = 0; k < entries; k++) {
+ newPlot[j].fill(center);
+ }
+ }
+ }
+
+ // Replace the old plots.
+ invariantMass = newPlot;
+
+ // Define the scaling factors for each plot.
+ double scaleFactor = 1;
+
+ // Define the plot titles and arrays for 1D plots.
+ IHistogram[][] plots = { tracks, posTracks, negTracks, posMomentum, negMomentum, energySum, momentumSum, energyMomentumDiff, invariantMass };
+ String[] titles = { trackName, posTrackName, negTrackName, posMomentumName, negMomentumName, energySumName, momentumSumName,
+ energyMomentumDiffName, invariantMassName };
+ String[] xTitles = { "Tracks", "Tracks", "Tracks", "Momentum (GeV)", "Momentum (GeV)", "Energy Sum (GeV)", "Momentum Sum (GeV)",
+ "|E_Cluster - P_Track| (GeV)", "Invariant Mass (GeV)" };
+ String yTitle = "Count";
+
+ // Define the plot titles and arrays for 2D plots.
+ IHistogram2D[][] plots2D = { energySum2D, momentumSum2D, position };
+ String[] titles2D = { energySum2DName, momentumSum2DName, positionName };
+ String[] xTitles2D = { "Positive Cluster Energy", "Positive Track Momentum", "x-Index" };
+ String[] yTitles2D = { "Negative Cluster Energy", "Negative Track Momentum", "y-Index" };
+ String zTitle2D = "Count";
+
+ // Create a plotter factory.
+ IPlotterFactory plotterFactory = af.createPlotterFactory();
+
+ // Format and display the basic histograms.
+ for(int i = 0; i < plots.length; i++) {
+ for(int j = 0; j < 2; j++) {
+ // Scale the histogram by the appropriate scaling factor.
+ plots[i][j].scale(1.0 / scaleFactor);
+
+ // Create a plotter and plotting region for the plot.
+ IPlotter plotter = plotterFactory.create((j == 1 ? "Cluster " : "") + titles[i]);
+ plotter.createRegions(1);
+ plotter.region(0).plot(plots[i][j]);
+
+ // Format the axis labels.
+ PlotterRegion region = (PlotterRegion) plotter.region(0);
+ region.getPlot().setTitle((j == 1 ? "Cluster " : "") + titles[i]);
+ region.getPlot().getXAxis().setLabel(xTitles[i]);
+ region.getPlot().getYAxis().setLabel(yTitle);
+
+ // Format the fonts and general plot presentation.
+ PlotsFormatter.setDefault1DStyle(region, new ColorStyle[] { ColorStyle.GREY });
+
+ // Show the plot.
+ plotter.setParameter("plotterWidth", "2000");
+ plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+ }
+
+ // Format and display the 2D histogram.
+ for(int i = 0; i < plots2D.length; i++) {
+ for(int j = 0; j < 2; j++) {
+ plots2D[i][j].scale(1.0 / scaleFactor);
+ IPlotter plotter2D = plotterFactory.create((j == 1 ? "Cluster " : "") + titles2D[i]);
+ plotter2D.createRegions(1);
+ plotter2D.region(0).plot(plots2D[i][j]);
+
+ // Format the axis labels.
+ PlotterRegion region2D = (PlotterRegion) plotter2D.region(0);
+ region2D.getPlot().setTitle((j == 1 ? "Cluster " : "") + titles2D[i]);
+ region2D.getPlot().getXAxis().setLabel(xTitles2D[i]);
+ region2D.getPlot().getYAxis().setLabel(yTitles2D[i]);
+
+ // Format the fonts and general plot presentation.
+ PlotsFormatter.setDefault2DStyle(region2D, true);
+
+ // Show the plot.
+ plotter2D.setParameter("plotterWidth", "2000");
+ plotter2D.setParameter("plotterHeight", "1200");
+ plotter2D.show();
+ }
+ }
+
+ // Close the tree.
+ tree.close();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TriggerPlotsFormat.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TriggerPlotsFormat.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/kmccarty/plots/formatter/TriggerPlotsFormat.java Wed Apr 27 11:11:32 2016
@@ -20,331 +20,331 @@
import hep.aida.ref.plotter.PlotterRegion;
public class TriggerPlotsFormat {
- // Define plot fonts.
- private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
- private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
- private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
-
- // Defines the color style options for plot data.
- private enum ColorStyle {
- MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
- MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
- MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
- RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
- FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
- TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
- BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
- PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
-
- private final Color fillColor;
- private final Color lineColor;
-
- private ColorStyle(Color fillColor, Color lineColor) {
- this.fillColor = fillColor;
- this.lineColor = lineColor;
- }
-
- public Color getFillColor() { return fillColor; }
-
- public Color getLineColor() { return lineColor; }
- };
-
- /**
- * Loads all plots in a file and formats them according to the
- * indicated style.
- * @param args - Unused default executable parameter.
- * @throws IOException Occurs if there is an issue opening the file.
- */
- public static void main(String[] args) throws IOException {
- // Define the root directory for the plots.
- String rootDir = "D:\\cygwin64\\home\\Kyle\\beam-plots\\base\\";
- //String rootDir = "D:\\cygwin64\\home\\Kyle\\aprime-plots\\base\\readout-plots\\";
-
- // Define the new name of the file containing the trigger plots.
- String[] plotFile = {
- rootDir + "compiled-plots.aida"
- //rootDir + "15-MeV\\compiled-plots.aida",
- //rootDir + "20-MeV\\compiled-plots.aida",
- //rootDir + "30-MeV\\compiled-plots.aida",
- //rootDir + "40-MeV\\compiled-plots.aida",
- //rootDir + "50-MeV\\compiled-plots.aida"
- };
-
- // Define the names of each plot. This will be used for the
- // legend in the case of multiple plots.
- String[] treeName = {
- "Background",
- "15 MeV A'",
- "20 MeV A'",
- "30 MeV A'",
- "40 MeV A'",
- "50 MeV A'"
- };
-
- // Define the color style for the plots.
- ColorStyle[] dataColorStyle = {
- ColorStyle.GREY,
- ColorStyle.MS_GREEN,
- ColorStyle.MS_BLUE,
- ColorStyle.MS_ORANGE,
- ColorStyle.MS_RED,
- ColorStyle.TEAL,
- ColorStyle.CRIMSON,
- ColorStyle.FOREST
- };
-
- // Get the plots file and open it.
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree[] tree = new ITree[plotFile.length];
- for(int i = 0; i < plotFile.length; i++) {
- tree[i] = af.createTreeFactory().create(plotFile[i]);
- if(tree[i] == null) { throw new IllegalArgumentException("Unable to load plot file."); }
- }
-
- // Get a list of all the histograms in the file.
- List<List<String>> treeHistograms = new ArrayList<List<String>>(plotFile.length);
- for(int i = 0; i < plotFile.length; i++) {
- treeHistograms.add(getHistograms(tree[i], "/NoCuts/"));//, "/PassedAll/"));
- }
-
- // Create a plotter factory.
- IPlotterFactory plotterFactory = af.createPlotterFactory();
-
- // Plot each histogram and format it.
- for(String histogram : treeHistograms.get(0)) {
- // Get the plot from the tree and verify that it is a 1D
- // or 2D histogram. Other types are not supported.
- IManagedObject histObject = tree[0].find(histogram);
- if(!(histObject instanceof IHistogram1D) && !(histObject instanceof IHistogram2D)) {
- continue;
- }
-
- // Obtain the histogram object.
- IBaseHistogram hist;
- if(histObject instanceof IHistogram1D) { hist = (IHistogram1D) histObject; }
- else { hist = (IHistogram2D) histObject; }
-
- // Define whether this is an overlay plot and whether
- // this is a one or two dimensional plot.
- boolean overlay = plotFile.length > 1;
- boolean twoDimensional = hist instanceof IHistogram2D;
-
- // Generate the plotter and set its title. The plotter will
- // use the title of the first tree's plot.
- String plotTitle = hist.title();
- IPlotter plotter = plotterFactory.create(plotTitle);
-
- // For single plots and one-dimensional overlay plots,
- // there should only be a single plotter region.
- if(!twoDimensional || !overlay) { plotter.createRegions(1); }
-
- // For two-dimensional overlay plots, create a region for
- // each plot individually.
- else { plotter.createRegions(2, (int) Math.ceil(plotFile.length / 2.0)); }
-
- // Find the histogram in each of the trees and plot them
- // all on the same region.
- for(int i = 0; i < plotFile.length; i++) {
- // Get the histogram from the tree.
- IManagedObject treeObject = tree[i].find(histogram);
- IBaseHistogram treeHist;
- if(treeObject instanceof IHistogram1D) { treeHist = (IHistogram1D) treeObject; }
- else { treeHist = (IHistogram2D) treeObject; }
-
- // Display the plot.
- if(treeHist != null) {
- // Set the title of plot to the name associated with
- // its tree. This ensures that the correct name will
- // appear on the legend.
- if(plotFile.length > 1) {
- treeHist.setTitle(treeName[i]);
- }
-
- // Plot the tree's data in the plotter region.
- if(!twoDimensional || !overlay) { plotter.region(0).plot(treeHist); }
- else {
- plotter.region(i).plot(treeHist);
- setDefault2DStyle(((PlotterRegion) plotter.region(i)), dataColorStyle);
- }
- }
- }
-
- // Format the plot region.
- if(!twoDimensional) { setDefault1DStyle(((PlotterRegion) plotter.region(0)), dataColorStyle); }
- else { setDefault2DStyle(((PlotterRegion) plotter.region(0)), dataColorStyle); }
-
- // Show the plotter.
- plotter.region(0).setTitle(plotTitle);
- plotter.setParameter("plotterWidth", "750");
- plotter.setParameter("plotterHeight", "600");
- //plotter.setParameter("plotterWidth", "2000");
- //plotter.setParameter("plotterHeight", "1200");
- plotter.show();
- }
-
- // Close the trees.
- for(int i = 0; i < plotFile.length; i++) {
- tree[i].close();
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the names of each plot on in the region.
- String[] dataNames = region.getAllDataNames();
-
- // Check whether this is an overlay plot. Overlay plots contain
- // more than one data name.
- boolean overlay = (dataNames.length > 1 ? true : false);
-
- // Iterate over each plot in the region.
- for(int i = 0; i < dataNames.length; i++) {
- // Set the overlay style if needed.
- if(overlay) {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with no fill. The color is set by the "color" argument.
- fillStyle.setHistogramFill(false);
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarLineColor(color[i].getFillColor());
-
- // Set the legend text style.
- region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
- }
-
- // Otherwise, set the fill style for a single plot.
- else {
- // Get the fill style for the current data type.
- JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
-
- // Set the histogram style to display thick-lined bars
- // with a fill color. The colors are defined by the
- // "color" argument.
- fillStyle.setHistogramBarLineWidth(3);
- fillStyle.setHistogramBarColor(color[i].getFillColor());
- fillStyle.setHistogramBarLineColor(color[i].getLineColor());
- }
-
- // Set the statistics box style.
- region.getPlot().getStats().setVisible(true);
- region.getPlot().getStats().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set generic axis titles.
- region.getPlot().getXAxis().setLabel("Data Label (Unit)");
- region.getPlot().getYAxis().setLabel("Count");
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
- }
-
- /**
- * Sets the plot display formatting for 1D plots.
- * @param region - The plotter region to format.
- * @param color - The data color settings to use.
- */
- private static final void setDefault2DStyle(PlotterRegion region, ColorStyle[] color) {
- // Get the fill style object. 2D plots should never be overlay
- // plots, so there should only ever be one data name.
- JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
-
- // Set the fill style for a two-dimensional plot.
- fillStyle.setLogZ(true);
- fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
- fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
-
- // Make the statistics box invisible.
- region.getPlot().getStats().setVisible(false);
-
- // Set the general plot font (which is also the z-axis font).
- region.getPlot().setFont(BASIC_FONT);
-
- // Set the title font.
- region.getPlot().getTitleObject().setFont(TITLE_FONT);
-
- // Set generic axis titles.
- region.getPlot().getXAxis().setLabel("Data Label (Unit)");
- region.getPlot().getYAxis().setLabel("Data Label (Unit)");
-
- // Set the axis tick-mark fonts.
- region.getPlot().getXAxis().setFont(BASIC_FONT);
- region.getPlot().getYAxis().setFont(BASIC_FONT);
- region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
- region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
- }
-
- /**
- * Gets a list of all objects that are not directories in a tree.
- * @param tree - The tree from which to extract the object names.
- * @return Returns the object names as <code>String</code> objects
- * in a <code>List</code> collection.
- */
- private static final List<String> getHistograms(ITree tree) {
- return getHistograms(tree, "/");
- }
-
- /**
- * Gets a list of all objects that are not directories in a tree.
- * @param tree - The tree from which to extract the object names.
- * @return Returns the object names as <code>String</code> objects
- * in a <code>List</code> collection.
- */
- private static final List<String> getHistograms(ITree tree, String rootDir) {
- return getHistograms(tree, rootDir, new ArrayList<String>());
- }
-
- /**
- * Recursive method that gets all object names from a tree that
- * are not directories. Method should not be called directly, but
- * rather called only through the <code>getHistograms(ITree)</code>
- * method.
- * @param tree - The tree from which to obtain the object names.
- * @param directory - The directory in which to search for objects.
- * @param list - The list in which to place the objects.
- * @return Returns the <code>List</code> collection that was given
- * as an argument.
- */
- private static final List<String> getHistograms(ITree tree, String directory, List<String> list) {
- // Get the list of objects in the directory.
- String[] treeObjects = tree.listObjectNames(directory);
-
- // Print the objects.
- for(String objectName : treeObjects) {
- // Check if the object is a directory.
- boolean isDirectory = isDirectory(objectName);
-
- // If the object is a directory, get the histograms from it.
- if(isDirectory) {
- getHistograms(tree, objectName, list);
- }
-
- // If the object is a plot, add it to the list.
- else { list.add(objectName); }
- }
-
- // Return the list.
- return list;
- }
-
- /**
- * Checks whether a tree object is a directory.
- * @param object - The object to check.
- * @return Returns <code>true</code> if the object is a directory
- * and <code>false</code> otherwise.
- */
- private static final boolean isDirectory(String object) {
- return (object.toCharArray()[object.length() - 1] == '/');
- }
+ // Define plot fonts.
+ private static final Font BASIC_FONT = new Font("Calibri", Font.PLAIN, 20);
+ private static final Font AXIS_FONT = new Font("Calibri", Font.BOLD, 25);
+ private static final Font TITLE_FONT = new Font("Calibri", Font.BOLD, 35);
+
+ // Defines the color style options for plot data.
+ private enum ColorStyle {
+ MS_BLUE(new Color( 79, 129, 189), new Color( 36, 64, 97)), MS_ORANGE(new Color(247, 150, 70), new Color(152, 72, 6)),
+ MS_RED(new Color(192, 80, 77), new Color( 99, 36, 35)), GREY(new Color(166, 166, 166), new Color( 89, 89, 89)),
+ MS_GREEN(new Color(155, 187, 89), new Color( 79, 98, 40)), CRIMSON(new Color(161, 0, 0), new Color(104, 0, 0)),
+ RUST(new Color(161, 80, 0), new Color(105, 80, 0)), YELLOW(new Color(161, 161, 0), new Color(122, 109, 8)),
+ FOREST(new Color( 65, 102, 0), new Color( 37, 79, 0)), GREEN(new Color( 7, 132, 70), new Color( 7, 82, 30)),
+ TEAL(new Color( 0, 130, 130), new Color( 0, 90, 100)), CERULEAN(new Color( 0, 86, 130), new Color( 0, 28, 83)),
+ BLUE(new Color( 0, 33, 203), new Color( 0, 0, 137)), INDIGO(new Color( 68, 10, 127), new Color( 0, 0, 61)),
+ PURPLE(new Color(106, 0, 106), new Color( 63, 0, 56)), FUSCHIA(new Color(119, 0, 60), new Color( 60, 0, 60));
+
+ private final Color fillColor;
+ private final Color lineColor;
+
+ private ColorStyle(Color fillColor, Color lineColor) {
+ this.fillColor = fillColor;
+ this.lineColor = lineColor;
+ }
+
+ public Color getFillColor() { return fillColor; }
+
+ public Color getLineColor() { return lineColor; }
+ };
+
+ /**
+ * Loads all plots in a file and formats them according to the
+ * indicated style.
+ * @param args - Unused default executable parameter.
+ * @throws IOException Occurs if there is an issue opening the file.
+ */
+ public static void main(String[] args) throws IOException {
+ // Define the root directory for the plots.
+ String rootDir = "D:\\cygwin64\\home\\Kyle\\beam-plots\\base\\";
+ //String rootDir = "D:\\cygwin64\\home\\Kyle\\aprime-plots\\base\\readout-plots\\";
+
+ // Define the new name of the file containing the trigger plots.
+ String[] plotFile = {
+ rootDir + "compiled-plots.aida"
+ //rootDir + "15-MeV\\compiled-plots.aida",
+ //rootDir + "20-MeV\\compiled-plots.aida",
+ //rootDir + "30-MeV\\compiled-plots.aida",
+ //rootDir + "40-MeV\\compiled-plots.aida",
+ //rootDir + "50-MeV\\compiled-plots.aida"
+ };
+
+ // Define the names of each plot. This will be used for the
+ // legend in the case of multiple plots.
+ String[] treeName = {
+ "Background",
+ "15 MeV A'",
+ "20 MeV A'",
+ "30 MeV A'",
+ "40 MeV A'",
+ "50 MeV A'"
+ };
+
+ // Define the color style for the plots.
+ ColorStyle[] dataColorStyle = {
+ ColorStyle.GREY,
+ ColorStyle.MS_GREEN,
+ ColorStyle.MS_BLUE,
+ ColorStyle.MS_ORANGE,
+ ColorStyle.MS_RED,
+ ColorStyle.TEAL,
+ ColorStyle.CRIMSON,
+ ColorStyle.FOREST
+ };
+
+ // Get the plots file and open it.
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree[] tree = new ITree[plotFile.length];
+ for(int i = 0; i < plotFile.length; i++) {
+ tree[i] = af.createTreeFactory().create(plotFile[i]);
+ if(tree[i] == null) { throw new IllegalArgumentException("Unable to load plot file."); }
+ }
+
+ // Get a list of all the histograms in the file.
+ List<List<String>> treeHistograms = new ArrayList<List<String>>(plotFile.length);
+ for(int i = 0; i < plotFile.length; i++) {
+ treeHistograms.add(getHistograms(tree[i], "/NoCuts/"));//, "/PassedAll/"));
+ }
+
+ // Create a plotter factory.
+ IPlotterFactory plotterFactory = af.createPlotterFactory();
+
+ // Plot each histogram and format it.
+ for(String histogram : treeHistograms.get(0)) {
+ // Get the plot from the tree and verify that it is a 1D
+ // or 2D histogram. Other types are not supported.
+ IManagedObject histObject = tree[0].find(histogram);
+ if(!(histObject instanceof IHistogram1D) && !(histObject instanceof IHistogram2D)) {
+ continue;
+ }
+
+ // Obtain the histogram object.
+ IBaseHistogram hist;
+ if(histObject instanceof IHistogram1D) { hist = (IHistogram1D) histObject; }
+ else { hist = (IHistogram2D) histObject; }
+
+ // Define whether this is an overlay plot and whether
+ // this is a one or two dimensional plot.
+ boolean overlay = plotFile.length > 1;
+ boolean twoDimensional = hist instanceof IHistogram2D;
+
+ // Generate the plotter and set its title. The plotter will
+ // use the title of the first tree's plot.
+ String plotTitle = hist.title();
+ IPlotter plotter = plotterFactory.create(plotTitle);
+
+ // For single plots and one-dimensional overlay plots,
+ // there should only be a single plotter region.
+ if(!twoDimensional || !overlay) { plotter.createRegions(1); }
+
+ // For two-dimensional overlay plots, create a region for
+ // each plot individually.
+ else { plotter.createRegions(2, (int) Math.ceil(plotFile.length / 2.0)); }
+
+ // Find the histogram in each of the trees and plot them
+ // all on the same region.
+ for(int i = 0; i < plotFile.length; i++) {
+ // Get the histogram from the tree.
+ IManagedObject treeObject = tree[i].find(histogram);
+ IBaseHistogram treeHist;
+ if(treeObject instanceof IHistogram1D) { treeHist = (IHistogram1D) treeObject; }
+ else { treeHist = (IHistogram2D) treeObject; }
+
+ // Display the plot.
+ if(treeHist != null) {
+ // Set the title of plot to the name associated with
+ // its tree. This ensures that the correct name will
+ // appear on the legend.
+ if(plotFile.length > 1) {
+ treeHist.setTitle(treeName[i]);
+ }
+
+ // Plot the tree's data in the plotter region.
+ if(!twoDimensional || !overlay) { plotter.region(0).plot(treeHist); }
+ else {
+ plotter.region(i).plot(treeHist);
+ setDefault2DStyle(((PlotterRegion) plotter.region(i)), dataColorStyle);
+ }
+ }
+ }
+
+ // Format the plot region.
+ if(!twoDimensional) { setDefault1DStyle(((PlotterRegion) plotter.region(0)), dataColorStyle); }
+ else { setDefault2DStyle(((PlotterRegion) plotter.region(0)), dataColorStyle); }
+
+ // Show the plotter.
+ plotter.region(0).setTitle(plotTitle);
+ plotter.setParameter("plotterWidth", "750");
+ plotter.setParameter("plotterHeight", "600");
+ //plotter.setParameter("plotterWidth", "2000");
+ //plotter.setParameter("plotterHeight", "1200");
+ plotter.show();
+ }
+
+ // Close the trees.
+ for(int i = 0; i < plotFile.length; i++) {
+ tree[i].close();
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ private static final void setDefault1DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the names of each plot on in the region.
+ String[] dataNames = region.getAllDataNames();
+
+ // Check whether this is an overlay plot. Overlay plots contain
+ // more than one data name.
+ boolean overlay = (dataNames.length > 1 ? true : false);
+
+ // Iterate over each plot in the region.
+ for(int i = 0; i < dataNames.length; i++) {
+ // Set the overlay style if needed.
+ if(overlay) {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with no fill. The color is set by the "color" argument.
+ fillStyle.setHistogramFill(false);
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarLineColor(color[i].getFillColor());
+
+ // Set the legend text style.
+ region.getPlot().getLegend().setFont(new Font("Calibri", Font.PLAIN, 20));
+ }
+
+ // Otherwise, set the fill style for a single plot.
+ else {
+ // Get the fill style for the current data type.
+ JASHist1DHistogramStyle fillStyle = (JASHist1DHistogramStyle) region.getDataForName(dataNames[i]).getStyle();
+
+ // Set the histogram style to display thick-lined bars
+ // with a fill color. The colors are defined by the
+ // "color" argument.
+ fillStyle.setHistogramBarLineWidth(3);
+ fillStyle.setHistogramBarColor(color[i].getFillColor());
+ fillStyle.setHistogramBarLineColor(color[i].getLineColor());
+ }
+
+ // Set the statistics box style.
+ region.getPlot().getStats().setVisible(true);
+ region.getPlot().getStats().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set generic axis titles.
+ region.getPlot().getXAxis().setLabel("Data Label (Unit)");
+ region.getPlot().getYAxis().setLabel("Count");
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+ }
+
+ /**
+ * Sets the plot display formatting for 1D plots.
+ * @param region - The plotter region to format.
+ * @param color - The data color settings to use.
+ */
+ private static final void setDefault2DStyle(PlotterRegion region, ColorStyle[] color) {
+ // Get the fill style object. 2D plots should never be overlay
+ // plots, so there should only ever be one data name.
+ JASHist2DHistogramStyle fillStyle = (JASHist2DHistogramStyle) region.getDataForName(region.getAllDataNames()[0]).getStyle();
+
+ // Set the fill style for a two-dimensional plot.
+ fillStyle.setLogZ(true);
+ fillStyle.setHistStyle(JASHist2DHistogramStyle.STYLE_COLORMAP);
+ fillStyle.setColorMapScheme(JASHist2DHistogramStyle.COLORMAP_RAINBOW);
+
+ // Make the statistics box invisible.
+ region.getPlot().getStats().setVisible(false);
+
+ // Set the general plot font (which is also the z-axis font).
+ region.getPlot().setFont(BASIC_FONT);
+
+ // Set the title font.
+ region.getPlot().getTitleObject().setFont(TITLE_FONT);
+
+ // Set generic axis titles.
+ region.getPlot().getXAxis().setLabel("Data Label (Unit)");
+ region.getPlot().getYAxis().setLabel("Data Label (Unit)");
+
+ // Set the axis tick-mark fonts.
+ region.getPlot().getXAxis().setFont(BASIC_FONT);
+ region.getPlot().getYAxis().setFont(BASIC_FONT);
+ region.getPlot().getXAxis().getLabelObject().setFont(AXIS_FONT);
+ region.getPlot().getYAxis().getLabelObject().setFont(AXIS_FONT);
+ }
+
+ /**
+ * Gets a list of all objects that are not directories in a tree.
+ * @param tree - The tree from which to extract the object names.
+ * @return Returns the object names as <code>String</code> objects
+ * in a <code>List</code> collection.
+ */
+ private static final List<String> getHistograms(ITree tree) {
+ return getHistograms(tree, "/");
+ }
+
+ /**
+ * Gets a list of all objects that are not directories in a tree.
+ * @param tree - The tree from which to extract the object names.
+ * @return Returns the object names as <code>String</code> objects
+ * in a <code>List</code> collection.
+ */
+ private static final List<String> getHistograms(ITree tree, String rootDir) {
+ return getHistograms(tree, rootDir, new ArrayList<String>());
+ }
+
+ /**
+ * Recursive method that gets all object names from a tree that
+ * are not directories. Method should not be called directly, but
+ * rather called only through the <code>getHistograms(ITree)</code>
+ * method.
+ * @param tree - The tree from which to obtain the object names.
+ * @param directory - The directory in which to search for objects.
+ * @param list - The list in which to place the objects.
+ * @return Returns the <code>List</code> collection that was given
+ * as an argument.
+ */
+ private static final List<String> getHistograms(ITree tree, String directory, List<String> list) {
+ // Get the list of objects in the directory.
+ String[] treeObjects = tree.listObjectNames(directory);
+
+ // Print the objects.
+ for(String objectName : treeObjects) {
+ // Check if the object is a directory.
+ boolean isDirectory = isDirectory(objectName);
+
+ // If the object is a directory, get the histograms from it.
+ if(isDirectory) {
+ getHistograms(tree, objectName, list);
+ }
+
+ // If the object is a plot, add it to the list.
+ else { list.add(objectName); }
+ }
+
+ // Return the list.
+ return list;
+ }
+
+ /**
+ * Checks whether a tree object is a directory.
+ * @param object - The object to check.
+ * @return Returns <code>true</code> if the object is a directory
+ * and <code>false</code> otherwise.
+ */
+ private static final boolean isDirectory(String object) {
+ return (object.toCharArray()[object.length() - 1] == '/');
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FADCVariableTriggerFEEDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FADCVariableTriggerFEEDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FADCVariableTriggerFEEDriver.java Wed Apr 27 11:11:32 2016
@@ -25,64 +25,64 @@
// ==================================================================
// ==== Trigger Cut Default Parameters ==============================
// ==================================================================
- private int minHitCount = 1; // Minimum required cluster hit count threshold. (Hits)
- private double seedEnergyHigh = Double.MAX_VALUE; // Maximum allowed cluster seed energy. (GeV)
- private double seedEnergyLow = Double.MIN_VALUE; // Minimum required cluster seed energy. (GeV)
- private double clusterEnergyHigh = 1.5 * EcalUtils.GeV; // Maximum allowed cluster total energy. (GeV)
- private double clusterEnergyLow = .1 * EcalUtils.GeV; // Minimum required cluster total energy. (GeV)
- private double energySumHigh = 1.9 * EcalUtils.GeV; // Maximum allowed pair energy sum. (GeV)
- private double energySumLow = 0.0 * EcalUtils.GeV; // Minimum required pair energy sum. (GeV)
- private double energyDifferenceHigh = 2.2 * EcalUtils.GeV; // Maximum allowed pair energy difference. (GeV)
- private double energySlopeLow = 1.1; // Minimum required pair energy slope value.
- private double coplanarityHigh = 35; // Maximum allowed pair coplanarity deviation. (Degrees)
+ private int minHitCount = 1; // Minimum required cluster hit count threshold. (Hits)
+ private double seedEnergyHigh = Double.MAX_VALUE; // Maximum allowed cluster seed energy. (GeV)
+ private double seedEnergyLow = Double.MIN_VALUE; // Minimum required cluster seed energy. (GeV)
+ private double clusterEnergyHigh = 1.5 * EcalUtils.GeV; // Maximum allowed cluster total energy. (GeV)
+ private double clusterEnergyLow = .1 * EcalUtils.GeV; // Minimum required cluster total energy. (GeV)
+ private double energySumHigh = 1.9 * EcalUtils.GeV; // Maximum allowed pair energy sum. (GeV)
+ private double energySumLow = 0.0 * EcalUtils.GeV; // Minimum required pair energy sum. (GeV)
+ private double energyDifferenceHigh = 2.2 * EcalUtils.GeV; // Maximum allowed pair energy difference. (GeV)
+ private double energySlopeLow = 1.1; // Minimum required pair energy slope value.
+ private double coplanarityHigh = 35; // Maximum allowed pair coplanarity deviation. (Degrees)
// ==================================================================
// ==== Trigger General Default Parameters ==========================
// ==================================================================
- private String clusterCollectionName = "EcalClusters"; // Name for the LCIO cluster collection.
- private int pairCoincidence = 2; // Maximum allowed time difference between clusters. (4 ns clock-cycles)
- private double energySlopeParamF = 0.005500; // A parameter value used for the energy slope calculation.
- private double originX = 1393.0 * Math.tan(0.03052); // ECal mid-plane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
- private int backgroundLevel = -1; // Automatically sets the cuts to achieve a predetermined background rate.
+ private String clusterCollectionName = "EcalClusters"; // Name for the LCIO cluster collection.
+ private int pairCoincidence = 2; // Maximum allowed time difference between clusters. (4 ns clock-cycles)
+ private double energySlopeParamF = 0.005500; // A parameter value used for the energy slope calculation.
+ private double originX = 1393.0 * Math.tan(0.03052); // ECal mid-plane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
+ private int backgroundLevel = -1; // Automatically sets the cuts to achieve a predetermined background rate.
// ==================================================================
// ==== Driver Internal Variables ===================================
// ==================================================================
- private Queue<List<Cluster>> topClusterQueue = null; // Store clusters on the top half of the calorimeter.
- private Queue<List<Cluster>> botClusterQueue = null; // Store clusters on the bottom half of the calorimeter.
- private int allClusters = 0; // Track the number of clusters processed.
- private int allPairs = 0; // Track the number of cluster pairs processed.
- private int clusterTotalEnergyCount = 0; // Track the clusters which pass the total energy cut.
- private int clusterSeedEnergyCount = 0; // Track the clusters which pass the seed energy cut.
- private int clusterHitCountCount = 0; // Track the clusters which pass the hit count cut.
- private int pairEnergySumCount = 0; // Track the pairs which pass the energy sum cut.
- private int pairEnergyDifferenceCount = 0; // Track the pairs which pass the energy difference cut.
- private int pairEnergySlopeCount = 0; // Track the pairs which pass the energy slope cut.
- private int pairCoplanarityCount = 0; // Track the pairs which pass the coplanarity cut.
+ private Queue<List<Cluster>> topClusterQueue = null; // Store clusters on the top half of the calorimeter.
+ private Queue<List<Cluster>> botClusterQueue = null; // Store clusters on the bottom half of the calorimeter.
+ private int allClusters = 0; // Track the number of clusters processed.
+ private int allPairs = 0; // Track the number of cluster pairs processed.
+ private int clusterTotalEnergyCount = 0; // Track the clusters which pass the total energy cut.
+ private int clusterSeedEnergyCount = 0; // Track the clusters which pass the seed energy cut.
+ private int clusterHitCountCount = 0; // Track the clusters which pass the hit count cut.
+ private int pairEnergySumCount = 0; // Track the pairs which pass the energy sum cut.
+ private int pairEnergyDifferenceCount = 0; // Track the pairs which pass the energy difference cut.
+ private int pairEnergySlopeCount = 0; // Track the pairs which pass the energy slope cut.
+ private int pairCoplanarityCount = 0; // Track the pairs which pass the coplanarity cut.
/**
* Prints out the results of the trigger at the end of the run.
*/
@Override
public void endOfData() {
- // Print out the results of the trigger cuts.
- System.out.printf("Trigger Processing Results%n");
- System.out.printf("\tSingle-Cluster Cuts%n");
- System.out.printf("\t\tTotal Clusters Processed :: %d%n", allClusters);
- System.out.printf("\t\tPassed Seed Energy Cut :: %d%n", clusterSeedEnergyCount);
- System.out.printf("\t\tPassed Hit Count Cut :: %d%n", clusterHitCountCount);
- System.out.printf("\t\tPassed Total Energy Cut :: %d%n", clusterTotalEnergyCount);
- System.out.printf("%n");
- System.out.printf("\tCluster Pair Cuts%n");
- System.out.printf("\t\tTotal Pairs Processed :: %d%n", allPairs);
- System.out.printf("\t\tPassed Energy Sum Cut :: %d%n", pairEnergySumCount);
- System.out.printf("\t\tPassed Energy Difference Cut :: %d%n", pairEnergyDifferenceCount);
- System.out.printf("\t\tPassed Energy Slope Cut :: %d%n", pairEnergySlopeCount);
- System.out.printf("\t\tPassed Coplanarity Cut :: %d%n", pairCoplanarityCount);
- System.out.printf("%n");
- System.out.printf("\tTrigger Count :: %d%n", numTriggers);
-
- // Run the superclass method.
+ // Print out the results of the trigger cuts.
+ System.out.printf("Trigger Processing Results%n");
+ System.out.printf("\tSingle-Cluster Cuts%n");
+ System.out.printf("\t\tTotal Clusters Processed :: %d%n", allClusters);
+ System.out.printf("\t\tPassed Seed Energy Cut :: %d%n", clusterSeedEnergyCount);
+ System.out.printf("\t\tPassed Hit Count Cut :: %d%n", clusterHitCountCount);
+ System.out.printf("\t\tPassed Total Energy Cut :: %d%n", clusterTotalEnergyCount);
+ System.out.printf("%n");
+ System.out.printf("\tCluster Pair Cuts%n");
+ System.out.printf("\t\tTotal Pairs Processed :: %d%n", allPairs);
+ System.out.printf("\t\tPassed Energy Sum Cut :: %d%n", pairEnergySumCount);
+ System.out.printf("\t\tPassed Energy Difference Cut :: %d%n", pairEnergyDifferenceCount);
+ System.out.printf("\t\tPassed Energy Slope Cut :: %d%n", pairEnergySlopeCount);
+ System.out.printf("\t\tPassed Coplanarity Cut :: %d%n", pairCoplanarityCount);
+ System.out.printf("%n");
+ System.out.printf("\tTrigger Count :: %d%n", numTriggers);
+
+ // Run the superclass method.
super.endOfData();
}
@@ -92,53 +92,53 @@
*/
@Override
public void process(EventHeader event) {
- // Process the list of clusters for the event, if it exists.
+ // Process the list of clusters for the event, if it exists.
if (event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the collection of clusters.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Create a list to hold clusters which pass the single
- // cluster cuts.
- List<Cluster> goodClusterList = new ArrayList<Cluster>(clusterList.size());
-
- // Sort through the cluster list and add clusters that pass
- // the single cluster cuts to the good list.
- clusterLoop:
- for(Cluster cluster : clusterList) {
- // Increment the number of processed clusters.
- allClusters++;
-
- // ==== Seed Hit Energy Cut ====================================
- // =============================================================
- // If the cluster fails the cut, skip to the next cluster.
- if(!clusterSeedEnergyCut(cluster)) { continue clusterLoop; }
-
- // Otherwise, note that it passed the cut.
- clusterSeedEnergyCount++;
-
- // ==== Cluster Hit Count Cut ==================================
- // =============================================================
- // If the cluster fails the cut, skip to the next cluster.
- if(!clusterHitCountCut(cluster)) { continue clusterLoop; }
-
- // Otherwise, note that it passed the cut.
- clusterHitCountCount++;
-
- // ==== Cluster Total Energy Cut ===============================
- // =============================================================
- // If the cluster fails the cut, skip to the next cluster.
- if(!clusterTotalEnergyCut(cluster)) { continue clusterLoop; }
-
- // Otherwise, note that it passed the cut.
- clusterTotalEnergyCount++;
-
- // A cluster that passes all of the single-cluster cuts
- // can be used in cluster pairs.
- goodClusterList.add(cluster);
- }
-
- // Put the good clusters into the cluster queue.
- updateClusterQueues(goodClusterList);
+ // Get the collection of clusters.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Create a list to hold clusters which pass the single
+ // cluster cuts.
+ List<Cluster> goodClusterList = new ArrayList<Cluster>(clusterList.size());
+
+ // Sort through the cluster list and add clusters that pass
+ // the single cluster cuts to the good list.
+ clusterLoop:
+ for(Cluster cluster : clusterList) {
+ // Increment the number of processed clusters.
+ allClusters++;
+
+ // ==== Seed Hit Energy Cut ====================================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next cluster.
+ if(!clusterSeedEnergyCut(cluster)) { continue clusterLoop; }
+
+ // Otherwise, note that it passed the cut.
+ clusterSeedEnergyCount++;
+
+ // ==== Cluster Hit Count Cut ==================================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next cluster.
+ if(!clusterHitCountCut(cluster)) { continue clusterLoop; }
+
+ // Otherwise, note that it passed the cut.
+ clusterHitCountCount++;
+
+ // ==== Cluster Total Energy Cut ===============================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next cluster.
+ if(!clusterTotalEnergyCut(cluster)) { continue clusterLoop; }
+
+ // Otherwise, note that it passed the cut.
+ clusterTotalEnergyCount++;
+
+ // A cluster that passes all of the single-cluster cuts
+ // can be used in cluster pairs.
+ goodClusterList.add(cluster);
+ }
+
+ // Put the good clusters into the cluster queue.
+ updateClusterQueues(goodClusterList);
}
// Perform the superclass event processing.
@@ -152,7 +152,7 @@
* be set. Actual background rates equal about (5 * backgroundLevel) kHz.
*/
public void setBackgroundLevel(int backgroundLevel) {
- this.backgroundLevel = backgroundLevel;
+ this.backgroundLevel = backgroundLevel;
}
/**
@@ -215,7 +215,7 @@
* @param energySlopeLow - The parameter value.
*/
public void setEnergySlopeLow(double energySlopeLow) {
- this.energySlopeLow = energySlopeLow;
+ this.energySlopeLow = energySlopeLow;
}
/**
@@ -299,12 +299,12 @@
*/
@Override
public void startOfData() {
- // Make sure that a valid cluster collection name has been
- // defined. If it has not, throw an exception.
+ // Make sure that a valid cluster collection name has been
+ // defined. If it has not, throw an exception.
if (clusterCollectionName == null) {
throw new RuntimeException("The parameter clusterCollectionName was not set!");
}
-
+
// Initialize the top and bottom cluster queues.
topClusterQueue = new LinkedList<List<Cluster>>();
botClusterQueue = new LinkedList<List<Cluster>>();
@@ -345,9 +345,9 @@
for (Cluster botCluster : botClusterQueue.element()) {
for (List<Cluster> topClusters : topClusterQueue) {
for (Cluster topCluster : topClusters) {
- // The first cluster in a pair should always be
- // the higher energy cluster. If the top cluster
- // is higher energy, it goes first.
+ // The first cluster in a pair should always be
+ // the higher energy cluster. If the top cluster
+ // is higher energy, it goes first.
if (topCluster.getEnergy() > botCluster.getEnergy()) {
Cluster[] clusterPair = {topCluster, botCluster};
clusterPairs.add(clusterPair);
@@ -366,24 +366,24 @@
return clusterPairs;
}
- /**
- * Determines if the event produces a trigger.
- *
- * @return Returns <code>true</code> if the event produces a trigger
- * and <code>false</code> if it does not.
- */
- @Override
- protected boolean triggerDecision(EventHeader event) {
- // If there is a list of clusters present for this event,
- // check whether it passes the trigger conditions.
- if (event.hasCollection(Cluster.class, clusterCollectionName)) {
- return testTrigger();
+ /**
+ * Determines if the event produces a trigger.
+ *
+ * @return Returns <code>true</code> if the event produces a trigger
+ * and <code>false</code> if it does not.
+ */
+ @Override
+ protected boolean triggerDecision(EventHeader event) {
+ // If there is a list of clusters present for this event,
+ // check whether it passes the trigger conditions.
+ if (event.hasCollection(Cluster.class, clusterCollectionName)) {
+ return testTrigger();
}
// Otherwise, this event can not produce a trigger and should
// return false automatically.
else { return false; }
- }
+ }
/**
* Checks whether the argument cluster possesses the minimum
@@ -394,7 +394,7 @@
* and <code>false</code> if the cluster does not.
*/
private boolean clusterHitCountCut(Cluster cluster) {
- return (getValueClusterHitCount(cluster) >= minHitCount);
+ return (getValueClusterHitCount(cluster) >= minHitCount);
}
/**
@@ -406,12 +406,12 @@
* and <code>false</code> if the cluster does not.
*/
private boolean clusterSeedEnergyCut(Cluster cluster) {
- // Get the cluster seed energy.
- double energy = getValueClusterSeedEnergy(cluster);
-
- // Check that it is above the minimum threshold and below the
- // maximum threshold.
- return (energy < seedEnergyHigh) && (energy > seedEnergyLow);
+ // Get the cluster seed energy.
+ double energy = getValueClusterSeedEnergy(cluster);
+
+ // Check that it is above the minimum threshold and below the
+ // maximum threshold.
+ return (energy < seedEnergyHigh) && (energy > seedEnergyLow);
}
/**
@@ -423,12 +423,12 @@
* and <code>false</code> if the cluster does not.
*/
private boolean clusterTotalEnergyCut(Cluster cluster) {
- // Get the total cluster energy.
- double energy = getValueClusterTotalEnergy(cluster);
-
- // Check that it is above the minimum threshold and below the
- // maximum threshold.
- return (energy < clusterEnergyHigh) && (energy > clusterEnergyLow);
+ // Get the total cluster energy.
+ double energy = getValueClusterTotalEnergy(cluster);
+
+ // Check that it is above the minimum threshold and below the
+ // maximum threshold.
+ return (energy < clusterEnergyHigh) && (energy > clusterEnergyLow);
}
/**
@@ -450,7 +450,7 @@
* @return Returns the cut value.
*/
private double getValueClusterTotalEnergy(Cluster cluster) {
- return cluster.getEnergy();
+ return cluster.getEnergy();
}
/**
@@ -461,7 +461,7 @@
* @return Returns the cut value.
*/
private int getValueClusterHitCount(Cluster cluster) {
- return cluster.getCalorimeterHits().size();
+ return cluster.getCalorimeterHits().size();
}
/**
@@ -472,7 +472,7 @@
* @return Returns the cut value.
*/
private double getValueClusterSeedEnergy(Cluster cluster) {
- return cluster.getCalorimeterHits().get(0).getCorrectedEnergy();
+ return cluster.getCalorimeterHits().get(0).getCorrectedEnergy();
}
/**
@@ -483,16 +483,16 @@
* @return Returns the cut value.
*/
private double getValueCoplanarity(Cluster[] clusterPair) {
- // Get the cluster angles.
- double[] clusterAngle = new double[2];
- for(int i = 0; i < 2; i++) {
+ // Get the cluster angles.
+ double[] clusterAngle = new double[2];
+ for(int i = 0; i < 2; i++) {
double position[] = clusterPair[i].getCalorimeterHits().get(0).getPosition();
//clusterAngle[i] = Math.toDegrees(Math.atan2(position[1], position[0] - originX));
//clusterAngle[i] = (clusterAngle[i] + 180.0) % 180.0;
clusterAngle[i] = (Math.toDegrees(Math.atan2(position[1], position[0] - originX)) + 180.0) % 180.0;
- }
-
- // Calculate the coplanarity cut value.
+ }
+
+ // Calculate the coplanarity cut value.
return Math.abs(clusterAngle[1] - clusterAngle[0]);
}
@@ -504,7 +504,7 @@
* @return Returns the cut value.
*/
private double getValueEnergyDifference(Cluster[] clusterPair) {
- return clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
+ return clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
}
/**
@@ -515,15 +515,15 @@
* @return Returns the cut value.
*/
private double getValueEnergySlope(Cluster[] clusterPair) {
- // E + R*F
- // Get the low energy cluster energy.
- double slopeParamE = clusterPair[1].getEnergy();
-
- // Get the low energy cluster radial distance.
- double slopeParamR = getClusterDistance(clusterPair[1]);
-
- // Calculate the energy slope.
- return slopeParamE + slopeParamR * energySlopeParamF;
+ // E + R*F
+ // Get the low energy cluster energy.
+ double slopeParamE = clusterPair[1].getEnergy();
+
+ // Get the low energy cluster radial distance.
+ double slopeParamR = getClusterDistance(clusterPair[1]);
+
+ // Calculate the energy slope.
+ return slopeParamE + slopeParamR * energySlopeParamF;
}
/**
@@ -534,7 +534,7 @@
* @return Returns the cut value.
*/
private double getValueEnergySum(Cluster[] clusterPair) {
- return clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
+ return clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
}
/**
@@ -570,7 +570,7 @@
* @return true if pair is found, false otherwise
*/
private boolean pairEnergySlopeCut(Cluster[] clusterPair) {
- return (getValueEnergySlope(clusterPair) > energySlopeLow);
+ return (getValueEnergySlope(clusterPair) > energySlopeLow);
}
/**
@@ -582,175 +582,175 @@
* the cut and <code>false</code> if it does not.
*/
private boolean pairEnergySumCut(Cluster[] clusterPair) {
- // Get the energy sum value.
- double energySum = getValueEnergySum(clusterPair);
-
- // Check that it is within the allowed range.
+ // Get the energy sum value.
+ double energySum = getValueEnergySum(clusterPair);
+
+ // Check that it is within the allowed range.
return (energySum < energySumHigh) && (energySum > energySumLow);
}
-
+
private void setBackgroundCuts(int backgroundLevel) {
- // Make sure that the background level is valid.
- if(backgroundLevel < 1 || backgroundLevel > 10) {
- throw new RuntimeException(String.format("Trigger cuts are undefined for background level %d.", backgroundLevel));
- }
-
- // Otherwise, set the trigger cuts. Certain cuts are constant
- // across all background levels.
- clusterEnergyLow = 0.000;
- seedEnergyLow = 0.100;
-
- // Set the variable values.
- if(backgroundLevel == 1) {
- clusterEnergyHigh = 1.700;
- seedEnergyHigh = 1.300;
- energySumLow = 0.400;
- energySumHigh = 2.00;
- energyDifferenceHigh = 1.500;
- energySlopeLow = 1.0;
- coplanarityHigh = 40;
- minHitCount = 2;
- } else if(backgroundLevel == 2) {
- clusterEnergyHigh = 1.600;
- seedEnergyHigh = 1.200;
- energySumLow = 0.300;
- energySumHigh = 2.00;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.8;
- coplanarityHigh = 40;
- minHitCount = 2;
- } else if(backgroundLevel == 3) {
- clusterEnergyHigh = 1.600;
- seedEnergyHigh = 1.200;
- energySumLow = 0.200;
- energySumHigh = 2.000;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.7;
- coplanarityHigh = 40;
- minHitCount = 2;
- } else if(backgroundLevel == 4) {
- clusterEnergyHigh = 1.500;
- seedEnergyHigh = 1.200;
- energySumLow = 0.500;
- energySumHigh = 1.950;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.6;
- coplanarityHigh = 40;
- minHitCount = 2;
- } else if(backgroundLevel == 5) {
- clusterEnergyHigh = 1.500;
- seedEnergyHigh = 1.200;
- energySumLow = 0.400;
- energySumHigh = 2.000;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.6;
- coplanarityHigh = 45;
- minHitCount = 2;
- } else if(backgroundLevel == 6) {
- clusterEnergyHigh = 1.500;
- seedEnergyHigh = 1.200;
- energySumLow = 0.200;
- energySumHigh = 1.950;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.6;
- coplanarityHigh = 55;
- minHitCount = 2;
- } else if(backgroundLevel == 7) {
- clusterEnergyHigh = 1.700;
- seedEnergyHigh = 1.200;
- energySumLow = 0.200;
- energySumHigh = 2.000;
- energyDifferenceHigh = 1.500;
- energySlopeLow = 0.6;
- coplanarityHigh = 60;
- minHitCount = 2;
- } else if(backgroundLevel == 8) {
- clusterEnergyHigh = 1.700;
- seedEnergyHigh = 1.300;
- energySumLow = 0.200;
- energySumHigh = 2.000;
- energyDifferenceHigh = 1.500;
- energySlopeLow = 0.6;
- coplanarityHigh = 65;
- minHitCount = 2;
- } else if(backgroundLevel == 9) {
- clusterEnergyHigh = 1.500;
- seedEnergyHigh = 1.200;
- energySumLow = 0.400;
- energySumHigh = 1.950;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.5;
- coplanarityHigh = 60;
- minHitCount = 2;
- } else if(backgroundLevel == 10) {
- clusterEnergyHigh = 1.500;
- seedEnergyHigh = 1.200;
- energySumLow = 0.400;
- energySumHigh = 2.000;
- energyDifferenceHigh = 1.400;
- energySlopeLow = 0.5;
- coplanarityHigh = 65;
- minHitCount = 2;
- }
- }
-
- /**
- * Tests all of the current cluster pairs for triggers.
- *
- * @return Returns <code>true</code> if one of the cluster pairs
- * passes all of the cluster cuts and <code>false</code> otherwise.
- */
+ // Make sure that the background level is valid.
+ if(backgroundLevel < 1 || backgroundLevel > 10) {
+ throw new RuntimeException(String.format("Trigger cuts are undefined for background level %d.", backgroundLevel));
+ }
+
+ // Otherwise, set the trigger cuts. Certain cuts are constant
+ // across all background levels.
+ clusterEnergyLow = 0.000;
+ seedEnergyLow = 0.100;
+
+ // Set the variable values.
+ if(backgroundLevel == 1) {
+ clusterEnergyHigh = 1.700;
+ seedEnergyHigh = 1.300;
+ energySumLow = 0.400;
+ energySumHigh = 2.00;
+ energyDifferenceHigh = 1.500;
+ energySlopeLow = 1.0;
+ coplanarityHigh = 40;
+ minHitCount = 2;
+ } else if(backgroundLevel == 2) {
+ clusterEnergyHigh = 1.600;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.300;
+ energySumHigh = 2.00;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.8;
+ coplanarityHigh = 40;
+ minHitCount = 2;
+ } else if(backgroundLevel == 3) {
+ clusterEnergyHigh = 1.600;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.200;
+ energySumHigh = 2.000;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.7;
+ coplanarityHigh = 40;
+ minHitCount = 2;
+ } else if(backgroundLevel == 4) {
+ clusterEnergyHigh = 1.500;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.500;
+ energySumHigh = 1.950;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.6;
+ coplanarityHigh = 40;
+ minHitCount = 2;
+ } else if(backgroundLevel == 5) {
+ clusterEnergyHigh = 1.500;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.400;
+ energySumHigh = 2.000;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.6;
+ coplanarityHigh = 45;
+ minHitCount = 2;
+ } else if(backgroundLevel == 6) {
+ clusterEnergyHigh = 1.500;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.200;
+ energySumHigh = 1.950;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.6;
+ coplanarityHigh = 55;
+ minHitCount = 2;
+ } else if(backgroundLevel == 7) {
+ clusterEnergyHigh = 1.700;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.200;
+ energySumHigh = 2.000;
+ energyDifferenceHigh = 1.500;
+ energySlopeLow = 0.6;
+ coplanarityHigh = 60;
+ minHitCount = 2;
+ } else if(backgroundLevel == 8) {
+ clusterEnergyHigh = 1.700;
+ seedEnergyHigh = 1.300;
+ energySumLow = 0.200;
+ energySumHigh = 2.000;
+ energyDifferenceHigh = 1.500;
+ energySlopeLow = 0.6;
+ coplanarityHigh = 65;
+ minHitCount = 2;
+ } else if(backgroundLevel == 9) {
+ clusterEnergyHigh = 1.500;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.400;
+ energySumHigh = 1.950;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.5;
+ coplanarityHigh = 60;
+ minHitCount = 2;
+ } else if(backgroundLevel == 10) {
+ clusterEnergyHigh = 1.500;
+ seedEnergyHigh = 1.200;
+ energySumLow = 0.400;
+ energySumHigh = 2.000;
+ energyDifferenceHigh = 1.400;
+ energySlopeLow = 0.5;
+ coplanarityHigh = 65;
+ minHitCount = 2;
+ }
+ }
+
+ /**
+ * Tests all of the current cluster pairs for triggers.
+ *
+ * @return Returns <code>true</code> if one of the cluster pairs
+ * passes all of the cluster cuts and <code>false</code> otherwise.
+ */
private boolean testTrigger() {
- // Get the list of cluster pairs.
- List<Cluster[]> clusterPairs = getClusterPairsTopBot();
+ // Get the list of cluster pairs.
+ List<Cluster[]> clusterPairs = getClusterPairsTopBot();
// Iterate over the cluster pairs and perform each of the cluster
// pair cuts on them. A cluster pair that passes all of the
// cuts registers as a trigger.
- pairLoop:
+ pairLoop:
for (Cluster[] clusterPair : clusterPairs) {
- // Increment the number of processed cluster pairs.
- allPairs++;
-
- // ==== Pair Energy Sum Cut ====================================
- // =============================================================
- // If the cluster fails the cut, skip to the next pair.
- if(!pairEnergySumCut(clusterPair)) { continue pairLoop; }
-
- // Otherwise, note that it passed the cut.
- pairEnergySumCount++;
-
- // ==== Pair Energy Difference Cut =============================
- // =============================================================
- // If the cluster fails the cut, skip to the next pair.
- if(!pairEnergyDifferenceCut(clusterPair)) { continue pairLoop; }
-
- // Otherwise, note that it passed the cut.
- pairEnergyDifferenceCount++;
-
- // ==== Pair Energy Slope Cut ==================================
- // =============================================================
- // If the cluster fails the cut, skip to the next pair.
- //if(!energyDistanceCut(clusterPair)) { continue pairLoop; }
- if(!pairEnergySlopeCut(clusterPair)) { continue pairLoop; }
-
- // Otherwise, note that it passed the cut.
- pairEnergySlopeCount++;
-
- // ==== Pair Coplanarity Cut ===================================
- // =============================================================
- // If the cluster fails the cut, skip to the next pair.
- if(!pairCoplanarityCut(clusterPair)) { continue pairLoop; }
-
- // Otherwise, note that it passed the cut.
- pairCoplanarityCount++;
-
- // Clusters that pass all of the pair cuts produce a trigger.
- return true;
+ // Increment the number of processed cluster pairs.
+ allPairs++;
+
+ // ==== Pair Energy Sum Cut ====================================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next pair.
+ if(!pairEnergySumCut(clusterPair)) { continue pairLoop; }
+
+ // Otherwise, note that it passed the cut.
+ pairEnergySumCount++;
+
+ // ==== Pair Energy Difference Cut =============================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next pair.
+ if(!pairEnergyDifferenceCut(clusterPair)) { continue pairLoop; }
+
+ // Otherwise, note that it passed the cut.
+ pairEnergyDifferenceCount++;
+
+ // ==== Pair Energy Slope Cut ==================================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next pair.
+ //if(!energyDistanceCut(clusterPair)) { continue pairLoop; }
+ if(!pairEnergySlopeCut(clusterPair)) { continue pairLoop; }
+
+ // Otherwise, note that it passed the cut.
+ pairEnergySlopeCount++;
+
+ // ==== Pair Coplanarity Cut ===================================
+ // =============================================================
+ // If the cluster fails the cut, skip to the next pair.
+ if(!pairCoplanarityCut(clusterPair)) { continue pairLoop; }
+
+ // Otherwise, note that it passed the cut.
+ pairCoplanarityCount++;
+
+ // Clusters that pass all of the pair cuts produce a trigger.
+ return true;
}
// If the loop terminates without producing a trigger, there
- // are no cluster pairs which meet the trigger conditions.
+ // are no cluster pairs which meet the trigger conditions.
return false;
}
@@ -761,14 +761,14 @@
* @param clusterList - The clusters to add to the queues.
*/
private void updateClusterQueues(List<Cluster> clusterList) {
- // Create lists to store the top and bottom clusters.
+ // Create lists to store the top and bottom clusters.
ArrayList<Cluster> topClusterList = new ArrayList<Cluster>();
ArrayList<Cluster> botClusterList = new ArrayList<Cluster>();
// Loop over the clusters in the cluster list.
for (Cluster cluster : clusterList) {
- // If the cluster is on the top of the calorimeter, it
- // goes into the top cluster list.
+ // If the cluster is on the top of the calorimeter, it
+ // goes into the top cluster list.
if (cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("iy") > 0) {
topClusterList.add(cluster);
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger.java Wed Apr 27 11:11:32 2016
@@ -15,209 +15,209 @@
* @author Luca Colaneri
*/
public class FEETrigger extends TriggerDriver {
- // Store the LCIO cluster collection name.
- private String clusterCollectionName = "EcalClusters";
-
- // Store the cluster total energy trigger threshold.
- private double energyThreshold = 1.5;
-
- // Track the number of over-threshold clusters in each region.
- private int zone1Count = 0;
- private int zone2Count = 0;
- private int zone3Count = 0;
-
+ // Store the LCIO cluster collection name.
+ private String clusterCollectionName = "EcalClusters";
+
+ // Store the cluster total energy trigger threshold.
+ private double energyThreshold = 1.5;
+
+ // Track the number of over-threshold clusters in each region.
+ private int zone1Count = 0;
+ private int zone2Count = 0;
+ private int zone3Count = 0;
+
// The number of cluster over threshold that must occur in a region
- // before a trigger occurs.
- private int zone1Prescaling = 50;
- private int zone2Prescaling = 10;
-
- /**
- * Sets the energy threshold required for a cluster to be counted.
- *
- * @param energyThreshold - The energy threshold in GeV.
- */
- public void setEnergyThreshold(int energyThreshold) {
- this.energyThreshold = energyThreshold;
- }
-
- /**
- * Sets the number of events over threshold which must occur in the
- * first region in order for a trigger to occur.
- *
- * @param zone1Prescaling - The number of over-threshold clusters needed
- * for a trigger.
- */
- public void setZone1Prescaling(int zone1Prescaling) {
- this.zone1Prescaling = zone1Prescaling;
- }
-
- /**
- * Sets the number of events over threshold which must occur in the
- * second region in order for a trigger to occur.
- *
- * @param zone2Prescaling - The number of over-threshold clusters needed
- * for a trigger.
- */
- public void setZone2Prescaling(int zone2Prescaling) {
- this.zone2Prescaling = zone2Prescaling;
- }
-
- /**
- * Checks if any clusters exist over the set energy threshold and,
- * if they do, increments the appropriate over-threshold count
- * variable for the zone in which the cluster resides.
- *
- * @param event - The event from which clusters should be extracted.
- */
- @Override
- public void process(EventHeader event) {
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the list of clusters from the event.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Loop over the clusters and check for any that pass the threshold.
- for(Cluster cluster : clusterList) {
- // Check if the current cluster exceeds the energy
- // threshold. If it does not, continue to the next
- // cluster in the list.
- if(cluster.getEnergy() > energyThreshold) {
- // Get the x-index of the seed hit.
- int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
-
- // Determine in which region the cluster is located
- // and increment the counter for that region. Zones
- // are defined as:
- // Zone 1 is -13 < ix < -4 and 14 < ix < 21 MISTAKE!!! it's all reversed!! remember!!!
- // Zone 2 is -20 < ix < -14 and ix > 20
- // Zone 3 is -23 <= ix < -18
+ // before a trigger occurs.
+ private int zone1Prescaling = 50;
+ private int zone2Prescaling = 10;
+
+ /**
+ * Sets the energy threshold required for a cluster to be counted.
+ *
+ * @param energyThreshold - The energy threshold in GeV.
+ */
+ public void setEnergyThreshold(int energyThreshold) {
+ this.energyThreshold = energyThreshold;
+ }
+
+ /**
+ * Sets the number of events over threshold which must occur in the
+ * first region in order for a trigger to occur.
+ *
+ * @param zone1Prescaling - The number of over-threshold clusters needed
+ * for a trigger.
+ */
+ public void setZone1Prescaling(int zone1Prescaling) {
+ this.zone1Prescaling = zone1Prescaling;
+ }
+
+ /**
+ * Sets the number of events over threshold which must occur in the
+ * second region in order for a trigger to occur.
+ *
+ * @param zone2Prescaling - The number of over-threshold clusters needed
+ * for a trigger.
+ */
+ public void setZone2Prescaling(int zone2Prescaling) {
+ this.zone2Prescaling = zone2Prescaling;
+ }
+
+ /**
+ * Checks if any clusters exist over the set energy threshold and,
+ * if they do, increments the appropriate over-threshold count
+ * variable for the zone in which the cluster resides.
+ *
+ * @param event - The event from which clusters should be extracted.
+ */
+ @Override
+ public void process(EventHeader event) {
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the list of clusters from the event.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Loop over the clusters and check for any that pass the threshold.
+ for(Cluster cluster : clusterList) {
+ // Check if the current cluster exceeds the energy
+ // threshold. If it does not, continue to the next
+ // cluster in the list.
+ if(cluster.getEnergy() > energyThreshold) {
+ // Get the x-index of the seed hit.
+ int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+
+ // Determine in which region the cluster is located
+ // and increment the counter for that region. Zones
+ // are defined as:
+ // Zone 1 is -13 < ix < -4 and 14 < ix < 21 MISTAKE!!! it's all reversed!! remember!!!
+ // Zone 2 is -20 < ix < -14 and ix > 20
+ // Zone 3 is -23 <= ix < -18
//V3
/*
- if( ix > 18 || ix < -22) { zone3Count++; }
- if(ix < 19 && ix > 12 ) { zone2Count++; }
- if((ix > 4 && ix < 13) || (ix > -23 && ix < -14)) { zone1Count++; }
+ if( ix > 18 || ix < -22) { zone3Count++; }
+ if(ix < 19 && ix > 12 ) { zone2Count++; }
+ if((ix > 4 && ix < 13) || (ix > -23 && ix < -14)) { zone1Count++; }
*/
//V2
/*
- if( ix > 18 ) { zone3Count++; }
- if(ix < 19 && ix > 12 || ix <-20) { zone2Count++; }
- if((ix > 4 && ix < 13) || (ix > -21 && ix < -14)) { zone1Count++; }
+ if( ix > 18 ) { zone3Count++; }
+ if(ix < 19 && ix > 12 || ix <-20) { zone2Count++; }
+ if((ix > 4 && ix < 13) || (ix > -21 && ix < -14)) { zone1Count++; }
*/
//v4/*
/*
if( ix > 19 || ix < -22) { zone3Count++; }
- if(ix < 20 && ix > 12 || ix <-20 && ix > -23) { zone2Count++; }
- if((ix > 4 && ix < 13) || (ix > -21 && ix < -14)) { zone1Count++; }
+ if(ix < 20 && ix > 12 || ix <-20 && ix > -23) { zone2Count++; }
+ if((ix > 4 && ix < 13) || (ix > -21 && ix < -14)) { zone1Count++; }
*/
//V prova
/*
- if( ix != 50 ) { zone2Count++; }
- */
+ if( ix != 50 ) { zone2Count++; }
+ */
//V6
/*
if( ix > 19 || ix < -22) { zone3Count++; }
- if(ix < 20 && ix > 8 || ix <-16 && ix > -23) { zone2Count++; }
- if((ix > 4 && ix < 9) || (ix > -17 && ix < -14)) { zone1Count++; }
+ if(ix < 20 && ix > 8 || ix <-16 && ix > -23) { zone2Count++; }
+ if((ix > 4 && ix < 9) || (ix > -17 && ix < -14)) { zone1Count++; }
*/
//V7
/*
if( ix > 19 || ix < -22) { zone3Count++; }
- if(ix < 20 && ix > 15 ) { zone2Count++; }
- if((ix > 5 && ix < 16) || (ix > -23 && ix < -14)) { zone1Count++; }
+ if(ix < 20 && ix > 15 ) { zone2Count++; }
+ if((ix > 5 && ix < 16) || (ix > -23 && ix < -14)) { zone1Count++; }
*/
//V8
/*
if( ix > 19 || ix < -17) { zone3Count++; } //x1
- if(ix < 20 && ix > 9 ) { zone2Count++; }//x10
- if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
+ if(ix < 20 && ix > 9 ) { zone2Count++; }//x10
+ if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
}*/
//V9
/*
if( ix > 19 || ix < -21) { zone3Count++; } //x1
- if(ix < 20 && ix > 9 || ix > -22 && ix <-17) { zone2Count++; }//x10
- if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
+ if(ix < 20 && ix > 9 || ix > -22 && ix <-17) { zone2Count++; }//x10
+ if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
*/
//10
/*
if( ix > 19 || ix < -22) { zone3Count++; } //x1
- if(ix < 20 && ix > 9 || ix > -22 && ix <-17) { zone2Count++; }//x10
- if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
+ if(ix < 20 && ix > 9 || ix > -22 && ix <-17) { zone2Count++; }//x10
+ if((ix > 5 && ix < 10) || (ix > -18 && ix < -14)) { zone1Count++; } //x50
*/
//V11
if( ix > 19 || ix < -22) { zone3Count++; } //x1
- if(ix < 20 && ix > 11 || ix > -22 && ix <-19) { zone2Count++; }//x10
- if((ix > 5 && ix < 12) || (ix > -20 && ix < -14)) { zone1Count++; } //x50
+ if(ix < 20 && ix > 11 || ix > -22 && ix <-19) { zone2Count++; }//x10
+ if((ix > 5 && ix < 12) || (ix > -20 && ix < -14)) { zone1Count++; } //x50
}
- }
- }
-
- // Run the superclass event processing.
- super.process(event);
- }
-
- /**
- * Checks whether or not a trigger occurred.
- *
- * @param event - The event on which to base the trigger decision.
- * @return Returns <code>true</code> if a trigger occurred and <code>
- * false</code> if a trigger did not.
- */
- @Override
- protected boolean triggerDecision(EventHeader event) {
- // Check if the event has clusters. An event with no clusters
- // should never result in a trigger.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Check if any of the zone counts are high enough to trigger.
- return triggerTest();
- }
-
- // Events without clusters can not trigger.
- else { return false; }
- }
-
- /**
- * Checks if any of the regional counts are sufficiently high to
- * register a trigger.
- *
- * @return Returns <code>true</code> if a region has enough clusters
- * to trigger and <code>false</code> otherwise.
- */
- private boolean triggerTest() {
- // Track whether a trigger occurred.
- boolean trigger = false;
-
- // If any clusters occur in zone 3, reset the count and note
- // that a trigger occurred.
- if(zone3Count > 0) {
- zone3Count = 0;
+ }
+ }
+
+ // Run the superclass event processing.
+ super.process(event);
+ }
+
+ /**
+ * Checks whether or not a trigger occurred.
+ *
+ * @param event - The event on which to base the trigger decision.
+ * @return Returns <code>true</code> if a trigger occurred and <code>
+ * false</code> if a trigger did not.
+ */
+ @Override
+ protected boolean triggerDecision(EventHeader event) {
+ // Check if the event has clusters. An event with no clusters
+ // should never result in a trigger.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Check if any of the zone counts are high enough to trigger.
+ return triggerTest();
+ }
+
+ // Events without clusters can not trigger.
+ else { return false; }
+ }
+
+ /**
+ * Checks if any of the regional counts are sufficiently high to
+ * register a trigger.
+ *
+ * @return Returns <code>true</code> if a region has enough clusters
+ * to trigger and <code>false</code> otherwise.
+ */
+ private boolean triggerTest() {
+ // Track whether a trigger occurred.
+ boolean trigger = false;
+
+ // If any clusters occur in zone 3, reset the count and note
+ // that a trigger occurred.
+ if(zone3Count > 0) {
+ zone3Count = 0;
if(zone2Count==zone2Prescaling){zone2Count=0;}
if(zone1Count==zone1Prescaling){zone1Count=0;}
- trigger = true;
- }
-
- // If zone 2 has sufficient clusters (100 by default) to
- // trigger, reset its count and note that a trigger occurred.
- else if(zone2Count == zone2Prescaling) {
- zone2Count = 0;
+ trigger = true;
+ }
+
+ // If zone 2 has sufficient clusters (100 by default) to
+ // trigger, reset its count and note that a trigger occurred.
+ else if(zone2Count == zone2Prescaling) {
+ zone2Count = 0;
if(zone3Count>0){zone3Count=0;}
if(zone1Count==zone1Prescaling){zone1Count=0;}
- trigger = true;
- }
-
- // If zone 3 has sufficient clusters (1000 by default) to
- // trigger, reset its count and note that a trigger occurred.
- else if(zone1Count == zone1Prescaling) {
- zone1Count = 0;
+ trigger = true;
+ }
+
+ // If zone 3 has sufficient clusters (1000 by default) to
+ // trigger, reset its count and note that a trigger occurred.
+ else if(zone1Count == zone1Prescaling) {
+ zone1Count = 0;
if(zone3Count>0){zone3Count=0;}
if(zone2Count==zone2Prescaling){zone2Count=0;}
- trigger = true;
- }
-
- // Return whether or not a trigger occurred.
- return trigger;
- }
+ trigger = true;
+ }
+
+ // Return whether or not a trigger occurred.
+ return trigger;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger2.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger2.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/FEETrigger2.java Wed Apr 27 11:11:32 2016
@@ -15,159 +15,159 @@
* @author Luca Colaneri
*/
public class FEETrigger2 extends TriggerDriver {
- // Store the LCIO cluster collection name.
- private String clusterCollectionName = "EcalClusters";
-
- // Store the cluster total energy trigger threshold.
- private double energyThreshold = 1.5;
-
- // Track the number of over-threshold clusters in each region.
- private int zone1Count = 0;
- private int zone2Count = 0;
- private int zone3Count = 0;
+ // Store the LCIO cluster collection name.
+ private String clusterCollectionName = "EcalClusters";
+
+ // Store the cluster total energy trigger threshold.
+ private double energyThreshold = 1.5;
+
+ // Track the number of over-threshold clusters in each region.
+ private int zone1Count = 0;
+ private int zone2Count = 0;
+ private int zone3Count = 0;
private int zone4Count = 0;
-
+
// The number of cluster over threshold that must occur in a region
- // before a trigger occurs.
- private int zone1Prescaling = 1000;
- private int zone2Prescaling = 70;
- private int zone4Prescaling = 200;
- /**
- * Sets the energy threshold required for a cluster to be counted.
- *
- * @param energyThreshold - The energy threshold in GeV.
- */
- public void setEnergyThreshold(int energyThreshold) {
- this.energyThreshold = energyThreshold;
- }
-
- /**
- * Sets the number of events over threshold which must occur in the
- * first region in order for a trigger to occur.
- *
- * @param zone1Prescaling - The number of over-threshold clusters needed
- * for a trigger.
- */
- public void setZone1Prescaling(int zone1Prescaling) {
- this.zone1Prescaling = zone1Prescaling;
- }
-
- /**
- * Sets the number of events over threshold which must occur in the
- * second region in order for a trigger to occur.
- *
- * @param zone2Prescaling - The number of over-threshold clusters needed
- * for a trigger.
- */
- public void setZone2Prescaling(int zone2Prescaling) {
- this.zone2Prescaling = zone2Prescaling;
- }
-
- /**
- * Checks if any clusters exist over the set energy threshold and,
- * if they do, increments the appropriate over-threshold count
- * variable for the zone in which the cluster resides.
- *
- * @param event - The event from which clusters should be extracted.
- */
- @Override
- public void process(EventHeader event) {
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Get the list of clusters from the event.
- List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
-
- // Loop over the clusters and check for any that pass the threshold.
- for(Cluster cluster : clusterList) {
- // Check if the current cluster exceeds the energy
- // threshold. If it does not, continue to the next
- // cluster in the list.
- if(cluster.getEnergy() > energyThreshold) {
- // Get the x-index of the seed hit.
- int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
-
- // Determine in which region the cluster is located
- // and increment the counter for that region. Zones
- // are defined as:
- // Zone 1 is -13 < ix < -4 and 14 < ix < 21
- // Zone 2 is -20 < ix < -14 and ix > 20
- // Zone 3 is -23 <= ix < -19
- if(-23 <= ix && ix < -19) { zone3Count++; }
- if((-20 < ix && ix < -14)) { zone2Count++; }
- if((-13 < ix && ix < -4) || (14 < ix && ix < 21)) { zone1Count++; }
+ // before a trigger occurs.
+ private int zone1Prescaling = 1000;
+ private int zone2Prescaling = 70;
+ private int zone4Prescaling = 200;
+ /**
+ * Sets the energy threshold required for a cluster to be counted.
+ *
+ * @param energyThreshold - The energy threshold in GeV.
+ */
+ public void setEnergyThreshold(int energyThreshold) {
+ this.energyThreshold = energyThreshold;
+ }
+
+ /**
+ * Sets the number of events over threshold which must occur in the
+ * first region in order for a trigger to occur.
+ *
+ * @param zone1Prescaling - The number of over-threshold clusters needed
+ * for a trigger.
+ */
+ public void setZone1Prescaling(int zone1Prescaling) {
+ this.zone1Prescaling = zone1Prescaling;
+ }
+
+ /**
+ * Sets the number of events over threshold which must occur in the
+ * second region in order for a trigger to occur.
+ *
+ * @param zone2Prescaling - The number of over-threshold clusters needed
+ * for a trigger.
+ */
+ public void setZone2Prescaling(int zone2Prescaling) {
+ this.zone2Prescaling = zone2Prescaling;
+ }
+
+ /**
+ * Checks if any clusters exist over the set energy threshold and,
+ * if they do, increments the appropriate over-threshold count
+ * variable for the zone in which the cluster resides.
+ *
+ * @param event - The event from which clusters should be extracted.
+ */
+ @Override
+ public void process(EventHeader event) {
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Get the list of clusters from the event.
+ List<Cluster> clusterList = event.get(Cluster.class, clusterCollectionName);
+
+ // Loop over the clusters and check for any that pass the threshold.
+ for(Cluster cluster : clusterList) {
+ // Check if the current cluster exceeds the energy
+ // threshold. If it does not, continue to the next
+ // cluster in the list.
+ if(cluster.getEnergy() > energyThreshold) {
+ // Get the x-index of the seed hit.
+ int ix = cluster.getCalorimeterHits().get(0).getIdentifierFieldValue("ix");
+
+ // Determine in which region the cluster is located
+ // and increment the counter for that region. Zones
+ // are defined as:
+ // Zone 1 is -13 < ix < -4 and 14 < ix < 21
+ // Zone 2 is -20 < ix < -14 and ix > 20
+ // Zone 3 is -23 <= ix < -19
+ if(-23 <= ix && ix < -19) { zone3Count++; }
+ if((-20 < ix && ix < -14)) { zone2Count++; }
+ if((-13 < ix && ix < -4) || (14 < ix && ix < 21)) { zone1Count++; }
if(ix>20){zone4Count++;}
- }
- }
- }
-
- // Run the superclass event processing.
- super.process(event);
- }
-
- /**
- * Checks whether or not a trigger occurred.
- *
- * @param event - The event on which to base the trigger decision.
- * @return Returns <code>true</code> if a trigger occurred and <code>
- * false</code> if a trigger did not.
- */
- @Override
- protected boolean triggerDecision(EventHeader event) {
- // Check if the event has clusters. An event with no clusters
- // should never result in a trigger.
- if(event.hasCollection(Cluster.class, clusterCollectionName)) {
- // Check if any of the zone counts are high enough to trigger.
- return triggerTest();
- }
-
- // Events without clusters can not trigger.
- else { return false; }
- }
-
- /**
- * Checks if any of the regional counts are sufficiently high to
- * register a trigger.
- *
- * @return Returns <code>true</code> if a region has enough clusters
- * to trigger and <code>false</code> otherwise.
- */
- private boolean triggerTest() {
- // Track whether a trigger occurred.
- boolean trigger = false;
-
- // If any clusters occur in zone 3, reset the count and note
- // that a trigger occurred.
- if(zone3Count > 0) {
- zone3Count = 0;
+ }
+ }
+ }
+
+ // Run the superclass event processing.
+ super.process(event);
+ }
+
+ /**
+ * Checks whether or not a trigger occurred.
+ *
+ * @param event - The event on which to base the trigger decision.
+ * @return Returns <code>true</code> if a trigger occurred and <code>
+ * false</code> if a trigger did not.
+ */
+ @Override
+ protected boolean triggerDecision(EventHeader event) {
+ // Check if the event has clusters. An event with no clusters
+ // should never result in a trigger.
+ if(event.hasCollection(Cluster.class, clusterCollectionName)) {
+ // Check if any of the zone counts are high enough to trigger.
+ return triggerTest();
+ }
+
+ // Events without clusters can not trigger.
+ else { return false; }
+ }
+
+ /**
+ * Checks if any of the regional counts are sufficiently high to
+ * register a trigger.
+ *
+ * @return Returns <code>true</code> if a region has enough clusters
+ * to trigger and <code>false</code> otherwise.
+ */
+ private boolean triggerTest() {
+ // Track whether a trigger occurred.
+ boolean trigger = false;
+
+ // If any clusters occur in zone 3, reset the count and note
+ // that a trigger occurred.
+ if(zone3Count > 0) {
+ zone3Count = 0;
if(zone2Count==zone2Prescaling){zone2Count=0;}
if(zone1Count==zone1Prescaling){zone1Count=0;}
- trigger = true;
- }
-
- // If zone 2 has sufficient clusters (100 by default) to
- // trigger, reset its count and note that a trigger occurred.
- else if(zone2Count == zone2Prescaling) {
- zone2Count = 0;
+ trigger = true;
+ }
+
+ // If zone 2 has sufficient clusters (100 by default) to
+ // trigger, reset its count and note that a trigger occurred.
+ else if(zone2Count == zone2Prescaling) {
+ zone2Count = 0;
if(zone3Count>0){zone3Count=0;}
if(zone1Count==zone1Prescaling){zone1Count=0;}
- trigger = true;
- }
-
- // If zone 3 has sufficient clusters (1000 by default) to
- // trigger, reset its count and note that a trigger occurred.
- else if(zone1Count == zone1Prescaling) {
- zone1Count = 0;
+ trigger = true;
+ }
+
+ // If zone 3 has sufficient clusters (1000 by default) to
+ // trigger, reset its count and note that a trigger occurred.
+ else if(zone1Count == zone1Prescaling) {
+ zone1Count = 0;
if(zone3Count>0){zone3Count=0;}
if(zone2Count==zone2Prescaling){zone2Count=0;}
- trigger = true;
- }
- else if(zone4Count == zone4Prescaling) {
- zone4Count = 0;
+ trigger = true;
+ }
+ else if(zone4Count == zone4Prescaling) {
+ zone4Count = 0;
if(zone3Count>0){zone3Count=0;}
if(zone2Count==zone2Prescaling){zone2Count=0;}
if(zone1Count==zone1Prescaling){zone1Count=0;}
- trigger = true;
- }
- // Return whether or not a trigger occurred.
- return trigger;
- }
+ trigger = true;
+ }
+ // Return whether or not a trigger occurred.
+ return trigger;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/LCIOReadScript.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/LCIOReadScript.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/LCIOReadScript.java Wed Apr 27 11:11:32 2016
@@ -10,91 +10,91 @@
import org.lcsim.lcio.LCIOWriter;
public class LCIOReadScript {
- public static void main(String[] args) {
- // Make sure there arguments are valid.
- if(args.length != 2) {
- System.err.println("Error: Arguments must be [Input_File] [Output_File]");
- System.exit(1);
- }
-
- // Set the input/output files.
- File inputFile = new File(args[0]);
- File outputFile = new File(args[1]);
-
- // Make sure that the input file exists.
- if(!inputFile.canRead()) {
- System.err.println("Error: Input file can not be found.");
- System.exit(1);
- }
-
- // Create an LCIO reader to read it in.
- LCIOReader reader = null;
- try { reader = new LCIOReader(inputFile); }
- catch(IOException e) {
- e.printStackTrace();
- System.exit(1);
- }
-
- // Create an LCIO writer to output the new file.
- LCIOWriter writer = null;
- try { writer = new LCIOWriter(outputFile); }
- catch(IOException e) {
- e.printStackTrace();
- System.exit(1);
- }
-
- // Keep looping through events until there are no more.
- while(true) {
- // Try to get an event.
- EventHeader event = null;
- try { event = reader.read(); }
- catch(IOException e) { }
-
- // If the event is still null, there either was no event
- // or an error occurred.
- if(event == null) { break; }
-
- // Get the event number to print a status update.
- int num = event.getEventNumber();
- if(num % 10000 == 0) { System.out.println("Parsing event " + num + "."); }
-
- // See if the MCParticle collection exists.
- if(event.hasCollection(MCParticle.class, "MCParticle")) {
- // Get the MCParticle collection from the event.
- ArrayList<MCParticle> particleList = (ArrayList<MCParticle>) event.get(MCParticle.class, "MCParticle");
-
- // Remove the MCParticle collection from the event.
- event.remove("MCParticle");
-
- // Make a new list for good particles which pass some test.
- ArrayList<MCParticle> goodParticles = new ArrayList<MCParticle>();
-
- // Sort through the list of MCParticle objects in the
- // full list and add good ones to the good list.
- for(MCParticle p : particleList) {
- if(p.getEnergy() >= 2.1) { goodParticles.add(p); }
- }
-
- // Write the good particles back to the event.
- event.put("MCParticle", goodParticles);
- }
-
- // Write the event back out to the new file.
- try { writer.write(event); }
- catch(IOException e) {
- e.printStackTrace();
- System.exit(1);
- }
- }
-
- // Close the reader and writer.
- try {
- reader.close();
- writer.close();
- }
- catch(IOException e) {
- e.printStackTrace();
- System.exit(1);
- }
- }
+ public static void main(String[] args) {
+ // Make sure there arguments are valid.
+ if(args.length != 2) {
+ System.err.println("Error: Arguments must be [Input_File] [Output_File]");
+ System.exit(1);
+ }
+
+ // Set the input/output files.
+ File inputFile = new File(args[0]);
+ File outputFile = new File(args[1]);
+
+ // Make sure that the input file exists.
+ if(!inputFile.canRead()) {
+ System.err.println("Error: Input file can not be found.");
+ System.exit(1);
+ }
+
+ // Create an LCIO reader to read it in.
+ LCIOReader reader = null;
+ try { reader = new LCIOReader(inputFile); }
+ catch(IOException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+
+ // Create an LCIO writer to output the new file.
+ LCIOWriter writer = null;
+ try { writer = new LCIOWriter(outputFile); }
+ catch(IOException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+
+ // Keep looping through events until there are no more.
+ while(true) {
+ // Try to get an event.
+ EventHeader event = null;
+ try { event = reader.read(); }
+ catch(IOException e) { }
+
+ // If the event is still null, there either was no event
+ // or an error occurred.
+ if(event == null) { break; }
+
+ // Get the event number to print a status update.
+ int num = event.getEventNumber();
+ if(num % 10000 == 0) { System.out.println("Parsing event " + num + "."); }
+
+ // See if the MCParticle collection exists.
+ if(event.hasCollection(MCParticle.class, "MCParticle")) {
+ // Get the MCParticle collection from the event.
+ ArrayList<MCParticle> particleList = (ArrayList<MCParticle>) event.get(MCParticle.class, "MCParticle");
+
+ // Remove the MCParticle collection from the event.
+ event.remove("MCParticle");
+
+ // Make a new list for good particles which pass some test.
+ ArrayList<MCParticle> goodParticles = new ArrayList<MCParticle>();
+
+ // Sort through the list of MCParticle objects in the
+ // full list and add good ones to the good list.
+ for(MCParticle p : particleList) {
+ if(p.getEnergy() >= 2.1) { goodParticles.add(p); }
+ }
+
+ // Write the good particles back to the event.
+ event.put("MCParticle", goodParticles);
+ }
+
+ // Write the event back out to the new file.
+ try { writer.write(event); }
+ catch(IOException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+ // Close the reader and writer.
+ try {
+ reader.close();
+ writer.close();
+ }
+ catch(IOException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/rate.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/rate.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/rate.java Wed Apr 27 11:11:32 2016
@@ -435,15 +435,15 @@
nevents++;
- /* natha's code for trigger
- List <AbstractIntData> aids = event.get(AbstractIntData.class, "TriggerBank");
- for (AbstractIntData aid : aids) {
- if (aid.getTag() == TIData.BANK_TAG) {
- TIData tt=(TIData)aid;
- if (!tt.isSingle1Trigger()) return;
+ /* natha's code for trigger
+ List <AbstractIntData> aids = event.get(AbstractIntData.class, "TriggerBank");
+ for (AbstractIntData aid : aids) {
+ if (aid.getTag() == TIData.BANK_TAG) {
+ TIData tt=(TIData)aid;
+ if (!tt.isSingle1Trigger()) return;
break;
- }
- }
+ }
+ }
*/ //nathans code for trigger end
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/ratesim.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/ratesim.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/luca/ratesim.java Wed Apr 27 11:11:32 2016
@@ -238,15 +238,15 @@
nevents++;
- /* natha's code for trigger
- List <AbstractIntData> aids = event.get(AbstractIntData.class, "TriggerBank");
- for (AbstractIntData aid : aids) {
- if (aid.getTag() == TIData.BANK_TAG) {
- TIData tt=(TIData)aid;
- if (!tt.isSingle1Trigger()) return;
+ /* natha's code for trigger
+ List <AbstractIntData> aids = event.get(AbstractIntData.class, "TriggerBank");
+ for (AbstractIntData aid : aids) {
+ if (aid.getTag() == TIData.BANK_TAG) {
+ TIData tt=(TIData)aid;
+ if (!tt.isSingle1Trigger()) return;
break;
- }
- }
+ }
+ }
*/ //nathans code for trigger end
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/FilterMCBunches.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/FilterMCBunches.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/FilterMCBunches.java Wed Apr 27 11:11:32 2016
@@ -17,7 +17,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.lcsim.event.EventHeader;
import org.lcsim.event.MCParticle;
import org.lcsim.event.RawCalorimeterHit;
@@ -70,7 +70,7 @@
public static void main(String[] args) {
// Set up command line parsing.
Options options = createCommandLineOptions();
- CommandLineParser parser = new DefaultParser();
+ CommandLineParser parser = new PosixParser();
// Parse command line arguments.
CommandLine cl = null;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalAnalogPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalAnalogPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalAnalogPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -22,67 +22,67 @@
*/
public class HPSEcalAnalogPrintDriver extends Driver {
- Subdetector ecal;
- IDDecoder dec;
- String ecalName;
- String ecalReadoutName = "EcalHits";
- String ecalCollectionName = null;
- String outputFileName;
- PrintWriter outputStream = null;
- int flags;
+ Subdetector ecal;
+ IDDecoder dec;
+ String ecalName;
+ String ecalReadoutName = "EcalHits";
+ String ecalCollectionName = null;
+ String outputFileName;
+ PrintWriter outputStream = null;
+ int flags;
- public HPSEcalAnalogPrintDriver() {
- }
+ public HPSEcalAnalogPrintDriver() {
+ }
- public void setEcalCollectionName(String ecalCollectionName) {
- this.ecalCollectionName = ecalCollectionName;
- }
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
- public void setEcalName(String ecalName) {
- this.ecalName = ecalName;
- }
+ public void setEcalName(String ecalName) {
+ this.ecalName = ecalName;
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- @Override
- public void startOfData() {
- if (ecalCollectionName == null) {
- throw new RuntimeException("The parameter ecalCollectionName was not set!");
- }
+ @Override
+ public void startOfData() {
+ if (ecalCollectionName == null) {
+ throw new RuntimeException("The parameter ecalCollectionName was not set!");
+ }
- if (ecalName == null) {
- throw new RuntimeException("The parameter ecalName was not set!");
- }
+ if (ecalName == null) {
+ throw new RuntimeException("The parameter ecalName was not set!");
+ }
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = (Subdetector) detector.getSubdetector(ecalName);
- dec = ecal.getIDDecoder();
- }
+ public void detectorChanged(Detector detector) {
+ // Get the Subdetector.
+ ecal = (Subdetector) detector.getSubdetector(ecalName);
+ dec = ecal.getIDDecoder();
+ }
- @Override
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
- //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
- List<CalorimeterHit> hits = event.get(CalorimeterHit.class, ecalCollectionName);
- for (CalorimeterHit hit : hits) {
- dec.setID(hit.getCellID());
- outputStream.printf("%d\t%d\t%f\t%f\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTime(), hit.getRawEnergy());
- }
- }
- }
+ @Override
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
+ //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
+ List<CalorimeterHit> hits = event.get(CalorimeterHit.class, ecalCollectionName);
+ for (CalorimeterHit hit : hits) {
+ dec.setID(hit.getCellID());
+ outputStream.printf("%d\t%d\t%f\t%f\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTime(), hit.getRawEnergy());
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalDigitalPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalDigitalPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalDigitalPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -22,70 +22,70 @@
*/
public class HPSEcalDigitalPrintDriver extends Driver {
- Subdetector ecal;
- IDDecoder dec;
- String ecalName = "Ecal";
- String ecalReadoutName = "EcalHits";
- String ecalCollectionName = "EcalRawHits";
- String outputFileName;
- PrintWriter outputStream = null;
- int timeScale = 1;
- int flags;
+ Subdetector ecal;
+ IDDecoder dec;
+ String ecalName = "Ecal";
+ String ecalReadoutName = "EcalHits";
+ String ecalCollectionName = "EcalRawHits";
+ String outputFileName;
+ PrintWriter outputStream = null;
+ int timeScale = 1;
+ int flags;
- public HPSEcalDigitalPrintDriver() {
- }
+ public HPSEcalDigitalPrintDriver() {
+ }
- public void setTimeScale(int timeScale) {
- this.timeScale = timeScale;
- }
+ public void setTimeScale(int timeScale) {
+ this.timeScale = timeScale;
+ }
- public void setEcalCollectionName(String ecalCollectionName) {
- this.ecalCollectionName = ecalCollectionName;
- }
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
- public void setEcalName(String ecalName) {
- this.ecalName = ecalName;
- }
+ public void setEcalName(String ecalName) {
+ this.ecalName = ecalName;
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- public void startOfData() {
- if (ecalCollectionName == null) {
- throw new RuntimeException("The parameter ecalCollectionName was not set!");
- }
+ public void startOfData() {
+ if (ecalCollectionName == null) {
+ throw new RuntimeException("The parameter ecalCollectionName was not set!");
+ }
- if (ecalName == null) {
- throw new RuntimeException("The parameter ecalName was not set!");
- }
+ if (ecalName == null) {
+ throw new RuntimeException("The parameter ecalName was not set!");
+ }
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = (Subdetector) detector.getSubdetector(ecalName);
- dec = ecal.getIDDecoder();
- }
+ public void detectorChanged(Detector detector) {
+ // Get the Subdetector.
+ ecal = (Subdetector) detector.getSubdetector(ecalName);
+ dec = ecal.getIDDecoder();
+ }
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(RawCalorimeterHit.class, ecalCollectionName)) {
- List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, ecalCollectionName);
- //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
- for (RawCalorimeterHit hit : hits) {
- dec.setID(hit.getCellID());
- outputStream.printf("%d\t%d\t%d\t%d\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTimeStamp() * timeScale, hit.getAmplitude());
- }
- }
- }
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(RawCalorimeterHit.class, ecalCollectionName)) {
+ List<RawCalorimeterHit> hits = event.get(RawCalorimeterHit.class, ecalCollectionName);
+ //outputStream.println("Reading RawCalorimeterHit from event " + event.getEventNumber());
+ for (RawCalorimeterHit hit : hits) {
+ dec.setID(hit.getCellID());
+ outputStream.printf("%d\t%d\t%d\t%d\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTimeStamp() * timeScale, hit.getAmplitude());
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalRawTrackerHitPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalRawTrackerHitPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSEcalRawTrackerHitPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -22,68 +22,68 @@
*/
public class HPSEcalRawTrackerHitPrintDriver extends Driver {
- Subdetector ecal;
- IDDecoder dec;
- String ecalName = "Ecal";
- String ecalReadoutName = "EcalHits";
- String ecalCollectionName = "EcalRawHits";
- String outputFileName;
- PrintWriter outputStream = null;
- int flags;
+ Subdetector ecal;
+ IDDecoder dec;
+ String ecalName = "Ecal";
+ String ecalReadoutName = "EcalHits";
+ String ecalCollectionName = "EcalRawHits";
+ String outputFileName;
+ PrintWriter outputStream = null;
+ int flags;
- public HPSEcalRawTrackerHitPrintDriver() {
- }
+ public HPSEcalRawTrackerHitPrintDriver() {
+ }
- public void setEcalCollectionName(String ecalCollectionName) {
- this.ecalCollectionName = ecalCollectionName;
- }
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
- public void setEcalName(String ecalName) {
- this.ecalName = ecalName;
- }
+ public void setEcalName(String ecalName) {
+ this.ecalName = ecalName;
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- public void startOfData() {
- if (ecalCollectionName == null) {
- throw new RuntimeException("The parameter ecalCollectionName was not set!");
- }
+ public void startOfData() {
+ if (ecalCollectionName == null) {
+ throw new RuntimeException("The parameter ecalCollectionName was not set!");
+ }
- if (ecalName == null) {
- throw new RuntimeException("The parameter ecalName was not set!");
- }
+ if (ecalName == null) {
+ throw new RuntimeException("The parameter ecalName was not set!");
+ }
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = (Subdetector) detector.getSubdetector(ecalName);
- dec = ecal.getIDDecoder();
- }
+ public void detectorChanged(Detector detector) {
+ // Get the Subdetector.
+ ecal = (Subdetector) detector.getSubdetector(ecalName);
+ dec = ecal.getIDDecoder();
+ }
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(RawTrackerHit.class, ecalCollectionName)) {
- //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
- List<RawTrackerHit> hits = event.get(RawTrackerHit.class, ecalCollectionName);
- for (RawTrackerHit hit : hits) {
- dec.setID(hit.getCellID());
- outputStream.printf("%d\t%d\t%d\t%d\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTime(), hit.getADCValues().length);
- for (int i = 0; i < hit.getADCValues().length; i++) {
- outputStream.printf("%d\n", hit.getADCValues()[i]);
- }
- }
- }
- }
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(RawTrackerHit.class, ecalCollectionName)) {
+ //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, ecalCollectionName);
+ for (RawTrackerHit hit : hits) {
+ dec.setID(hit.getCellID());
+ outputStream.printf("%d\t%d\t%d\t%d\n", dec.getValue("ix"), dec.getValue("iy"), hit.getTime(), hit.getADCValues().length);
+ for (int i = 0; i < hit.getADCValues().length; i++) {
+ outputStream.printf("%d\n", hit.getADCValues()[i]);
+ }
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSGenericRawTrackerHitPrintDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSGenericRawTrackerHitPrintDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/HPSGenericRawTrackerHitPrintDriver.java Wed Apr 27 11:11:32 2016
@@ -20,42 +20,42 @@
*/
public class HPSGenericRawTrackerHitPrintDriver extends Driver {
- String outputFileName;
- PrintWriter outputStream = null;
+ String outputFileName;
+ PrintWriter outputStream = null;
- public HPSGenericRawTrackerHitPrintDriver() {
- }
+ public HPSGenericRawTrackerHitPrintDriver() {
+ }
- public void setOutputFileName(String outputFileName) {
- this.outputFileName = outputFileName;
- }
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
- public void startOfData() {
- if (outputFileName != null) {
- try {
- outputStream = new PrintWriter(outputFileName);
- } catch (IOException ex) {
- throw new RuntimeException("Invalid outputFilePath!");
- }
- } else {
- outputStream = new PrintWriter(System.out, true);
- }
- }
+ public void startOfData() {
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
- public void process(EventHeader event) {
- // Get the list of ECal hits.
- if (event.hasCollection(RawTrackerHit.class)) {
- //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
- List<List<RawTrackerHit>> listOfLists = event.get(RawTrackerHit.class);
- for (List<RawTrackerHit> hits : listOfLists) {
- outputStream.printf("List with %d RawTrackerHits:\n", hits.size());
- for (RawTrackerHit hit : hits) {
- outputStream.printf("%d\t%d\n", hit.getCellID(), hit.getADCValues().length);
- for (int i = 0; i < hit.getADCValues().length; i++) {
- outputStream.printf("%d\n", hit.getADCValues()[i]);
- }
- }
- }
- }
- }
+ public void process(EventHeader event) {
+ // Get the list of ECal hits.
+ if (event.hasCollection(RawTrackerHit.class)) {
+ //outputStream.println("Reading RawTrackerHits from event " + event.getEventNumber());
+ List<List<RawTrackerHit>> listOfLists = event.get(RawTrackerHit.class);
+ for (List<RawTrackerHit> hits : listOfLists) {
+ outputStream.printf("List with %d RawTrackerHits:\n", hits.size());
+ for (RawTrackerHit hit : hits) {
+ outputStream.printf("%d\t%d\n", hit.getCellID(), hit.getADCValues().length);
+ for (int i = 0; i < hit.getADCValues().length; i++) {
+ outputStream.printf("%d\n", hit.getADCValues()[i]);
+ }
+ }
+ }
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/LCIOTrackAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/LCIOTrackAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/LCIOTrackAnalysis.java Wed Apr 27 11:11:32 2016
@@ -24,7 +24,7 @@
* @author Sho Uemura <[log in to unmask]>
*/
// TODO: This is an exact duplicate of the class in the analysis.example package.
-// One of them should be sandboxed
+// One of them should be sandboxed
public class LCIOTrackAnalysis {
protected Track track;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/MergeMCBunches.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/MergeMCBunches.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/MergeMCBunches.java Wed Apr 27 11:11:32 2016
@@ -13,7 +13,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.lcsim.event.EventHeader;
import org.lcsim.event.MCParticle;
import org.lcsim.event.SimCalorimeterHit;
@@ -49,7 +49,7 @@
public static void main(String[] args) {
// Set up command line parsing.
Options options = createCommandLineOptions();
- CommandLineParser parser = new DefaultParser();
+ CommandLineParser parser = new PosixParser();
// Parse command line arguments.
CommandLine cl = null;
@@ -191,4 +191,4 @@
throw new RuntimeException(e);
}
}
-}
+}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java Wed Apr 27 11:11:32 2016
@@ -6,17 +6,19 @@
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
@@ -26,6 +28,7 @@
import org.hps.conditions.svt.SvtBiasConstant.SvtBiasConstantCollection;
import org.hps.conditions.svt.SvtMotorPosition;
import org.hps.conditions.svt.SvtMotorPosition.SvtMotorPositionCollection;
+import org.hps.conditions.svt.SvtTimingConstants;
import org.hps.run.database.RunManager;
/**
@@ -35,9 +38,9 @@
public class SvtChargeIntegrator {
private static final double angleTolerance = 1e-4;
+ private static final double burstModeNoiseEfficiency = 0.965;
/**
- * Load SVT HV bias constants into the conditions database.
*
* @param args the command line arguments (requires a CSV run/file log file
* and a MYA dump file.)
@@ -48,8 +51,9 @@
options.addOption(new Option("r", false, "use per-run CSV log file (default is per-file)"));
options.addOption(new Option("t", false, "use TI timestamp instead of Unix time (higher precision, but requires TI time offset in run DB)"));
options.addOption(new Option("c", false, "get TI time offset from CSV log file instead of run DB"));
-
- final CommandLineParser parser = new DefaultParser();
+ options.addOption(new Option("e", true, "header error file"));
+
+ final CommandLineParser parser = new PosixParser();
CommandLine cl = null;
try {
cl = parser.parse(options, args);
@@ -61,6 +65,26 @@
boolean useTI = cl.hasOption("t");
boolean useCrawlerTI = cl.hasOption("c");
+ Map<Integer, Long> runErrorMap = new HashMap<Integer, Long>();
+ if (cl.hasOption("e")) {
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(cl.getOptionValue("e")));
+ String line;
+ System.err.println("header error file header: " + br.readLine()); //discard the first line
+ while ((line = br.readLine()) != null) {
+ String arr[] = line.split(" +");
+ int run = Integer.parseInt(arr[1]);
+ long errorTime = Long.parseLong(arr[4]);
+ runErrorMap.put(run, errorTime);
+// System.out.format("%d %d\n", run, errorTime);
+ }
+ } catch (FileNotFoundException ex) {
+ Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+ } catch (IOException ex) {
+ Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ }
+
if (cl.getArgs().length != 2) {
printUsage(options);
return;
@@ -88,9 +112,9 @@
String line;
System.err.println("myaData header: " + br.readLine()); //discard the first line
if (perRun) {
- System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom");
+ System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
} else {
- System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom");
+ System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
}
int currentRun = 0;
@@ -98,6 +122,7 @@
double nominalAngleBottom = -999;
String nominalPosition = null;
long tiTimeOffset = 0;
+ double efficiency = 0;
SvtBiasConstantCollection svtBiasConstants = null;
SvtMotorPositionCollection svtPositionConstants = null;
SvtAlignmentConstant.SvtAlignmentConstantCollection alignmentConstants = null;
@@ -120,10 +145,10 @@
if (runNum != currentRun) {
if (useTI && !useCrawlerTI) {
RunManager.getRunManager().setRun(runNum);
- if (!RunManager.getRunManager().runExists() || RunManager.getRunManager().getTriggerConfig().getTiTimeOffset() == null) {
+ if (!RunManager.getRunManager().runExists() || RunManager.getRunManager().getRunSummary().getTiTimeOffset() == null) {
continue;
}
- tiTimeOffset = RunManager.getRunManager().getTriggerConfig().getTiTimeOffset();
+ tiTimeOffset = RunManager.getRunManager().getRunSummary().getTiTimeOffset();
if (tiTimeOffset == 0) {
continue;
}
@@ -169,7 +194,20 @@
alignmentConstants = null;
nominalPosition = "unknown";
}
-
+ efficiency = burstModeNoiseEfficiency;
+ SvtTimingConstants svtTimingConstants;
+ try {
+ svtTimingConstants = DatabaseConditionsManager.getInstance().getCachedConditions(SvtTimingConstants.SvtTimingConstantsCollection.class, "svt_timing_constants").getCachedData().get(0);
+ } catch (Exception ex) {
+ svtTimingConstants = null;
+ }
+ if (svtTimingConstants != null) {
+ if (svtTimingConstants.getOffsetTime() > 27) {
+ efficiency *= 2.0 / 3.0; // bad latency: drop 2 out of 6 trigger phases
+ }// otherwise, we have good latency
+ } else {
+ efficiency = 0;
+ }//no latency info in conditions: give up
currentRun = runNum;
}
@@ -194,22 +232,40 @@
if (firstTI == 0 || lastTI == 0) {
continue;
}
- startDate = new Date((long) ((firstTI + tiTimeOffset) / 1e6));
- endDate = new Date((long) ((lastTI + tiTimeOffset) / 1e6));
+ startDate = new Date((firstTI + tiTimeOffset) / 1000000);
+ endDate = new Date((lastTI + tiTimeOffset) / 1000000);
} else {
if (firstTime == 0 || lastTime == 0) {
continue;
}
startDate = new Date(firstTime * 1000);
endDate = new Date(lastTime * 1000);
+ }
+
+ Long errorTime = runErrorMap.get(runNum);
+ Date errorDate = null;
+ if (errorTime != null) {
+ errorDate = new Date(errorTime / 1000000);
+ boolean isGood = Math.abs(errorDate.getTime() - startDate.getTime()) < 10 * 60 * 60 * 1000; //10 hours
+ if (!isGood && useTI) {
+ errorDate = new Date((errorTime + tiTimeOffset) / 1000000);
+// boolean isPlusOffsetGood = Math.abs(errorDatePlusOffset.getTime() - startDate.getTime()) < 10 * 60 * 60 * 1000; //10 hours
+// System.out.format("%d, %d, %d: %s (good: %b), %s (good: %b)\n", runNum, errorTime, tiTimeOffset, errorDate, isGood, errorDatePlusOffset, isPlusOffsetGood);
+ }
}
double totalCharge = 0;
double totalChargeWithBias = 0;
double totalChargeWithBiasAtNominal = 0;
+ double totalChargeWithBiasAtNominalNoError = 0;
double totalGatedCharge = 0;
double totalGatedChargeWithBias = 0;
double totalGatedChargeWithBiasAtNominal = 0;
+ double totalGatedChargeWithBiasAtNominalNoError = 0;
+ double totalGoodCharge = 0;
+ double totalGoodChargeWithBias = 0;
+ double totalGoodChargeWithBiasAtNominal = 0;
+ double totalGoodChargeWithBiasAtNominalNoError = 0;
br.mark(1000);
while ((line = br.readLine()) != null) {
@@ -268,27 +324,41 @@
long dtStart = Math.max(startDate.getTime(), lastDate.getTime());
long dtEnd = Math.min(date.getTime(), endDate.getTime());
double dt = (dtEnd - dtStart) / 1000.0;
+ double errorDt = 0;
if (biasConstant != null) {
long biasStart = Math.max(dtStart, biasConstant.getStart());
long biasEnd = Math.min(dtEnd, biasConstant.getEnd());
- biasDt = (biasEnd - biasStart) / 1000.0;
+ biasDt = Math.max(0, biasEnd - biasStart) / 1000.0;
if (positionConstant != null) {
long positionStart = Math.max(biasStart, positionConstant.getStart());
long positionEnd = Math.min(biasEnd, positionConstant.getEnd());
- positionDt = (positionEnd - positionStart) / 1000.0;
+ positionDt = Math.max(0, positionEnd - positionStart) / 1000.0;
+
+ long errorEnd = positionStart;
+ if (errorDate == null) {
+ errorEnd = positionEnd;
+ } else if (errorDate.getTime() > dtStart) {
+ errorEnd = Math.min(positionEnd, errorDate.getTime());
+ }
+ errorDt = Math.max(0, errorEnd - positionStart) / 1000.0;
}
}
- double dq = dt * current; // nC
- double dqGated = dt * current * livetime; // nC
// System.out.format("start %d end %d date %d lastDate %d current %f dt %f\n", startDate.getTime(), endDate.getTime(), date.getTime(), lastDate.getTime(), current, dt);
- totalCharge += dq;
- totalGatedCharge += dqGated;
+ totalCharge += dt * current; // nC
+ totalGatedCharge += dt * current * livetime;
+ totalGoodCharge += dt * current * livetime * efficiency;
if (biasGood) {
totalChargeWithBias += biasDt * current;
totalGatedChargeWithBias += biasDt * current * livetime;
+ totalGoodChargeWithBias += biasDt * current * livetime * efficiency;
if (positionGood) {
totalChargeWithBiasAtNominal += positionDt * current;
totalGatedChargeWithBiasAtNominal += positionDt * current * livetime;
+ totalGoodChargeWithBiasAtNominal += positionDt * current * livetime * efficiency;
+
+ totalChargeWithBiasAtNominalNoError += errorDt * current;
+ totalGatedChargeWithBiasAtNominalNoError += errorDt * current * livetime;
+ totalGoodChargeWithBiasAtNominalNoError += errorDt * current * livetime * efficiency;
}
}
}
@@ -304,11 +374,11 @@
}
if (perRun) {
int nEvents = Integer.parseInt(record.get(9));
- System.out.format("%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal);
+ System.out.format("%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalChargeWithBiasAtNominalNoError, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGatedChargeWithBiasAtNominalNoError, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal, totalGoodChargeWithBiasAtNominalNoError);
} else {
int fileNum = Integer.parseInt(record.get(1));
int nEvents = Integer.parseInt(record.get(2));
- System.out.format("%d\t%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, fileNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal);
+ System.out.format("%d\t%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, fileNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalChargeWithBiasAtNominalNoError, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGatedChargeWithBiasAtNominalNoError, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal, totalGoodChargeWithBiasAtNominalNoError);
}
}
} catch (Exception ex) {
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/TridentMCFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/TridentMCFilter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/meeg/TridentMCFilter.java Wed Apr 27 11:11:32 2016
@@ -16,8 +16,45 @@
*/
public class TridentMCFilter extends EventReconFilter {
+ private boolean requireFrontHits = false;
+ private double minL12Kink = -1;
+ private double maxL12Kink = -1;
+ private double minL1Kink = -1;
+ private double maxL1Kink = -1;
+ private double minL2Kink = -1;
+ private double maxL2Kink = -1;
+
+ public void setMinL12Kink(double minL12Kink) {
+ this.minL12Kink = minL12Kink;
+ }
+
+ public void setMaxL12Kink(double maxL12Kink) {
+ this.maxL12Kink = maxL12Kink;
+ }
+
+ public void setMinL1Kink(double minL1Kink) {
+ this.minL1Kink = minL1Kink;
+ }
+
+ public void setMaxL1Kink(double maxL1Kink) {
+ this.maxL1Kink = maxL1Kink;
+ }
+
+ public void setMinL2Kink(double minL2Kink) {
+ this.minL2Kink = minL2Kink;
+ }
+
+ public void setMaxL2Kink(double maxL2Kink) {
+ this.maxL2Kink = maxL2Kink;
+ }
+
+ public void setRequireFrontHits(boolean requireFrontHits) {
+ this.requireFrontHits = requireFrontHits;
+ }
+
@Override
public void process(EventHeader event) {
+ incrementEventProcessed();
List<MCParticle> MCParticles = event.getMCParticles();
List<MCParticle> tridentParticles = null;
@@ -39,7 +76,11 @@
int nElectronsWithTracks = 0, nPositronsWithTracks = 0;
MCParticle electron = null, positron = null;
+ particleLoop:
for (MCParticle particle : tridentParticles) {
+ if (!trackHitMap.containsKey(particle)) {
+ continue;
+ }
Set<Integer> layers = trackHitMap.get(particle).keySet();
int pairCount = 0;
for (Integer layer : layers) {
@@ -47,31 +88,74 @@
pairCount++;
}
}
- boolean hasTrack = (pairCount >= 5);
+ if (pairCount < 5) {
+ continue;
+ }
+ if (requireFrontHits) {
+ for (int i = 1; i < 5; i++) {
+ if (!layers.contains(i)) {
+ continue particleLoop;
+ }
+ }
+ }
- if (hasTrack && particle.getCharge() < 0) {
+ if (particle.getCharge() < 0) {
nElectronsWithTracks++;
electron = particle;
}
- if (hasTrack && particle.getCharge() > 0) {
+ if (particle.getCharge() > 0) {
nPositronsWithTracks++;
positron = particle;
}
}
if (electron == null || positron == null) {
- System.out.println("not enough trident daughters with tracks");
+// System.out.println("not enough trident daughters with tracks");
skipEvent();
}
if (nElectronsWithTracks > 1 || nPositronsWithTracks > 1) {
- System.out.println("too many trident daughters with tracks");
+// System.out.println("too many trident daughters with tracks");
skipEvent();
}
-// double deflection12_ele = KinkAnalysisDriver.deflection(trackHitMap.get(electron), 0, 4);
-// double deflection12_pos = KinkAnalysisDriver.deflection(trackHitMap.get(positron), 0, 4);
+ double deflection12_ele = KinkAnalysisDriver.deflection(trackHitMap.get(electron), 0, 4);
+ double deflection12_pos = KinkAnalysisDriver.deflection(trackHitMap.get(positron), 0, 4);
+ double deflection1_ele = KinkAnalysisDriver.deflection(trackHitMap.get(electron), 0, 2);
+ double deflection1_pos = KinkAnalysisDriver.deflection(trackHitMap.get(positron), 0, 2);
+ double deflection2_ele = KinkAnalysisDriver.deflection(trackHitMap.get(electron), 2, 4);
+ double deflection2_pos = KinkAnalysisDriver.deflection(trackHitMap.get(positron), 2, 4);
+ if (minL12Kink > 0) {
+ if (Math.abs(deflection12_ele) < minL12Kink && Math.abs(deflection12_pos) < minL12Kink) {
+ skipEvent();
+ }
+ }
+ if (maxL12Kink > 0) {
+ if (Math.abs(deflection12_ele) > maxL12Kink || Math.abs(deflection12_pos) > maxL12Kink) {
+ skipEvent();
+ }
+ }
+ if (minL1Kink > 0) {
+ if (Math.abs(deflection1_ele) < minL1Kink && Math.abs(deflection1_pos) < minL1Kink) {
+ skipEvent();
+ }
+ }
+ if (maxL1Kink > 0) {
+ if (Math.abs(deflection1_ele) > maxL1Kink || Math.abs(deflection1_pos) > maxL1Kink) {
+ skipEvent();
+ }
+ }
+ if (minL2Kink > 0) {
+ if (Math.abs(deflection2_ele) < minL2Kink && Math.abs(deflection2_pos) < minL2Kink) {
+ skipEvent();
+ }
+ }
+ if (maxL2Kink > 0) {
+ if (Math.abs(deflection2_ele) > maxL2Kink || Math.abs(deflection2_pos) > maxL2Kink) {
+ skipEvent();
+ }
+ }
+
incrementEventPassed();
-
}
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/ExamplePlotter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/ExamplePlotter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/ExamplePlotter.java Wed Apr 27 11:11:32 2016
@@ -4,6 +4,7 @@
import hep.aida.IHistogram1D;
import hep.aida.IPlotter;
import hep.aida.IPlotterStyle;
+import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
import java.io.IOException;
@@ -12,9 +13,10 @@
import java.util.logging.Logger;
import org.hps.recon.tracking.BeamlineConstants;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.HelixConverter;
import org.hps.recon.tracking.StraightLineTrack;
+import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
@@ -76,15 +78,9 @@
aida.histogram1D("Track Momentum (Pz)").fill(trk.getTrackStates().get(0).getMomentum()[0]);
aida.histogram1D("Track Chi2").fill(trk.getChi2());
- SeedTrack stEle = (SeedTrack) trk;
- SeedCandidate seedEle = stEle.getSeedCandidate();
- HelicalTrackFit ht = seedEle.getHelix();
- HelixConverter converter = new HelixConverter(0);
- StraightLineTrack slt = converter.Convert(ht);
- HPSTrack hpstrack = new HPSTrack(ht);
- Hep3Vector[] trkatconver = hpstrack.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
- aida.histogram1D("X (mm) @ Converter").fill(trkatconver[0].x()); // y tracker frame?
- aida.histogram1D("Y (mm) @ Converter").fill(trkatconver[0].y()); // z tracker frame?
+ Hep3Vector trkatconver = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk, 100.0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0, event.getDetector().getFieldMap()).getReferencePoint());
+ aida.histogram1D("X (mm) @ Converter").fill(trkatconver.x()); // y tracker frame?
+ aida.histogram1D("Y (mm) @ Converter").fill(trkatconver.y()); // z tracker frame?
}
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HPSTrackerHit.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HPSTrackerHit.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HPSTrackerHit.java Wed Apr 27 11:11:32 2016
@@ -16,23 +16,23 @@
double t0;
double amp;
public HPSTrackerHit(
- long id,
- int time,
- short[] adcValues, double t0, double Amp) {
- this.cellId = id;
- this.packedID = new Identifier(id);
- this.time = time;
- this.adcValues = adcValues;
+ long id,
+ int time,
+ short[] adcValues, double t0, double Amp) {
+ this.cellId = id;
+ this.packedID = new Identifier(id);
+ this.time = time;
+ this.adcValues = adcValues;
this.t0=t0;
this.amp=Amp;
}
public HPSTrackerHit(
- RawTrackerHit rth, double t0, double Amp) {
- this.cellId = rth.getCellID();
- this.packedID = new Identifier(rth.getCellID());
- this.time = rth.getTime();
- this.adcValues = rth.getADCValues();
+ RawTrackerHit rth, double t0, double Amp) {
+ this.cellId = rth.getCellID();
+ this.packedID = new Identifier(rth.getCellID());
+ this.time = rth.getTime();
+ this.adcValues = rth.getADCValues();
this.t0=t0;
this.amp=Amp;
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HelicalTrackHitResidualsDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HelicalTrackHitResidualsDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/HelicalTrackHitResidualsDriver.java Wed Apr 27 11:11:32 2016
@@ -16,10 +16,11 @@
import java.util.logging.Logger;
+
//===> import org.hps.conditions.deprecated.SvtUtils;
import org.hps.recon.tracking.EventQuality;
import org.hps.recon.tracking.TrackUtils;
-import org.hps.users.phansson.TrigRateDriver;
+import org.hps.users.phansson.testrun.TrigRateDriver;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/SVTRawTrackerHitThresholdDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/SVTRawTrackerHitThresholdDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/SVTRawTrackerHitThresholdDriver.java Wed Apr 27 11:11:32 2016
@@ -15,7 +15,7 @@
* @author Matt Graham
*/
// TODO: Check that this Driver works as expected after it was updated to use
-// the database conditions system.
+// the database conditions system.
public class SVTRawTrackerHitThresholdDriver extends Driver {
private String rawTrackerHitCollectionName = "RawTrackerHitMaker_RawTrackerHits";
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TrackExtrapolationAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TrackExtrapolationAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TrackExtrapolationAnalysis.java Wed Apr 27 11:11:32 2016
@@ -5,13 +5,14 @@
import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
import hep.aida.IPlotterStyle;
+import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
import java.util.List;
import java.util.Map;
import org.hps.recon.tracking.BeamlineConstants;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.HelixConverter;
import org.hps.recon.tracking.StraightLineTrack;
import org.hps.recon.tracking.TrackUtils;
@@ -99,10 +100,9 @@
charge = 0;//make plot look pretty
// System.out.println("Charge = " + charge + "; isTop = " + isTop);
- HPSTrack hpstrk=null;
- hpstrk = new HPSTrack(ht);
-// Hep3Vector posAtConv = hpstrk.getPositionAtZ(zAtConverter, -101, -100, 0.1);
- Hep3Vector posAtConv = hpstrk.getPositionAtZMap(100,BeamlineConstants.HARP_POSITION_TESTRUN , 5.0)[0];
+ HpsHelicalTrackFit hpstrk=null;
+ hpstrk = new HpsHelicalTrackFit(ht);
+ Hep3Vector posAtConv = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk, 100, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0, event.getDetector().getFieldMap()).getReferencePoint());
double useThisx=posAtConv.x();
double useThisy=posAtConv.y();
@@ -123,7 +123,7 @@
aida.histogram1D("Negative Y (mm) @ Converter").fill(useThisy);
}
// Hep3Vector posAtConvShort = hpstrk.getPositionAtZ(zAtConverter, -0.1, 0, 0.01);
- Hep3Vector posAtConvShort = hpstrk.getPositionAtZMap(0,BeamlineConstants.HARP_POSITION_TESTRUN, 5.0)[0];
+ Hep3Vector posAtConvShort = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk, 0, BeamlineConstants.HARP_POSITION_TESTRUN, 5.0, event.getDetector().getFieldMap()).getReferencePoint());
aida.histogram2D("Extrapolated X: short vs long fringe").fill(posAtConvShort.x(), posAtConv.x());
aida.histogram2D("Extrapolated Y: short vs long fringe").fill(posAtConvShort.y(), posAtConv.y());
@@ -144,7 +144,8 @@
// Hep3Vector posAtEcalHPS = hpstrk.getPositionAtZMap(750,zCluster, 5.0);
double zCluster=clust.getPosition()[2];
// double zCluster=1450.0;
- Hep3Vector posAtEcalHPS = hpstrk.getPositionAtZMap(750,zCluster, 5.0)[0];
+ Hep3Vector posAtEcalHPS = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk, 750, zCluster, 5.0, event.getDetector().getFieldMap()).getReferencePoint());
+
Hep3Vector posAtEcalExtend= TrackUtils.extrapolateTrack(trk,zCluster);
aida.histogram2D("ECal Extrapolation X : HPS vs Extend").fill( posAtEcalExtend.y(),posAtEcalHPS.x()-posAtEcalExtend.y());
aida.histogram2D("ECal Extrapolation Y : HPS vs Extend").fill( posAtEcalExtend.z(),posAtEcalHPS.y()-posAtEcalExtend.z());
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TwoTrackAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TwoTrackAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/mgraham/TwoTrackAnalysis.java Wed Apr 27 11:11:32 2016
@@ -17,10 +17,12 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.commons.lang3.NotImplementedException;
import org.hps.recon.tracking.BeamlineConstants;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.HelixConverter;
import org.hps.recon.tracking.StraightLineTrack;
+import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
@@ -273,10 +275,11 @@
HelicalTrackFit ht = seedEle.getHelix();
HelixConverter converter = new HelixConverter(0);
StraightLineTrack slt = converter.Convert(ht);
- HPSTrack hpstrack = new HPSTrack(ht);
- Hep3Vector[] trkatconver = hpstrack.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
- aida.histogram1D("X (mm) @ Converter using Map").fill(trkatconver[0].x()); // y tracker frame?
- aida.histogram1D("Y (mm) @ Converter using Map").fill(trkatconver[0].y()); // z tracker frame?
+ HpsHelicalTrackFit hpstrack = new HpsHelicalTrackFit(ht);
+ Hep3Vector trkatconver = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk, 100, BeamlineConstants.HARP_POSITION_TESTRUN, 1, event.getDetector().getFieldMap()).getReferencePoint());
+
+ aida.histogram1D("X (mm) @ Converter using Map").fill(trkatconver.x()); // y tracker frame?
+ aida.histogram1D("Y (mm) @ Converter using Map").fill(trkatconver.y()); // z tracker frame?
if (slt != null) {
aida.histogram1D("X (mm) @ Converter using SLT").fill(slt.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN)[0]); // y tracker frame?
aida.histogram1D("Y (mm) @ Converter using SLT").fill(slt.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN)[1]); // z tracker frame?
@@ -320,9 +323,9 @@
// HPSTrack hpstrack2 = new HPSTrack(ht2);
// Hep3Vector[] trkatconver2 = hpstrack2.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION, 1);
- HPSTrack hpstrack1 = new HPSTrack(ht1);
+ HpsHelicalTrackFit hpstrack1 = new HpsHelicalTrackFit(ht1);
Hep3Vector[] trkatconver1 = {new BasicHep3Vector(), new BasicHep3Vector(0, 0, 0)};
- HPSTrack hpstrack2 = new HPSTrack(ht2);
+ HpsHelicalTrackFit hpstrack2 = new HpsHelicalTrackFit(ht2);
Hep3Vector[] trkatconver2 = {new BasicHep3Vector(), new BasicHep3Vector(0, 0, 0)};;
if (isMC) {
double[] t1 = slt1.getYZAtX(BeamlineConstants.HARP_POSITION_TESTRUN);
@@ -330,8 +333,9 @@
trkatconver1[0] = new BasicHep3Vector(t1[0], t1[1], BeamlineConstants.HARP_POSITION_TESTRUN);
trkatconver2[0] = new BasicHep3Vector(t2[0], t2[1], BeamlineConstants.HARP_POSITION_TESTRUN);
} else {
- trkatconver1 = hpstrack1.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
- trkatconver2 = hpstrack2.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
+ throw new NotImplementedException("Need to implement using TrackUtils to extrapolate tracks!");
+ //trkatconver1 = hpstrack1.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
+ //trkatconver2 = hpstrack2.getPositionAtZMap(100, BeamlineConstants.HARP_POSITION_TESTRUN, 1);
}
List<TrackerHit> hitsOnTrack1 = trk1.getTrackerHits();
int layer1;
@@ -508,9 +512,10 @@
posvec[1] = slt1.getYZAtX(z)[1];
posvec[2] = z;
} else {
- Hep3Vector[] trk1atz = hpstrack1.getPositionAtZMap(100, z, 1);
- posvec[0] = trk1atz[0].x();
- posvec[1] = trk1atz[0].y();
+ Hep3Vector trk1atz = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk1, 100, z, 1, event.getDetector().getFieldMap()).getReferencePoint());
+
+ posvec[0] = trk1atz.x();
+ posvec[1] = trk1atz.y();
posvec[2] = z;
}
Trk1.add(posvec);
@@ -592,9 +597,10 @@
posvec2[1] = slt2.getYZAtX(z)[1];
posvec2[2] = z;
} else {
- Hep3Vector[] trk2atz = hpstrack2.getPositionAtZMap(100, z, 1);
- posvec2[0] = trk2atz[0].x();
- posvec2[1] = trk2atz[0].y();
+ Hep3Vector trk2atz = new BasicHep3Vector(TrackUtils.extrapolateTrackUsingFieldMap(trk2, 100, z, 1, event.getDetector().getFieldMap()).getReferencePoint());
+
+ posvec2[0] = trk2atz.x();
+ posvec2[1] = trk2atz.y();
posvec2[2] = z;
}
Trk2.add(posvec2);
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/EcalScoringPlaneDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/EcalScoringPlaneDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/EcalScoringPlaneDriver.java Wed Apr 27 11:11:32 2016
@@ -23,136 +23,136 @@
*/
public class EcalScoringPlaneDriver extends Driver {
- boolean verbose = false;
-
- // Collection Names
- String ecalScoringPlaneHitsCollectionName = "TrackerHitsECal";
- String tracksCollectionName = "MatchedTracks";
- String trackToScoringPlaneHitRelationsName = "TrackToEcalScoringPlaneHitRelations";
- String trackToMCParticleRelationsName = "TrackToMCParticleRelations";
-
- /**
- * Enable/disable verbose mode
- *
- * @param verbose : set true to enable, false otherwise
- */
- public void setVerbose(boolean verbose){
- this.verbose = verbose;
- }
-
- @Override
- protected void process(EventHeader event){
-
- // If the event doesn't have a collection of Tracks, skip it
- if(!event.hasCollection(Track.class, tracksCollectionName)) return;
-
- // If the event doesn't have a collection of Ecal scoring plane hits,
- // skip it
- if(!event.hasCollection(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName)) return;
-
- // Get the collection of tracks from the event
- List<Track> tracks = event.get(Track.class, tracksCollectionName);
-
- // Get the collection of Ecal scoring plane hits from the event
- List<SimTrackerHit> scoringPlaneHits = event.get(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName);
-
- // Create a collection to hold the scoring plane hits that were found to match
- // a track
- List<SimTrackerHit> matchedScoringPlaneHits = new ArrayList<SimTrackerHit>();
-
- // Create a collection of LCRelations between a track and the scoring plane hit
- List<LCRelation> trackToScoringPlaneHitRelations = new ArrayList<LCRelation>();
-
- // Create a collection of LCRelations between a track and its corresponding MC particle
- List<LCRelation> trackToMCParticleRelations = new ArrayList<LCRelation>();
-
- MCParticle particle = null;
- for(Track track : tracks){
-
- // Get the MC particle associated with this track
- particle = this.getMCParticleAssociatedWithTrack(track);
- // If the MC particle is null, then the hits associated with the
- // track did not have an MC particle associated with them
- // TODO: Find out why some hits don't have any MC particles associated with them
- if(particle == null) continue;
-
- // Add an LCRelation between the track and the corresponding MC particle
- trackToMCParticleRelations.add(new BaseLCRelation(track, particle));
-
- // Loop over all of the scoring plane hits and check if the associated MC particle
- // matches the one from the track
- for(SimTrackerHit scoringPlaneHit : scoringPlaneHits){
-
- // If the MC particles don't match, move on to the next particle
- if(!(scoringPlaneHit.getMCParticle() == particle)) continue;
-
- this.printVerbose("Found a match between a track and a scoring plane hit.");
-
- // If a match is found, add the scoring plane hit to the list of matched hits and
- // an LCRelation between the track and the scoring plane.
- matchedScoringPlaneHits.add(scoringPlaneHit);
- trackToScoringPlaneHitRelations.add(new BaseLCRelation(track, scoringPlaneHit));
-
- // Once a match is found, there is no need to loop through the rest of the list
- break;
- }
- }
-
- // Store all of the collections in the event
- event.put(ecalScoringPlaneHitsCollectionName, matchedScoringPlaneHits, SimTrackerHit.class, 0);
- event.put(trackToScoringPlaneHitRelationsName, trackToScoringPlaneHitRelations, LCRelation.class, 0);
- event.put(trackToMCParticleRelationsName, trackToMCParticleRelations, LCRelation.class, 0);
- }
-
- /**
- * Print a message if verbose has been enabled.
- *
- * @param message : message to print.
- */
- private void printVerbose(String message){
- if(verbose)
- System.out.println(this.getClass().getSimpleName() + ": " + message);
- }
-
- /**
- * Get the MC particle associated with a track.
- *
- * @param track : Track to get the MC particle for
- * @return The MC particle associated with the track
- */
- private MCParticle getMCParticleAssociatedWithTrack(Track track){
-
- Map <MCParticle, int[]>mcParticleMultiplicity = new HashMap<MCParticle, int[]>();
- MCParticle particle;
- for(TrackerHit hit : track.getTrackerHits()){
-
- // If one of the tracker hits doesn't have any MC particles associated
- // with it, return null for now.
- if(((HelicalTrackHit) hit).getMCParticles().size() == 0){
- this.printVerbose("HelicalTrackHit is not associated with any MC particles.");
- return null;
- }
-
- particle = ((HelicalTrackHit) hit).getMCParticles().get(0);
- if(!mcParticleMultiplicity.containsKey(particle)){
- mcParticleMultiplicity.put(particle, new int[1]);
- mcParticleMultiplicity.get(particle)[0] = 0;
- }
-
- mcParticleMultiplicity.get(particle)[0]++;
-
- }
-
- // Look for the MC particle that occurs the most of the track
- int maxValue = 0;
- particle = null;
- for(Map.Entry<MCParticle, int[]> entry : mcParticleMultiplicity.entrySet()){
- if(maxValue < entry.getValue()[0]){
- particle = entry.getKey();
- maxValue = entry.getValue()[0];
- }
- }
-
- return particle;
- }
+ boolean verbose = false;
+
+ // Collection Names
+ String ecalScoringPlaneHitsCollectionName = "TrackerHitsECal";
+ String tracksCollectionName = "MatchedTracks";
+ String trackToScoringPlaneHitRelationsName = "TrackToEcalScoringPlaneHitRelations";
+ String trackToMCParticleRelationsName = "TrackToMCParticleRelations";
+
+ /**
+ * Enable/disable verbose mode
+ *
+ * @param verbose : set true to enable, false otherwise
+ */
+ public void setVerbose(boolean verbose){
+ this.verbose = verbose;
+ }
+
+ @Override
+ protected void process(EventHeader event){
+
+ // If the event doesn't have a collection of Tracks, skip it
+ if(!event.hasCollection(Track.class, tracksCollectionName)) return;
+
+ // If the event doesn't have a collection of Ecal scoring plane hits,
+ // skip it
+ if(!event.hasCollection(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName)) return;
+
+ // Get the collection of tracks from the event
+ List<Track> tracks = event.get(Track.class, tracksCollectionName);
+
+ // Get the collection of Ecal scoring plane hits from the event
+ List<SimTrackerHit> scoringPlaneHits = event.get(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName);
+
+ // Create a collection to hold the scoring plane hits that were found to match
+ // a track
+ List<SimTrackerHit> matchedScoringPlaneHits = new ArrayList<SimTrackerHit>();
+
+ // Create a collection of LCRelations between a track and the scoring plane hit
+ List<LCRelation> trackToScoringPlaneHitRelations = new ArrayList<LCRelation>();
+
+ // Create a collection of LCRelations between a track and its corresponding MC particle
+ List<LCRelation> trackToMCParticleRelations = new ArrayList<LCRelation>();
+
+ MCParticle particle = null;
+ for(Track track : tracks){
+
+ // Get the MC particle associated with this track
+ particle = this.getMCParticleAssociatedWithTrack(track);
+ // If the MC particle is null, then the hits associated with the
+ // track did not have an MC particle associated with them
+ // TODO: Find out why some hits don't have any MC particles associated with them
+ if(particle == null) continue;
+
+ // Add an LCRelation between the track and the corresponding MC particle
+ trackToMCParticleRelations.add(new BaseLCRelation(track, particle));
+
+ // Loop over all of the scoring plane hits and check if the associated MC particle
+ // matches the one from the track
+ for(SimTrackerHit scoringPlaneHit : scoringPlaneHits){
+
+ // If the MC particles don't match, move on to the next particle
+ if(!(scoringPlaneHit.getMCParticle() == particle)) continue;
+
+ this.printVerbose("Found a match between a track and a scoring plane hit.");
+
+ // If a match is found, add the scoring plane hit to the list of matched hits and
+ // an LCRelation between the track and the scoring plane.
+ matchedScoringPlaneHits.add(scoringPlaneHit);
+ trackToScoringPlaneHitRelations.add(new BaseLCRelation(track, scoringPlaneHit));
+
+ // Once a match is found, there is no need to loop through the rest of the list
+ break;
+ }
+ }
+
+ // Store all of the collections in the event
+ event.put(ecalScoringPlaneHitsCollectionName, matchedScoringPlaneHits, SimTrackerHit.class, 0);
+ event.put(trackToScoringPlaneHitRelationsName, trackToScoringPlaneHitRelations, LCRelation.class, 0);
+ event.put(trackToMCParticleRelationsName, trackToMCParticleRelations, LCRelation.class, 0);
+ }
+
+ /**
+ * Print a message if verbose has been enabled.
+ *
+ * @param message : message to print.
+ */
+ private void printVerbose(String message){
+ if(verbose)
+ System.out.println(this.getClass().getSimpleName() + ": " + message);
+ }
+
+ /**
+ * Get the MC particle associated with a track.
+ *
+ * @param track : Track to get the MC particle for
+ * @return The MC particle associated with the track
+ */
+ private MCParticle getMCParticleAssociatedWithTrack(Track track){
+
+ Map <MCParticle, int[]>mcParticleMultiplicity = new HashMap<MCParticle, int[]>();
+ MCParticle particle;
+ for(TrackerHit hit : track.getTrackerHits()){
+
+ // If one of the tracker hits doesn't have any MC particles associated
+ // with it, return null for now.
+ if(((HelicalTrackHit) hit).getMCParticles().size() == 0){
+ this.printVerbose("HelicalTrackHit is not associated with any MC particles.");
+ return null;
+ }
+
+ particle = ((HelicalTrackHit) hit).getMCParticles().get(0);
+ if(!mcParticleMultiplicity.containsKey(particle)){
+ mcParticleMultiplicity.put(particle, new int[1]);
+ mcParticleMultiplicity.get(particle)[0] = 0;
+ }
+
+ mcParticleMultiplicity.get(particle)[0]++;
+
+ }
+
+ // Look for the MC particle that occurs the most of the track
+ int maxValue = 0;
+ particle = null;
+ for(Map.Entry<MCParticle, int[]> entry : mcParticleMultiplicity.entrySet()){
+ if(maxValue < entry.getValue()[0]){
+ particle = entry.getKey();
+ maxValue = entry.getValue()[0];
+ }
+ }
+
+ return particle;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ExtrapolationAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ExtrapolationAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ExtrapolationAnalysis.java Wed Apr 27 11:11:32 2016
@@ -30,312 +30,312 @@
*/
public class ExtrapolationAnalysis extends Driver {
- AIDA aida = null;
- List<IPlotter> plotters;
-
- Hep3Vector bField = null;
-
- boolean verbose = false;
-
- // Collection Names
- String matchedEcalScoringPlaneHitsCollectionName = "MatchedTrackerHitsEcal";
- String trackToScoringPlaneHitRelationsName = "TrackToEcalScoringPlaneHitRelations";
- String trackToMCParticleRelationsName = "TrackToMCParticleRelations";
-
- /**
- * Enable/disable verbose mode
- *
- * @param verbose : true to enable, false otherwise
- */
- public void setVerbose(boolean verbose){
- this.verbose = verbose;
- }
-
- public void detectorChanged(Detector detector){
-
- // Get the magnetic field from the geometry
- bField = detector.getFieldMap().getField(new BasicHep3Vector(0,0,0));
-
- //-----------------------//
- //--- Setup all plots ---//
- //-----------------------//
-
- // Setup AIDA
- aida = AIDA.defaultInstance();
- aida.tree().cd("/");
-
- // Instantiate a list to hold the collection of plotters
- plotters = new ArrayList<IPlotter>();
- IPlotter plotter = null;
-
- //--- Plots of scoring plane positions ---//
- //----------------------------------------//
- plotter = PlotUtils.setupPlotter("Positions of Scoring plane hits matched to tracks", 2, 2);
- PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - x", 0, "x (mm)",
- aida.histogram1D("Scoring plane hit position - x", 100, -400, 400));
- PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - y", 1, "y (mm)",
- aida.histogram1D("Scoring plane hit position - y", 100, -200, 200));
- PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - z", 2, "z (mm)",
- aida.histogram1D("Scoring plane hit position - z", 100, 1000, 1500));
- PlotUtils.setup2DRegion(plotter, "Scoring plane hit position - x-y", 3, "x (mm)", "y (mm)",
- aida.histogram2D("Scoring plane hit position - x-y", 100, -400, 400, 100, -200, 200));
- plotters.add(plotter);
-
- //--- Plots of residuals at scoring plane ---//
- //-------------------------------------------//
- plotter = PlotUtils.setupPlotter("Residuals at scoring plane", 3, 3);
- PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at scoring plane", 0, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Top electron tracks - Bend plane residual at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at scoring plane", 0, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Top positron tracks - Bend plane residual at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at scoring plane", 1, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Bottom electron tracks - Bend plane residuals at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at scoring plane", 1, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Bottom positron tracks - Bend plane residuals at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Bend plane residuals at scoring plane", 2, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Bend plane residuals at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Bend plane residuals at scoring plane", 2, "x_{ep} - x_{sp} (mm)",
- aida.histogram1D("Bend plane residuals at scoring plane", 60, -30, 30));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at scoring plane", 3,"y_{ep} - y_{sp} (mm)",
- aida.histogram1D("Top electron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at scoring plane", 3,"y_{ep} - y_{sp} (mm)",
- aida.histogram1D("Top positron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at scoring plane", 4,"y_{ep} - y_{sp} (mm)",
- aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at scoring plane", 4,"y_{ep} - y_{sp} (mm)",
- aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
- PlotUtils.setup1DRegion(plotter, "Non-bend plane residuals at scoring plane", 5,"y_{ep} - y_{sp} (mm)",
- aida.histogram1D("Non-bend plane residuals at scoring plane", 30, -15, 15));
- PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at scoring plane", 6, "z_{ep} - z_{sp} (mm)",
- aida.histogram1D("Top electron tracks - z residuals at scoring plane", 10, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at scoring plane", 6, "z_{ep} - z_{sp} (mm)",
- aida.histogram1D("Top positron tracks - z residuals at scoring plane", 10, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
- aida.histogram1D("Bottom electron tracks - z residuals at scoring plane", 10, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
- aida.histogram1D("Bottom positron tracks - z residuals at scoring plane", 10, -5, 5));
- PlotUtils.setup1DRegion(plotter, "z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
- aida.histogram1D("z residuals at scoring plane", 10, -5, 5));
- plotters.add(plotter);
-
- //--- Plots of residuals at target ---//
- //------------------------------------//
- plotter = PlotUtils.setupPlotter("Residuals at target", 3, 3);
- PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at target", 0, "x_{ep} - x_{t} (mm)",
- aida.histogram1D("Top electron tracks - Bend plane residual at target", 40, -4, 4));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at target", 0, "x_{ep} - x_{t} (mm)",
- aida.histogram1D("Top positron tracks - Bend plane residual at target", 40, -4, 4));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at target", 1, "x_{ep} - x_{t} (mm)",
- aida.histogram1D("Bottom electron tracks - Bend plane residuals at target", 40, -4, 4));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at target", 1, "x_{ep} - x_{t} (mm)",
- aida.histogram1D("Bottom positron tracks - Bend plane residuals at target", 40, -4, 4));
- PlotUtils.setup1DRegion(plotter, "Bend plane residuals at target", 2, "x_{ep} - x_{t} (mm)",
- aida.histogram1D("Bend plane residuals at target", 40, -4, 4));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at target", 3, "y_{ep} - y_{t} (mm)",
- aida.histogram1D("Top electron tracks - Non-bend plane residuals at target", 20, -2, 2));
- PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at target", 3, "y_{ep} - y_{t} (mm)",
- aida.histogram1D("Top positron tracks - Non-bend plane residuals at target", 20, -2, 2));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at target", 4, "y_{ep} - y_{t} (mm)",
- aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at target", 20, -2, 2));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at target", 4, "y_{ep} - y_{t} (mm)",
- aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at target", 20, -2, 2));
- PlotUtils.setup1DRegion(plotter, "Non-bend plane residuals at target", 5, "y_{ep} - y_{t} (mm)",
- aida.histogram1D("Non-bend plane residuals at target", 20, -2, 2));
- PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at target", 6, "z_{ep} - z_{t} (mm)",
- aida.histogram1D("Top electron tracks - z residuals at target", 50, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at target", 6, "z_{ep} - z_{t} (mm)",
- aida.histogram1D("Top positron tracks - z residuals at target", 50, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at target", 7, "z_{ep} - z_{t} (mm)",
- aida.histogram1D("Bottom electron tracks - z residuals at target", 50, -5, 5));
- PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at target", 7, "z_{ep} - z_{t} (mm)",
- aida.histogram1D("Bottom positron tracks - z residuals at target", 50, -5, 5));
- PlotUtils.setup1DRegion(plotter, "z residuals at target", 8, "z_{ep} - z_{t} (mm)",
- aida.histogram1D("z residuals at target", 50, -5, 5));
- plotters.add(plotter);
-
- //--- Plot of residuals at scoring plane vs momentum ---//
- //------------------------------------------------------//
- plotter = PlotUtils.setupPlotter("Residuals vs Momentum", 2, 2);
- PlotUtils.setup2DRegion(plotter, "Bend plane residuals vs momentum at scoring plane", 0,
- "Momentum (GeV)", "x_{ep} - x_{sp} (mm)",
- aida.histogram2D("Bend plane residuals vs momentum at scoring plane", 5, 0, 2.5, 60, -30, 30));
- PlotUtils.setup2DRegion(plotter, "Non-bend plane residuals vs momentum at scoring plane", 1,
- "Momentum (GeV)", "y_{ep} - y_{sp} (mm)",
- aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane", 5, 0, 2.5, 60, -30, 30));
- PlotUtils.setup2DRegion(plotter, "Bend plane residuals vs momentum at target", 2,
- "Momentum (GeV)", "x_{ep} - x_{t} (mm)",
- aida.histogram2D("Bend plane residuals vs momentum at target", 5, 0, 2.5, 60, -3, 3));
- PlotUtils.setup2DRegion(plotter, "Non-bend plane residuals vs momentum at target", 3,
- "Momentum (GeV)", "y_{ep} - y_{t} (mm)",
- aida.histogram2D("Non-bend plane residuals vs momentum at target", 5, 0, 2.5, 50, -2.5, 2.5));
- plotters.add(plotter);
-
- for(IPlotter iPlotter : plotters){
- iPlotter.show();
- }
-
- }
-
- public void process(EventHeader event){
-
- // If the event doesn't contain an LCRelation between a track and its
- // corresponding ECal scoring plane hit, skip the event.
- if(!event.hasCollection(LCRelation.class, trackToScoringPlaneHitRelationsName)) return;
-
- List<LCRelation> trackToScoringPlaneHitRelations = event.get(LCRelation.class, trackToScoringPlaneHitRelationsName);
-
- for(LCRelation trackToScoringPlaneHitRelation : trackToScoringPlaneHitRelations){
-
- // Get the track
- Track track = (Track) trackToScoringPlaneHitRelation.getFrom();
-
- // Get the track momentum
- double[] momentum = BaseTrackState.computeMomentum(track.getTrackStates().get(0), bField.y());
- double p = Math.sqrt(momentum[0]*momentum[0] + momentum[1]*momentum[1] + momentum[2]*momentum[2]);
- this.printVerbose("Track momentum: " + p);
-
- // Get the corresponding scoring plane hit
- SimTrackerHit scoringPlaneHit = (SimTrackerHit) trackToScoringPlaneHitRelation.getTo();
- Hep3Vector scoringPlaneHitPosition = scoringPlaneHit.getPositionVec();
- this.printVerbose("Scoring plane hit position: " + scoringPlaneHitPosition.toString());
-
- // Fill the scoring plane position histograms
- aida.histogram1D("Scoring plane hit position - x").fill(scoringPlaneHitPosition.x());
- aida.histogram1D("Scoring plane hit position - y").fill(scoringPlaneHitPosition.y());
- aida.histogram1D("Scoring plane hit position - z").fill(scoringPlaneHitPosition.z());
- aida.histogram2D("Scoring plane hit position - x-y").fill(scoringPlaneHitPosition.x(), scoringPlaneHitPosition.y());
-
- // Extrapolate the track to the scoring plane position
- Hep3Vector trackPositionAtScoringPlane = TrackUtils.extrapolateTrack(track, scoringPlaneHitPosition.z());
- this.printVerbose("Extrapolated track position: " + trackPositionAtScoringPlane.toString());
-
- // Find the residual between the extrapolated track position and the scoring plane hit position
- double deltaX = trackPositionAtScoringPlane.x() - scoringPlaneHitPosition.x();
- double deltaY = trackPositionAtScoringPlane.y() - scoringPlaneHitPosition.y();
- // This should be 0 but it serves as a sanity check.
- double deltaZ = trackPositionAtScoringPlane.z() - scoringPlaneHitPosition.z();
-
- if(track.getTrackerHits().get(0).getPosition()[2] > 0){
- if(track.getTrackStates().get(0).getOmega() > 0){
- aida.histogram1D("Top positron tracks - Bend plane residual at scoring plane").fill(deltaX);
- aida.histogram1D("Top positron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
- aida.histogram1D("Top positron tracks - z residuals at scoring plane").fill(deltaZ);
- } else {
- aida.histogram1D("Top electron tracks - Bend plane residual at scoring plane").fill(deltaX);
- aida.histogram1D("Top electron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
- aida.histogram1D("Top electron tracks - z residuals at scoring plane").fill(deltaZ);
- }
- } else {
- if(track.getTrackStates().get(0).getOmega() > 0){
- aida.histogram1D("Bottom positron tracks - Bend plane residuals at scoring plane").fill(deltaX);
- aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
- aida.histogram1D("Bottom positron tracks - z residuals at scoring plane").fill(deltaZ);
- } else {
- aida.histogram1D("Bottom electron tracks - Bend plane residuals at scoring plane").fill(deltaX);
- aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
- aida.histogram1D("Bottom electron tracks - z residuals at scoring plane").fill(deltaZ);
- }
- }
-
- aida.histogram1D("Bend plane residuals at scoring plane").fill(deltaX);
- aida.histogram1D("Non-bend plane residuals at scoring plane").fill(deltaY);
- aida.histogram1D("z residuals at target").fill(deltaZ);
- aida.histogram2D("Bend plane residuals vs momentum at scoring plane").fill(p, deltaX);
- aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane").fill(p, deltaY);
- }
-
- if(!event.hasCollection(LCRelation.class, trackToMCParticleRelationsName)) return;
-
- List<LCRelation> trackToMCParticleRelations = event.get(LCRelation.class, trackToMCParticleRelationsName);
-
- for(LCRelation trackToMCParticleRelation : trackToMCParticleRelations){
-
- // Get the track
- Track track = (Track) trackToMCParticleRelation.getFrom();
-
- // Get the track momentum
- double[] momentum = BaseTrackState.computeMomentum(track.getTrackStates().get(0), bField.y());
- double p = Math.sqrt(momentum[0]*momentum[0] + momentum[1]*momentum[1] + momentum[2]*momentum[2]);
- this.printVerbose("Track momentum: " + p);
-
- // Get the corresponding MC particle
- MCParticle particle = (MCParticle) trackToMCParticleRelation.getTo();
-
- // Extrapolate the track to the origin
- Hep3Vector trackPositionAtOrigin = TrackUtils.extrapolateTrack(track, particle.getOriginZ());
-
- // Find the residual between the extrapolated track and the position of the scoring plane at the origin
- double deltaX = trackPositionAtOrigin.x() - particle.getOriginX();
- double deltaY = trackPositionAtOrigin.y() - particle.getOriginY();
- double deltaZ = trackPositionAtOrigin.z() - particle.getOriginZ();
-
- if(track.getTrackerHits().get(0).getPosition()[2] > 0){
- if(track.getTrackStates().get(0).getOmega() > 0){
- aida.histogram1D("Top positron tracks - Bend plane residual at target").fill(deltaX);
- aida.histogram1D("Top positron tracks - Non-bend plane residuals at target").fill(deltaY);
- aida.histogram1D("Top positron tracks - z residuals at target").fill(deltaZ);
- } else {
- aida.histogram1D("Top electron tracks - Bend plane residual at target").fill(deltaX);
- aida.histogram1D("Top electron tracks - Non-bend plane residuals at target").fill(deltaY);
- aida.histogram1D("Top electron tracks - z residuals at target").fill(deltaZ);
- }
- } else {
-
- if(track.getTrackStates().get(0).getOmega() > 0){
- aida.histogram1D("Bottom positron tracks - Bend plane residuals at target").fill(deltaX);
- aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at target").fill(deltaY);
- aida.histogram1D("Bottom positron tracks - z residuals at target").fill(deltaZ);
- } else {
- aida.histogram1D("Bottom electron tracks - Bend plane residuals at target").fill(deltaX);
- aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at target").fill(deltaY);
- aida.histogram1D("Bottom electron tracks - z residuals at target").fill(deltaZ);
- }
- }
-
- aida.histogram1D("Bend plane residuals at target").fill(deltaX);
- aida.histogram1D("Non-bend plane residuals at target").fill(deltaY);
- aida.histogram1D("z residuals at target").fill(deltaZ);
- aida.histogram2D("Bend plane residuals vs momentum at target").fill(p, deltaX);
- aida.histogram2D("Non-bend plane residuals vs momentum at target").fill(p, deltaY);
- }
- }
-
- @Override
- protected void endOfData(){
-
- IHistogram2D histogram = aida.histogram2D("Bend plane residuals vs momentum at scoring plane");
- int binsX = histogram.xAxis().bins();
- for(int binX = 0; binX < binsX; binX++){
- PlotUtils.getYProjection(binX, histogram);
- }
-
- histogram = aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane");
- binsX = histogram.xAxis().bins();
- for(int binX = 0; binX < binsX; binX++){
- PlotUtils.getYProjection(binX, histogram);
- }
-
- histogram = aida.histogram2D("Bend plane residuals vs momentum at target");
- binsX = histogram.xAxis().bins();
- for(int binX = 0; binX < binsX; binX++){
- PlotUtils.getYProjection(binX, histogram);
- }
-
- histogram = aida.histogram2D("Non-bend plane residuals vs momentum at target");
- binsX = histogram.xAxis().bins();
- for(int binX = 0; binX < binsX; binX++){
- PlotUtils.getYProjection(binX, histogram);
- }
- }
-
- /**
- * Print a message if verbose has been enabled.
- *
- * @param message : message to print.
- */
- private void printVerbose(String message){
- if(verbose)
- System.out.println(this.getClass().getSimpleName() + ": " + message);
- }
-
+ AIDA aida = null;
+ List<IPlotter> plotters;
+
+ Hep3Vector bField = null;
+
+ boolean verbose = false;
+
+ // Collection Names
+ String matchedEcalScoringPlaneHitsCollectionName = "MatchedTrackerHitsEcal";
+ String trackToScoringPlaneHitRelationsName = "TrackToEcalScoringPlaneHitRelations";
+ String trackToMCParticleRelationsName = "TrackToMCParticleRelations";
+
+ /**
+ * Enable/disable verbose mode
+ *
+ * @param verbose : true to enable, false otherwise
+ */
+ public void setVerbose(boolean verbose){
+ this.verbose = verbose;
+ }
+
+ public void detectorChanged(Detector detector){
+
+ // Get the magnetic field from the geometry
+ bField = detector.getFieldMap().getField(new BasicHep3Vector(0,0,0));
+
+ //-----------------------//
+ //--- Setup all plots ---//
+ //-----------------------//
+
+ // Setup AIDA
+ aida = AIDA.defaultInstance();
+ aida.tree().cd("/");
+
+ // Instantiate a list to hold the collection of plotters
+ plotters = new ArrayList<IPlotter>();
+ IPlotter plotter = null;
+
+ //--- Plots of scoring plane positions ---//
+ //----------------------------------------//
+ plotter = PlotUtils.setupPlotter("Positions of Scoring plane hits matched to tracks", 2, 2);
+ PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - x", 0, "x (mm)",
+ aida.histogram1D("Scoring plane hit position - x", 100, -400, 400));
+ PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - y", 1, "y (mm)",
+ aida.histogram1D("Scoring plane hit position - y", 100, -200, 200));
+ PlotUtils.setup1DRegion(plotter, "Scoring plane hit position - z", 2, "z (mm)",
+ aida.histogram1D("Scoring plane hit position - z", 100, 1000, 1500));
+ PlotUtils.setup2DRegion(plotter, "Scoring plane hit position - x-y", 3, "x (mm)", "y (mm)",
+ aida.histogram2D("Scoring plane hit position - x-y", 100, -400, 400, 100, -200, 200));
+ plotters.add(plotter);
+
+ //--- Plots of residuals at scoring plane ---//
+ //-------------------------------------------//
+ plotter = PlotUtils.setupPlotter("Residuals at scoring plane", 3, 3);
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at scoring plane", 0, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Top electron tracks - Bend plane residual at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at scoring plane", 0, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Top positron tracks - Bend plane residual at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at scoring plane", 1, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Bottom electron tracks - Bend plane residuals at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at scoring plane", 1, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Bottom positron tracks - Bend plane residuals at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Bend plane residuals at scoring plane", 2, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Bend plane residuals at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Bend plane residuals at scoring plane", 2, "x_{ep} - x_{sp} (mm)",
+ aida.histogram1D("Bend plane residuals at scoring plane", 60, -30, 30));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at scoring plane", 3,"y_{ep} - y_{sp} (mm)",
+ aida.histogram1D("Top electron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at scoring plane", 3,"y_{ep} - y_{sp} (mm)",
+ aida.histogram1D("Top positron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at scoring plane", 4,"y_{ep} - y_{sp} (mm)",
+ aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at scoring plane", 4,"y_{ep} - y_{sp} (mm)",
+ aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at scoring plane", 30, -15, 15));
+ PlotUtils.setup1DRegion(plotter, "Non-bend plane residuals at scoring plane", 5,"y_{ep} - y_{sp} (mm)",
+ aida.histogram1D("Non-bend plane residuals at scoring plane", 30, -15, 15));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at scoring plane", 6, "z_{ep} - z_{sp} (mm)",
+ aida.histogram1D("Top electron tracks - z residuals at scoring plane", 10, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at scoring plane", 6, "z_{ep} - z_{sp} (mm)",
+ aida.histogram1D("Top positron tracks - z residuals at scoring plane", 10, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
+ aida.histogram1D("Bottom electron tracks - z residuals at scoring plane", 10, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
+ aida.histogram1D("Bottom positron tracks - z residuals at scoring plane", 10, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "z residuals at scoring plane", 7, "z_{ep} - z_{sp} (mm)",
+ aida.histogram1D("z residuals at scoring plane", 10, -5, 5));
+ plotters.add(plotter);
+
+ //--- Plots of residuals at target ---//
+ //------------------------------------//
+ plotter = PlotUtils.setupPlotter("Residuals at target", 3, 3);
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at target", 0, "x_{ep} - x_{t} (mm)",
+ aida.histogram1D("Top electron tracks - Bend plane residual at target", 40, -4, 4));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Bend plane residuals at target", 0, "x_{ep} - x_{t} (mm)",
+ aida.histogram1D("Top positron tracks - Bend plane residual at target", 40, -4, 4));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at target", 1, "x_{ep} - x_{t} (mm)",
+ aida.histogram1D("Bottom electron tracks - Bend plane residuals at target", 40, -4, 4));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Bend plane residuals at target", 1, "x_{ep} - x_{t} (mm)",
+ aida.histogram1D("Bottom positron tracks - Bend plane residuals at target", 40, -4, 4));
+ PlotUtils.setup1DRegion(plotter, "Bend plane residuals at target", 2, "x_{ep} - x_{t} (mm)",
+ aida.histogram1D("Bend plane residuals at target", 40, -4, 4));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at target", 3, "y_{ep} - y_{t} (mm)",
+ aida.histogram1D("Top electron tracks - Non-bend plane residuals at target", 20, -2, 2));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - Non-bend plane residuals at target", 3, "y_{ep} - y_{t} (mm)",
+ aida.histogram1D("Top positron tracks - Non-bend plane residuals at target", 20, -2, 2));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at target", 4, "y_{ep} - y_{t} (mm)",
+ aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at target", 20, -2, 2));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - Non-bend plane residuals at target", 4, "y_{ep} - y_{t} (mm)",
+ aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at target", 20, -2, 2));
+ PlotUtils.setup1DRegion(plotter, "Non-bend plane residuals at target", 5, "y_{ep} - y_{t} (mm)",
+ aida.histogram1D("Non-bend plane residuals at target", 20, -2, 2));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at target", 6, "z_{ep} - z_{t} (mm)",
+ aida.histogram1D("Top electron tracks - z residuals at target", 50, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Top tracks - z residuals at target", 6, "z_{ep} - z_{t} (mm)",
+ aida.histogram1D("Top positron tracks - z residuals at target", 50, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at target", 7, "z_{ep} - z_{t} (mm)",
+ aida.histogram1D("Bottom electron tracks - z residuals at target", 50, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "Bottom tracks - z residuals at target", 7, "z_{ep} - z_{t} (mm)",
+ aida.histogram1D("Bottom positron tracks - z residuals at target", 50, -5, 5));
+ PlotUtils.setup1DRegion(plotter, "z residuals at target", 8, "z_{ep} - z_{t} (mm)",
+ aida.histogram1D("z residuals at target", 50, -5, 5));
+ plotters.add(plotter);
+
+ //--- Plot of residuals at scoring plane vs momentum ---//
+ //------------------------------------------------------//
+ plotter = PlotUtils.setupPlotter("Residuals vs Momentum", 2, 2);
+ PlotUtils.setup2DRegion(plotter, "Bend plane residuals vs momentum at scoring plane", 0,
+ "Momentum (GeV)", "x_{ep} - x_{sp} (mm)",
+ aida.histogram2D("Bend plane residuals vs momentum at scoring plane", 5, 0, 2.5, 60, -30, 30));
+ PlotUtils.setup2DRegion(plotter, "Non-bend plane residuals vs momentum at scoring plane", 1,
+ "Momentum (GeV)", "y_{ep} - y_{sp} (mm)",
+ aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane", 5, 0, 2.5, 60, -30, 30));
+ PlotUtils.setup2DRegion(plotter, "Bend plane residuals vs momentum at target", 2,
+ "Momentum (GeV)", "x_{ep} - x_{t} (mm)",
+ aida.histogram2D("Bend plane residuals vs momentum at target", 5, 0, 2.5, 60, -3, 3));
+ PlotUtils.setup2DRegion(plotter, "Non-bend plane residuals vs momentum at target", 3,
+ "Momentum (GeV)", "y_{ep} - y_{t} (mm)",
+ aida.histogram2D("Non-bend plane residuals vs momentum at target", 5, 0, 2.5, 50, -2.5, 2.5));
+ plotters.add(plotter);
+
+ for(IPlotter iPlotter : plotters){
+ iPlotter.show();
+ }
+
+ }
+
+ public void process(EventHeader event){
+
+ // If the event doesn't contain an LCRelation between a track and its
+ // corresponding ECal scoring plane hit, skip the event.
+ if(!event.hasCollection(LCRelation.class, trackToScoringPlaneHitRelationsName)) return;
+
+ List<LCRelation> trackToScoringPlaneHitRelations = event.get(LCRelation.class, trackToScoringPlaneHitRelationsName);
+
+ for(LCRelation trackToScoringPlaneHitRelation : trackToScoringPlaneHitRelations){
+
+ // Get the track
+ Track track = (Track) trackToScoringPlaneHitRelation.getFrom();
+
+ // Get the track momentum
+ double[] momentum = BaseTrackState.computeMomentum(track.getTrackStates().get(0), bField.y());
+ double p = Math.sqrt(momentum[0]*momentum[0] + momentum[1]*momentum[1] + momentum[2]*momentum[2]);
+ this.printVerbose("Track momentum: " + p);
+
+ // Get the corresponding scoring plane hit
+ SimTrackerHit scoringPlaneHit = (SimTrackerHit) trackToScoringPlaneHitRelation.getTo();
+ Hep3Vector scoringPlaneHitPosition = scoringPlaneHit.getPositionVec();
+ this.printVerbose("Scoring plane hit position: " + scoringPlaneHitPosition.toString());
+
+ // Fill the scoring plane position histograms
+ aida.histogram1D("Scoring plane hit position - x").fill(scoringPlaneHitPosition.x());
+ aida.histogram1D("Scoring plane hit position - y").fill(scoringPlaneHitPosition.y());
+ aida.histogram1D("Scoring plane hit position - z").fill(scoringPlaneHitPosition.z());
+ aida.histogram2D("Scoring plane hit position - x-y").fill(scoringPlaneHitPosition.x(), scoringPlaneHitPosition.y());
+
+ // Extrapolate the track to the scoring plane position
+ Hep3Vector trackPositionAtScoringPlane = TrackUtils.extrapolateTrack(track, scoringPlaneHitPosition.z());
+ this.printVerbose("Extrapolated track position: " + trackPositionAtScoringPlane.toString());
+
+ // Find the residual between the extrapolated track position and the scoring plane hit position
+ double deltaX = trackPositionAtScoringPlane.x() - scoringPlaneHitPosition.x();
+ double deltaY = trackPositionAtScoringPlane.y() - scoringPlaneHitPosition.y();
+ // This should be 0 but it serves as a sanity check.
+ double deltaZ = trackPositionAtScoringPlane.z() - scoringPlaneHitPosition.z();
+
+ if(track.getTrackerHits().get(0).getPosition()[2] > 0){
+ if(track.getTrackStates().get(0).getOmega() > 0){
+ aida.histogram1D("Top positron tracks - Bend plane residual at scoring plane").fill(deltaX);
+ aida.histogram1D("Top positron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
+ aida.histogram1D("Top positron tracks - z residuals at scoring plane").fill(deltaZ);
+ } else {
+ aida.histogram1D("Top electron tracks - Bend plane residual at scoring plane").fill(deltaX);
+ aida.histogram1D("Top electron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
+ aida.histogram1D("Top electron tracks - z residuals at scoring plane").fill(deltaZ);
+ }
+ } else {
+ if(track.getTrackStates().get(0).getOmega() > 0){
+ aida.histogram1D("Bottom positron tracks - Bend plane residuals at scoring plane").fill(deltaX);
+ aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
+ aida.histogram1D("Bottom positron tracks - z residuals at scoring plane").fill(deltaZ);
+ } else {
+ aida.histogram1D("Bottom electron tracks - Bend plane residuals at scoring plane").fill(deltaX);
+ aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at scoring plane").fill(deltaY);
+ aida.histogram1D("Bottom electron tracks - z residuals at scoring plane").fill(deltaZ);
+ }
+ }
+
+ aida.histogram1D("Bend plane residuals at scoring plane").fill(deltaX);
+ aida.histogram1D("Non-bend plane residuals at scoring plane").fill(deltaY);
+ aida.histogram1D("z residuals at target").fill(deltaZ);
+ aida.histogram2D("Bend plane residuals vs momentum at scoring plane").fill(p, deltaX);
+ aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane").fill(p, deltaY);
+ }
+
+ if(!event.hasCollection(LCRelation.class, trackToMCParticleRelationsName)) return;
+
+ List<LCRelation> trackToMCParticleRelations = event.get(LCRelation.class, trackToMCParticleRelationsName);
+
+ for(LCRelation trackToMCParticleRelation : trackToMCParticleRelations){
+
+ // Get the track
+ Track track = (Track) trackToMCParticleRelation.getFrom();
+
+ // Get the track momentum
+ double[] momentum = BaseTrackState.computeMomentum(track.getTrackStates().get(0), bField.y());
+ double p = Math.sqrt(momentum[0]*momentum[0] + momentum[1]*momentum[1] + momentum[2]*momentum[2]);
+ this.printVerbose("Track momentum: " + p);
+
+ // Get the corresponding MC particle
+ MCParticle particle = (MCParticle) trackToMCParticleRelation.getTo();
+
+ // Extrapolate the track to the origin
+ Hep3Vector trackPositionAtOrigin = TrackUtils.extrapolateTrack(track, particle.getOriginZ());
+
+ // Find the residual between the extrapolated track and the position of the scoring plane at the origin
+ double deltaX = trackPositionAtOrigin.x() - particle.getOriginX();
+ double deltaY = trackPositionAtOrigin.y() - particle.getOriginY();
+ double deltaZ = trackPositionAtOrigin.z() - particle.getOriginZ();
+
+ if(track.getTrackerHits().get(0).getPosition()[2] > 0){
+ if(track.getTrackStates().get(0).getOmega() > 0){
+ aida.histogram1D("Top positron tracks - Bend plane residual at target").fill(deltaX);
+ aida.histogram1D("Top positron tracks - Non-bend plane residuals at target").fill(deltaY);
+ aida.histogram1D("Top positron tracks - z residuals at target").fill(deltaZ);
+ } else {
+ aida.histogram1D("Top electron tracks - Bend plane residual at target").fill(deltaX);
+ aida.histogram1D("Top electron tracks - Non-bend plane residuals at target").fill(deltaY);
+ aida.histogram1D("Top electron tracks - z residuals at target").fill(deltaZ);
+ }
+ } else {
+
+ if(track.getTrackStates().get(0).getOmega() > 0){
+ aida.histogram1D("Bottom positron tracks - Bend plane residuals at target").fill(deltaX);
+ aida.histogram1D("Bottom positron tracks - Non-bend plane residuals at target").fill(deltaY);
+ aida.histogram1D("Bottom positron tracks - z residuals at target").fill(deltaZ);
+ } else {
+ aida.histogram1D("Bottom electron tracks - Bend plane residuals at target").fill(deltaX);
+ aida.histogram1D("Bottom electron tracks - Non-bend plane residuals at target").fill(deltaY);
+ aida.histogram1D("Bottom electron tracks - z residuals at target").fill(deltaZ);
+ }
+ }
+
+ aida.histogram1D("Bend plane residuals at target").fill(deltaX);
+ aida.histogram1D("Non-bend plane residuals at target").fill(deltaY);
+ aida.histogram1D("z residuals at target").fill(deltaZ);
+ aida.histogram2D("Bend plane residuals vs momentum at target").fill(p, deltaX);
+ aida.histogram2D("Non-bend plane residuals vs momentum at target").fill(p, deltaY);
+ }
+ }
+
+ @Override
+ protected void endOfData(){
+
+ IHistogram2D histogram = aida.histogram2D("Bend plane residuals vs momentum at scoring plane");
+ int binsX = histogram.xAxis().bins();
+ for(int binX = 0; binX < binsX; binX++){
+ PlotUtils.getYProjection(binX, histogram);
+ }
+
+ histogram = aida.histogram2D("Non-bend plane residuals vs momentum at scoring plane");
+ binsX = histogram.xAxis().bins();
+ for(int binX = 0; binX < binsX; binX++){
+ PlotUtils.getYProjection(binX, histogram);
+ }
+
+ histogram = aida.histogram2D("Bend plane residuals vs momentum at target");
+ binsX = histogram.xAxis().bins();
+ for(int binX = 0; binX < binsX; binX++){
+ PlotUtils.getYProjection(binX, histogram);
+ }
+
+ histogram = aida.histogram2D("Non-bend plane residuals vs momentum at target");
+ binsX = histogram.xAxis().bins();
+ for(int binX = 0; binX < binsX; binX++){
+ PlotUtils.getYProjection(binX, histogram);
+ }
+ }
+
+ /**
+ * Print a message if verbose has been enabled.
+ *
+ * @param message : message to print.
+ */
+ private void printVerbose(String message){
+ if(verbose)
+ System.out.println(this.getClass().getSimpleName() + ": " + message);
+ }
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/LheToStdhep.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/LheToStdhep.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/LheToStdhep.java Wed Apr 27 11:11:32 2016
@@ -15,7 +15,7 @@
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
@@ -32,248 +32,248 @@
* TODO: Make this converter more generic.
*/
public class LheToStdhep {
-
- private static final int N_PARTICLE_INDEX = 0;
- private static final int PDG_ID_INDEX = 1;
- private static final int STATUS_INDEX = 2;
- private static final int FIRST_MOTHER_INDEX = 3;
- private static final int SECOND_MOTHER_INDEX = 4;
- private static final int FIRST_DAUGHTER_INDEX = 5;
- private static final int SECOND_DAUGHTER_INDEX = 6;
-
- private static double sigmaX = 0.2;
- private static double sigmaY = 0.02;
- private static double sigmaZ = 0.0;
-
- private static double offsetX = 0;
- private static double offsetY = 0;
- private static double offsetZ = 0.03;
-
- static int eventNumber = 0;
-
-
- public static void main(String[] args) throws IOException{
-
- String lheFileName = null;
- String stdhepFileName = "output.stdhep";
-
- // Instantiate te command line parser
- CommandLineParser parser = new DefaultParser();
-
- // Create the Options
- // TODO: Add ability to parse list of files.
- // Allow a user to pass tag.gz files
- Options options = new Options();
- options.addOption("i", "input", true, "Input LHE file name");
- options.addOption("o", "output", true, "Output Stdhep file name");
-
- try {
- // Parse the command line arguments
- CommandLine line = parser.parse(options, args);
-
- // If the file is not specified, notify the user and exit the program
- if(!line.hasOption("i")){
- System.out.println("Please specify an LHE file to process");
- System.exit(0);
- }
- lheFileName = line.getOptionValue("i");
-
- // Check if the user has specified the output file name and that the
- // extension is stdhep. If not, add the extension
- if(line.hasOption("o")){
- stdhepFileName = line.getOptionValue("o");
- }
- } catch(ParseException e){
- System.out.println("Unexpected exception: " + e.getMessage());
- }
-
- convertToStdhep(lheFileName, stdhepFileName);
-
- }
-
- /**
- *
- */
- static private void convertToStdhep(String lheFileName, String stdhepFileName) throws IOException{
- List<Element> events = readLhe(lheFileName);
-
- StdhepWriter writer = new StdhepWriter(
- stdhepFileName,
- "Import Stdhep Events",
- "Imported from LHE generated from MadGraph",
- events.size()
- );
- writer.setCompatibilityMode(false);
-
- for(Element event : events){
- writeEvent(event, writer);
- }
- writer.close();
- }
-
- /**
- *
- */
- private static List<Element> readLhe(String lheFileName){
-
- // Instantiate the SAX parser used to build the JDOM document
- SAXBuilder builder = new SAXBuilder();
-
- // Open the lhe file
- File lheFile = new File(lheFileName);
-
- // Parse the lhe file and build the JDOM document
- Document document = null;
- List<Element> eventNodes = null;
- try {
-
- document = (Document) builder.build(lheFile);
-
- // Get the root node
- Element rootNode = document.getRootElement();
-
- // Get a list of all nodes of type event
- eventNodes = rootNode.getChildren("event");
-
- } catch (JDOMException e) {
- e.printStackTrace();
-
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- return eventNodes;
- }
-
- /**
- *
- */
- private static void writeEvent(Element event, StdhepWriter writer) throws IOException{
-
- int numberOfParticles = 0;
- int particleIndex = 0;
- int pdgID[] = null;
- int particleStatus[] = null;
- int motherParticles[] = null;
- int daughterParticles[] = null;
- double particleMomentum[] = null;
- double particleVertex[] = null;
-
- Random generator = new Random();
-
- eventNumber++;
-
- System.out.println("#================================================#\n#");
- System.out.println("# Event: " + eventNumber);
-
-
- // Get the text within the event element node. An element node contains
- // information describing the event and it's particles. The PDG ID of
- // a particle along with it's kinematics is listed on it's own line.
- // In order to parse the information for each particle, the text is
- // split using the newline character as a delimiter.
- String[] eventData = event.getTextTrim().split("\n");
-
- for(int datumIndex = 0; datumIndex < eventData.length; datumIndex++){
-
- // Split a line by whitespace
- String[] eventTokens = eventData[datumIndex].split("\\s+");
-
- if(datumIndex == 0){
-
- numberOfParticles = Integer.valueOf(eventTokens[N_PARTICLE_INDEX]);
- System.out.println("# Number of particles: " + numberOfParticles + "\n#");
- System.out.println("#================================================#");
-
- // Reset all arrays used to build the Stdhep event
- particleIndex = 0;
- particleStatus = new int[numberOfParticles];
- pdgID = new int[numberOfParticles];
- motherParticles = new int[numberOfParticles*2];
- daughterParticles = new int[numberOfParticles*2];
- particleMomentum = new double[numberOfParticles*5];
- particleVertex = new double[numberOfParticles*4];
-
- continue;
- }
-
- // Get the PDG ID of the particle
- pdgID[particleIndex] = Integer.valueOf(eventTokens[PDG_ID_INDEX]);
-
-
- System.out.println(">>> PDG ID: " + pdgID[particleIndex]);
-
- // Get the status of the particle (initial state = -1, final state = 1, resonance = 2)
- particleStatus[particleIndex] = Integer.valueOf(eventTokens[STATUS_INDEX]);
- if(particleStatus[particleIndex] == -1) particleStatus[particleIndex] = 3;
- System.out.println(">>>> Particle Status: " + particleStatus[particleIndex]);
-
- // Get the mothers of a particle. If the particle is a trident electron, then assign it
- // a mother value of 10 so it's distinguishable from the beam electron.
- if(pdgID[particleIndex] == 611){
- motherParticles[particleIndex*2] = 10;
- // If the PDG ID is equal to 611/-611 (trident electron) change it back to 11/-11.
- // Otherwise, SLIC won't do anything with them.
- pdgID[particleIndex] = 11;
- } else if(pdgID[particleIndex] == -611){
- motherParticles[particleIndex*2] = 10;
- pdgID[particleIndex] = -11;
- } else {
- motherParticles[particleIndex*2] = Integer.valueOf(eventTokens[FIRST_MOTHER_INDEX]);
- }
- motherParticles[particleIndex*2 + 1] = Integer.valueOf(eventTokens[SECOND_MOTHER_INDEX]);
- System.out.println(">>>> Mothers: 1) " + motherParticles[particleIndex*2] + " 2) " + motherParticles[particleIndex*2 + 1]);
-
- // Get the daughter particles
- daughterParticles[particleIndex*2] = Integer.valueOf(eventTokens[FIRST_DAUGHTER_INDEX]);
- daughterParticles[particleIndex*2 + 1] = Integer.valueOf(eventTokens[SECOND_DAUGHTER_INDEX]);
- System.out.println(">>>> Daughter: 1) " + daughterParticles[particleIndex*2] + " 2) " + daughterParticles[particleIndex*2 + 1]);
-
- // Get the particle momentum, its mass and energy
- particleMomentum[particleIndex*5] = Double.valueOf(eventTokens[7]); // px
- particleMomentum[particleIndex*5 + 1] = Double.valueOf(eventTokens[8]); // py
- particleMomentum[particleIndex*5 + 2] = Double.valueOf(eventTokens[9]); // pz
- particleMomentum[particleIndex*5 + 3] = Double.valueOf(eventTokens[10]); // Particle Energy
- particleMomentum[particleIndex*5 + 4] = Double.valueOf(eventTokens[11]); // Particle Mass
-
- // Rotate the particle by 30 mrad around the beam axis
- Hep3Vector rotatedMomentum =
- rotateToDetector(particleMomentum[particleIndex*5],
- particleMomentum[particleIndex*5+1],
- particleMomentum[particleIndex*5+1]);
-
- particleMomentum[particleIndex*5] = rotatedMomentum.x();
- particleMomentum[particleIndex*5 + 1] = rotatedMomentum.y();
- particleMomentum[particleIndex*5 + 2] = rotatedMomentum.z();
-
- // Set the origin of the particle
- Hep3Vector rotatedVertex = rotateToDetector(sigmaX*generator.nextGaussian() + offsetX,
- sigmaY*generator.nextGaussian() + offsetY,
- sigmaZ*generator.nextGaussian() + offsetZ);
- particleVertex[particleIndex*4] = rotatedVertex.x();
- particleVertex[particleIndex*4+1] = rotatedVertex.y();
- particleVertex[particleIndex*4+2] = rotatedVertex.z();
- particleVertex[particleIndex*4+3] = 0;
-
- // Increment the particle number
- particleIndex++;
-
- System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
- }
-
- // Create the Stdhep event and write it
- StdhepEvent stdhepEvent = new StdhepEvent(eventNumber, numberOfParticles, particleStatus,
- pdgID, motherParticles, daughterParticles, particleMomentum, particleVertex);
- writer.writeRecord(stdhepEvent);
- }
-
- /**
- *
- */
- private static Hep3Vector rotateToDetector(double x, double y, double z){
- IRotation3D rotation = new RotationGeant(0.0, 0.03, 0.0);
- Hep3Vector vector = new BasicHep3Vector(x, y, z);
- return rotation.rotated(vector);
- }
+
+ private static final int N_PARTICLE_INDEX = 0;
+ private static final int PDG_ID_INDEX = 1;
+ private static final int STATUS_INDEX = 2;
+ private static final int FIRST_MOTHER_INDEX = 3;
+ private static final int SECOND_MOTHER_INDEX = 4;
+ private static final int FIRST_DAUGHTER_INDEX = 5;
+ private static final int SECOND_DAUGHTER_INDEX = 6;
+
+ private static double sigmaX = 0.2;
+ private static double sigmaY = 0.02;
+ private static double sigmaZ = 0.0;
+
+ private static double offsetX = 0;
+ private static double offsetY = 0;
+ private static double offsetZ = 0.03;
+
+ static int eventNumber = 0;
+
+
+ public static void main(String[] args) throws IOException{
+
+ String lheFileName = null;
+ String stdhepFileName = "output.stdhep";
+
+ // Instantiate te command line parser
+ CommandLineParser parser = new PosixParser();
+
+ // Create the Options
+ // TODO: Add ability to parse list of files.
+ // Allow a user to pass tag.gz files
+ Options options = new Options();
+ options.addOption("i", "input", true, "Input LHE file name");
+ options.addOption("o", "output", true, "Output Stdhep file name");
+
+ try {
+ // Parse the command line arguments
+ CommandLine line = parser.parse(options, args);
+
+ // If the file is not specified, notify the user and exit the program
+ if(!line.hasOption("i")){
+ System.out.println("Please specify an LHE file to process");
+ System.exit(0);
+ }
+ lheFileName = line.getOptionValue("i");
+
+ // Check if the user has specified the output file name and that the
+ // extension is stdhep. If not, add the extension
+ if(line.hasOption("o")){
+ stdhepFileName = line.getOptionValue("o");
+ }
+ } catch(ParseException e){
+ System.out.println("Unexpected exception: " + e.getMessage());
+ }
+
+ convertToStdhep(lheFileName, stdhepFileName);
+
+ }
+
+ /**
+ *
+ */
+ static private void convertToStdhep(String lheFileName, String stdhepFileName) throws IOException{
+ List<Element> events = readLhe(lheFileName);
+
+ StdhepWriter writer = new StdhepWriter(
+ stdhepFileName,
+ "Import Stdhep Events",
+ "Imported from LHE generated from MadGraph",
+ events.size()
+ );
+ writer.setCompatibilityMode(false);
+
+ for(Element event : events){
+ writeEvent(event, writer);
+ }
+ writer.close();
+ }
+
+ /**
+ *
+ */
+ private static List<Element> readLhe(String lheFileName){
+
+ // Instantiate the SAX parser used to build the JDOM document
+ SAXBuilder builder = new SAXBuilder();
+
+ // Open the lhe file
+ File lheFile = new File(lheFileName);
+
+ // Parse the lhe file and build the JDOM document
+ Document document = null;
+ List<Element> eventNodes = null;
+ try {
+
+ document = (Document) builder.build(lheFile);
+
+ // Get the root node
+ Element rootNode = document.getRootElement();
+
+ // Get a list of all nodes of type event
+ eventNodes = rootNode.getChildren("event");
+
+ } catch (JDOMException e) {
+ e.printStackTrace();
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ return eventNodes;
+ }
+
+ /**
+ *
+ */
+ private static void writeEvent(Element event, StdhepWriter writer) throws IOException{
+
+ int numberOfParticles = 0;
+ int particleIndex = 0;
+ int pdgID[] = null;
+ int particleStatus[] = null;
+ int motherParticles[] = null;
+ int daughterParticles[] = null;
+ double particleMomentum[] = null;
+ double particleVertex[] = null;
+
+ Random generator = new Random();
+
+ eventNumber++;
+
+ System.out.println("#================================================#\n#");
+ System.out.println("# Event: " + eventNumber);
+
+
+ // Get the text within the event element node. An element node contains
+ // information describing the event and it's particles. The PDG ID of
+ // a particle along with it's kinematics is listed on it's own line.
+ // In order to parse the information for each particle, the text is
+ // split using the newline character as a delimiter.
+ String[] eventData = event.getTextTrim().split("\n");
+
+ for(int datumIndex = 0; datumIndex < eventData.length; datumIndex++){
+
+ // Split a line by whitespace
+ String[] eventTokens = eventData[datumIndex].split("\\s+");
+
+ if(datumIndex == 0){
+
+ numberOfParticles = Integer.valueOf(eventTokens[N_PARTICLE_INDEX]);
+ System.out.println("# Number of particles: " + numberOfParticles + "\n#");
+ System.out.println("#================================================#");
+
+ // Reset all arrays used to build the Stdhep event
+ particleIndex = 0;
+ particleStatus = new int[numberOfParticles];
+ pdgID = new int[numberOfParticles];
+ motherParticles = new int[numberOfParticles*2];
+ daughterParticles = new int[numberOfParticles*2];
+ particleMomentum = new double[numberOfParticles*5];
+ particleVertex = new double[numberOfParticles*4];
+
+ continue;
+ }
+
+ // Get the PDG ID of the particle
+ pdgID[particleIndex] = Integer.valueOf(eventTokens[PDG_ID_INDEX]);
+
+
+ System.out.println(">>> PDG ID: " + pdgID[particleIndex]);
+
+ // Get the status of the particle (initial state = -1, final state = 1, resonance = 2)
+ particleStatus[particleIndex] = Integer.valueOf(eventTokens[STATUS_INDEX]);
+ if(particleStatus[particleIndex] == -1) particleStatus[particleIndex] = 3;
+ System.out.println(">>>> Particle Status: " + particleStatus[particleIndex]);
+
+ // Get the mothers of a particle. If the particle is a trident electron, then assign it
+ // a mother value of 10 so it's distinguishable from the beam electron.
+ if(pdgID[particleIndex] == 611){
+ motherParticles[particleIndex*2] = 10;
+ // If the PDG ID is equal to 611/-611 (trident electron) change it back to 11/-11.
+ // Otherwise, SLIC won't do anything with them.
+ pdgID[particleIndex] = 11;
+ } else if(pdgID[particleIndex] == -611){
+ motherParticles[particleIndex*2] = 10;
+ pdgID[particleIndex] = -11;
+ } else {
+ motherParticles[particleIndex*2] = Integer.valueOf(eventTokens[FIRST_MOTHER_INDEX]);
+ }
+ motherParticles[particleIndex*2 + 1] = Integer.valueOf(eventTokens[SECOND_MOTHER_INDEX]);
+ System.out.println(">>>> Mothers: 1) " + motherParticles[particleIndex*2] + " 2) " + motherParticles[particleIndex*2 + 1]);
+
+ // Get the daughter particles
+ daughterParticles[particleIndex*2] = Integer.valueOf(eventTokens[FIRST_DAUGHTER_INDEX]);
+ daughterParticles[particleIndex*2 + 1] = Integer.valueOf(eventTokens[SECOND_DAUGHTER_INDEX]);
+ System.out.println(">>>> Daughter: 1) " + daughterParticles[particleIndex*2] + " 2) " + daughterParticles[particleIndex*2 + 1]);
+
+ // Get the particle momentum, its mass and energy
+ particleMomentum[particleIndex*5] = Double.valueOf(eventTokens[7]); // px
+ particleMomentum[particleIndex*5 + 1] = Double.valueOf(eventTokens[8]); // py
+ particleMomentum[particleIndex*5 + 2] = Double.valueOf(eventTokens[9]); // pz
+ particleMomentum[particleIndex*5 + 3] = Double.valueOf(eventTokens[10]); // Particle Energy
+ particleMomentum[particleIndex*5 + 4] = Double.valueOf(eventTokens[11]); // Particle Mass
+
+ // Rotate the particle by 30 mrad around the beam axis
+ Hep3Vector rotatedMomentum =
+ rotateToDetector(particleMomentum[particleIndex*5],
+ particleMomentum[particleIndex*5+1],
+ particleMomentum[particleIndex*5+1]);
+
+ particleMomentum[particleIndex*5] = rotatedMomentum.x();
+ particleMomentum[particleIndex*5 + 1] = rotatedMomentum.y();
+ particleMomentum[particleIndex*5 + 2] = rotatedMomentum.z();
+
+ // Set the origin of the particle
+ Hep3Vector rotatedVertex = rotateToDetector(sigmaX*generator.nextGaussian() + offsetX,
+ sigmaY*generator.nextGaussian() + offsetY,
+ sigmaZ*generator.nextGaussian() + offsetZ);
+ particleVertex[particleIndex*4] = rotatedVertex.x();
+ particleVertex[particleIndex*4+1] = rotatedVertex.y();
+ particleVertex[particleIndex*4+2] = rotatedVertex.z();
+ particleVertex[particleIndex*4+3] = 0;
+
+ // Increment the particle number
+ particleIndex++;
+
+ System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
+ }
+
+ // Create the Stdhep event and write it
+ StdhepEvent stdhepEvent = new StdhepEvent(eventNumber, numberOfParticles, particleStatus,
+ pdgID, motherParticles, daughterParticles, particleMomentum, particleVertex);
+ writer.writeRecord(stdhepEvent);
+ }
+
+ /**
+ *
+ */
+ private static Hep3Vector rotateToDetector(double x, double y, double z){
+ IRotation3D rotation = new RotationGeant(0.0, 0.03, 0.0);
+ Hep3Vector vector = new BasicHep3Vector(x, y, z);
+ return rotation.rotated(vector);
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/PlotUtils.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/PlotUtils.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/PlotUtils.java Wed Apr 27 11:11:32 2016
@@ -20,71 +20,71 @@
*/
public class PlotUtils {
- // Default ctor
- public PlotUtils(){}
+ // Default ctor
+ public PlotUtils(){}
- public static IPlotter setupPlotter(String title, int regionX, int regionY){
- IPlotter plotter = AIDA.defaultInstance().analysisFactory().createPlotterFactory().create(title);
- plotter.setTitle(title);
-
- if(regionX < 0 || regionY < 0) throw new RuntimeException("Region dimensions need to be greater than 0!");
- else if(regionX != 0 || regionY != 0) plotter.createRegions(regionX, regionY);
-
- plotter.style().statisticsBoxStyle().setVisible(false);
- plotter.style().dataStyle().errorBarStyle().setVisible(false);
- plotter.setParameter("plotterWidth", "800");
- plotter.setParameter("plotterHeight", "800");
-
- return plotter;
-
- }
+ public static IPlotter setupPlotter(String title, int regionX, int regionY){
+ IPlotter plotter = AIDA.defaultInstance().analysisFactory().createPlotterFactory().create(title);
+ plotter.setTitle(title);
+
+ if(regionX < 0 || regionY < 0) throw new RuntimeException("Region dimensions need to be greater than 0!");
+ else if(regionX != 0 || regionY != 0) plotter.createRegions(regionX, regionY);
+
+ plotter.style().statisticsBoxStyle().setVisible(false);
+ plotter.style().dataStyle().errorBarStyle().setVisible(false);
+ plotter.setParameter("plotterWidth", "800");
+ plotter.setParameter("plotterHeight", "800");
+
+ return plotter;
+
+ }
public static void setup2DRegion(IPlotter plotter, String title, int region, String xTitle, String yTitle, IHistogram2D histo){
-
- // Check if the specified region is valid
- if(region > plotter.numberOfRegions())
- throw new RuntimeException("Region is invalid! " + title + " contains " + plotter.numberOfRegions() + " regions");
-
- plotter.region(region).style().xAxisStyle().setLabel(xTitle);
- plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
- plotter.region(region).style().yAxisStyle().setLabel(yTitle);
- plotter.region(region).style().yAxisStyle().labelStyle().setFontSize(14);
- plotter.region(region).style().xAxisStyle().setVisible(true);
- plotter.region(region).style().yAxisStyle().setVisible(true);
- plotter.region(region).style().setParameter("hist2DStyle", "colorMap");
- plotter.region(region).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
-
- if(histo != null) plotter.region(region).plot(histo);
+
+ // Check if the specified region is valid
+ if(region > plotter.numberOfRegions())
+ throw new RuntimeException("Region is invalid! " + title + " contains " + plotter.numberOfRegions() + " regions");
+
+ plotter.region(region).style().xAxisStyle().setLabel(xTitle);
+ plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
+ plotter.region(region).style().yAxisStyle().setLabel(yTitle);
+ plotter.region(region).style().yAxisStyle().labelStyle().setFontSize(14);
+ plotter.region(region).style().xAxisStyle().setVisible(true);
+ plotter.region(region).style().yAxisStyle().setVisible(true);
+ plotter.region(region).style().setParameter("hist2DStyle", "colorMap");
+ plotter.region(region).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+
+ if(histo != null) plotter.region(region).plot(histo);
}
public static void setup2DRegion(IPlotter plotter, String title, int region, String xTitle, String yTitle, ICloud2D cloud, IPlotterStyle style){
-
- // Check if the specified region is valid
- if(region > plotter.numberOfRegions())
- throw new RuntimeException("Region is invalid! " + title + " contains " + plotter.numberOfRegions() + " regions");
-
- plotter.region(region).style().xAxisStyle().setLabel(xTitle);
- plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
- String[] pars = plotter.region(region).style().xAxisStyle().availableParameters();
- plotter.region(region).style().yAxisStyle().setLabel(yTitle);
- plotter.region(region).style().yAxisStyle().labelStyle().setFontSize(14);
- plotter.region(region).style().xAxisStyle().setVisible(true);
- plotter.region(region).style().yAxisStyle().setVisible(true);
- plotter.region(region).style().setParameter("showAsScatterPlot", "true");
-
- if(cloud != null) plotter.region(region).plot(cloud, style);
+
+ // Check if the specified region is valid
+ if(region > plotter.numberOfRegions())
+ throw new RuntimeException("Region is invalid! " + title + " contains " + plotter.numberOfRegions() + " regions");
+
+ plotter.region(region).style().xAxisStyle().setLabel(xTitle);
+ plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
+ String[] pars = plotter.region(region).style().xAxisStyle().availableParameters();
+ plotter.region(region).style().yAxisStyle().setLabel(yTitle);
+ plotter.region(region).style().yAxisStyle().labelStyle().setFontSize(14);
+ plotter.region(region).style().xAxisStyle().setVisible(true);
+ plotter.region(region).style().yAxisStyle().setVisible(true);
+ plotter.region(region).style().setParameter("showAsScatterPlot", "true");
+
+ if(cloud != null) plotter.region(region).plot(cloud, style);
}
public static void setup1DRegion(IPlotter plotter, String title, int region, String xTitle, IHistogram1D histo){
-
- plotter.region(region).style().xAxisStyle().setLabel(xTitle);
- plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
- plotter.region(region).style().xAxisStyle().setVisible(true);
- plotter.region(region).style().dataStyle().fillStyle().setVisible(false);
- plotter.region(region).style().dataStyle().lineStyle().setThickness(3);
-
- if(histo != null) plotter.region(region).plot(histo);
+
+ plotter.region(region).style().xAxisStyle().setLabel(xTitle);
+ plotter.region(region).style().xAxisStyle().labelStyle().setFontSize(14);
+ plotter.region(region).style().xAxisStyle().setVisible(true);
+ plotter.region(region).style().dataStyle().fillStyle().setVisible(false);
+ plotter.region(region).style().dataStyle().lineStyle().setThickness(3);
+
+ if(histo != null) plotter.region(region).plot(histo);
}
/**
@@ -98,7 +98,7 @@
int ix = (layer - 1) / 2;
int iy = 0;
if (!((HpsSiSensor) sensor).isTopLayer()){
- iy += 2;
+ iy += 2;
}
if (layer % 2 == 0) {
iy += 1;
@@ -108,35 +108,35 @@
}
public static IHistogram1D getYProjection(int binX, IHistogram2D histogram){
- int binsY = histogram.yAxis().bins();
- double yMin = histogram.yAxis().lowerEdge();
- double yMax = histogram.yAxis().upperEdge();
-
- IHistogram1D projection
- = AIDA.defaultInstance().histogram1D(histogram.title() + "_" + binX, binsY, yMin, yMax);
- projection.reset();
-
- double dataY = 0;
- for(int binY = 0; binY < binsY; binY++){
- dataY = histogram.binEntries(binX, binY);
- projection.fill(yMin, dataY);
- yMin++;
- }
-
- return projection;
+ int binsY = histogram.yAxis().bins();
+ double yMin = histogram.yAxis().lowerEdge();
+ double yMax = histogram.yAxis().upperEdge();
+
+ IHistogram1D projection
+ = AIDA.defaultInstance().histogram1D(histogram.title() + "_" + binX, binsY, yMin, yMax);
+ projection.reset();
+
+ double dataY = 0;
+ for(int binY = 0; binY < binsY; binY++){
+ dataY = histogram.binEntries(binX, binY);
+ projection.fill(yMin, dataY);
+ yMin++;
+ }
+
+ return projection;
}
public static double[] fitToGuassian(IHistogram1D histogram){
-
- double[] fitParameters = {0, 0};
- IFitter fitter = AIDA.defaultInstance().analysisFactory().createFitFactory().createFitter();
- IFitResult fitResult = fitter.fit(histogram, "g");
- int meanIndex = fitResult.fittedFunction().indexOfParameter("mean");
- fitParameters[0] = fitResult.fittedParameters()[meanIndex];
+
+ double[] fitParameters = {0, 0};
+ IFitter fitter = AIDA.defaultInstance().analysisFactory().createFitFactory().createFitter();
+ IFitResult fitResult = fitter.fit(histogram, "g");
+ int meanIndex = fitResult.fittedFunction().indexOfParameter("mean");
+ fitParameters[0] = fitResult.fittedParameters()[meanIndex];
- int sigmaIndex = fitResult.fittedFunction().indexOfParameter("sigma");
- fitParameters[1] = fitResult.fittedParameters()[sigmaIndex];
-
- return fitParameters;
+ int sigmaIndex = fitResult.fittedFunction().indexOfParameter("sigma");
+ fitParameters[1] = fitResult.fittedParameters()[sigmaIndex];
+
+ return fitParameters;
}
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ReconstructedParticleChecker.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ReconstructedParticleChecker.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/ReconstructedParticleChecker.java Wed Apr 27 11:11:32 2016
@@ -29,92 +29,92 @@
*
*/
public class ReconstructedParticleChecker extends Driver {
-
- private AIDA aida;
- private List<IPlotter> plotters = new ArrayList<IPlotter>();
+
+ private AIDA aida;
+ private List<IPlotter> plotters = new ArrayList<IPlotter>();
- IHistogram1D xPositionResidual;
- IHistogram1D yPositionResidual;
- IHistogram1D zPositionResidual;
- IHistogram1D r;
-
- // Collection Names
- private String finalStateParticlesCollectionName = "FinalStateParticles";
-
- boolean debug = true;
- int plotterIndex = 0;
-
- public ReconstructedParticleChecker(){}
-
- protected void detectorChanged(Detector detector){
- super.detectorChanged(detector);
-
- // Setup AIDA
- aida = AIDA.defaultInstance();
- aida.tree().cd("/");
-
- plotters.add(PlotUtils.setupPlotter("Track-Cluster Position Residual", 2, 2));
- xPositionResidual = aida.histogram1D("x Residual", 100, -100, 100);
- yPositionResidual = aida.histogram1D("y Residual", 100, -100, 100);
- zPositionResidual = aida.histogram1D("z Residual", 100, -100, 100);
- r = aida.histogram1D("r", 100, -100, 100);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "x Residual", 0, "delta x [mm]", xPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "y Residual", 1, "delta y [mm]", yPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "z Residual", 2, "delta z [mm]", zPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "r", 3, "r [mm]", r);
-
-
- for(IPlotter plotter : plotters){
- plotter.show();
- }
- }
-
- public void process(EventHeader event){
-
- // If the event doesn't contain any final state reconstructed
- // particles, skip the event
- if(!event.hasCollection(ReconstructedParticle.class, finalStateParticlesCollectionName)){
- this.printDebug("Event does not contain ReconstructedParticles");
- return;
- }
-
- // Get the collections of reconstructed final state particles from the
- // event
- List<ReconstructedParticle> finalStateParticles
- = event.get(ReconstructedParticle.class, finalStateParticlesCollectionName);
-
-
- // Loop over all of the reconstructed particles in the event
- for(ReconstructedParticle finalStateParticle : finalStateParticles){
-
- // Get the list of clusters from the event
- List<Cluster> ecalClusters = finalStateParticle.getClusters();
- this.printDebug("Number of Ecal clusters: " + ecalClusters.size());
- if(ecalClusters.isEmpty()){
- this.printDebug("Number of Ecal clusters: " + ecalClusters.size());
- this.printDebug("List of Ecal cluster is empty ... skipping");
- continue;
- }
-
- // Get the list of tracks from the event
- List<Track> tracks = finalStateParticle.getTracks();
- if(tracks.isEmpty()){
- this.printDebug("List of tracks is empty ... skipping");
- continue;
- }
-
- Hep3Vector ecalPosition = new BasicHep3Vector(ecalClusters.get(0).getPosition());
- Hep3Vector trackPositionAtEcal = TrackUtils.extrapolateTrack(tracks.get(0),ecalPosition.z());
- xPositionResidual.fill(trackPositionAtEcal.x() - ecalPosition.x());
- yPositionResidual.fill(trackPositionAtEcal.y() - ecalPosition.y());
- zPositionResidual.fill(trackPositionAtEcal.z() - ecalPosition.z());
- r.fill(VecOp.sub(trackPositionAtEcal, ecalPosition).magnitude());
- }
-
- }
-
- private void printDebug(String debugMessage){
- if(debug)
- System.out.println(this.getClass().getSimpleName() + ": " + debugMessage);
- }
+ IHistogram1D xPositionResidual;
+ IHistogram1D yPositionResidual;
+ IHistogram1D zPositionResidual;
+ IHistogram1D r;
+
+ // Collection Names
+ private String finalStateParticlesCollectionName = "FinalStateParticles";
+
+ boolean debug = true;
+ int plotterIndex = 0;
+
+ public ReconstructedParticleChecker(){}
+
+ protected void detectorChanged(Detector detector){
+ super.detectorChanged(detector);
+
+ // Setup AIDA
+ aida = AIDA.defaultInstance();
+ aida.tree().cd("/");
+
+ plotters.add(PlotUtils.setupPlotter("Track-Cluster Position Residual", 2, 2));
+ xPositionResidual = aida.histogram1D("x Residual", 100, -100, 100);
+ yPositionResidual = aida.histogram1D("y Residual", 100, -100, 100);
+ zPositionResidual = aida.histogram1D("z Residual", 100, -100, 100);
+ r = aida.histogram1D("r", 100, -100, 100);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "x Residual", 0, "delta x [mm]", xPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "y Residual", 1, "delta y [mm]", yPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "z Residual", 2, "delta z [mm]", zPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "r", 3, "r [mm]", r);
+
+
+ for(IPlotter plotter : plotters){
+ plotter.show();
+ }
+ }
+
+ public void process(EventHeader event){
+
+ // If the event doesn't contain any final state reconstructed
+ // particles, skip the event
+ if(!event.hasCollection(ReconstructedParticle.class, finalStateParticlesCollectionName)){
+ this.printDebug("Event does not contain ReconstructedParticles");
+ return;
+ }
+
+ // Get the collections of reconstructed final state particles from the
+ // event
+ List<ReconstructedParticle> finalStateParticles
+ = event.get(ReconstructedParticle.class, finalStateParticlesCollectionName);
+
+
+ // Loop over all of the reconstructed particles in the event
+ for(ReconstructedParticle finalStateParticle : finalStateParticles){
+
+ // Get the list of clusters from the event
+ List<Cluster> ecalClusters = finalStateParticle.getClusters();
+ this.printDebug("Number of Ecal clusters: " + ecalClusters.size());
+ if(ecalClusters.isEmpty()){
+ this.printDebug("Number of Ecal clusters: " + ecalClusters.size());
+ this.printDebug("List of Ecal cluster is empty ... skipping");
+ continue;
+ }
+
+ // Get the list of tracks from the event
+ List<Track> tracks = finalStateParticle.getTracks();
+ if(tracks.isEmpty()){
+ this.printDebug("List of tracks is empty ... skipping");
+ continue;
+ }
+
+ Hep3Vector ecalPosition = new BasicHep3Vector(ecalClusters.get(0).getPosition());
+ Hep3Vector trackPositionAtEcal = TrackUtils.extrapolateTrack(tracks.get(0),ecalPosition.z());
+ xPositionResidual.fill(trackPositionAtEcal.x() - ecalPosition.x());
+ yPositionResidual.fill(trackPositionAtEcal.y() - ecalPosition.y());
+ zPositionResidual.fill(trackPositionAtEcal.z() - ecalPosition.z());
+ r.fill(VecOp.sub(trackPositionAtEcal, ecalPosition).magnitude());
+ }
+
+ }
+
+ private void printDebug(String debugMessage){
+ if(debug)
+ System.out.println(this.getClass().getSimpleName() + ": " + debugMessage);
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SharedHitAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SharedHitAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SharedHitAnalysis.java Wed Apr 27 11:11:32 2016
@@ -98,14 +98,14 @@
}
- protected void detectorChanged(Detector detector){
-
- for (int layer = 1; layer <= 6; layer++) {
-
- topLayerToStereoHit.put(layer, new ArrayList<TrackerHit>());
- bottomLayerToStereoHit.put(layer, new ArrayList<TrackerHit>());
- }
-
+ protected void detectorChanged(Detector detector){
+
+ for (int layer = 1; layer <= 6; layer++) {
+
+ topLayerToStereoHit.put(layer, new ArrayList<TrackerHit>());
+ bottomLayerToStereoHit.put(layer, new ArrayList<TrackerHit>());
+ }
+
tree = IAnalysisFactory.create().createTreeFactory().create();
histogramFactory = IAnalysisFactory.create().createHistogramFactory(tree);
@@ -159,18 +159,18 @@
plotters.get("Track Parameters").region(4).plot(trackPlots.get("tan_lambda"), this.createStyle(1, "", ""));
plotters.get("Track Parameters").region(4).plot(trackPlots.get("tan_lambda - shared strip hit"), this.createStyle(2, "", ""));
plotters.get("Track Parameters").region(4).plot(trackPlots.get("tan_lambda - l1 Isolation"), this.createStyle(3, "", ""));
-
- for (IPlotter plotter : plotters.values()) {
- plotter.show();
- }
- }
-
- @SuppressWarnings({ "unchecked", "rawtypes" })
+
+ for (IPlotter plotter : plotters.values()) {
+ plotter.show();
+ }
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
public void process(EventHeader event){
// If the event doesn't have any tracks, skip it
- if(!event.hasCollection(Track.class, trackCollectionName)) return;
-
+ if(!event.hasCollection(Track.class, trackCollectionName)) return;
+
// Get the collection of tracks from the event
List<Track> tracks = event.get(Track.class, trackCollectionName);
@@ -198,13 +198,13 @@
this.mapStereoHits(stereoHits);
// Loop over all of the tracks in the event
- for(Track track : tracks){
-
- boolean sharedHitTrack = false;
+ for(Track track : tracks){
+
+ boolean sharedHitTrack = false;
boolean l1Isolation = true;
-
- // Fill the track parameter plots
-
+
+ // Fill the track parameter plots
+
// Loop through all of the stereo hits associated with a track
for (TrackerHit rotatedStereoHit : track.getTrackerHits()) {
@@ -277,15 +277,15 @@
trackPlots.get("tan_lambda - l1 Isolation").fill(TrackUtils.getTanLambda(track));
trackPlots.get("chi2 - l1 Isolation").fill(track.getChi2());
}
- }
- }
-
+ }
+ }
+
private void mapStereoHits(List<TrackerHit> stereoHits) {
- for (int layer = 1; layer <= 6; layer++) {
- topLayerToStereoHit.get(layer).clear();
- bottomLayerToStereoHit.get(layer).clear();;
- }
+ for (int layer = 1; layer <= 6; layer++) {
+ topLayerToStereoHit.get(layer).clear();
+ bottomLayerToStereoHit.get(layer).clear();;
+ }
for (TrackerHit stereoHit : stereoHits) {
HpsSiSensor sensor = (HpsSiSensor) ((RawTrackerHit) stereoHit.getRawHits().get(0)).getDetectorElement();
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtClusterAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtClusterAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtClusterAnalysis.java Wed Apr 27 11:11:32 2016
@@ -50,28 +50,28 @@
// Plotting
ITree tree;
IHistogramFactory histogramFactory;
- IPlotterFactory plotterFactory = IAnalysisFactory.create().createPlotterFactory();
- protected Map<String, IPlotter> plotters = new HashMap<String, IPlotter>();
-
- // All clusters
- private Map<String, IHistogram1D> clusterChargePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> singleHitClusterChargePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> multHitClusterChargePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> signalToNoisePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> singleHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> multHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> clusterSizePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> clusterTimePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram2D> clusterChargeVsTimePlots = new HashMap<String, IHistogram2D>();
-
- // Clusters on track
- private Map<String, IHistogram1D> trackClusterChargePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> trackHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> trackClusterTimePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram2D> trackClusterChargeVsMomentum = new HashMap<String, IHistogram2D>();
- private Map<String, IHistogram2D> trackClusterChargeVsCosTheta = new HashMap<String, IHistogram2D>();
- private Map<String, IHistogram2D> trackClusterChargeVsSinPhi = new HashMap<String, IHistogram2D>();
-
+ IPlotterFactory plotterFactory = IAnalysisFactory.create().createPlotterFactory();
+ protected Map<String, IPlotter> plotters = new HashMap<String, IPlotter>();
+
+ // All clusters
+ private Map<String, IHistogram1D> clusterChargePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> singleHitClusterChargePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> multHitClusterChargePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> signalToNoisePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> singleHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> multHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> clusterSizePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> clusterTimePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram2D> clusterChargeVsTimePlots = new HashMap<String, IHistogram2D>();
+
+ // Clusters on track
+ private Map<String, IHistogram1D> trackClusterChargePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> trackHitSignalToNoisePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> trackClusterTimePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram2D> trackClusterChargeVsMomentum = new HashMap<String, IHistogram2D>();
+ private Map<String, IHistogram2D> trackClusterChargeVsCosTheta = new HashMap<String, IHistogram2D>();
+ private Map<String, IHistogram2D> trackClusterChargeVsSinPhi = new HashMap<String, IHistogram2D>();
+
// Detector name
private static final String SUBDETECTOR_NAME = "Tracker";
@@ -92,29 +92,29 @@
private int computePlotterRegion(HpsSiSensor sensor) {
- if (sensor.getLayerNumber() < 7) {
- if (sensor.isTopLayer()) {
- return 6*(sensor.getLayerNumber() - 1);
- } else {
- return 6*(sensor.getLayerNumber() - 1) + 1;
- }
- } else {
-
- if (sensor.isTopLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6*(sensor.getLayerNumber() - 7) + 2;
- } else {
- return 6*(sensor.getLayerNumber() - 7) + 3;
- }
- } else if (sensor.isBottomLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6*(sensor.getLayerNumber() - 7) + 4;
- } else {
- return 6*(sensor.getLayerNumber() - 7) + 5;
- }
- }
- }
- return -1;
+ if (sensor.getLayerNumber() < 7) {
+ if (sensor.isTopLayer()) {
+ return 6*(sensor.getLayerNumber() - 1);
+ } else {
+ return 6*(sensor.getLayerNumber() - 1) + 1;
+ }
+ } else {
+
+ if (sensor.isTopLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6*(sensor.getLayerNumber() - 7) + 2;
+ } else {
+ return 6*(sensor.getLayerNumber() - 7) + 3;
+ }
+ } else if (sensor.isBottomLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6*(sensor.getLayerNumber() - 7) + 4;
+ } else {
+ return 6*(sensor.getLayerNumber() - 7) + 5;
+ }
+ }
+ }
+ return -1;
}
protected void detectorChanged(Detector detector) {
@@ -249,12 +249,12 @@
.get(sensor.getName()));
}
- for (IPlotter plotter : plotters.values()) {
- plotter.show();
- }
- }
-
- @SuppressWarnings({ "unchecked", "rawtypes" })
+ for (IPlotter plotter : plotters.values()) {
+ plotter.show();
+ }
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
public void process(EventHeader event) {
if (runNumber == -1) runNumber = event.getRunNumber();
@@ -354,18 +354,18 @@
this.mapReconstructedParticlesToTracks(tracks, fsParticles);
// Loop over all of the tracks in the event
- for(Track track : tracks){
+ for(Track track : tracks){
// Calculate the momentum of the track
double p = this.getReconstructedParticle(track).getMomentum().magnitude();
-
- for (TrackerHit rotatedStereoHit : track.getTrackerHits()) {
-
- // Get the HelicalTrackHit corresponding to the RotatedHelicalTrackHit
- // associated with a track
+
+ for (TrackerHit rotatedStereoHit : track.getTrackerHits()) {
+
+ // Get the HelicalTrackHit corresponding to the RotatedHelicalTrackHit
+ // associated with a track
Set<TrackerHit> trackClusters = stereoHitToClusters.allFrom(hthToRotatedHth.from(rotatedStereoHit));
-
- for (TrackerHit trackCluster : trackClusters) {
+
+ for (TrackerHit trackCluster : trackClusters) {
// Get the raw hits composing this cluster and use them to calculate the amplitude of the hit
double amplitudeSum = 0;
@@ -405,9 +405,9 @@
trackClusterChargeVsCosTheta.get(sensor.getName()).fill(TrackUtils.getCosTheta(track), amplitudeSum);
trackClusterChargeVsSinPhi.get(sensor.getName()).fill(Math.sin(TrackUtils.getPhi0(track)), amplitudeSum);
//trackClusterTimePlots.get(sensor.getName()).fill(trackCluster.time());
- }
- }
- }
+ }
+ }
+ }
}
public void endOfData() {
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtDataRates.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtDataRates.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtDataRates.java Wed Apr 27 11:11:32 2016
@@ -12,81 +12,81 @@
public class SvtDataRates extends Driver {
- //Map<VOLUMES, double[]> rawHitsPerLayer = new HashMap<VOLUMES, double[]>();
- double[][] rawHitsPerLayer = new double[12][4];
-
- //public enum VOLUMES { TOP, BOTTOM };
-
- // Collection Names
- String rawTrackerHitCollectionName = "SVTRawTrackerHits";
+ //Map<VOLUMES, double[]> rawHitsPerLayer = new HashMap<VOLUMES, double[]>();
+ double[][] rawHitsPerLayer = new double[12][4];
+
+ //public enum VOLUMES { TOP, BOTTOM };
+
+ // Collection Names
+ String rawTrackerHitCollectionName = "SVTRawTrackerHits";
- double totalEvents = 0;
- int totalLayersPerVolume = 0;
-
- public SvtDataRates(){}
-
- //static {
- // hep.aida.jfree.AnalysisFactory.register();
- //}
-
- protected void detectorChanged(Detector detector){
-
- List<HpsSiSensor> sensors = detector.getDetectorElement().findDescendants(HpsSiSensor.class);
- for(HpsSiSensor sensor : sensors){
- this.printDebug("Layer: " + sensor.getLayerNumber() + " Module: " + sensor.getModuleNumber());
- }
- totalLayersPerVolume = sensors.size()/2;
- //for(VOLUMES volume : VOLUMES.values()){
- // rawHitsPerLayer.put(volume, new double[totalLayersPerVolume]);
- //}
- }
-
- protected void process(EventHeader event){
-
- if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)){
- return;
- }
-
- totalEvents++;
-
- List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
- for(RawTrackerHit rawHit : rawHits){
-
- HpsSiSensor sensor = (HpsSiSensor) rawHit.getDetectorElement();
- int layer = sensor.getLayerNumber();
- int module = sensor.getModuleNumber();
- //if(sensor.isTopLayer()){
- // rawHitsPerLayer.get(VOLUMES.TOP)[layer]++;
- //} else {
- // rawHitsPerLayer.get(VOLUMES.BOTTOM)[layer]++;
- //}
-
- rawHitsPerLayer[layer-1][module]++;
- }
- }
-
- protected void endOfData(){
-
- //for(VOLUMES volume : VOLUMES.values()){
- //System.out.println("Volume: " + volume);
- //System.out.println("Hits per layer per event: ");
- //for(int layer = 0; layer < totalLayersPerVolume; layer++){
- // System.out.println("Layer: " + (layer+1) + ": " + rawHitsPerLayer.get(volume)[layer]/totalEvents);
- //}
- //}
-
- for(int layer = 0; layer < 12; layer++){
-
- for(int module = 0; module < 4; module++){
- System.out.println("Layer: " + layer +
- " Module: " + module +
- " Hits Per Layer: " + rawHitsPerLayer[layer][module]/totalEvents);
- }
- }
- }
+ double totalEvents = 0;
+ int totalLayersPerVolume = 0;
+
+ public SvtDataRates(){}
+
+ //static {
+ // hep.aida.jfree.AnalysisFactory.register();
+ //}
+
+ protected void detectorChanged(Detector detector){
+
+ List<HpsSiSensor> sensors = detector.getDetectorElement().findDescendants(HpsSiSensor.class);
+ for(HpsSiSensor sensor : sensors){
+ this.printDebug("Layer: " + sensor.getLayerNumber() + " Module: " + sensor.getModuleNumber());
+ }
+ totalLayersPerVolume = sensors.size()/2;
+ //for(VOLUMES volume : VOLUMES.values()){
+ // rawHitsPerLayer.put(volume, new double[totalLayersPerVolume]);
+ //}
+ }
+
+ protected void process(EventHeader event){
+
+ if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)){
+ return;
+ }
+
+ totalEvents++;
+
+ List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
+ for(RawTrackerHit rawHit : rawHits){
+
+ HpsSiSensor sensor = (HpsSiSensor) rawHit.getDetectorElement();
+ int layer = sensor.getLayerNumber();
+ int module = sensor.getModuleNumber();
+ //if(sensor.isTopLayer()){
+ // rawHitsPerLayer.get(VOLUMES.TOP)[layer]++;
+ //} else {
+ // rawHitsPerLayer.get(VOLUMES.BOTTOM)[layer]++;
+ //}
+
+ rawHitsPerLayer[layer-1][module]++;
+ }
+ }
+
+ protected void endOfData(){
+
+ //for(VOLUMES volume : VOLUMES.values()){
+ //System.out.println("Volume: " + volume);
+ //System.out.println("Hits per layer per event: ");
+ //for(int layer = 0; layer < totalLayersPerVolume; layer++){
+ // System.out.println("Layer: " + (layer+1) + ": " + rawHitsPerLayer.get(volume)[layer]/totalEvents);
+ //}
+ //}
+
+ for(int layer = 0; layer < 12; layer++){
+
+ for(int module = 0; module < 4; module++){
+ System.out.println("Layer: " + layer +
+ " Module: " + module +
+ " Hits Per Layer: " + rawHitsPerLayer[layer][module]/totalEvents);
+ }
+ }
+ }
- private void printDebug(String debugMessage){
- System.out.println(this.getClass().getSimpleName() + ": " + debugMessage);
- }
-
+ private void printDebug(String debugMessage){
+ System.out.println(this.getClass().getSimpleName() + ": " + debugMessage);
+ }
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtHitCorrelations.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtHitCorrelations.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtHitCorrelations.java Wed Apr 27 11:11:32 2016
@@ -28,12 +28,12 @@
*/
public class SvtHitCorrelations extends Driver {
- // TODO: Add documentation
- static {
- hep.aida.jfree.AnalysisFactory.register();
- }
+ // TODO: Add documentation
+ static {
+ hep.aida.jfree.AnalysisFactory.register();
+ }
- // Plotting
+ // Plotting
ITree tree;
IHistogramFactory histogramFactory;
IPlotterFactory plotterFactory = IAnalysisFactory.create().createPlotterFactory();
@@ -59,46 +59,46 @@
boolean enableBottomAxialAxial = false;
boolean enableBottomAxialStereo = false;
- /**
- *
- */
- public void setEnableTopAxialAxial(boolean enableTopAxialAxial){
- this.enableTopAxialAxial = enableTopAxialAxial;
- }
-
- /**
- *
- */
- public void setEnableTopAxialStereo(boolean enableTopAxialStereo){
- this.enableTopAxialStereo = enableTopAxialStereo;
- }
-
- /**
- *
- */
- public void setEnableBottomAxialAxial(boolean enableBottomAxialAxial){
- this.enableBottomAxialAxial = enableBottomAxialAxial;
- }
-
- /**
- *
- */
- public void setEnableBottomAxialStereo(boolean enableBottomAxialStereo){
- this.enableBottomAxialStereo = enableBottomAxialStereo;
- }
-
- /**
- *
- */
- private int computePlotterRegion(HpsSiSensor firstSensor, HpsSiSensor secondSensor) {
- return (this.getLayerNumber(firstSensor) - 1) + (this.getLayerNumber(secondSensor) - 1)*6;
- }
-
- protected void detectorChanged(Detector detector){
-
+ /**
+ *
+ */
+ public void setEnableTopAxialAxial(boolean enableTopAxialAxial){
+ this.enableTopAxialAxial = enableTopAxialAxial;
+ }
+
+ /**
+ *
+ */
+ public void setEnableTopAxialStereo(boolean enableTopAxialStereo){
+ this.enableTopAxialStereo = enableTopAxialStereo;
+ }
+
+ /**
+ *
+ */
+ public void setEnableBottomAxialAxial(boolean enableBottomAxialAxial){
+ this.enableBottomAxialAxial = enableBottomAxialAxial;
+ }
+
+ /**
+ *
+ */
+ public void setEnableBottomAxialStereo(boolean enableBottomAxialStereo){
+ this.enableBottomAxialStereo = enableBottomAxialStereo;
+ }
+
+ /**
+ *
+ */
+ private int computePlotterRegion(HpsSiSensor firstSensor, HpsSiSensor secondSensor) {
+ return (this.getLayerNumber(firstSensor) - 1) + (this.getLayerNumber(secondSensor) - 1)*6;
+ }
+
+ protected void detectorChanged(Detector detector){
+
tree = IAnalysisFactory.create().createTreeFactory().create();
histogramFactory = IAnalysisFactory.create().createHistogramFactory(tree);
-
+
sensors = detector.getSubdetector(SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class);
if (sensors.size() == 0) {
@@ -178,55 +178,55 @@
}
for (IPlotter plotter : plotters.values()) plotter.show();
- }
-
- public void process(EventHeader event){
-
+ }
+
+ public void process(EventHeader event){
+
if (runNumber == -1) runNumber = event.getRunNumber();
-
- if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) return;
-
- List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
-
- String plotName = "";
- for(RawTrackerHit firstRawHit : rawHits){
-
- HpsSiSensor firstSensor = (HpsSiSensor) firstRawHit.getDetectorElement();
- int firstChannel = firstRawHit.getIdentifierFieldValue("strip");
-
- for(RawTrackerHit secondRawHit : rawHits){
-
- HpsSiSensor secondSensor = (HpsSiSensor) secondRawHit.getDetectorElement();
- int secondChannel = secondRawHit.getIdentifierFieldValue("strip");
-
- if(firstSensor.isTopLayer() && secondSensor.isTopLayer()){
- if(enableTopAxialAxial && firstSensor.isAxial() && secondSensor.isAxial()){
-
- plotName = "Top Axial Layer " + this.getLayerNumber(firstSensor)
- + " vs Top Axial Layer " + this.getLayerNumber(secondSensor);
- topAxialAxialPlots.get(plotName).fill(firstChannel, secondChannel);
- } else if (enableTopAxialStereo && firstSensor.isAxial() && secondSensor.isStereo()) {
-
- plotName = "Top Axial Layer " + this.getLayerNumber(firstSensor)
- + " vs Top Stereo Layer " + this.getLayerNumber(secondSensor);
- topAxialStereoPlots.get(plotName).fill(firstChannel, secondChannel);
- }
- } else if (firstSensor.isBottomLayer() && secondSensor.isBottomLayer()) {
- if(enableBottomAxialAxial && firstSensor.isAxial() && secondSensor.isAxial()){
-
- plotName = "Bottom Axial Layer " + this.getLayerNumber(firstSensor)
- + " vs Bottom Axial Layer " + this.getLayerNumber(secondSensor);
- bottomAxialAxialPlots.get(plotName).fill(firstChannel, secondChannel);
- } else if (enableBottomAxialStereo && firstSensor.isAxial() && secondSensor.isStereo()) {
-
- plotName = "Bottom Axial Layer " + this.getLayerNumber(firstSensor)
- + " vs Bottom Stereo Layer " + this.getLayerNumber(secondSensor);
- bottomAxialStereoPlots.get(plotName).fill(firstChannel, secondChannel);
- }
- }
- }
- }
- }
+
+ if(!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) return;
+
+ List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
+
+ String plotName = "";
+ for(RawTrackerHit firstRawHit : rawHits){
+
+ HpsSiSensor firstSensor = (HpsSiSensor) firstRawHit.getDetectorElement();
+ int firstChannel = firstRawHit.getIdentifierFieldValue("strip");
+
+ for(RawTrackerHit secondRawHit : rawHits){
+
+ HpsSiSensor secondSensor = (HpsSiSensor) secondRawHit.getDetectorElement();
+ int secondChannel = secondRawHit.getIdentifierFieldValue("strip");
+
+ if(firstSensor.isTopLayer() && secondSensor.isTopLayer()){
+ if(enableTopAxialAxial && firstSensor.isAxial() && secondSensor.isAxial()){
+
+ plotName = "Top Axial Layer " + this.getLayerNumber(firstSensor)
+ + " vs Top Axial Layer " + this.getLayerNumber(secondSensor);
+ topAxialAxialPlots.get(plotName).fill(firstChannel, secondChannel);
+ } else if (enableTopAxialStereo && firstSensor.isAxial() && secondSensor.isStereo()) {
+
+ plotName = "Top Axial Layer " + this.getLayerNumber(firstSensor)
+ + " vs Top Stereo Layer " + this.getLayerNumber(secondSensor);
+ topAxialStereoPlots.get(plotName).fill(firstChannel, secondChannel);
+ }
+ } else if (firstSensor.isBottomLayer() && secondSensor.isBottomLayer()) {
+ if(enableBottomAxialAxial && firstSensor.isAxial() && secondSensor.isAxial()){
+
+ plotName = "Bottom Axial Layer " + this.getLayerNumber(firstSensor)
+ + " vs Bottom Axial Layer " + this.getLayerNumber(secondSensor);
+ bottomAxialAxialPlots.get(plotName).fill(firstChannel, secondChannel);
+ } else if (enableBottomAxialStereo && firstSensor.isAxial() && secondSensor.isStereo()) {
+
+ plotName = "Bottom Axial Layer " + this.getLayerNumber(firstSensor)
+ + " vs Bottom Stereo Layer " + this.getLayerNumber(secondSensor);
+ bottomAxialStereoPlots.get(plotName).fill(firstChannel, secondChannel);
+ }
+ }
+ }
+ }
+ }
public void endOfData() {
@@ -240,12 +240,12 @@
e.printStackTrace();
}
}
-
-
- private int getLayerNumber(HpsSiSensor sensor) {
- return (int) Math.ceil(((double) sensor.getLayerNumber())/2);
- }
-
+
+
+ private int getLayerNumber(HpsSiSensor sensor) {
+ return (int) Math.ceil(((double) sensor.getLayerNumber())/2);
+ }
+
IPlotterStyle createStyle(String xAxisTitle, String yAxisTitle) {
// Create a default style
@@ -279,5 +279,5 @@
return style;
}
-
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtQA.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtQA.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtQA.java Wed Apr 27 11:11:32 2016
@@ -54,7 +54,7 @@
int channelNumber = 0;
int plotterIndex = 0;
- int apvNumber = 0;
+ int apvNumber = 0;
double totalNumberEvents = 0;
double totalNumberOfRawHitEvents = 0;
double[] totalTopSamples = new double[6];
@@ -210,7 +210,7 @@
*
*/
public void setEnableTotalNumberOfHitsPlots(boolean enableTotalNumberOfHitsPlots){
- this.enableTotalNumberOfHitsPlots = enableTotalNumberOfHitsPlots;
+ this.enableTotalNumberOfHitsPlots = enableTotalNumberOfHitsPlots;
}
/**
@@ -391,13 +391,13 @@
}
if(enableTotalNumberOfHitsPlots){
- title = "Total Number of RawTrackerHits";
- plotters.add(PlotUtils.setupPlotter(title, 0, 0));
- plotters.get(plotterIndex).style().statisticsBoxStyle().setVisible(true);
- histo1D = aida.histogram1D(title, 100, 0, 75);
- histos1D.add(histo1D);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), title, 0, "Number of RawTrackerHits", histo1D);
- plotterIndex++;
+ title = "Total Number of RawTrackerHits";
+ plotters.add(PlotUtils.setupPlotter(title, 0, 0));
+ plotters.get(plotterIndex).style().statisticsBoxStyle().setVisible(true);
+ histo1D = aida.histogram1D(title, 100, 0, 75);
+ histos1D.add(histo1D);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), title, 0, "Number of RawTrackerHits", histo1D);
+ plotterIndex++;
}
for(IPlotter plotter : plotters) plotter.show();
@@ -488,7 +488,7 @@
aida.histogram1D(title).fill(sample);
}
title = "Shaper Signal Amplitude";
- aida.histogram1D(title).fill(fit.getAmp());
+ aida.histogram1D(title).fill(fit.getAmp());
System.out.println("Amplitude: " + fit.getAmp());
title="t0";
aida.histogram1D(title).fill(fit.getT0());
@@ -595,9 +595,9 @@
topSamples[sampleN-1] += samples[sampleN-1] - sensor.getPedestal(channel, sampleN-1);
}
else{
- aida.histogram2D("APV Sample Number vs Sample Amplitude - Bottom").fill(sampleN, samples[sampleN-1] - sensor.getPedestal(channel, sampleN-1));
- totalBottomSamples[sampleN-1]++;
- bottomSamples[sampleN-1] += samples[sampleN - 1] - sensor.getPedestal(channel, sampleN-1);
+ aida.histogram2D("APV Sample Number vs Sample Amplitude - Bottom").fill(sampleN, samples[sampleN-1] - sensor.getPedestal(channel, sampleN-1));
+ totalBottomSamples[sampleN-1]++;
+ bottomSamples[sampleN-1] += samples[sampleN - 1] - sensor.getPedestal(channel, sampleN-1);
}
}
}
@@ -659,7 +659,7 @@
int channel = ((RawTrackerHit) hts.rawhits().get(0)).getIdentifierFieldValue("strip");
if(sensorName.equals("all")){
- aida.histogram2D(sensor.getName() + " - t0 Resolution vs Channel #").fill(channel, meanT0 - hts.time());
+ aida.histogram2D(sensor.getName() + " - t0 Resolution vs Channel #").fill(channel, meanT0 - hts.time());
} else {
if(sensor.getName().equals(sensorName)){
aida.histogram1D(sensorName + " - Hit Time Resolution").fill(meanT0 - hts.time());
@@ -675,67 +675,67 @@
@Override
public void endOfData(){
- String title;
-
-
-
+ String title;
+
+
+
String plotName;
- if(enableOccupancy){
- for(HpsSiSensor sensor : sensors){
- title = sensor.getName() + " - Occupancy";
- // Scale the hits per channel by the number of events
- aida.histogram1D(title).scale(1/totalNumberEvents);
-
- // Write the occupancies to a file
- if(sensor.isTopLayer()){
- plotName = outputFile + "_top_";
- } else {
- plotName = outputFile + "_bottom_";
- }
-
- if(sensor.getLayerNumber() < 10){
- plotName += "0" + sensor.getLayerNumber() + ".dat";
- } else {
- plotName += sensor.getLayerNumber() + ".dat";
- }
-
- // Open the output files stream
- if(plotName != null){
- try{
- output = new BufferedWriter(new FileWriter(plotName));
- for(int channel = 0; channel < 640; channel++){
- output.write(channel + " " + aida.histogram1D(title).binHeight(channel) + "\n");
- }
- output.close();
- } catch(Exception e) {
- System.out.println(this.getClass().getSimpleName() + " :Error! " + e.getMessage());
- }
- }
- }
- }
-
- if(enableT0Plots){
- int bins = aida.histogram1D(sensorName + " - Hit Time Resolution").axis().bins();
- for(int bin = 0; bin < bins; bin++){
- System.out.println(bin + " " + aida.histogram1D(sensorName + " - Hit Time Resolution").binHeight(bin));
- }
- }
-
-
- /*
+ if(enableOccupancy){
for(HpsSiSensor sensor : sensors){
- if(outputFile != null && sensorName.equals(sensor.getName())){
- try{
- for(int channel = 0; channel < 639; channel++){
- output.write(channel + " " + this.getOccupancy(sensor, channel) + "\n");
- }
- output.close();
- } catch(IOException e){
- System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
- }
- }
-
- System.out.println("%===================================================================%");
+ title = sensor.getName() + " - Occupancy";
+ // Scale the hits per channel by the number of events
+ aida.histogram1D(title).scale(1/totalNumberEvents);
+
+ // Write the occupancies to a file
+ if(sensor.isTopLayer()){
+ plotName = outputFile + "_top_";
+ } else {
+ plotName = outputFile + "_bottom_";
+ }
+
+ if(sensor.getLayerNumber() < 10){
+ plotName += "0" + sensor.getLayerNumber() + ".dat";
+ } else {
+ plotName += sensor.getLayerNumber() + ".dat";
+ }
+
+ // Open the output files stream
+ if(plotName != null){
+ try{
+ output = new BufferedWriter(new FileWriter(plotName));
+ for(int channel = 0; channel < 640; channel++){
+ output.write(channel + " " + aida.histogram1D(title).binHeight(channel) + "\n");
+ }
+ output.close();
+ } catch(Exception e) {
+ System.out.println(this.getClass().getSimpleName() + " :Error! " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ if(enableT0Plots){
+ int bins = aida.histogram1D(sensorName + " - Hit Time Resolution").axis().bins();
+ for(int bin = 0; bin < bins; bin++){
+ System.out.println(bin + " " + aida.histogram1D(sensorName + " - Hit Time Resolution").binHeight(bin));
+ }
+ }
+
+
+ /*
+ for(HpsSiSensor sensor : sensors){
+ if(outputFile != null && sensorName.equals(sensor.getName())){
+ try{
+ for(int channel = 0; channel < 639; channel++){
+ output.write(channel + " " + this.getOccupancy(sensor, channel) + "\n");
+ }
+ output.close();
+ } catch(IOException e){
+ System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
+ }
+ }
+
+ System.out.println("%===================================================================%");
System.out.println(sensor.getName() + " Bad Channels");
System.out.println("%===================================================================%");
for(int index = 0; index < 640; index++){
@@ -769,21 +769,21 @@
}
if(enableSamples){
- double sigma = 0;
- double[] topMean = new double[6];
- double[] bottomMean = new double[6];
-
+ double sigma = 0;
+ double[] topMean = new double[6];
+ double[] bottomMean = new double[6];
+
System.out.println("%===================================================================% \n");
- for(int index = 0; index < topSamples.length; index++){
- topMean[index] = topSamples[index]/totalTopSamples[index];
- System.out.println("Top sample " + index + " mean: " + topMean[index]);
- }
-
+ for(int index = 0; index < topSamples.length; index++){
+ topMean[index] = topSamples[index]/totalTopSamples[index];
+ System.out.println("Top sample " + index + " mean: " + topMean[index]);
+ }
+
System.out.println("\n%===================================================================% \n");
- for(int index = 0; index < bottomSamples.length; index++){
- bottomMean[index] = bottomSamples[index]/totalBottomSamples[index];
- System.out.println("Bottom sample " + index + " mean: " + bottomMean[index]);
- }
+ for(int index = 0; index < bottomSamples.length; index++){
+ bottomMean[index] = bottomSamples[index]/totalBottomSamples[index];
+ System.out.println("Bottom sample " + index + " mean: " + bottomMean[index]);
+ }
System.out.println("\n%===================================================================% \n");
}
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackAnalysis.java Wed Apr 27 11:11:32 2016
@@ -46,8 +46,8 @@
IPlotterFactory plotterFactory = IAnalysisFactory.create().createPlotterFactory();
protected Map<String, IPlotter> plotters = new HashMap<String, IPlotter>();
private Map<String, IHistogram1D> trackPlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> clusterChargePlots = new HashMap<String, IHistogram1D>();
- private Map<String, IHistogram1D> clusterSizePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> clusterChargePlots = new HashMap<String, IHistogram1D>();
+ private Map<String, IHistogram1D> clusterSizePlots = new HashMap<String, IHistogram1D>();
private List<HpsSiSensor> sensors;
private Map<RawTrackerHit, LCRelation> fittedRawTrackerHitMap
@@ -64,67 +64,67 @@
private int runNumber = -1;
- int npositive = 0;
- int nnegative = 0;
- double ntracks = 0;
- double ntracksTop = 0;
- double ntracksBottom = 0;
- double nTwoTracks = 0;
- double nevents = 0;
-
- double d0Cut = -9999;
-
- // Flags
- boolean electronCut = false;
- boolean positronCut = false;
-
+ int npositive = 0;
+ int nnegative = 0;
+ double ntracks = 0;
+ double ntracksTop = 0;
+ double ntracksBottom = 0;
+ double nTwoTracks = 0;
+ double nevents = 0;
+
+ double d0Cut = -9999;
+
+ // Flags
+ boolean electronCut = false;
+ boolean positronCut = false;
+
/**
* Default Constructor
*/
- public SvtTrackAnalysis(){
- }
-
- public void setEnableElectronCut(boolean electronCut) {
- this.electronCut = electronCut;
- }
-
- public void setEnablePositronCut(boolean positronCut) {
- this.positronCut = positronCut;
- }
-
- public void setD0Cut(double d0Cut) {
- this.d0Cut = d0Cut;
- }
-
- private int computePlotterRegion(HpsSiSensor sensor) {
-
- if (sensor.getLayerNumber() < 7) {
- if (sensor.isTopLayer()) {
- return 6*(sensor.getLayerNumber() - 1);
- } else {
- return 6*(sensor.getLayerNumber() - 1) + 1;
- }
- } else {
-
- if (sensor.isTopLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6*(sensor.getLayerNumber() - 7) + 2;
- } else {
- return 6*(sensor.getLayerNumber() - 7) + 3;
- }
- } else if (sensor.isBottomLayer()) {
- if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
- return 6*(sensor.getLayerNumber() - 7) + 4;
- } else {
- return 6*(sensor.getLayerNumber() - 7) + 5;
- }
- }
- }
- return -1;
- }
-
- protected void detectorChanged(Detector detector){
-
+ public SvtTrackAnalysis(){
+ }
+
+ public void setEnableElectronCut(boolean electronCut) {
+ this.electronCut = electronCut;
+ }
+
+ public void setEnablePositronCut(boolean positronCut) {
+ this.positronCut = positronCut;
+ }
+
+ public void setD0Cut(double d0Cut) {
+ this.d0Cut = d0Cut;
+ }
+
+ private int computePlotterRegion(HpsSiSensor sensor) {
+
+ if (sensor.getLayerNumber() < 7) {
+ if (sensor.isTopLayer()) {
+ return 6*(sensor.getLayerNumber() - 1);
+ } else {
+ return 6*(sensor.getLayerNumber() - 1) + 1;
+ }
+ } else {
+
+ if (sensor.isTopLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6*(sensor.getLayerNumber() - 7) + 2;
+ } else {
+ return 6*(sensor.getLayerNumber() - 7) + 3;
+ }
+ } else if (sensor.isBottomLayer()) {
+ if (sensor.getSide() == HpsSiSensor.POSITRON_SIDE) {
+ return 6*(sensor.getLayerNumber() - 7) + 4;
+ } else {
+ return 6*(sensor.getLayerNumber() - 7) + 5;
+ }
+ }
+ }
+ return -1;
+ }
+
+ protected void detectorChanged(Detector detector){
+
tree = IAnalysisFactory.create().createTreeFactory().create();
histogramFactory = IAnalysisFactory.create().createHistogramFactory(tree);
@@ -195,66 +195,66 @@
}
- //--- Track Extrapolation ---//
- //---------------------------//
- /*plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal"));
- plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal", 200, -350, 350, 200, -100, 100));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ //--- Track Extrapolation ---//
+ //---------------------------//
+ /*plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal"));
+ plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal", 200, -350, 350, 200, -100, 100));
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style();
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Harp"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Harp", 200, -200, 200, 100, -50, 50));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal: curvature < 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal: curvature < 0",200, -350, 350, 200, -100, 100));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
-
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Harp: curvature < 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Harp: curvature < 0", 200, -200, 200, 100, -50, 50));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
-
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal: curvature > 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal: curvature > 0", 200, -350, 350, 200, -100, 100));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
-
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Harp: curvature > 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Harp: curvature > 0", 200, -200, 200, 100, -50, 50));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
-
- plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal: Two Tracks"));
- plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal: Two Tracks", 200, -350, 350, 200, -100, 100));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+
+ plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Ecal: Two Tracks"));
+ plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Ecal: Two Tracks", 200, -350, 350, 200, -100, 100));
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style();
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Track Position at Harp: Two Tracks"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("Track Position at Harp: Two Tracks", 200, -200, 200, 100, -50, 50));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
-
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+
//--- Momentum ---//
//----------------//
@@ -262,99 +262,99 @@
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Px", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Py"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Py", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Pz"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Pz", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
-
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Px: C > 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Px: C > 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Py: C > 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Py: C > 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Pz: C > 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Pz: C > 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
-
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Px: C < 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Px: C < 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Py: C < 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Py: C < 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("Pz: C < 0"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Pz: C < 0", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
-
+ nPlotters++;
+
plotters.add(aida.analysisFactory().createPlotterFactory().create("Px: Two Tracks"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("Px: Two Tracks", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
plotters.add(aida.analysisFactory().createPlotterFactory().create("E over P"));
plotters.get(nPlotters).region(0).plot(aida.histogram1D("E over P", 100, 0, 5));
plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
plotters.get(nPlotters).style().dataStyle().errorBarStyle().setVisible(false);
- nPlotters++;
-
- plotters.add(aida.analysisFactory().createPlotterFactory().create("E versus P"));
- plotters.get(nPlotters).region(0).plot(aida.histogram2D("E versus P", 100, 0, 1500, 100, 0, 4000));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
+ nPlotters++;
+
+ plotters.add(aida.analysisFactory().createPlotterFactory().create("E versus P"));
+ plotters.get(nPlotters).region(0).plot(aida.histogram2D("E versus P", 100, 0, 1500, 100, 0, 4000));
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
//--- Cluster Matching ---//
//------------------------//
plotters.add(aida.analysisFactory().createPlotterFactory().create("XY Difference between Ecal Cluster and Track Position"));
plotters.get(nPlotters).region(0).plot(aida.histogram2D("XY Difference between Ecal Cluster and Track Position", 200, -200, 200, 100, -50, 50));
- plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
- plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
- nPlotters++;
- */
- for (IPlotter plotter : plotters.values()) {
- plotter.show();
- }
- }
-
- @SuppressWarnings({ "unchecked", "rawtypes" })
+ plotters.get(nPlotters).region(0).style().setParameter("hist2DStyle", "colorMap");
+ plotters.get(nPlotters).region(0).style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ plotters.get(nPlotters).style().statisticsBoxStyle().setVisible(false);
+ nPlotters++;
+ */
+ for (IPlotter plotter : plotters.values()) {
+ plotter.show();
+ }
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
public void process(EventHeader event){
- nevents++;
-
- // Get the run number from the event
+ nevents++;
+
+ // Get the run number from the event
if (runNumber == -1) runNumber = event.getRunNumber();
-
+
// If the event doesn't have any tracks, skip it
- if(!event.hasCollection(Track.class, trackCollectionName)) return;
-
+ if(!event.hasCollection(Track.class, trackCollectionName)) return;
+
// Get the collection of tracks from the event
List<Track> tracks = event.get(Track.class, trackCollectionName);
@@ -386,17 +386,17 @@
trackPlots.get("Number of tracks").fill(tracks.size());
// Loop over all of the tracks in the event
- for(Track track : tracks){
-
- if (TrackUtils.getR(track) < 0 && electronCut) continue;
-
- if (TrackUtils.getR(track) > 0 && positronCut) continue;
-
- if (d0Cut != -9999 && Math.abs(TrackUtils.getDoca(track)) < d0Cut) continue;
-
- trackPlots.get("Track charge").fill(TrackUtils.getR(track), 1);
-
- // Fill the track parameter plots
+ for(Track track : tracks){
+
+ if (TrackUtils.getR(track) < 0 && electronCut) continue;
+
+ if (TrackUtils.getR(track) > 0 && positronCut) continue;
+
+ if (d0Cut != -9999 && Math.abs(TrackUtils.getDoca(track)) < d0Cut) continue;
+
+ trackPlots.get("Track charge").fill(TrackUtils.getR(track), 1);
+
+ // Fill the track parameter plots
trackPlots.get("doca").fill(TrackUtils.getDoca(track));
trackPlots.get("z0").fill(TrackUtils.getZ0(track));
trackPlots.get("sin(phi0)").fill(TrackUtils.getPhi0(track));
@@ -444,8 +444,8 @@
}
}
}
-
- public void endOfData() {
+
+ public void endOfData() {
String rootFile = "run" + runNumber + "_track_analysis.root";
RootFileStore store = new RootFileStore(rootFile);
@@ -473,7 +473,7 @@
fittedRawTrackerHitMap.put(FittedRawTrackerHit.getRawTrackerHit(fittedHit), fittedHit);
}
}
-
+
/**
*
* @param rawHit
@@ -489,101 +489,101 @@
/*
ntracks++;
- Hep3Vector positionEcal = TrackUtils.getTrackPositionAtEcal(track);
- System.out.println("Position at Ecal: " + positionEcal);
- Hep3Vector positionConverter = TrackUtils.extrapolateTrack(track,-700);
-
- aida.histogram2D("Track Position at Ecal").fill(positionEcal.y(), positionEcal.z());
- aida.histogram2D("Track Position at Harp").fill(positionConverter.y(), positionConverter.z());
-
- if(positionEcal.z() > 0 ) ntracksTop++;
- else if(positionEcal.z() < 0) ntracksBottom++;
+ Hep3Vector positionEcal = TrackUtils.getTrackPositionAtEcal(track);
+ System.out.println("Position at Ecal: " + positionEcal);
+ Hep3Vector positionConverter = TrackUtils.extrapolateTrack(track,-700);
+
+ aida.histogram2D("Track Position at Ecal").fill(positionEcal.y(), positionEcal.z());
+ aida.histogram2D("Track Position at Harp").fill(positionConverter.y(), positionConverter.z());
+
+ if(positionEcal.z() > 0 ) ntracksTop++;
+ else if(positionEcal.z() < 0) ntracksBottom++;
*/
-
-
+
+
/*
- aida.histogram1D("Px").fill(track.getTrackStates().get(0).getMomentum()[0]);
- aida.histogram1D("Py").fill(track.getTrackStates().get(0).getMomentum()[1]);
- aida.histogram1D("Pz").fill(track.getTrackStates().get(0).getMomentum()[2]);
- aida.histogram1D("ChiSquared").fill(track.getChi2());
-
- if(Math.signum(TrackUtils.getR(track)) < 0){
- aida.histogram2D("Track Position at Ecal: curvature < 0").fill(positionEcal.y(), positionEcal.z());
- aida.histogram2D("Track Position at Harp: curvature < 0").fill(positionConverter.y(), positionConverter.z());
- aida.histogram1D("Px: C < 0").fill(track.getTrackStates().get(0).getMomentum()[0]);
- aida.histogram1D("Py: C < 0").fill(track.getTrackStates().get(0).getMomentum()[1]);
- aida.histogram1D("Pz: C < 0").fill(track.getTrackStates().get(0).getMomentum()[2]);
- nnegative++;
- } else if(Math.signum(TrackUtils.getR(track)) > 0){
- aida.histogram2D("Track Position at Ecal: curvature > 0").fill(positionEcal.y(), positionEcal.z());
- aida.histogram2D("Track Position at Harp: curvature > 0").fill(positionConverter.y(), positionConverter.z());
- aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[0]);
- aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[1]);
- aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[2]);
- npositive++;
- }
-
- if(tracks.size() > 1){
- aida.histogram2D("Track Position at Ecal: Two Tracks").fill(positionEcal.y(), positionEcal.z());
- aida.histogram2D("Track Position at Harp: Two Tracks").fill(positionConverter.y(), positionConverter.z());
- aida.histogram1D("Px: Two Tracks").fill(track.getTrackStates().get(0).getMomentum()[0]);
- if(tracks.size() == 2) nTwoTracks++;
- }
-
- trackToEcalPosition.put(positionEcal, track);
- ecalPos.add(positionEcal);
- }
-
- if(!event.hasCollection(Cluster.class, "EcalClusters")) return;
- List<Cluster> clusters = event.get(Cluster.class, "EcalClusters");
-
-
- for(Hep3Vector ecalP : ecalPos){
- double xdiff = 1000;
- double ydiff = 1000;
- for(Cluster cluster : clusters){
- double xd = ecalP.y() - cluster.getPosition()[0];
- double yd = ecalP.z() - cluster.getPosition()[1];
- if(yd < ydiff){
- xdiff = xd;
- ydiff = yd;
- trackToCluster.put(trackToEcalPosition.get(ecalP),cluster);
- }
- }
- clusters.remove(trackToCluster.get(trackToEcalPosition.get(ecalP)));
- aida.histogram2D("XY Difference between Ecal Cluster and Track Position").fill(xdiff, ydiff);
- }
-
- for(Map.Entry<Track, Cluster> entry : trackToCluster.entrySet()){
- double Energy = entry.getValue().getEnergy();
- Track track = entry.getKey();
- double pTotal = Math.sqrt(track.getTrackStates().get(0).getMomentum()[0]*track.getTrackStates().get(0).getMomentum()[0] + track.getTrackStates().get(0).getMomentum()[1]*track.getTrackStates().get(0).getMomentum()[1] + track.getTrackStates().get(0).getMomentum()[2]*track.getTrackStates().get(0).getMomentum()[2]);
-
- double ep = Energy/(pTotal*1000);
-
- System.out.println("Energy: " + Energy + "P: " + pTotal + " E over P: " + ep);
-
- aida.histogram1D("E over P").fill(ep);
- aida.histogram2D("E versus P").fill(Energy, pTotal*1000);
- }
-
- for(Cluster cluster : clusters){
- double[] clusterPosition = cluster.getPosition();
-
- System.out.println("Cluster Position: [" + clusterPosition[0] + ", " + clusterPosition[1] + ", " + clusterPosition[2]+ "]");
- }
-
- double ratio = nnegative/npositive;
- System.out.println("Ratio of Negative to Position Tracks: " + ratio);
-
- double tracksRatio = ntracks/nevents;
- double tracksTopRatio = ntracksTop/nevents;
- double tracksBottomRatio = ntracksBottom/nevents;
- double twoTrackRatio = nTwoTracks/nevents;
- System.out.println("Number of tracks per event: " + tracksRatio);
- System.out.println("Number of top tracks per event: " + tracksTopRatio);
- System.out.println("Number of bottom tracks per event: " + tracksBottomRatio);
- System.out.println("Number of two track events: " + twoTrackRatio);
- }*/
-
-
+ aida.histogram1D("Px").fill(track.getTrackStates().get(0).getMomentum()[0]);
+ aida.histogram1D("Py").fill(track.getTrackStates().get(0).getMomentum()[1]);
+ aida.histogram1D("Pz").fill(track.getTrackStates().get(0).getMomentum()[2]);
+ aida.histogram1D("ChiSquared").fill(track.getChi2());
+
+ if(Math.signum(TrackUtils.getR(track)) < 0){
+ aida.histogram2D("Track Position at Ecal: curvature < 0").fill(positionEcal.y(), positionEcal.z());
+ aida.histogram2D("Track Position at Harp: curvature < 0").fill(positionConverter.y(), positionConverter.z());
+ aida.histogram1D("Px: C < 0").fill(track.getTrackStates().get(0).getMomentum()[0]);
+ aida.histogram1D("Py: C < 0").fill(track.getTrackStates().get(0).getMomentum()[1]);
+ aida.histogram1D("Pz: C < 0").fill(track.getTrackStates().get(0).getMomentum()[2]);
+ nnegative++;
+ } else if(Math.signum(TrackUtils.getR(track)) > 0){
+ aida.histogram2D("Track Position at Ecal: curvature > 0").fill(positionEcal.y(), positionEcal.z());
+ aida.histogram2D("Track Position at Harp: curvature > 0").fill(positionConverter.y(), positionConverter.z());
+ aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[0]);
+ aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[1]);
+ aida.histogram1D("Px: C > 0").fill(track.getTrackStates().get(0).getMomentum()[2]);
+ npositive++;
+ }
+
+ if(tracks.size() > 1){
+ aida.histogram2D("Track Position at Ecal: Two Tracks").fill(positionEcal.y(), positionEcal.z());
+ aida.histogram2D("Track Position at Harp: Two Tracks").fill(positionConverter.y(), positionConverter.z());
+ aida.histogram1D("Px: Two Tracks").fill(track.getTrackStates().get(0).getMomentum()[0]);
+ if(tracks.size() == 2) nTwoTracks++;
+ }
+
+ trackToEcalPosition.put(positionEcal, track);
+ ecalPos.add(positionEcal);
+ }
+
+ if(!event.hasCollection(Cluster.class, "EcalClusters")) return;
+ List<Cluster> clusters = event.get(Cluster.class, "EcalClusters");
+
+
+ for(Hep3Vector ecalP : ecalPos){
+ double xdiff = 1000;
+ double ydiff = 1000;
+ for(Cluster cluster : clusters){
+ double xd = ecalP.y() - cluster.getPosition()[0];
+ double yd = ecalP.z() - cluster.getPosition()[1];
+ if(yd < ydiff){
+ xdiff = xd;
+ ydiff = yd;
+ trackToCluster.put(trackToEcalPosition.get(ecalP),cluster);
+ }
+ }
+ clusters.remove(trackToCluster.get(trackToEcalPosition.get(ecalP)));
+ aida.histogram2D("XY Difference between Ecal Cluster and Track Position").fill(xdiff, ydiff);
+ }
+
+ for(Map.Entry<Track, Cluster> entry : trackToCluster.entrySet()){
+ double Energy = entry.getValue().getEnergy();
+ Track track = entry.getKey();
+ double pTotal = Math.sqrt(track.getTrackStates().get(0).getMomentum()[0]*track.getTrackStates().get(0).getMomentum()[0] + track.getTrackStates().get(0).getMomentum()[1]*track.getTrackStates().get(0).getMomentum()[1] + track.getTrackStates().get(0).getMomentum()[2]*track.getTrackStates().get(0).getMomentum()[2]);
+
+ double ep = Energy/(pTotal*1000);
+
+ System.out.println("Energy: " + Energy + "P: " + pTotal + " E over P: " + ep);
+
+ aida.histogram1D("E over P").fill(ep);
+ aida.histogram2D("E versus P").fill(Energy, pTotal*1000);
+ }
+
+ for(Cluster cluster : clusters){
+ double[] clusterPosition = cluster.getPosition();
+
+ System.out.println("Cluster Position: [" + clusterPosition[0] + ", " + clusterPosition[1] + ", " + clusterPosition[2]+ "]");
+ }
+
+ double ratio = nnegative/npositive;
+ System.out.println("Ratio of Negative to Position Tracks: " + ratio);
+
+ double tracksRatio = ntracks/nevents;
+ double tracksTopRatio = ntracksTop/nevents;
+ double tracksBottomRatio = ntracksBottom/nevents;
+ double twoTrackRatio = nTwoTracks/nevents;
+ System.out.println("Number of tracks per event: " + tracksRatio);
+ System.out.println("Number of top tracks per event: " + tracksTopRatio);
+ System.out.println("Number of bottom tracks per event: " + tracksBottomRatio);
+ System.out.println("Number of two track events: " + twoTrackRatio);
+ }*/
+
+
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackRecoEfficiency.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackRecoEfficiency.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/SvtTrackRecoEfficiency.java Wed Apr 27 11:11:32 2016
@@ -100,7 +100,7 @@
* Set the name of the file to output efficiency data to
*/
public void setEfficiencyOutputFile(String efficiencyOutputFile){
- this.efficiencyOutputFile = efficiencyOutputFile;
+ this.efficiencyOutputFile = efficiencyOutputFile;
}
/**
@@ -124,31 +124,31 @@
* @param message : debug message
*/
private void printDebug(String message){
- if(debug){
- System.out.println(this.getClass().getSimpleName() + ": " + message);
- }
+ if(debug){
+ System.out.println(this.getClass().getSimpleName() + ": " + message);
+ }
}
/**
*
*/
protected void detectorChanged(Detector detector){
- super.detectorChanged(detector);
-
- sensors = detector.getSubdetector("Tracker").getDetectorElement().findDescendants(HpsSiSensor.class);
-
+ super.detectorChanged(detector);
+
+ sensors = detector.getSubdetector("Tracker").getDetectorElement().findDescendants(HpsSiSensor.class);
+
// setup AIDA
aida = AIDA.defaultInstance();
aida.tree().cd("/");
// Open the output file stream
if(efficiencyOutputFile != null && momentumOutputFile != null){
- try{
- efficiencyOutput = new BufferedWriter(new FileWriter(efficiencyOutputFile));
+ try{
+ efficiencyOutput = new BufferedWriter(new FileWriter(efficiencyOutputFile));
momentumOutput = new BufferedWriter(new FileWriter(momentumOutputFile));
- } catch(Exception e){
- System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
- }
+ } catch(Exception e){
+ System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
+ }
}
// Get the total number of SVT layers
@@ -164,28 +164,28 @@
}
if(trackingEfficiencyPlots){
- plotters.add(PlotUtils.setupPlotter("Track Momentum", 0, 0));
- histo1D.add(aida.histogram1D("Momentum - Reconstructed Tracks", 14, 0, 5.6));
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "Reconstructed Tracks", 0, "Momentum [GeV]", histo1D.get(histo1DIndex));
+ plotters.add(PlotUtils.setupPlotter("Track Momentum", 0, 0));
+ histo1D.add(aida.histogram1D("Momentum - Reconstructed Tracks", 14, 0, 5.6));
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "Reconstructed Tracks", 0, "Momentum [GeV]", histo1D.get(histo1DIndex));
histo1DIndex++;
histo1D.add(aida.histogram1D("Momentum - Findable Tracks", 14, 0, 5.6));
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "Findable Tracks", 0, "Momentum [GeV]", histo1D.get(histo1DIndex));
- plotterIndex++;
- histo1DIndex++;
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "Findable Tracks", 0, "Momentum [GeV]", histo1D.get(histo1DIndex));
+ plotterIndex++;
+ histo1DIndex++;
}
for(IPlotter plotter : plotters){
- plotter.show();
+ plotter.show();
}
}
private String samplesToString(short[] samples){
- String sampleList = "[ ";
- for(short sample : samples){
- sampleList += Short.toString(sample) + ", ";
- }
- sampleList += "]";
- return sampleList;
+ String sampleList = "[ ";
+ for(short sample : samples){
+ sampleList += Short.toString(sample) + ", ";
+ }
+ sampleList += "]";
+ return sampleList;
}
/**
@@ -196,7 +196,7 @@
@Override
protected void process(EventHeader event){
- // For now, only look at events with a single track
+ // For now, only look at events with a single track
if(event.get(Track.class, trackCollectionName).size() > 1) return;
eventNumber++;
@@ -204,46 +204,46 @@
if(!event.hasCollection(SimTrackerHit.class, simTrackerHitCollectionName)) return;
List<SimTrackerHit> simTrackerHits = event.get(SimTrackerHit.class, simTrackerHitCollectionName);
this.printDebug("\nEvent " + eventNumber + " contains " + simTrackerHits.size() + " SimTrackerHits");
- // Loop through all SimTrackerHits and confirm that a corresponding RawTrackerHit was created
- for(SimTrackerHit simTrackHit : simTrackerHits){
-
- this.printDebug("SimTrackerHit Layer Number: " + simTrackHit.getLayerNumber());
- }
+ // Loop through all SimTrackerHits and confirm that a corresponding RawTrackerHit was created
+ for(SimTrackerHit simTrackHit : simTrackerHits){
+
+ this.printDebug("SimTrackerHit Layer Number: " + simTrackHit.getLayerNumber());
+ }
// Get the list of RawTrackerHits and add them to the sensor readout
List<RawTrackerHit> rawHits = event.get(RawTrackerHit.class, rawTrackerHitCollectionName);
String volume;
for(RawTrackerHit rawHit : rawHits){
- HpsSiSensor sensor = (HpsSiSensor) rawHit.getDetectorElement();
- if(sensor.isTopLayer()){
- volume = "Top Volume ";
- } else {
- volume = "Bottom Volume ";
- }
- this.printDebug(volume + "RawTrackerHit Channel #: " + rawHit.getIdentifierFieldValue("strip") + " Layer Number: " + rawHit.getLayerNumber()
- + " Samples: " + samplesToString(rawHit.getADCValues()));
+ HpsSiSensor sensor = (HpsSiSensor) rawHit.getDetectorElement();
+ if(sensor.isTopLayer()){
+ volume = "Top Volume ";
+ } else {
+ volume = "Bottom Volume ";
+ }
+ this.printDebug(volume + "RawTrackerHit Channel #: " + rawHit.getIdentifierFieldValue("strip") + " Layer Number: " + rawHit.getLayerNumber()
+ + " Samples: " + samplesToString(rawHit.getADCValues()));
((HpsSiSensor) rawHit.getDetectorElement()).getReadout().addHit(rawHit);
}
if(event.hasCollection(SiTrackerHit.class, siTrackerHitCollectionName)){
- List<SiTrackerHit> hitlist = event.get(SiTrackerHit.class, siTrackerHitCollectionName);
- for(SiTrackerHit siTrackerHit : hitlist){
- this.printDebug("Cluster is comprised by the following raw hits:");
- for(RawTrackerHit rawHit : siTrackerHit.getRawHits()){
- this.printDebug("RawTrackerHit Channel #: " + rawHit.getIdentifierFieldValue("strip") + " Layer Number: " + rawHit.getLayerNumber());
- }
- }
+ List<SiTrackerHit> hitlist = event.get(SiTrackerHit.class, siTrackerHitCollectionName);
+ for(SiTrackerHit siTrackerHit : hitlist){
+ this.printDebug("Cluster is comprised by the following raw hits:");
+ for(RawTrackerHit rawHit : siTrackerHit.getRawHits()){
+ this.printDebug("RawTrackerHit Channel #: " + rawHit.getIdentifierFieldValue("strip") + " Layer Number: " + rawHit.getLayerNumber());
+ }
+ }
}
// Get the MC Particles associated with the SimTrackerHits
List<MCParticle> mcParticles = event.getMCParticles();
if(debug){
- String particleList = "[ ";
- for(MCParticle mcParticle : mcParticles){
- particleList += mcParticle.getPDGID() + ", ";
- }
- particleList += "]";
- this.printDebug("MC Particles: " + particleList);
+ String particleList = "[ ";
+ for(MCParticle mcParticle : mcParticles){
+ particleList += mcParticle.getPDGID() + ", ";
+ }
+ particleList += "]";
+ this.printDebug("MC Particles: " + particleList);
}
// Get the magnetic field
@@ -264,10 +264,10 @@
Set<SimTrackerHit> trackerHits = findable.getSimTrackerHits(mcParticle);
if(this.isSameSvtVolume(trackerHits)){
if(debug){
- this.printDebug("Track is findable");
- this.printDebug("MC particle momentum: " + mcParticle.getMomentum().toString());
- }
-
+ this.printDebug("Track is findable");
+ this.printDebug("MC particle momentum: " + mcParticle.getMomentum().toString());
+ }
+
findableTracks++;
trackIsFindable = true;
}
@@ -303,8 +303,8 @@
if(!mcParticles.isEmpty() && trackingEfficiencyPlots){
// If the list still contains MC Particles, a matching track wasn't found
- this.printDebug("No matching track found");
-
+ this.printDebug("No matching track found");
+
// Check that all stereoHits were correctly assigned to an MCParticle
for(MCParticle mcParticle : mcParticles){
@@ -326,7 +326,7 @@
// Determine if the MC particle passed through the top or bottom SVT volume
for(SimTrackerHit simHit : simHits){
- HpsSiSensor sensor = (HpsSiSensor) simHit.getDetectorElement();
+ HpsSiSensor sensor = (HpsSiSensor) simHit.getDetectorElement();
if(sensor.isTopLayer()){
this.printDebug("MC Particle passed through the top layer");
isTopTrack = true;
@@ -376,7 +376,7 @@
{
int volumeIndex = 0;
for(SimTrackerHit simTrackerHit : simTrackerHits){
- HpsSiSensor sensor = (HpsSiSensor) simTrackerHit.getDetectorElement();
+ HpsSiSensor sensor = (HpsSiSensor) simTrackerHit.getDetectorElement();
if(sensor.isTopLayer()) volumeIndex++;
else volumeIndex--;
}
@@ -454,24 +454,24 @@
@Override
public void endOfData()
{
-
+
if(trackingEfficiencyPlots && efficiencyOutputFile != null && momentumOutputFile != null){
- try{
- int bins = aida.histogram1D("Momentum - Findable Tracks").axis().bins();
- for(int index = 0; index < bins; index++){
- if(aida.histogram1D("Momentum - Reconstructed Tracks").binEntries(index) == 0) efficiencyOutput.write(index + " " + 0 + "\n");
- else efficiencyOutput.write(index + " " + aida.histogram1D("Momentum - Reconstructed Tracks").binEntries(index) + "\n");
-
+ try{
+ int bins = aida.histogram1D("Momentum - Findable Tracks").axis().bins();
+ for(int index = 0; index < bins; index++){
+ if(aida.histogram1D("Momentum - Reconstructed Tracks").binEntries(index) == 0) efficiencyOutput.write(index + " " + 0 + "\n");
+ else efficiencyOutput.write(index + " " + aida.histogram1D("Momentum - Reconstructed Tracks").binEntries(index) + "\n");
+
if(aida.histogram1D("Momentum - Findable Tracks").binEntries(index) == 0) momentumOutput.write(index + " " + 0 + "\n");
- else momentumOutput.write(index + " " + aida.histogram1D("Momentum - Findable Tracks").binEntries(index) + "\n");
- }
- efficiencyOutput.close();
+ else momentumOutput.write(index + " " + aida.histogram1D("Momentum - Findable Tracks").binEntries(index) + "\n");
+ }
+ efficiencyOutput.close();
momentumOutput.close();
- } catch(IOException e){
- System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
- }
+ } catch(IOException e){
+ System.out.println(this.getClass().getSimpleName() + ": Error! " + e.getMessage());
+ }
}
-
+
System.out.println("%===============================================================%");
System.out.println("%============== Track Reconstruction Efficiencies ==============%");
System.out.println("%===============================================================%\n%");
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/TestRunTrackReconEfficiency.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/TestRunTrackReconEfficiency.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/omoreno/TestRunTrackReconEfficiency.java Wed Apr 27 11:11:32 2016
@@ -60,18 +60,18 @@
boolean topTrigger = false;
// Collection Names
- String stereoHitCollectionName = "HelicalTrackHits";
- String trackCollectionName = "MatchedTracks";
- String ecalClustersCollectionName = "EcalClusters";
- String triggerDataCollectionName = "TriggerBank";
+ String stereoHitCollectionName = "HelicalTrackHits";
+ String trackCollectionName = "MatchedTracks";
+ String ecalClustersCollectionName = "EcalClusters";
+ String triggerDataCollectionName = "TriggerBank";
// Plots
IHistogram1D findableTrackMomentum;
IHistogram1D totalTrackMomentum;
- IHistogram1D xPositionResidual;
- IHistogram1D yPositionResidual;
- IHistogram1D zPositionResidual;
- IHistogram1D r;
+ IHistogram1D xPositionResidual;
+ IHistogram1D yPositionResidual;
+ IHistogram1D zPositionResidual;
+ IHistogram1D r;
/**
* Dflt Ctor
@@ -89,7 +89,7 @@
*
*/
public void setThresholdEnergy(double thresholdEnergy){
- this.thresholdEnergy = thresholdEnergy;
+ this.thresholdEnergy = thresholdEnergy;
}
public void setClusterEnergyDifference(double energyDifference){
@@ -117,15 +117,15 @@
plotterIndex++;
// Create plot for diffence in track and cluster position
- plotters.add(PlotUtils.setupPlotter("Track-Cluster Position Residual", 2, 2));
- xPositionResidual = aida.histogram1D("x Residual", 100, -100, 100);
- yPositionResidual = aida.histogram1D("y Residual", 100, -100, 100);
- zPositionResidual = aida.histogram1D("z Residual", 100, -100, 100);
- r = aida.histogram1D("r", 100, -100, 100);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "x Residual", 0, "delta x [mm]", xPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "y Residual", 1, "delta y [mm]", yPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "z Residual", 2, "delta z [mm]", zPositionResidual);
- PlotUtils.setup1DRegion(plotters.get(plotterIndex), "r", 3, "r [mm]", r);
+ plotters.add(PlotUtils.setupPlotter("Track-Cluster Position Residual", 2, 2));
+ xPositionResidual = aida.histogram1D("x Residual", 100, -100, 100);
+ yPositionResidual = aida.histogram1D("y Residual", 100, -100, 100);
+ zPositionResidual = aida.histogram1D("z Residual", 100, -100, 100);
+ r = aida.histogram1D("r", 100, -100, 100);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "x Residual", 0, "delta x [mm]", xPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "y Residual", 1, "delta y [mm]", yPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "z Residual", 2, "delta z [mm]", zPositionResidual);
+ PlotUtils.setup1DRegion(plotters.get(plotterIndex), "r", 3, "r [mm]", r);
plotterIndex++;
// Show all of the plotters
@@ -154,138 +154,138 @@
// If the event has a single Ecal cluster satisfying the threshold cut,
// check if there is a track that is well matched to the cluster
if(ecalClusters.size() == 1){
- Cluster ecalCluster = ecalClusters.get(0);
-
- // If the cluster is above the energy threshold, then the track should
- // be findable
- if(!isClusterAboveEnergyThreshold(ecalCluster)) return;
- findableSingleTracks++;
-
- double[] clusterPosition = ecalCluster.getPosition();
-
- if(clusterPosition[0] > 0 && clusterPosition[1] > 0) findableSingleTracksQuad1++;
- else if(clusterPosition[0] < 0 && clusterPosition[1] > 0) findableSingleTracksQuad2++;
- else if(clusterPosition[0] < 0 && clusterPosition[1] < 0) findableSingleTracksQuad3++;
- else if(clusterPosition[0] > 0 && clusterPosition[1] < 0) findableSingleTracksQuad4++;
-
- if(!isClusterMatchedToTrack(ecalCluster, tracks)) return;
- foundSingleTracks++;
-
- if(clusterPosition[0] > 0 && clusterPosition[1] > 0) foundSingleTracksQuad1++;
- else if(clusterPosition[0] < 0 && clusterPosition[1] > 0) foundSingleTracksQuad2++;
- else if(clusterPosition[0] < 0 && clusterPosition[1] < 0) foundSingleTracksQuad3++;
- else if(clusterPosition[0] > 0 && clusterPosition[1] < 0) foundSingleTracksQuad4++;
+ Cluster ecalCluster = ecalClusters.get(0);
+
+ // If the cluster is above the energy threshold, then the track should
+ // be findable
+ if(!isClusterAboveEnergyThreshold(ecalCluster)) return;
+ findableSingleTracks++;
+
+ double[] clusterPosition = ecalCluster.getPosition();
+
+ if(clusterPosition[0] > 0 && clusterPosition[1] > 0) findableSingleTracksQuad1++;
+ else if(clusterPosition[0] < 0 && clusterPosition[1] > 0) findableSingleTracksQuad2++;
+ else if(clusterPosition[0] < 0 && clusterPosition[1] < 0) findableSingleTracksQuad3++;
+ else if(clusterPosition[0] > 0 && clusterPosition[1] < 0) findableSingleTracksQuad4++;
+
+ if(!isClusterMatchedToTrack(ecalCluster, tracks)) return;
+ foundSingleTracks++;
+
+ if(clusterPosition[0] > 0 && clusterPosition[1] > 0) foundSingleTracksQuad1++;
+ else if(clusterPosition[0] < 0 && clusterPosition[1] > 0) foundSingleTracksQuad2++;
+ else if(clusterPosition[0] < 0 && clusterPosition[1] < 0) foundSingleTracksQuad3++;
+ else if(clusterPosition[0] > 0 && clusterPosition[1] < 0) foundSingleTracksQuad4++;
}
// Only look at events which have two Ecal cluster
if(ecalClusters.size() != 2) return;
// Check that the Ecal clusters are in opposite Ecal volumes. If
- // they don't, skip the event.
- if(!hasClustersInOppositeVolumes(ecalClusters)){
- this.printDebug("Ecal clusters are not in opposite volumes");
- return;
- }
- nOppositeVolume++;
+ // they don't, skip the event.
+ if(!hasClustersInOppositeVolumes(ecalClusters)){
+ this.printDebug("Ecal clusters are not in opposite volumes");
+ return;
+ }
+ nOppositeVolume++;
- // Check that the Ecal clusters lie within some pre-defined window. If
- // they don't, skip the event.
+ // Check that the Ecal clusters lie within some pre-defined window. If
+ // they don't, skip the event.
if(!isClusterWithinWindow(ecalClusters.get(0)) || !isClusterWithinWindow(ecalClusters.get(1))){
- this.printDebug("Ecal cluster falls outside of window.");
- return;
+ this.printDebug("Ecal cluster falls outside of window.");
+ return;
}
nWithinWindow++;
// Check that the Ecal clusters are above the threshold energy. If
// they don't, skip the event.
if(!isClusterAboveEnergyThreshold(ecalClusters.get(0)) || !isClusterAboveEnergyThreshold(ecalClusters.get(1))){
- this.printDebug("Ecal cluster energies are below threshold.");
- return;
- }
- nAboveThreshold++;
+ this.printDebug("Ecal cluster energies are below threshold.");
+ return;
+ }
+ nAboveThreshold++;
- // Check that the difference between the Ecal cluster energies is
+ // Check that the difference between the Ecal cluster energies is
// reasonable
- double energyDiff = Math.abs(ecalClusters.get(0).getEnergy() - ecalClusters.get(1).getEnergy());
- if(energyDiff > energyDifference){
- this.printDebug("The energy difference between the two clusters is too great.");
- return;
- }
+ double energyDiff = Math.abs(ecalClusters.get(0).getEnergy() - ecalClusters.get(1).getEnergy());
+ if(energyDiff > energyDifference){
+ this.printDebug("The energy difference between the two clusters is too great.");
+ return;
+ }
// Check if the event contains a collection of tracks. If it doesn't,
// move on to the next event.
if(!event.hasCollection(Track.class, trackCollectionName)){
- this.printDebug("Event doesn't contain a collection of tracks!");
- return;
+ this.printDebug("Event doesn't contain a collection of tracks!");
+ return;
}
// If there are no tracks in the collection, move on to the next event.
if(tracks.isEmpty()){
- this.printDebug("Event doesn't contain any tracks!");
- return;
+ this.printDebug("Event doesn't contain any tracks!");
+ return;
}
// Sort the tracks by SVT volume
- topTracks = new ArrayList<Track>();
- botTracks = new ArrayList<Track>();
+ topTracks = new ArrayList<Track>();
+ botTracks = new ArrayList<Track>();
for(Track track : tracks){
- if(track.getTrackStates().get(0).getZ0() > 0) topTracks.add(track);
- else if(track.getTrackStates().get(0).getZ0() < 0) botTracks.add(track);
- }
-
- // Get the trigger information from the event
- List<GenericObject> triggerData = event.get(GenericObject.class, triggerDataCollectionName);
- GenericObject triggerDatum = triggerData.get(0);
- if(triggerDatum.getIntVal(4) > 0){
- this.printDebug("Ecal triggered by top cluster");
- topTrigger = true;
- } else if(triggerDatum.getIntVal(5) > 0){
- this.printDebug("Ecal triggered by bottom cluster");
- topTrigger = false;
- }
-
- // Match a track to the trigger cluster
- Cluster matchedCluster = null;
- for(Cluster ecalCluster : ecalClusters){
- if(ecalCluster.getPosition()[1] > 0 && topTrigger){
- if(!isClusterMatchedToTrack(ecalCluster, topTracks)){
- this.printDebug("Trigger cluster-track match was not found.");
- return;
- }
- matchedCluster = ecalCluster;
- findableBottomTracks++;
- break;
- } else if( ecalCluster.getPosition()[1] < 0 && !topTrigger){
- if(!isClusterMatchedToTrack(ecalCluster, botTracks)){
- this.printDebug("Trigger cluster-track match was not found.");
- return;
- }
- matchedCluster = ecalCluster;
- findableTopTracks++;
- break;
- }
- }
- if(matchedCluster != null) ecalClusters.remove(matchedCluster);
- nTrigClusterTrackMatch++;
-
+ if(track.getTrackStates().get(0).getZ0() > 0) topTracks.add(track);
+ else if(track.getTrackStates().get(0).getZ0() < 0) botTracks.add(track);
+ }
+
+ // Get the trigger information from the event
+ List<GenericObject> triggerData = event.get(GenericObject.class, triggerDataCollectionName);
+ GenericObject triggerDatum = triggerData.get(0);
+ if(triggerDatum.getIntVal(4) > 0){
+ this.printDebug("Ecal triggered by top cluster");
+ topTrigger = true;
+ } else if(triggerDatum.getIntVal(5) > 0){
+ this.printDebug("Ecal triggered by bottom cluster");
+ topTrigger = false;
+ }
+
+ // Match a track to the trigger cluster
+ Cluster matchedCluster = null;
+ for(Cluster ecalCluster : ecalClusters){
+ if(ecalCluster.getPosition()[1] > 0 && topTrigger){
+ if(!isClusterMatchedToTrack(ecalCluster, topTracks)){
+ this.printDebug("Trigger cluster-track match was not found.");
+ return;
+ }
+ matchedCluster = ecalCluster;
+ findableBottomTracks++;
+ break;
+ } else if( ecalCluster.getPosition()[1] < 0 && !topTrigger){
+ if(!isClusterMatchedToTrack(ecalCluster, botTracks)){
+ this.printDebug("Trigger cluster-track match was not found.");
+ return;
+ }
+ matchedCluster = ecalCluster;
+ findableTopTracks++;
+ break;
+ }
+ }
+ if(matchedCluster != null) ecalClusters.remove(matchedCluster);
+ nTrigClusterTrackMatch++;
+
// If the cluster passes all requirements, then there is likely a track
// associated with it
findableTracks++;
// Now check if a track is associated with the non-trigger cluster
if(topTrigger){
- if(!isClusterMatchedToTrack(ecalClusters.get(0), botTracks)){
- this.printDebug("Non trigger cluster-track match was not found.");
- return;
- }
- totalBottomTracks++;
+ if(!isClusterMatchedToTrack(ecalClusters.get(0), botTracks)){
+ this.printDebug("Non trigger cluster-track match was not found.");
+ return;
+ }
+ totalBottomTracks++;
} else if(!topTrigger){
- if(!isClusterMatchedToTrack(ecalClusters.get(0), topTracks)){
- this.printDebug("Non trigger cluster-track match was not found.");
- return;
- }
- totalTopTracks++;
+ if(!isClusterMatchedToTrack(ecalClusters.get(0), topTracks)){
+ this.printDebug("Non trigger cluster-track match was not found.");
+ return;
+ }
+ totalTopTracks++;
}
++totalTracks;
}
@@ -305,67 +305,67 @@
*
*/
private boolean isClusterWithinWindow(Cluster clusterPosition){
- return true;
+ return true;
}
/**
*
*/
private boolean isClusterAboveEnergyThreshold(Cluster ecalCluster){
- if(ecalCluster.getEnergy() > thresholdEnergy) return true;
- return false;
+ if(ecalCluster.getEnergy() > thresholdEnergy) return true;
+ return false;
}
/**
*
*/
private boolean hasClustersInOppositeVolumes(List<Cluster> ecalClusters){
- this.printPosition(ecalClusters.get(0).getPosition());
- this.printPosition(ecalClusters.get(1).getPosition());
- if((ecalClusters.get(0).getPosition()[1] > 0 && ecalClusters.get(1).getPosition()[1] < 0)
- || (ecalClusters.get(0).getPosition()[1] < 0 && ecalClusters.get(1).getPosition()[1] > 0)){
- return true;
- }
- return false;
+ this.printPosition(ecalClusters.get(0).getPosition());
+ this.printPosition(ecalClusters.get(1).getPosition());
+ if((ecalClusters.get(0).getPosition()[1] > 0 && ecalClusters.get(1).getPosition()[1] < 0)
+ || (ecalClusters.get(0).getPosition()[1] < 0 && ecalClusters.get(1).getPosition()[1] > 0)){
+ return true;
+ }
+ return false;
}
/**
*
*/
private boolean isClusterMatchedToTrack(Cluster cluster, List<Track> tracks){
- Hep3Vector clusterPos = new BasicHep3Vector(cluster.getPosition());
- double rMax = Double.MAX_VALUE;
- Track matchedTrack = null;
- for(Track track : tracks){
-
- Hep3Vector trkPosAtShowerMax = TrackUtils.extrapolateTrack(track,clusterPos.z());
- if(Double.isNaN(trkPosAtShowerMax.x()) || Double.isNaN(trkPosAtShowerMax.y())){
- this.printDebug("Invalid track position");
- return false;
- }
- this.printDebug("Track position at shower max: " + trkPosAtShowerMax.toString());
+ Hep3Vector clusterPos = new BasicHep3Vector(cluster.getPosition());
+ double rMax = Double.MAX_VALUE;
+ Track matchedTrack = null;
+ for(Track track : tracks){
+
+ Hep3Vector trkPosAtShowerMax = TrackUtils.extrapolateTrack(track,clusterPos.z());
+ if(Double.isNaN(trkPosAtShowerMax.x()) || Double.isNaN(trkPosAtShowerMax.y())){
+ this.printDebug("Invalid track position");
+ return false;
+ }
+ this.printDebug("Track position at shower max: " + trkPosAtShowerMax.toString());
- // Find the distance between the track position at shower
- // max and the cluster position
- double r = VecOp.sub(trkPosAtShowerMax, clusterPos).magnitude();
- this.printDebug("Distance between Ecal cluster and track position at shower max: " + r + " mm");
-
- // Check if the track is the closest to the cluster. If it is, then
- // save the track and contineu looping over all other tracks
- if (r < rMax /*&& r <= maxTrackClusterDistance*/) {
- rMax = r;
- matchedTrack = track;
- }
- }
- if(matchedTrack != null) return true;
- return false;
+ // Find the distance between the track position at shower
+ // max and the cluster position
+ double r = VecOp.sub(trkPosAtShowerMax, clusterPos).magnitude();
+ this.printDebug("Distance between Ecal cluster and track position at shower max: " + r + " mm");
+
+ // Check if the track is the closest to the cluster. If it is, then
+ // save the track and contineu looping over all other tracks
+ if (r < rMax /*&& r <= maxTrackClusterDistance*/) {
+ rMax = r;
+ matchedTrack = track;
+ }
+ }
+ if(matchedTrack != null) return true;
+ return false;
}
/**
*
*/
private void printPosition(double[] position){
- this.printDebug("[ " + position[0] + ", " + position[1] + ", " + position[2] + " ]");
+ this.printDebug("[ " + position[0] + ", " + position[1] + ", " + position[2] + " ]");
}
@@ -373,28 +373,28 @@
public void endOfData(){
System.out.println("%===================================================================% \n");
if(findableSingleTracks > 0){
- System.out.println("% Total single track efficiency: " + foundSingleTracks + " / " + findableSingleTracks + " = " + (foundSingleTracks/findableSingleTracks)*100 + "%");
+ System.out.println("% Total single track efficiency: " + foundSingleTracks + " / " + findableSingleTracks + " = " + (foundSingleTracks/findableSingleTracks)*100 + "%");
}
if(findableSingleTracksQuad1 > 0){
- System.out.println("% Total single track efficiency - Quad 1: " + foundSingleTracksQuad1 + " / " + findableSingleTracksQuad1 + " = " + (foundSingleTracksQuad1/findableSingleTracksQuad1)*100 + "%");
+ System.out.println("% Total single track efficiency - Quad 1: " + foundSingleTracksQuad1 + " / " + findableSingleTracksQuad1 + " = " + (foundSingleTracksQuad1/findableSingleTracksQuad1)*100 + "%");
}
if(findableSingleTracksQuad2 > 0){
- System.out.println("% Total single track efficiency - Quad 2: " + foundSingleTracksQuad2 + " / " + findableSingleTracksQuad2 + " = " + (foundSingleTracksQuad2/findableSingleTracksQuad2)*100 + "%");
+ System.out.println("% Total single track efficiency - Quad 2: " + foundSingleTracksQuad2 + " / " + findableSingleTracksQuad2 + " = " + (foundSingleTracksQuad2/findableSingleTracksQuad2)*100 + "%");
}
if(findableSingleTracksQuad3 > 0){
- System.out.println("% Total single track efficiency - Quad 3: " + foundSingleTracksQuad3 + " / " + findableSingleTracksQuad3 + " = " + (foundSingleTracksQuad3/findableSingleTracksQuad3)*100 + "%");
+ System.out.println("% Total single track efficiency - Quad 3: " + foundSingleTracksQuad3 + " / " + findableSingleTracksQuad3 + " = " + (foundSingleTracksQuad3/findableSingleTracksQuad3)*100 + "%");
}
if(findableSingleTracksQuad4 > 0){
- System.out.println("% Total single track efficiency - Quad 4: " + foundSingleTracksQuad4 + " / " + findableSingleTracksQuad4 + " = " + (foundSingleTracksQuad4/findableSingleTracksQuad4)*100 + "%");
+ System.out.println("% Total single track efficiency - Quad 4: " + foundSingleTracksQuad4 + " / " + findableSingleTracksQuad4 + " = " + (foundSingleTracksQuad4/findableSingleTracksQuad4)*100 + "%");
}
if(nOppositeVolume > 0){
- System.out.println("% Total events passing opposite volume requirement: " + nOppositeVolume + " / " + eventNumber + " = " + (nOppositeVolume/eventNumber)*100 + "%");
+ System.out.println("% Total events passing opposite volume requirement: " + nOppositeVolume + " / " + eventNumber + " = " + (nOppositeVolume/eventNumber)*100 + "%");
}
if(nAboveThreshold > 0){
- System.out.println("% Total events with both clusters above energy threshold: " + nAboveThreshold + " / " + eventNumber + " = " + (nAboveThreshold/eventNumber)*100 + "%");
+ System.out.println("% Total events with both clusters above energy threshold: " + nAboveThreshold + " / " + eventNumber + " = " + (nAboveThreshold/eventNumber)*100 + "%");
}
if(nTrigClusterTrackMatch > 0){
- System.out.println("% Total events with a trigger cluster-track match: " + nTrigClusterTrackMatch + " / " + eventNumber + " = " + (nTrigClusterTrackMatch/eventNumber)*100 + "%");
+ System.out.println("% Total events with a trigger cluster-track match: " + nTrigClusterTrackMatch + " / " + eventNumber + " = " + (nTrigClusterTrackMatch/eventNumber)*100 + "%");
}
if(findableTracks > 0){
System.out.println("% Total Track Reconstruction Efficiency: " + totalTracks + " / " + findableTracks + " = " + (totalTracks / findableTracks) * 100 + "%");
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/CmpGenToFittedTracksDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/CmpGenToFittedTracksDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/CmpGenToFittedTracksDriver.java Wed Apr 27 11:11:32 2016
@@ -12,6 +12,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.hps.users.phansson.testrun.TrigRateDriver;
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DataTrackerFakeHitDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DataTrackerFakeHitDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/DataTrackerFakeHitDriver.java Wed Apr 27 11:11:32 2016
@@ -16,10 +16,12 @@
import java.util.List;
import java.util.Set;
+
//===> import org.hps.conditions.deprecated.SvtUtils;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.TrackUtils;
import org.hps.recon.tracking.TrackerHitUtils;
+import org.hps.recon.tracking.WTrack;
import org.lcsim.detector.IDetectorElement;
import org.lcsim.detector.ITransform3D;
import org.lcsim.detector.ITranslation3D;
@@ -194,12 +196,12 @@
// Obtain the tracks from the event
- if (!event.hasCollection(HPSTrack.class, trackCollectionName)) {
+ if (!event.hasCollection(HpsHelicalTrackFit.class, trackCollectionName)) {
this.printDebug("No HPSTracks were found, skipping event");
simHits = null;
return;
}
- List<HPSTrack> tracks = event.get(HPSTrack.class, trackCollectionName);
+ List<HpsHelicalTrackFit> tracks = event.get(HpsHelicalTrackFit.class, trackCollectionName);
if (debug) {
System.out.println(this.getClass().getSimpleName() + ": found " + tracks.size() + " tracks (" + this.trackCollectionName + ")");
@@ -223,13 +225,13 @@
System.out.println(this.getClass().getSimpleName() + ": Add hits for " + tracks.size() + " tracks (" + this.trackCollectionName + ")");
}
- for (HPSTrack helix : tracks) {
+ for (HpsHelicalTrackFit helix : tracks) {
if (debug) {
System.out.println(this.getClass().getSimpleName() + ": trying to add hits for this track");
}
// Get the MC Particle associated with this track
- MCParticle mcParticle = helix.getMCParticle();
+ MCParticle mcParticle = helix.getMcParticle();
if (debug) {
System.out.println(this.getClass().getSimpleName() + helix.toString());
@@ -238,8 +240,8 @@
System.out.println(this.getClass().getSimpleName() + ": create a WTrack object");
}
- WTrack wtrack = new WTrack(helix, Math.abs(_bfield.z()), true); //remove sign from B-field (assumed to go along z-direction)
-
+ WTrack wtrack = new WTrack(helix, Math.abs(_bfield.z()));
+
if (debug) {
System.out.println(this.getClass().getSimpleName() + ": " + wtrack.toString());
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/FastTrackResidualDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/FastTrackResidualDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/FastTrackResidualDriver.java Wed Apr 27 11:11:32 2016
@@ -78,11 +78,11 @@
}
public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = detector.getSubdetector(ecalName);
-
- // Cache ref to decoder.
- dec = ecal.getIDDecoder();
+ // Get the Subdetector.
+ ecal = detector.getSubdetector(ecalName);
+
+ // Cache ref to decoder.
+ dec = ecal.getIDDecoder();
//Ecal geometry
crystalX = (13.3 + 16.0) / 2;
@@ -793,7 +793,7 @@
style.setParameter("hist2DStyle", "colorMap");
style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
((PlotterRegion) plotter_ecalhitmult.region(idx)).getPlot().setAllowUserInteraction(false);
- ((PlotterRegion) plotter_ecalhitmult.region(idx)).getPlot().setAllowPopupMenus(false);
+ ((PlotterRegion) plotter_ecalhitmult.region(idx)).getPlot().setAllowPopupMenus(false);
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ParticleHelixProducer.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ParticleHelixProducer.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/ParticleHelixProducer.java Wed Apr 27 11:11:32 2016
@@ -13,7 +13,7 @@
import java.util.List;
import org.hps.analysis.ecal.HPSMCParticlePlotsDriver;
-import org.hps.recon.tracking.HPSTrack;
+import org.hps.recon.tracking.HpsHelicalTrackFit;
import org.hps.recon.tracking.TrackerHitUtils;
import org.lcsim.constants.Constants;
import org.lcsim.event.EventHeader;
@@ -137,7 +137,7 @@
//Make new tracks based on the MC particles
//List<HelicalTrackFit> tracks = new ArrayList<HelicalTrackFit>();
- List<HPSTrack> tracks = new ArrayList<HPSTrack>();
+ List<HpsHelicalTrackFit> tracks = new ArrayList<HpsHelicalTrackFit>();
if (event.hasCollection(MCParticle.class)) {
List<MCParticle> mcparticles = event.get(MCParticle.class).get(0);
@@ -251,7 +251,7 @@
pars[3] = hpc.getZ0();
pars[4] = hpc.getSlopeSZPlane();
//HelicalTrackFit htf = this.trackUtils.makeHelicalTrackFit(pars);
- HPSTrack htf = this.makeHPSTrack(pars, part);
+ HpsHelicalTrackFit htf = this.makeHPSTrack(pars, part);
tracks.add(htf);
if (debug) {
System.out.println(this.getClass().getSimpleName() + ": MC particle created HelicalTrackFit " + htf.toString());
@@ -281,7 +281,7 @@
}
this.printDebug("created " + tracks.size() + " MC particle helix tracks");
- event.put(this.trackOutputCollectionName, tracks, HPSTrack.class, 0);
+ event.put(this.trackOutputCollectionName, tracks, HpsHelicalTrackFit.class, 0);
_totalTracks += tracks.size();
}
@@ -292,9 +292,12 @@
* @param mcParticle : MC particle associated to this HelicalTrackFit
* @return HpsHelicalTrackFit :
*/
- public HPSTrack makeHPSTrack(double[] helixParameters, MCParticle mcParticle) {
- return new HPSTrack(helixParameters, new SymmetricMatrix(5), new double[2], new int[2],
- new HashMap<HelicalTrackHit, Double>(), new HashMap<HelicalTrackHit, MultipleScatter>(), mcParticle);
+ public HpsHelicalTrackFit makeHPSTrack(double[] helixParameters, MCParticle mcParticle) {
+ HpsHelicalTrackFit helicalTrackFit = new HpsHelicalTrackFit(helixParameters, new SymmetricMatrix(5), new double[2], new int[2],
+ new HashMap<HelicalTrackHit, Double>(), new HashMap<HelicalTrackHit, MultipleScatter>());
+ helicalTrackFit.setMcParticle(mcParticle);
+ return helicalTrackFit;
+
}
/**
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SimpleResiduals.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SimpleResiduals.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/SimpleResiduals.java Wed Apr 27 11:11:32 2016
@@ -98,11 +98,11 @@
}
public void detectorChanged(Detector detector) {
- // Get the Subdetector.
- ecal = detector.getSubdetector(ecalName);
-
- // Cache ref to decoder.
- dec = ecal.getIDDecoder();
+ // Get the Subdetector.
+ ecal = detector.getSubdetector(ecalName);
+
+ // Cache ref to decoder.
+ dec = ecal.getIDDecoder();
}
public SimpleResiduals() {
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java Wed Apr 27 11:11:32 2016
@@ -50,7 +50,8 @@
import org.lcsim.util.aida.AIDA;
/**
- *
+ * Analysis class to check recon.
+ *
* @author phansson
*/
public class TrackingReconstructionPlots extends Driver {
@@ -89,12 +90,10 @@
IPlotter plotter55;
IPlotter plotter6;
IPlotter plotter66;
- IPlotter plotter7;
IPlotter plotter8;
IPlotter plotter88;
IPlotter plotter888;
IPlotter plotter8888;
- IPlotter plotter9;
IPlotter top1;
IPlotter top2;
IPlotter top3;
@@ -114,11 +113,11 @@
private boolean showPlots = true;
private double _bfield;
private static Logger LOGGER = Logger.getLogger(TrackingReconstructionPlots.class.getName());
+ private List<HpsSiSensor> sensors = new ArrayList<HpsSiSensor>();
@Override
protected void detectorChanged(Detector detector) {
aida.tree().cd("/");
- List<HpsSiSensor> sensors = new ArrayList<HpsSiSensor>();
for(HpsSiSensor s : detector.getDetectorElement().findDescendants(HpsSiSensor.class)) {
if(s.getName().startsWith("module_") && s.getName().endsWith("sensor0")) {
sensors.add(s);
@@ -128,833 +127,15 @@
Hep3Vector bfieldvec = detector.getFieldMap().getField(new BasicHep3Vector(0., 0., 1.));
_bfield = bfieldvec.y();
-
- IAnalysisFactory fac = aida.analysisFactory();
- plotter = fac.createPlotterFactory().create("HPS Tracking Plots");
- plotter.setTitle("Momentum");
- IPlotterStyle style = plotter.style();
- style.dataStyle().fillStyle().setColor("yellow");
- style.dataStyle().errorBarStyle().setVisible(false);
- plotter.createRegions(2, 2);
- //plotterFrame.addPlotter(plotter);
-
- trkPx = aida.histogram1D("Track Momentum (Px)", 25, -0.25, 0.25);
- IHistogram1D trkPy = aida.histogram1D("Track Momentum (Py)", 25, -0.5, 0.5);
- IHistogram1D trkPz = aida.histogram1D("Track Momentum (Pz)", 25, 0, 1.5);
- IHistogram1D trkChi2 = aida.histogram1D("Track Chi2", 25, 0, 25.0);
-
- plotter.region(0).plot(trkPx);
- plotter.region(1).plot(trkPy);
- plotter.region(2).plot(trkPz);
- plotter.region(3).plot(trkChi2);
-
- if(showPlots) plotter.show();
-
-// ******************************************************************
- top1 = fac.createPlotterFactory().create("Top Tracking Plots");
- top1.setTitle("Top Momentum");
- IPlotterStyle stop1 = top1.style();
- stop1.dataStyle().fillStyle().setColor("green");
- stop1.dataStyle().errorBarStyle().setVisible(false);
- top1.createRegions(2, 2);
- //topFrame.addPlotter(top1);
-
- IHistogram1D toptrkPx = aida.histogram1D("Top Track Momentum (Px)", 25, -0.25, 0.25);
- IHistogram1D toptrkPy = aida.histogram1D("Top Track Momentum (Py)", 25, -0.5, 0.5);
- IHistogram1D toptrkPz = aida.histogram1D("Top Track Momentum (Pz)", 25, 0, 1.5);
- IHistogram1D toptrkChi2 = aida.histogram1D("Top Track Chi2", 25, 0, 25.0);
-
- top1.region(0).plot(toptrkPx);
- top1.region(1).plot(toptrkPy);
- top1.region(2).plot(toptrkPz);
- top1.region(3).plot(toptrkChi2);
-
- if(showPlots) top1.show();
-
- bot1 = fac.createPlotterFactory().create("Bottom Tracking Plots");
- bot1.setTitle("Bottom Momentum");
- IPlotterStyle sbot1 = bot1.style();
- sbot1.dataStyle().fillStyle().setColor("blue");
- sbot1.dataStyle().errorBarStyle().setVisible(false);
- bot1.createRegions(2, 2);
- //bottomFrame.addPlotter(bot1);
-
- IHistogram1D bottrkPx = aida.histogram1D("Bottom Track Momentum (Px)", 25, -0.25, 0.25);
- IHistogram1D bottrkPy = aida.histogram1D("Bottom Track Momentum (Py)", 25, -0.5, 0.5);
- IHistogram1D bottrkPz = aida.histogram1D("Bottom Track Momentum (Pz)", 25, 0, 1.5);
- IHistogram1D bottrkChi2 = aida.histogram1D("Bottom Track Chi2", 25, 0, 25.0);
-
- bot1.region(0).plot(bottrkPx);
- bot1.region(1).plot(bottrkPy);
- bot1.region(2).plot(bottrkPz);
- bot1.region(3).plot(bottrkChi2);
-
- if(showPlots) bot1.show();
-
-// ******************************************************************
- IHistogram1D trkd0 = aida.histogram1D("d0 ", 25, -10.0, 10.0);
- IHistogram1D trkphi = aida.histogram1D("sinphi ", 25, -0.2, 0.2);
- IHistogram1D trkomega = aida.histogram1D("omega ", 25, -0.0025, 0.0025);
- IHistogram1D trklam = aida.histogram1D("tan(lambda) ", 25, -0.1, 0.1);
- IHistogram1D trkz0 = aida.histogram1D("z0 ", 25, -6.0, 6.0);
-
- plotter22 = fac.createPlotterFactory().create("HPS Track Params");
- plotter22.setTitle("Track parameters");
- //plotterFrame.addPlotter(plotter22);
- IPlotterStyle style22 = plotter22.style();
- style22.dataStyle().fillStyle().setColor("yellow");
- style22.dataStyle().errorBarStyle().setVisible(false);
- plotter22.createRegions(2, 3);
- plotter22.region(0).plot(trkd0);
- plotter22.region(1).plot(trkphi);
- plotter22.region(2).plot(trkomega);
- plotter22.region(3).plot(trklam);
- plotter22.region(4).plot(trkz0);
-
- if(showPlots) plotter22.show();
-
- // ******************************************************************
-
-
- trkd0 = aida.histogram1D("d0 Top", 25, -10.0, 10.0);
- trkphi = aida.histogram1D("sinphi Top", 25, -0.2, 0.2);
- trkomega = aida.histogram1D("omega Top", 25, -0.0025, 0.0025);
- trklam = aida.histogram1D("tan(lambda) Top", 25, -0.1, 0.1);
- trkz0 = aida.histogram1D("z0 Top", 25, -6.0, 6.0);
-
- plotter2221 = fac.createPlotterFactory().create("HPS Track Params");
- plotter2221.setTitle("Track parameters");
- //plotterFrame.addPlotter(plotter22);
- IPlotterStyle style2221 = plotter2221.style();
- style2221.dataStyle().fillStyle().setColor("yellow");
- style2221.dataStyle().errorBarStyle().setVisible(false);
- plotter2221.createRegions(2, 3);
- plotter2221.region(0).plot(trkd0);
- plotter2221.region(1).plot(trkphi);
- plotter2221.region(2).plot(trkomega);
- plotter2221.region(3).plot(trklam);
- plotter2221.region(4).plot(trkz0);
-
- if(showPlots) plotter2221.show();
-
-
- // ******************************************************************
-
-
- trkd0 = aida.histogram1D("d0 Bottom", 25, -10.0, 10.0);
- trkphi = aida.histogram1D("sinphi Bottom", 25, -0.2, 0.2);
- trkomega = aida.histogram1D("omega Bottom", 25, -0.0025, 0.0025);
- trklam = aida.histogram1D("tan(lambda) Bottom", 25, -0.1, 0.1);
- trkz0 = aida.histogram1D("z0 Bottom", 25, -6.0, 6.0);
-
- plotter2222 = fac.createPlotterFactory().create("HPS Track Params");
- plotter2222.setTitle("Track parameters");
- //plotterFrame.addPlotter(plotter22);
- IPlotterStyle style2222 = plotter2222.style();
- style2222.dataStyle().fillStyle().setColor("yellow");
- style2222.dataStyle().errorBarStyle().setVisible(false);
- plotter2222.createRegions(2, 3);
- plotter2222.region(0).plot(trkd0);
- plotter2222.region(1).plot(trkphi);
- plotter2222.region(2).plot(trkomega);
- plotter2222.region(3).plot(trklam);
- plotter2222.region(4).plot(trkz0);
-
- if(showPlots) plotter2222.show();
-
-
-
- // ******************************************************************
-
-
- plotter2 = fac.createPlotterFactory().create("HPS Tracking Plots");
- plotter2.setTitle("Track extrapolation");
- //plotterFrame.addPlotter(plotter2);
- IPlotterStyle style2 = plotter2.style();
- style2.dataStyle().fillStyle().setColor("yellow");
- style2.dataStyle().errorBarStyle().setVisible(false);
- plotter2.createRegions(2, 4);
- IHistogram1D xAtConverter = aida.histogram1D("X (mm) @ Z=-60cm", 50, -50, 50);
- IHistogram1D yAtConverter = aida.histogram1D("Y (mm) @ Z=-60cm", 50, -20, 20);
- IHistogram1D xAtColl = aida.histogram1D("X (mm) @ Z=-150cm", 50, -200, 200);
- IHistogram1D yAtColl = aida.histogram1D("Y (mm) @ Z=-150cm", 50, -200, 200);
- IHistogram1D xAtEcal = aida.histogram1D("X (mm) @ ECAL", 50, -500, 500);
- IHistogram1D yAtEcal = aida.histogram1D("Y (mm) @ ECAL", 50, -100, 100);
- IHistogram1D xAtEcal2 = aida.histogram1D("X (mm) @ ECAL (Pz>1)", 50, -500, 500);
- IHistogram1D yAtEcal2 = aida.histogram1D("Y (mm) @ ECAL (Pz>1)", 50, -100, 100);
-
- plotter2.region(0).plot(xAtConverter);
- plotter2.region(4).plot(yAtConverter);
- plotter2.region(1).plot(xAtColl);
- plotter2.region(5).plot(yAtColl);
- plotter2.region(2).plot(xAtEcal);
- plotter2.region(6).plot(yAtEcal);
- plotter2.region(3).plot(xAtEcal2);
- plotter2.region(7).plot(yAtEcal2);
-
- if(showPlots) plotter2.show();
-
- // ******************************************************************
-
- plotter222 = fac.createPlotterFactory().create("HPS Tracking Plots");
- plotter222.setTitle("HPS Tracking Plots");
- //plotterFrame.addPlotter(plotter222);
- IPlotterStyle style222 = plotter222.style();
- style222.dataStyle().fillStyle().setColor("yellow");
- style222.dataStyle().errorBarStyle().setVisible(false);
- plotter222.createRegions(2, 2);
-
- IHistogram1D nHits = aida.histogram1D("Hits per Track", 4, 3, 7);
- nTracks = aida.histogram1D("Tracks per Event", 3, 0, 3);
- IHistogram1D nHitsCluster = aida.histogram1D("Hits in Cluster (HitOnTrack)", 4, 0, 4);
-
-
- plotter222.region(0).plot(nHits);
- plotter222.region(1).plot(nTracks);
- plotter222.region(2).plot(nHitsCluster);
-
- if(showPlots) plotter222.show();
-
-
- // ******************************************************************
-
- plotter22299 = fac.createPlotterFactory().create("HPS Tracking Plots Top");
- plotter22299.setTitle("HPS Tracking Plots Top");
- //plotterFrame.addPlotter(plotter22299);
- IPlotterStyle style22299 = plotter22299.style();
- style22299.dataStyle().fillStyle().setColor("yellow");
- style22299.dataStyle().errorBarStyle().setVisible(false);
- plotter22299.createRegions(2, 2);
-
- IHistogram1D nHitsTop = aida.histogram1D("Hits per Track Top", 4, 3, 7);
- nTracksTop = aida.histogram1D("Tracks per Event Top", 3, 0, 3);
- IHistogram1D nHitsClusterTop = aida.histogram1D("Hits in Cluster (HitOnTrack) Top", 4, 0, 4);
-
-
- plotter22299.region(0).plot(nHitsTop);
- plotter22299.region(1).plot(nTracksTop);
- plotter22299.region(2).plot(nHitsClusterTop);
-
- if(showPlots) plotter22299.show();
-
-// ******************************************************************
-
- plotter22298 = fac.createPlotterFactory().create("HPS Tracking Plots Bottom");
- plotter22298.setTitle("HPS Tracking Plots Bottom");
- //plotterFrame.addPlotter(plotter22298);
- IPlotterStyle style22298 = plotter22298.style();
- style22298.dataStyle().fillStyle().setColor("yellow");
- style22298.dataStyle().errorBarStyle().setVisible(false);
- plotter22298.createRegions(2, 2);
-
- IHistogram1D nHitsBot = aida.histogram1D("Hits per Track Bot", 4, 3, 7);
- nTracksBot = aida.histogram1D("Tracks per Event Bot", 3, 0, 3);
- IHistogram1D nHitsClusterBot = aida.histogram1D("Hits in Cluster (HitOnTrack) Bot", 4, 0, 4);
-
-
- plotter22298.region(0).plot(nHitsBot);
- plotter22298.region(1).plot(nTracksBot);
- plotter22298.region(2).plot(nHitsClusterBot);
-
- if(showPlots) plotter22298.show();
-
-
- // ******************************************************************
-
-
- plotter2223 = fac.createPlotterFactory().create("Cluster Amp Plots");
- plotter2223.setTitle("Other");
- //plotterFrame.addPlotter(plotter222);
- IPlotterStyle style2223 = plotter2223.style();
- style2223.dataStyle().fillStyle().setColor("yellow");
- style2223.dataStyle().errorBarStyle().setVisible(false);
- plotter2223.createRegions(2, 2);
-
-
-
- IHistogram1D amp = aida.histogram1D("Amp (HitOnTrack)", 50, 0, 5000);
- IHistogram1D ampcl = aida.histogram1D("Cluster Amp (HitOnTrack)", 50, 0, 5000);
- IHistogram1D amp2 = aida.histogram1D("Amp Pz>0.8 (HitOnTrack)", 50, 0, 5000);
- IHistogram1D ampcl2 = aida.histogram1D("Cluster Amp Pz>0.8 (HitOnTrack)", 50, 0, 5000);
-
-
- plotter2223.region(0).plot(amp);
- plotter2223.region(1).plot(amp2);
- plotter2223.region(2).plot(ampcl);
- plotter2223.region(3).plot(ampcl2);
-
- if(showPlots) plotter2223.show();
-
-// ******************************************************************
-
-
- plotter2224 = fac.createPlotterFactory().create("t0 Plots");
- plotter2224.setTitle("Other");
- IPlotterStyle style2224 = plotter2224.style();
- style2224.dataStyle().fillStyle().setColor("yellow");
- style2224.dataStyle().errorBarStyle().setVisible(false);
- plotter2224.createRegions(2, 2);
-
- IHistogram1D t0 = aida.histogram1D("t0 (HitOnTrack)", 50, -100, 100);
- IHistogram1D t0cl = aida.histogram1D("Cluster t0 (HitOnTrack)", 50, -100, 100);
- IHistogram1D t02 = aida.histogram1D("t0 Pz>0.8 (HitOnTrack)", 50, -100, 100);
- IHistogram1D t0cl2 = aida.histogram1D("Cluster t0 Pz>0.8 (HitOnTrack)", 50, -100, 100);
-
- plotter2224.region(0).plot(t0);
- plotter2224.region(1).plot(t0cl);
- plotter2224.region(2).plot(t02);
- plotter2224.region(3).plot(t0cl2);
-
- if(showPlots) plotter2224.show();
-
-
- // ******************************************************************
-
- plotter3 = fac.createPlotterFactory().create("HPS Layer Residual Plots");
- plotter3.setTitle("Layer Residuals");
- //plotterFrame.addPlotter(plotter3);
- IPlotterStyle style3 = plotter3.style();
- style3.dataStyle().fillStyle().setColor("yellow");
- style3.dataStyle().errorBarStyle().setVisible(false);
- plotter3.createRegions(6, 2);
-
-
-
- IHistogram1D mod1ResX = aida.histogram1D("Layer 1 Residual X(mm)", 25, -1, 1);
- IHistogram1D mod1ResY = aida.histogram1D("Layer 1 Residual Y(mm)", 25, -0.04, 0.04);
-
- IHistogram1D mod2ResX = aida.histogram1D("Layer 2 Residual X(mm)", 25, -2, 2);
- IHistogram1D mod2ResY = aida.histogram1D("Layer 2 Residual Y(mm)", 25, -1, 1);
-
- IHistogram1D mod3ResX = aida.histogram1D("Layer 3 Residual X(mm)", 25, -2.5, 2.5);
- IHistogram1D mod3ResY = aida.histogram1D("Layer 3 Residual Y(mm)", 25, -1.5, 1.5);
-
- IHistogram1D mod4ResX = aida.histogram1D("Layer 4 Residual X(mm)", 25, -3.0, 3.0);
- IHistogram1D mod4ResY = aida.histogram1D("Layer 4 Residual Y(mm)", 25, -2, 2);
-
- IHistogram1D mod5ResX = aida.histogram1D("Layer 5 Residual X(mm)", 25, -4, 4);
- IHistogram1D mod5ResY = aida.histogram1D("Layer 5 Residual Y(mm)", 25, -3, 3);
-
- IHistogram1D mod6ResX = aida.histogram1D("Layer 6 Residual X(mm)", 25, -5, 5);
- IHistogram1D mod6ResY = aida.histogram1D("Layer 6 Residual Y(mm)", 25, -3, 3);
-
- plotter3.region(0).plot(mod1ResX);
- plotter3.region(2).plot(mod2ResX);
- plotter3.region(4).plot(mod3ResX);
- plotter3.region(6).plot(mod4ResX);
- plotter3.region(8).plot(mod5ResX);
- plotter3.region(10).plot(mod6ResX);
-
- plotter3.region(1).plot(mod1ResY);
- plotter3.region(3).plot(mod2ResY);
- plotter3.region(5).plot(mod3ResY);
- plotter3.region(7).plot(mod4ResY);
- plotter3.region(9).plot(mod5ResY);
- plotter3.region(11).plot(mod6ResY);
-
- if(showPlots) plotter3.show();
-
-
-
- plotter3_11 = fac.createPlotterFactory().create("HPS Strip Residual Plots");
- plotter3_11.setTitle("Strip Residuals (Top)");
- //plotterFrame.addPlotter(plotter3_11);
- IPlotterStyle style3_11 = plotter3_11.style();
- style3_11.dataStyle().fillStyle().setColor("yellow");
- style3_11.dataStyle().errorBarStyle().setVisible(false);
- plotter3_11.createRegions(6, 6);
- int i=0;
- for(HpsSiSensor sensor : sensors) {
- double min = 0.0;
- double max = 0.0;
- if(sensor.getName().contains("L1")) {
- min=-0.04; max=0.04;
- } else if(sensor.getName().contains("L2")) {
- min=-1; max=1;
- } else if(sensor.getName().contains("L3")) {
- min=-1.5; max=1.5;
- } else if(sensor.getName().contains("L4")) {
- min=-3; max=3;
- } else if(sensor.getName().contains("L5")) {
- min=-4; max=4;
- } else if(sensor.getName().contains("L6")) {
- min=-5; max=5;
- } else {
- throw new RuntimeException("Invalid sensor name: " + sensor.getName());
- }
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip residual (mm)", 50, min, max);
- plotter3_11.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter3_11.show();
-
-
- plotter3_1 = fac.createPlotterFactory().create("HPS Residual Plots (Single hit per layer)");
- plotter3_1.setTitle("Residuals (Top)");
- //plotterFrame.addPlotter(plotter3_1);
- IPlotterStyle style3_1 = plotter3_1.style();
- style3_1.dataStyle().fillStyle().setColor("yellow");
- style3_1.dataStyle().errorBarStyle().setVisible(false);
- plotter3_1.createRegions(6, 2);
-
- IHistogram1D mod1ResX_Top = aida.histogram1D("Layer 1 Residual X(mm) Top", 25, -1, 1);
- IHistogram1D mod1ResY_Top = aida.histogram1D("Layer 1 Residual Y(mm) Top", 25, -0.04, 0.04);
-
- IHistogram1D mod2ResX_Top = aida.histogram1D("Layer 2 Residual X(mm) Top", 25, -2, 2);
- IHistogram1D mod2ResY_Top = aida.histogram1D("Layer 2 Residual Y(mm) Top", 25, -1, 1);
-
- IHistogram1D mod3ResX_Top = aida.histogram1D("Layer 3 Residual X(mm) Top", 25, -2.5, 2.5);
- IHistogram1D mod3ResY_Top = aida.histogram1D("Layer 3 Residual Y(mm) Top", 25, -1.5, 1.5);
-
- IHistogram1D mod4ResX_Top = aida.histogram1D("Layer 4 Residual X(mm) Top", 25, -3.0, 3.0);
- IHistogram1D mod4ResY_Top = aida.histogram1D("Layer 4 Residual Y(mm) Top", 25, -2, 2);
-
- IHistogram1D mod5ResX_Top = aida.histogram1D("Layer 5 Residual X(mm) Top", 25, -4, 4);
- IHistogram1D mod5ResY_Top = aida.histogram1D("Layer 5 Residual Y(mm) Top", 25, -3, 3);
-
- IHistogram1D mod6ResX_Top = aida.histogram1D("Layer 6 Residual X(mm) Top", 25, -5, 5);
- IHistogram1D mod6ResY_Top = aida.histogram1D("Layer 6 Residual Y(mm) Top", 25, -3, 3);
-
-
- plotter3_1.region(0).plot(mod1ResX_Top);
- plotter3_1.region(2).plot(mod2ResX_Top);
- plotter3_1.region(4).plot(mod3ResX_Top);
- plotter3_1.region(6).plot(mod4ResX_Top);
- plotter3_1.region(8).plot(mod5ResX_Top);
- plotter3_1.region(10).plot(mod6ResX_Top);
-
- plotter3_1.region(1).plot(mod1ResY_Top);
- plotter3_1.region(3).plot(mod2ResY_Top);
- plotter3_1.region(5).plot(mod3ResY_Top);
- plotter3_1.region(7).plot(mod4ResY_Top);
- plotter3_1.region(9).plot(mod5ResY_Top);
- plotter3_1.region(11).plot(mod6ResY_Top);
-
- if(showPlots) plotter3_1.show();
-
- plotter3_2 = fac.createPlotterFactory().create("HPS Residual Plots (Single strip cluster per layer)");
- plotter3_2.setTitle("Residuals (Bottom)");
- //plotterFrame.addPlotter(plotter3_2);
- IPlotterStyle style3_2 = plotter3_2.style();
- style3_2.dataStyle().fillStyle().setColor("yellow");
- style3_2.dataStyle().errorBarStyle().setVisible(false);
- plotter3_2.createRegions(6, 2);
-
- IHistogram1D mod1ResX_Bottom = aida.histogram1D("Layer 1 Residual X(mm) Bottom", 25, -1, 1);
- IHistogram1D mod1ResY_Bottom = aida.histogram1D("Layer 1 Residual Y(mm) Bottom", 25, -0.04, 0.04);
-
- IHistogram1D mod2ResX_Bottom = aida.histogram1D("Layer 2 Residual X(mm) Bottom", 25, -2, 2);
- IHistogram1D mod2ResY_Bottom = aida.histogram1D("Layer 2 Residual Y(mm) Bottom", 25, -1, 1);
-
- IHistogram1D mod3ResX_Bottom = aida.histogram1D("Layer 3 Residual X(mm) Bottom", 25, -2.5, 2.5);
- IHistogram1D mod3ResY_Bottom = aida.histogram1D("Layer 3 Residual Y(mm) Bottom", 25, -1.5, 1.5);
-
- IHistogram1D mod4ResX_Bottom = aida.histogram1D("Layer 4 Residual X(mm) Bottom", 25, -3.0, 3.0);
- IHistogram1D mod4ResY_Bottom = aida.histogram1D("Layer 4 Residual Y(mm) Bottom", 25, -2, 2);
-
- IHistogram1D mod5ResX_Bottom = aida.histogram1D("Layer 5 Residual X(mm) Bottom", 25, -4, 4);
- IHistogram1D mod5ResY_Bottom = aida.histogram1D("Layer 5 Residual Y(mm) Bottom", 25, -3, 3);
-
- IHistogram1D mod6ResX_Bottom = aida.histogram1D("Layer 6 Residual X(mm) Bottom", 25, -5, 5);
- IHistogram1D mod6ResY_Bottom = aida.histogram1D("Layer 6 Residual Y(mm) Bottom", 25, -3, 3);
-
- plotter3_2.region(0).plot(mod1ResX_Bottom);
- plotter3_2.region(2).plot(mod2ResX_Bottom);
- plotter3_2.region(4).plot(mod3ResX_Bottom);
- plotter3_2.region(6).plot(mod4ResX_Bottom);
- plotter3_2.region(8).plot(mod5ResX_Bottom);
- plotter3_2.region(10).plot(mod6ResX_Bottom);
-
- plotter3_2.region(1).plot(mod1ResY_Bottom);
- plotter3_2.region(3).plot(mod2ResY_Bottom);
- plotter3_2.region(5).plot(mod3ResY_Bottom);
- plotter3_2.region(7).plot(mod4ResY_Bottom);
- plotter3_2.region(9).plot(mod5ResY_Bottom);
- plotter3_2.region(11).plot(mod6ResY_Bottom);
-
- if(showPlots) plotter3_2.show();
-
- plotter4 = fac.createPlotterFactory().create("HPS Track and ECal Plots");
- plotter4.setTitle("Track and ECal Correlations");
- //plotterFrame.addPlotter(plotter4);
- IPlotterStyle style4 = plotter4.style();
- style4.setParameter("hist2DStyle", "colorMap");
- style4.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- style4.dataStyle().fillStyle().setColor("yellow");
- style4.dataStyle().errorBarStyle().setVisible(false);
- plotter4.createRegions(2, 3);
-
- IHistogram2D eVsP = aida.histogram2D("Energy Vs Momentum", 50, 0, 0.50, 50, 0, 1.5);
- IHistogram1D eOverP = aida.histogram1D("Energy Over Momentum", 50, 0, 2);
-
- IHistogram1D distX = aida.histogram1D("deltaX", 50, -100, 100);
- IHistogram1D distY = aida.histogram1D("deltaY", 50, -40, 40);
-
- IHistogram2D xEcalVsTrk = aida.histogram2D("X ECal Vs Track", 100, -400, 400, 100, -400, 400);
- IHistogram2D yEcalVsTrk = aida.histogram2D("Y ECal Vs Track", 100, -100, 100, 100, -100, 100);
-
- plotter4.region(0).plot(eVsP);
- plotter4.region(3).plot(eOverP);
- plotter4.region(1).plot(distX);
- plotter4.region(4).plot(distY);
- plotter4.region(2).plot(xEcalVsTrk);
- plotter4.region(5).plot(yEcalVsTrk);
-
- if(showPlots) plotter4.show();
-
- // ******************************************************************
- top2 = fac.createPlotterFactory().create("Top ECal Plots");
- top2.setTitle("Top ECal Correlations");
- IPlotterStyle stop2 = top2.style();
- stop2.dataStyle().fillStyle().setColor("green");
- stop2.dataStyle().errorBarStyle().setVisible(false);
- stop2.setParameter("hist2DStyle", "colorMap");
- stop2.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- top2.createRegions(2, 3);
- //topFrame.addPlotter(top2);
-
- IHistogram2D topeVsP = aida.histogram2D("Top Energy Vs Momentum", 50, 0, 0.500, 50, 0, 1.5);
- IHistogram1D topeOverP = aida.histogram1D("Top Energy Over Momentum", 50, 0, 2);
-
- IHistogram1D topdistX = aida.histogram1D("Top deltaX", 50, -100, 100);
- IHistogram1D topdistY = aida.histogram1D("Top deltaY", 50, -40, 40);
-
- IHistogram2D topxEcalVsTrk = aida.histogram2D("Top X ECal Vs Track", 100, -400, 400, 100, -100, 100);
- IHistogram2D topyEcalVsTrk = aida.histogram2D("Top Y ECal Vs Track", 100, 0, 100, 100, 0, 100);
-
- top2.region(0).plot(topeVsP);
- top2.region(3).plot(topeOverP);
- top2.region(1).plot(topdistX);
- top2.region(4).plot(topdistY);
- top2.region(2).plot(topxEcalVsTrk);
- top2.region(5).plot(topyEcalVsTrk);
-
- if(showPlots) top2.show();
-
- bot2 = fac.createPlotterFactory().create("Bottom ECal Plots");
- bot2.setTitle("Bottom ECal Correlations");
- IPlotterStyle sbot2 = bot2.style();
- sbot2.dataStyle().fillStyle().setColor("green");
- sbot2.dataStyle().errorBarStyle().setVisible(false);
- sbot2.setParameter("hist2DStyle", "colorMap");
- sbot2.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- bot2.createRegions(2, 3);
- //bottomFrame.addPlotter(bot2);
-
- IHistogram2D BottomeVsP = aida.histogram2D("Bottom Energy Vs Momentum", 50, 0, 0.500, 50, 0, 1.5);
- IHistogram1D BottomeOverP = aida.histogram1D("Bottom Energy Over Momentum", 50, 0, 2);
-
- IHistogram1D BottomdistX = aida.histogram1D("Bottom deltaX", 50, -100, 100);
- IHistogram1D BottomdistY = aida.histogram1D("Bottom deltaY", 50, -40, 40);
-
- IHistogram2D BottomxEcalVsTrk = aida.histogram2D("Bottom X ECal Vs Track", 100, -400, 400, 100, -400, 400);
- IHistogram2D BottomyEcalVsTrk = aida.histogram2D("Bottom Y ECal Vs Track", 100, -100, 0, 100, -100, 0);
-
- bot2.region(0).plot(BottomeVsP);
- bot2.region(3).plot(BottomeOverP);
- bot2.region(1).plot(BottomdistX);
- bot2.region(4).plot(BottomdistY);
- bot2.region(2).plot(BottomxEcalVsTrk);
- bot2.region(5).plot(BottomyEcalVsTrk);
-
- if(showPlots) bot2.show();
-
-
- // ******************************************************************
- top3 = fac.createPlotterFactory().create("Top ECal Plots");
- top3.setTitle("Top ECal More Correlations");
- IPlotterStyle stop3 = top3.style();
- stop3.dataStyle().fillStyle().setColor("green");
- stop3.dataStyle().errorBarStyle().setVisible(false);
- stop3.setParameter("hist2DStyle", "colorMap");
- stop3.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- top3.createRegions(1, 2);
- //topFrame.addPlotter(top3);
-
- IHistogram2D topdistXvsX = aida.histogram2D("Top deltaX vs X", 51, -400, 400, 25, -100, 100);
- IHistogram2D topdistYvsY = aida.histogram2D("Top deltaY vs Y", 51, 0, 100, 25, -40, 40);
-
- top3.region(0).plot(topdistXvsX);
- top3.region(1).plot(topdistYvsY);
-
- if(showPlots) top3.show();
-
- bot3 = fac.createPlotterFactory().create("Bottom ECal Plots");
- bot3.setTitle("Bottom ECal More Correlations");
- IPlotterStyle sbot3 = bot3.style();
- sbot3.dataStyle().fillStyle().setColor("green");
- sbot3.dataStyle().errorBarStyle().setVisible(false);
- sbot3.setParameter("hist2DStyle", "colorMap");
- sbot3.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- bot3.createRegions(1, 2);
- //bottomFrame.addPlotter(bot3);
-
- IHistogram2D botdistXvsX = aida.histogram2D("Bottom deltaX vs X", 51, -400, 400, 25, -100, 100);
- IHistogram2D botdistYvsY = aida.histogram2D("Bottom deltaY vs Y", 51, -100, 0, 25, -40, 40);
-
- bot3.region(0).plot(botdistXvsX);
- bot3.region(1).plot(botdistYvsY);
-
- if(showPlots) bot3.show();
-
- // ******************************************************************
- top4 = fac.createPlotterFactory().create("Track Matching Plots");
- top4.setTitle("Track Matching Plots");
- IPlotterStyle stop4 = top4.style();
- stop4.dataStyle().fillStyle().setColor("green");
- stop4.dataStyle().errorBarStyle().setVisible(false);
- stop4.setParameter("hist2DStyle", "colorMap");
- stop4.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- top4.createRegions(2, 3);
- //topFrame.addPlotter(top4);
-
- IHistogram1D trackmatchN = aida.histogram1D("Tracks matched", 3, 0, 3);
- IHistogram1D toptrackmatchN = aida.histogram1D("Tracks matched Top", 3, 0, 3);
- IHistogram1D bottrackmatchN = aida.histogram1D("Tracks matched Bottom", 3, 0, 3);
- IHistogram1D trackmatchN2 = aida.histogram1D("Tracks matched (Pz>0.8)", 3, 0, 3);
- IHistogram1D toptrackmatchN2 = aida.histogram1D("Tracks matched Top (Pz>0.8)", 3, 0, 3);
- IHistogram1D bottrackmatchN2 = aida.histogram1D("Tracks matched Bottom (Pz>0.8)", 3, 0, 3);
-
- top4.region(0).plot(trackmatchN);
- top4.region(1).plot(toptrackmatchN);
- top4.region(2).plot(bottrackmatchN);
- top4.region(3).plot(trackmatchN2);
- top4.region(4).plot(toptrackmatchN2);
- top4.region(5).plot(bottrackmatchN2);
-
- if(showPlots) top4.show();
-
- // ******************************************************************
- top44 = fac.createPlotterFactory().create("e+e- Plots");
- top44.setTitle("e+e- Plots");
- IPlotterStyle stop44 = top44.style();
- stop44.dataStyle().fillStyle().setColor("green");
- stop44.dataStyle().errorBarStyle().setVisible(false);
- stop44.setParameter("hist2DStyle", "colorMap");
- stop44.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- top44.createRegions(2,4);
- //topFrame.addPlotter(top44);
-
- IHistogram2D trackPCorr = aida.histogram2D("p(e-) vs p(e+) max", 25, 0, 1.2, 25, 0, 1.2);
- IHistogram1D ne = aida.histogram1D("n(e-)", 3, 0, 3);
- IHistogram1D np = aida.histogram1D("n(e+)", 3, 0, 3);
- IHistogram1D pem = aida.histogram1D("p(e-) max", 25, 0, 1.5);
- IHistogram1D pe = aida.histogram1D("p(e-)", 25, 0, 1.5);
- IHistogram1D ppm = aida.histogram1D("p(e+) max", 25, 0, 1.5);
- IHistogram1D pp = aida.histogram1D("p(e+)", 25, 0, 1.5);
-
- top44.region(0).plot(trackPCorr);
- top44.region(1).plot(ne);
- top44.region(2).plot(np);
- top44.region(3).plot(pe);
- top44.region(4).plot(pp);
- top44.region(5).plot(pem);
- top44.region(6).plot(ppm);
-
- if(showPlots) top44.show();
-
-
-
-// ******************************************************************
- plotter5 = fac.createPlotterFactory().create("HPS Hit Positions");
- plotter5.setTitle("Hit Positions: Top");
- //plotterFrame.addPlotter(plotter5);
- IPlotterStyle style5 = plotter5.style();
- style5.setParameter("hist2DStyle", "colorMap");
- style5.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- style5.dataStyle().fillStyle().setColor("yellow");
- style5.dataStyle().errorBarStyle().setVisible(false);
- plotter5.createRegions(1, 2);
-
- IHistogram2D l1Pos = aida.histogram2D("Layer 1 HTH Position: Top", 50, -55, 55, 55, -25, 25);
- IHistogram2D l7Pos = aida.histogram2D("Layer 7 HTH Position: Top", 50, -55, 55, 55, -25, 25);
-
- plotter5.region(0).plot(l1Pos);
- plotter5.region(1).plot(l7Pos);
-
- if(showPlots) plotter5.show();
-
- plotter5_1 = fac.createPlotterFactory().create("HPS Hit Positions");
- plotter5_1.setTitle("Hit Positions: Bottom");
- //plotterFrame.addPlotter(plotter5_1);
- IPlotterStyle style5_1 = plotter5_1.style();
- style5_1.setParameter("hist2DStyle", "colorMap");
- style5_1.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- style5_1.dataStyle().fillStyle().setColor("yellow");
- style5_1.dataStyle().errorBarStyle().setVisible(false);
- plotter5_1.createRegions(1, 2);
-
-
- IHistogram2D l1PosBot = aida.histogram2D("Layer 1 HTH Position: Bottom", 50, -55, 55, 55, -25, 25);
- IHistogram2D l7PosBot = aida.histogram2D("Layer 7 HTH Position: Bottom", 50, -55, 55, 55, -25, 25);
- plotter5_1.region(0).plot(l1PosBot);
- plotter5_1.region(1).plot(l7PosBot);
-
- if(showPlots) plotter5_1.show();
-
- plotter55 = fac.createPlotterFactory().create("HPS Hit Positions");
- plotter55.setTitle("Helical Track Hits");
- //plotterFrame.addPlotter(plotter55);
- IPlotterStyle style55 = plotter55.style();
- style55.dataStyle().fillStyle().setColor("Green");
- style55.dataStyle().errorBarStyle().setVisible(false);
- style55.dataStyle().markerStyle().setSize(20);
- plotter55.createRegions(1, 2);
-
- IProfile avgLayersTopPlot = aida.profile1D("Number of Stereo Hits per layer in Top Half", 13, 0, 13);
- IProfile avgLayersBottomPlot = aida.profile1D("Number of Stereo Hits per layer in Bottom Half", 13, 0, 13);
-
- plotter55.region(0).plot(avgLayersTopPlot);
- plotter55.region(1).plot(avgLayersBottomPlot);
-
- if(showPlots) plotter55.show();
-
- plotter6 = fac.createPlotterFactory().create("HPS ECAL Hit Positions");
- plotter6.setTitle("ECAL Positions");
- //plotterFrame.addPlotter(plotter6);
- IPlotterStyle style6 = plotter6.style();
- style6.setParameter("hist2DStyle", "colorMap");
- style6.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- style6.dataStyle().fillStyle().setColor("yellow");
- style6.dataStyle().errorBarStyle().setVisible(false);
- plotter6.createRegions(4, 2);
-
- IHistogram2D topECal = aida.histogram2D("Top ECal Cluster Position", 50, -400, 400, 10, 0, 100);
- IHistogram2D botECal = aida.histogram2D("Bottom ECal Cluster Position", 50, -400, 400, 10, -100, 0);
- IHistogram2D topECal1 = aida.histogram2D("Top ECal Cluster Position (>0 tracks)", 50, -400, 400, 10, 0, 100);
- IHistogram2D botECal1 = aida.histogram2D("Bottom ECal Cluster Position (>0 tracks)", 50, -400, 400, 10, -100, 0);
- IHistogram2D topECal2 = aida.histogram2D("Top ECal Cluster Position (E>0.1,>0 tracks)", 50, -400, 400, 10, 0, 100);
- IHistogram2D botECal2 = aida.histogram2D("Bottom ECal Cluster Position (E>0.1,>0 tracks)", 50, -400, 400, 10, -100, 0);
- IHistogram2D topECal3 = aida.histogram2D("Top ECal Cluster Position w_E (E>0.1,>0 tracks)", 50, -400, 400, 10, 0, 100);
- IHistogram2D botECal3 = aida.histogram2D("Bottom ECal Cluster Position w_E (E>0.1,>0 tracks)", 50, -400, 400, 10, -100, 0);
-
- plotter6.region(0).plot(topECal);
- plotter6.region(1).plot(botECal);
- plotter6.region(2).plot(topECal1);
- plotter6.region(3).plot(botECal1);
- plotter6.region(4).plot(topECal2);
- plotter6.region(5).plot(botECal2);
- plotter6.region(6).plot(topECal3);
- plotter6.region(7).plot(botECal3);
-
- if(showPlots) plotter6.show();
-
-
- plotter66 = fac.createPlotterFactory().create("HPS ECAL Basic Plots");
- plotter66.setTitle("ECAL Basic Plots");
- //plotterFrame.addPlotter(plotter6);
- IPlotterStyle style66 = plotter66.style();
- style66.dataStyle().fillStyle().setColor("yellow");
- style66.dataStyle().errorBarStyle().setVisible(false);
- plotter66.createRegions(2, 2);
-
- IHistogram1D topECalE = aida.histogram1D("Top ECal Cluster Energy", 50, 0, 2);
- IHistogram1D botECalE = aida.histogram1D("Bottom ECal Cluster Energy", 50, 0, 2);
- IHistogram1D topECalN = aida.histogram1D("Number of Clusters Top", 6, 0, 6);
- IHistogram1D botECalN = aida.histogram1D("Number of Clusters Bot", 6, 0, 6);
-
- plotter66.region(0).plot(topECalE);
- plotter66.region(1).plot(botECalE);
- plotter66.region(2).plot(botECalN);
- plotter66.region(3).plot(topECalN);
-
- if(showPlots) plotter66.show();
-
-
- plotter7 = fac.createPlotterFactory().create("HPS ECAL Hit Positions");
- plotter7.setTitle("Basic Misc Stuff");
- //plotterFrame.addPlotter(plotter7);
- IPlotterStyle style7 = plotter7.style();
- style7.setParameter("hist2DStyle", "colorMap");
- style7.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- style7.dataStyle().fillStyle().setColor("yellow");
- style7.dataStyle().errorBarStyle().setVisible(false);
- plotter7.createRegions(2, 2);
-
- IHistogram2D quadrants = aida.histogram2D("Charge vs Slope", 2, -1, 1, 2, -1, 1);
- plotter7.region(0).plot(quadrants);
-
- if(showPlots) plotter7.show();
-
- plotter8 = fac.createPlotterFactory().create("HPS Strip Hit From Stereo Multiplicity");
- plotter8.setTitle("Strip Hit Multiplicity");
- //plotterFrame.addPlotter(plotter8);
- IPlotterStyle style8 = plotter8.style();
- style8.dataStyle().fillStyle().setColor("yellow");
- style8.dataStyle().errorBarStyle().setVisible(false);
- plotter8.createRegions(6, 6);
- i=0;
- for(SiSensor sensor : sensors) {
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits from stereo", 10, 0, 10);
- plotter8.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter8.show();
-
- plotter88 = fac.createPlotterFactory().create("HPS Strip Hit Multiplicity");
- plotter88.setTitle("Strip Hit Multiplicity");
- //plotterFrame.addPlotter(plotter88);
- plotter88.setStyle(style8);
- plotter88.createRegions(6, 6);
- i=0;
- for(SiSensor sensor : sensors) {
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits", 10, 0, 10);
- plotter88.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter88.show();
-
-
- plotter9 = fac.createPlotterFactory().create("HPS Strip Hit On Track Multiplicity");
- plotter9.setTitle("Strip Hit Multiplicity");
- //plotterFrame.addPlotter(plotter9);
- IPlotterStyle style9 = plotter9.style();
- style9.dataStyle().fillStyle().setColor("yellow");
- style9.dataStyle().errorBarStyle().setVisible(false);
- plotter9.createRegions(6, 6);
- i=0;
- for(SiSensor sensor : sensors) {
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits on track", 3, 0, 3);
- plotter9.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter9.show();
-
-
- plotter888 = fac.createPlotterFactory().create("HPS Strip Hit Isolation");
- plotter888.setTitle("Strip Hit Isolation");
- //plotterFrame.addPlotter(plotter88);
- plotter888.setStyle(style8);
- plotter888.createRegions(6, 6);
- i=0;
- for(SiSensor sensor : sensors) {
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits iso", 50, 0, 5);
- plotter888.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter888.show();
-
- plotter8888 = fac.createPlotterFactory().create("HPS Strip Hit On Track Isolation");
- plotter8888.setTitle("Strip Hit On Track Isolation");
- //plotterFrame.addPlotter(plotter88);
- plotter8888.setStyle(style8);
- plotter8888.createRegions(6, 6);
- i=0;
- for(SiSensor sensor : sensors) {
- IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits iso on track", 50, 0, 5);
- plotter8888.region(i).plot(resX);
- i++;
- }
-
- if(showPlots) plotter8888.show();
-
-
+
+ setupPlots();
}
+
+
+
+
+
+
public TrackingReconstructionPlots() {
LOGGER.setLevel(Level.WARNING);
@@ -1170,13 +351,8 @@
int isTop = -1;
if (trk.getTrackerHits().get(0).getPosition()[2] > 0) {
- isTop = 0;//make plot look pretty
- }
- int charge = trk.getCharge();
- if (charge > 0) {
- charge = 0;//make plot look pretty
- }// System.out.println("Charge = " + charge + "; isTop = " + isTop);
- aida.histogram2D("Charge vs Slope").fill(charge, isTop);
+ isTop = 0;
+ }
if (isTop == 0) {
aida.histogram1D("Top Track Momentum (Px)").fill(trk.getTrackStates().get(0).getMomentum()[1]);
aida.histogram1D("Top Track Momentum (Py)").fill(trk.getTrackStates().get(0).getMomentum()[2]);
@@ -1304,8 +480,6 @@
layersBot[htc.Layer() - 1]++;
Hep3Vector sensorPos = ((SiSensor) ((RawTrackerHit) htc.getRawHits().get(0)).getDetectorElement()).getGeometry().getPosition();
if (htc.Layer() == 1) {
-// System.out.println(sensorPos.toString());
-// System.out.println("Hit X = " + x + "; Hit Y = " + y);
aida.histogram2D("Layer 1 HTH Position: Bottom").fill(x - sensorPos.x(), y - sensorPos.y());
}
if (htc.Layer() == 7) {
@@ -1319,7 +493,7 @@
double clusterSum = 0;
double clusterT0 = 0;
int nHitsCluster = 0;
-
+
for (RawTrackerHit rawHit : (List<RawTrackerHit>) hts.rawhits()) {
if(event.hasCollection(LCRelation.class, "SVTFittedRawTrackerHits")) {
List<LCRelation> fittedHits = event.get(LCRelation.class, "SVTFittedRawTrackerHits");
@@ -1361,7 +535,6 @@
}
for(Map.Entry<HpsSiSensor,Integer> sensor : stripHitsOnTrack.entrySet()) {
- aida.histogram1D(sensor.getKey().getName() + " strip hits on track").fill(sensor.getValue());
aida.histogram1D(sensor.getKey().getName() + " strip hits iso on track").fill(stripHitsIsoOnTrack.get(sensor.getKey()));
}
@@ -1645,6 +818,813 @@
+
+private void setupPlots() {
+
+
+ IAnalysisFactory fac = aida.analysisFactory();
+ plotter = fac.createPlotterFactory().create("HPS Tracking Plots");
+ plotter.setTitle("Momentum");
+ IPlotterStyle style = plotter.style();
+ style.dataStyle().fillStyle().setColor("yellow");
+ style.dataStyle().errorBarStyle().setVisible(false);
+ plotter.createRegions(2, 2);
+ //plotterFrame.addPlotter(plotter);
+
+ trkPx = aida.histogram1D("Track Momentum (Px)", 25, -0.25, 0.25);
+ IHistogram1D trkPy = aida.histogram1D("Track Momentum (Py)", 25, -0.5, 0.5);
+ IHistogram1D trkPz = aida.histogram1D("Track Momentum (Pz)", 25, 0, 1.5);
+ IHistogram1D trkChi2 = aida.histogram1D("Track Chi2", 25, 0, 25.0);
+
+ plotter.region(0).plot(trkPx);
+ plotter.region(1).plot(trkPy);
+ plotter.region(2).plot(trkPz);
+ plotter.region(3).plot(trkChi2);
+
+ if(showPlots) plotter.show();
+
+// ******************************************************************
+ top1 = fac.createPlotterFactory().create("Top Tracking Plots");
+ top1.setTitle("Top Momentum");
+ IPlotterStyle stop1 = top1.style();
+ stop1.dataStyle().fillStyle().setColor("green");
+ stop1.dataStyle().errorBarStyle().setVisible(false);
+ top1.createRegions(2, 2);
+ //topFrame.addPlotter(top1);
+
+ IHistogram1D toptrkPx = aida.histogram1D("Top Track Momentum (Px)", 25, -0.25, 0.25);
+ IHistogram1D toptrkPy = aida.histogram1D("Top Track Momentum (Py)", 25, -0.5, 0.5);
+ IHistogram1D toptrkPz = aida.histogram1D("Top Track Momentum (Pz)", 25, 0, 1.5);
+ IHistogram1D toptrkChi2 = aida.histogram1D("Top Track Chi2", 25, 0, 25.0);
+
+ top1.region(0).plot(toptrkPx);
+ top1.region(1).plot(toptrkPy);
+ top1.region(2).plot(toptrkPz);
+ top1.region(3).plot(toptrkChi2);
+
+ if(showPlots) top1.show();
+
+ bot1 = fac.createPlotterFactory().create("Bottom Tracking Plots");
+ bot1.setTitle("Bottom Momentum");
+ IPlotterStyle sbot1 = bot1.style();
+ sbot1.dataStyle().fillStyle().setColor("blue");
+ sbot1.dataStyle().errorBarStyle().setVisible(false);
+ bot1.createRegions(2, 2);
+ //bottomFrame.addPlotter(bot1);
+
+ IHistogram1D bottrkPx = aida.histogram1D("Bottom Track Momentum (Px)", 25, -0.25, 0.25);
+ IHistogram1D bottrkPy = aida.histogram1D("Bottom Track Momentum (Py)", 25, -0.5, 0.5);
+ IHistogram1D bottrkPz = aida.histogram1D("Bottom Track Momentum (Pz)", 25, 0, 1.5);
+ IHistogram1D bottrkChi2 = aida.histogram1D("Bottom Track Chi2", 25, 0, 25.0);
+
+ bot1.region(0).plot(bottrkPx);
+ bot1.region(1).plot(bottrkPy);
+ bot1.region(2).plot(bottrkPz);
+ bot1.region(3).plot(bottrkChi2);
+
+ if(showPlots) bot1.show();
+
+// ******************************************************************
+ IHistogram1D trkd0 = aida.histogram1D("d0 ", 25, -10.0, 10.0);
+ IHistogram1D trkphi = aida.histogram1D("sinphi ", 25, -0.2, 0.2);
+ IHistogram1D trkomega = aida.histogram1D("omega ", 25, -0.0025, 0.0025);
+ IHistogram1D trklam = aida.histogram1D("tan(lambda) ", 25, -0.1, 0.1);
+ IHistogram1D trkz0 = aida.histogram1D("z0 ", 25, -6.0, 6.0);
+
+ plotter22 = fac.createPlotterFactory().create("HPS Track Params");
+ plotter22.setTitle("Track parameters");
+ //plotterFrame.addPlotter(plotter22);
+ IPlotterStyle style22 = plotter22.style();
+ style22.dataStyle().fillStyle().setColor("yellow");
+ style22.dataStyle().errorBarStyle().setVisible(false);
+ plotter22.createRegions(2, 3);
+ plotter22.region(0).plot(trkd0);
+ plotter22.region(1).plot(trkphi);
+ plotter22.region(2).plot(trkomega);
+ plotter22.region(3).plot(trklam);
+ plotter22.region(4).plot(trkz0);
+
+ if(showPlots) plotter22.show();
+
+ // ******************************************************************
+
+
+ trkd0 = aida.histogram1D("d0 Top", 25, -10.0, 10.0);
+ trkphi = aida.histogram1D("sinphi Top", 25, -0.2, 0.2);
+ trkomega = aida.histogram1D("omega Top", 25, -0.0025, 0.0025);
+ trklam = aida.histogram1D("tan(lambda) Top", 25, -0.1, 0.1);
+ trkz0 = aida.histogram1D("z0 Top", 25, -6.0, 6.0);
+
+ plotter2221 = fac.createPlotterFactory().create("HPS Track Params");
+ plotter2221.setTitle("Track parameters");
+ //plotterFrame.addPlotter(plotter22);
+ IPlotterStyle style2221 = plotter2221.style();
+ style2221.dataStyle().fillStyle().setColor("yellow");
+ style2221.dataStyle().errorBarStyle().setVisible(false);
+ plotter2221.createRegions(2, 3);
+ plotter2221.region(0).plot(trkd0);
+ plotter2221.region(1).plot(trkphi);
+ plotter2221.region(2).plot(trkomega);
+ plotter2221.region(3).plot(trklam);
+ plotter2221.region(4).plot(trkz0);
+
+ if(showPlots) plotter2221.show();
+
+
+ // ******************************************************************
+
+
+ trkd0 = aida.histogram1D("d0 Bottom", 25, -10.0, 10.0);
+ trkphi = aida.histogram1D("sinphi Bottom", 25, -0.2, 0.2);
+ trkomega = aida.histogram1D("omega Bottom", 25, -0.0025, 0.0025);
+ trklam = aida.histogram1D("tan(lambda) Bottom", 25, -0.1, 0.1);
+ trkz0 = aida.histogram1D("z0 Bottom", 25, -6.0, 6.0);
+
+ plotter2222 = fac.createPlotterFactory().create("HPS Track Params");
+ plotter2222.setTitle("Track parameters");
+ //plotterFrame.addPlotter(plotter22);
+ IPlotterStyle style2222 = plotter2222.style();
+ style2222.dataStyle().fillStyle().setColor("yellow");
+ style2222.dataStyle().errorBarStyle().setVisible(false);
+ plotter2222.createRegions(2, 3);
+ plotter2222.region(0).plot(trkd0);
+ plotter2222.region(1).plot(trkphi);
+ plotter2222.region(2).plot(trkomega);
+ plotter2222.region(3).plot(trklam);
+ plotter2222.region(4).plot(trkz0);
+
+ if(showPlots) plotter2222.show();
+
+
+
+ // ******************************************************************
+
+
+ plotter2 = fac.createPlotterFactory().create("HPS Tracking Plots");
+ plotter2.setTitle("Track extrapolation");
+ //plotterFrame.addPlotter(plotter2);
+ IPlotterStyle style2 = plotter2.style();
+ style2.dataStyle().fillStyle().setColor("yellow");
+ style2.dataStyle().errorBarStyle().setVisible(false);
+ plotter2.createRegions(2, 4);
+ IHistogram1D xAtConverter = aida.histogram1D("X (mm) @ Z=-60cm", 50, -50, 50);
+ IHistogram1D yAtConverter = aida.histogram1D("Y (mm) @ Z=-60cm", 50, -20, 20);
+ IHistogram1D xAtColl = aida.histogram1D("X (mm) @ Z=-150cm", 50, -200, 200);
+ IHistogram1D yAtColl = aida.histogram1D("Y (mm) @ Z=-150cm", 50, -200, 200);
+ IHistogram1D xAtEcal = aida.histogram1D("X (mm) @ ECAL", 50, -500, 500);
+ IHistogram1D yAtEcal = aida.histogram1D("Y (mm) @ ECAL", 50, -100, 100);
+ IHistogram1D xAtEcal2 = aida.histogram1D("X (mm) @ ECAL (Pz>1)", 50, -500, 500);
+ IHistogram1D yAtEcal2 = aida.histogram1D("Y (mm) @ ECAL (Pz>1)", 50, -100, 100);
+
+ plotter2.region(0).plot(xAtConverter);
+ plotter2.region(4).plot(yAtConverter);
+ plotter2.region(1).plot(xAtColl);
+ plotter2.region(5).plot(yAtColl);
+ plotter2.region(2).plot(xAtEcal);
+ plotter2.region(6).plot(yAtEcal);
+ plotter2.region(3).plot(xAtEcal2);
+ plotter2.region(7).plot(yAtEcal2);
+
+ if(showPlots) plotter2.show();
+
+ // ******************************************************************
+
+ plotter222 = fac.createPlotterFactory().create("HPS Tracking Plots");
+ plotter222.setTitle("HPS Tracking Plots");
+ //plotterFrame.addPlotter(plotter222);
+ IPlotterStyle style222 = plotter222.style();
+ style222.dataStyle().fillStyle().setColor("yellow");
+ style222.dataStyle().errorBarStyle().setVisible(false);
+ plotter222.createRegions(2, 2);
+
+ IHistogram1D nHits = aida.histogram1D("Hits per Track", 4, 3, 7);
+ nTracks = aida.histogram1D("Tracks per Event", 3, 0, 3);
+ IHistogram1D nHitsCluster = aida.histogram1D("Hits in Cluster (HitOnTrack)", 4, 0, 4);
+
+
+ plotter222.region(0).plot(nHits);
+ plotter222.region(1).plot(nTracks);
+ plotter222.region(2).plot(nHitsCluster);
+
+ if(showPlots) plotter222.show();
+
+
+ // ******************************************************************
+
+ plotter22299 = fac.createPlotterFactory().create("HPS Tracking Plots Top");
+ plotter22299.setTitle("HPS Tracking Plots Top");
+ //plotterFrame.addPlotter(plotter22299);
+ IPlotterStyle style22299 = plotter22299.style();
+ style22299.dataStyle().fillStyle().setColor("yellow");
+ style22299.dataStyle().errorBarStyle().setVisible(false);
+ plotter22299.createRegions(2, 2);
+
+ IHistogram1D nHitsTop = aida.histogram1D("Hits per Track Top", 4, 3, 7);
+ nTracksTop = aida.histogram1D("Tracks per Event Top", 3, 0, 3);
+ IHistogram1D nHitsClusterTop = aida.histogram1D("Hits in Cluster (HitOnTrack) Top", 4, 0, 4);
+
+
+ plotter22299.region(0).plot(nHitsTop);
+ plotter22299.region(1).plot(nTracksTop);
+ plotter22299.region(2).plot(nHitsClusterTop);
+
+ if(showPlots) plotter22299.show();
+
+// ******************************************************************
+
+ plotter22298 = fac.createPlotterFactory().create("HPS Tracking Plots Bottom");
+ plotter22298.setTitle("HPS Tracking Plots Bottom");
+ //plotterFrame.addPlotter(plotter22298);
+ IPlotterStyle style22298 = plotter22298.style();
+ style22298.dataStyle().fillStyle().setColor("yellow");
+ style22298.dataStyle().errorBarStyle().setVisible(false);
+ plotter22298.createRegions(2, 2);
+
+ IHistogram1D nHitsBot = aida.histogram1D("Hits per Track Bot", 4, 3, 7);
+ nTracksBot = aida.histogram1D("Tracks per Event Bot", 3, 0, 3);
+ IHistogram1D nHitsClusterBot = aida.histogram1D("Hits in Cluster (HitOnTrack) Bot", 4, 0, 4);
+
+
+ plotter22298.region(0).plot(nHitsBot);
+ plotter22298.region(1).plot(nTracksBot);
+ plotter22298.region(2).plot(nHitsClusterBot);
+
+ if(showPlots) plotter22298.show();
+
+
+ // ******************************************************************
+
+
+ plotter2223 = fac.createPlotterFactory().create("Cluster Amp Plots");
+ plotter2223.setTitle("Other");
+ //plotterFrame.addPlotter(plotter222);
+ IPlotterStyle style2223 = plotter2223.style();
+ style2223.dataStyle().fillStyle().setColor("yellow");
+ style2223.dataStyle().errorBarStyle().setVisible(false);
+ plotter2223.createRegions(2, 2);
+
+
+
+ IHistogram1D amp = aida.histogram1D("Amp (HitOnTrack)", 50, 0, 5000);
+ IHistogram1D ampcl = aida.histogram1D("Cluster Amp (HitOnTrack)", 50, 0, 5000);
+ IHistogram1D amp2 = aida.histogram1D("Amp Pz>0.8 (HitOnTrack)", 50, 0, 5000);
+ IHistogram1D ampcl2 = aida.histogram1D("Cluster Amp Pz>0.8 (HitOnTrack)", 50, 0, 5000);
+
+
+ plotter2223.region(0).plot(amp);
+ plotter2223.region(1).plot(amp2);
+ plotter2223.region(2).plot(ampcl);
+ plotter2223.region(3).plot(ampcl2);
+
+ if(showPlots) plotter2223.show();
+
+// ******************************************************************
+
+
+ plotter2224 = fac.createPlotterFactory().create("t0 Plots");
+ plotter2224.setTitle("Other");
+ IPlotterStyle style2224 = plotter2224.style();
+ style2224.dataStyle().fillStyle().setColor("yellow");
+ style2224.dataStyle().errorBarStyle().setVisible(false);
+ plotter2224.createRegions(2, 2);
+
+ IHistogram1D t0 = aida.histogram1D("t0 (HitOnTrack)", 50, -100, 100);
+ IHistogram1D t0cl = aida.histogram1D("Cluster t0 (HitOnTrack)", 50, -100, 100);
+ IHistogram1D t02 = aida.histogram1D("t0 Pz>0.8 (HitOnTrack)", 50, -100, 100);
+ IHistogram1D t0cl2 = aida.histogram1D("Cluster t0 Pz>0.8 (HitOnTrack)", 50, -100, 100);
+
+ plotter2224.region(0).plot(t0);
+ plotter2224.region(1).plot(t0cl);
+ plotter2224.region(2).plot(t02);
+ plotter2224.region(3).plot(t0cl2);
+
+ if(showPlots) plotter2224.show();
+
+
+ // ******************************************************************
+
+ plotter3 = fac.createPlotterFactory().create("HPS Layer Residual Plots");
+ plotter3.setTitle("Layer Residuals");
+ //plotterFrame.addPlotter(plotter3);
+ IPlotterStyle style3 = plotter3.style();
+ style3.dataStyle().fillStyle().setColor("yellow");
+ style3.dataStyle().errorBarStyle().setVisible(false);
+ plotter3.createRegions(6, 2);
+
+
+
+ IHistogram1D mod1ResX = aida.histogram1D("Layer 1 Residual X(mm)", 25, -1, 1);
+ IHistogram1D mod1ResY = aida.histogram1D("Layer 1 Residual Y(mm)", 25, -0.04, 0.04);
+
+ IHistogram1D mod2ResX = aida.histogram1D("Layer 2 Residual X(mm)", 25, -2, 2);
+ IHistogram1D mod2ResY = aida.histogram1D("Layer 2 Residual Y(mm)", 25, -1, 1);
+
+ IHistogram1D mod3ResX = aida.histogram1D("Layer 3 Residual X(mm)", 25, -2.5, 2.5);
+ IHistogram1D mod3ResY = aida.histogram1D("Layer 3 Residual Y(mm)", 25, -1.5, 1.5);
+
+ IHistogram1D mod4ResX = aida.histogram1D("Layer 4 Residual X(mm)", 25, -3.0, 3.0);
+ IHistogram1D mod4ResY = aida.histogram1D("Layer 4 Residual Y(mm)", 25, -2, 2);
+
+ IHistogram1D mod5ResX = aida.histogram1D("Layer 5 Residual X(mm)", 25, -4, 4);
+ IHistogram1D mod5ResY = aida.histogram1D("Layer 5 Residual Y(mm)", 25, -3, 3);
+
+ IHistogram1D mod6ResX = aida.histogram1D("Layer 6 Residual X(mm)", 25, -5, 5);
+ IHistogram1D mod6ResY = aida.histogram1D("Layer 6 Residual Y(mm)", 25, -3, 3);
+
+ plotter3.region(0).plot(mod1ResX);
+ plotter3.region(2).plot(mod2ResX);
+ plotter3.region(4).plot(mod3ResX);
+ plotter3.region(6).plot(mod4ResX);
+ plotter3.region(8).plot(mod5ResX);
+ plotter3.region(10).plot(mod6ResX);
+
+ plotter3.region(1).plot(mod1ResY);
+ plotter3.region(3).plot(mod2ResY);
+ plotter3.region(5).plot(mod3ResY);
+ plotter3.region(7).plot(mod4ResY);
+ plotter3.region(9).plot(mod5ResY);
+ plotter3.region(11).plot(mod6ResY);
+
+ if(showPlots) plotter3.show();
+
+
+
+ plotter3_11 = fac.createPlotterFactory().create("HPS Strip Residual Plots");
+ plotter3_11.setTitle("Strip Residuals (Top)");
+ //plotterFrame.addPlotter(plotter3_11);
+ IPlotterStyle style3_11 = plotter3_11.style();
+ style3_11.dataStyle().fillStyle().setColor("yellow");
+ style3_11.dataStyle().errorBarStyle().setVisible(false);
+ plotter3_11.createRegions(6, 6);
+ int i=0;
+ for(HpsSiSensor sensor : sensors) {
+ double min = 0.0;
+ double max = 0.0;
+ if(sensor.getName().contains("L1")) {
+ min=-0.04; max=0.04;
+ } else if(sensor.getName().contains("L2")) {
+ min=-1; max=1;
+ } else if(sensor.getName().contains("L3")) {
+ min=-1.5; max=1.5;
+ } else if(sensor.getName().contains("L4")) {
+ min=-3; max=3;
+ } else if(sensor.getName().contains("L5")) {
+ min=-4; max=4;
+ } else if(sensor.getName().contains("L6")) {
+ min=-5; max=5;
+ } else {
+ throw new RuntimeException("Invalid sensor name: " + sensor.getName());
+ }
+ IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip residual (mm)", 50, min, max);
+ plotter3_11.region(i).plot(resX);
+ i++;
+ }
+
+ if(showPlots) plotter3_11.show();
+
+
+ plotter3_1 = fac.createPlotterFactory().create("HPS Residual Plots (Single hit per layer)");
+ plotter3_1.setTitle("Residuals (Top)");
+ //plotterFrame.addPlotter(plotter3_1);
+ IPlotterStyle style3_1 = plotter3_1.style();
+ style3_1.dataStyle().fillStyle().setColor("yellow");
+ style3_1.dataStyle().errorBarStyle().setVisible(false);
+ plotter3_1.createRegions(6, 2);
+
+ IHistogram1D mod1ResX_Top = aida.histogram1D("Layer 1 Residual X(mm) Top", 25, -1, 1);
+ IHistogram1D mod1ResY_Top = aida.histogram1D("Layer 1 Residual Y(mm) Top", 25, -0.04, 0.04);
+
+ IHistogram1D mod2ResX_Top = aida.histogram1D("Layer 2 Residual X(mm) Top", 25, -2, 2);
+ IHistogram1D mod2ResY_Top = aida.histogram1D("Layer 2 Residual Y(mm) Top", 25, -1, 1);
+
+ IHistogram1D mod3ResX_Top = aida.histogram1D("Layer 3 Residual X(mm) Top", 25, -2.5, 2.5);
+ IHistogram1D mod3ResY_Top = aida.histogram1D("Layer 3 Residual Y(mm) Top", 25, -1.5, 1.5);
+
+ IHistogram1D mod4ResX_Top = aida.histogram1D("Layer 4 Residual X(mm) Top", 25, -3.0, 3.0);
+ IHistogram1D mod4ResY_Top = aida.histogram1D("Layer 4 Residual Y(mm) Top", 25, -2, 2);
+
+ IHistogram1D mod5ResX_Top = aida.histogram1D("Layer 5 Residual X(mm) Top", 25, -4, 4);
+ IHistogram1D mod5ResY_Top = aida.histogram1D("Layer 5 Residual Y(mm) Top", 25, -3, 3);
+
+ IHistogram1D mod6ResX_Top = aida.histogram1D("Layer 6 Residual X(mm) Top", 25, -5, 5);
+ IHistogram1D mod6ResY_Top = aida.histogram1D("Layer 6 Residual Y(mm) Top", 25, -3, 3);
+
+
+ plotter3_1.region(0).plot(mod1ResX_Top);
+ plotter3_1.region(2).plot(mod2ResX_Top);
+ plotter3_1.region(4).plot(mod3ResX_Top);
+ plotter3_1.region(6).plot(mod4ResX_Top);
+ plotter3_1.region(8).plot(mod5ResX_Top);
+ plotter3_1.region(10).plot(mod6ResX_Top);
+
+ plotter3_1.region(1).plot(mod1ResY_Top);
+ plotter3_1.region(3).plot(mod2ResY_Top);
+ plotter3_1.region(5).plot(mod3ResY_Top);
+ plotter3_1.region(7).plot(mod4ResY_Top);
+ plotter3_1.region(9).plot(mod5ResY_Top);
+ plotter3_1.region(11).plot(mod6ResY_Top);
+
+ if(showPlots) plotter3_1.show();
+
+ plotter3_2 = fac.createPlotterFactory().create("HPS Residual Plots (Single strip cluster per layer)");
+ plotter3_2.setTitle("Residuals (Bottom)");
+ //plotterFrame.addPlotter(plotter3_2);
+ IPlotterStyle style3_2 = plotter3_2.style();
+ style3_2.dataStyle().fillStyle().setColor("yellow");
+ style3_2.dataStyle().errorBarStyle().setVisible(false);
+ plotter3_2.createRegions(6, 2);
+
+ IHistogram1D mod1ResX_Bottom = aida.histogram1D("Layer 1 Residual X(mm) Bottom", 25, -1, 1);
+ IHistogram1D mod1ResY_Bottom = aida.histogram1D("Layer 1 Residual Y(mm) Bottom", 25, -0.04, 0.04);
+
+ IHistogram1D mod2ResX_Bottom = aida.histogram1D("Layer 2 Residual X(mm) Bottom", 25, -2, 2);
+ IHistogram1D mod2ResY_Bottom = aida.histogram1D("Layer 2 Residual Y(mm) Bottom", 25, -1, 1);
+
+ IHistogram1D mod3ResX_Bottom = aida.histogram1D("Layer 3 Residual X(mm) Bottom", 25, -2.5, 2.5);
+ IHistogram1D mod3ResY_Bottom = aida.histogram1D("Layer 3 Residual Y(mm) Bottom", 25, -1.5, 1.5);
+
+ IHistogram1D mod4ResX_Bottom = aida.histogram1D("Layer 4 Residual X(mm) Bottom", 25, -3.0, 3.0);
+ IHistogram1D mod4ResY_Bottom = aida.histogram1D("Layer 4 Residual Y(mm) Bottom", 25, -2, 2);
+
+ IHistogram1D mod5ResX_Bottom = aida.histogram1D("Layer 5 Residual X(mm) Bottom", 25, -4, 4);
+ IHistogram1D mod5ResY_Bottom = aida.histogram1D("Layer 5 Residual Y(mm) Bottom", 25, -3, 3);
+
+ IHistogram1D mod6ResX_Bottom = aida.histogram1D("Layer 6 Residual X(mm) Bottom", 25, -5, 5);
+ IHistogram1D mod6ResY_Bottom = aida.histogram1D("Layer 6 Residual Y(mm) Bottom", 25, -3, 3);
+
+ plotter3_2.region(0).plot(mod1ResX_Bottom);
+ plotter3_2.region(2).plot(mod2ResX_Bottom);
+ plotter3_2.region(4).plot(mod3ResX_Bottom);
+ plotter3_2.region(6).plot(mod4ResX_Bottom);
+ plotter3_2.region(8).plot(mod5ResX_Bottom);
+ plotter3_2.region(10).plot(mod6ResX_Bottom);
+
+ plotter3_2.region(1).plot(mod1ResY_Bottom);
+ plotter3_2.region(3).plot(mod2ResY_Bottom);
+ plotter3_2.region(5).plot(mod3ResY_Bottom);
+ plotter3_2.region(7).plot(mod4ResY_Bottom);
+ plotter3_2.region(9).plot(mod5ResY_Bottom);
+ plotter3_2.region(11).plot(mod6ResY_Bottom);
+
+ if(showPlots) plotter3_2.show();
+
+ plotter4 = fac.createPlotterFactory().create("HPS Track and ECal Plots");
+ plotter4.setTitle("Track and ECal Correlations");
+ //plotterFrame.addPlotter(plotter4);
+ IPlotterStyle style4 = plotter4.style();
+ style4.setParameter("hist2DStyle", "colorMap");
+ style4.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style4.dataStyle().fillStyle().setColor("yellow");
+ style4.dataStyle().errorBarStyle().setVisible(false);
+ plotter4.createRegions(2, 3);
+
+ IHistogram2D eVsP = aida.histogram2D("Energy Vs Momentum", 50, 0, 0.50, 50, 0, 1.5);
+ IHistogram1D eOverP = aida.histogram1D("Energy Over Momentum", 50, 0, 2);
+
+ IHistogram1D distX = aida.histogram1D("deltaX", 50, -100, 100);
+ IHistogram1D distY = aida.histogram1D("deltaY", 50, -40, 40);
+
+ IHistogram2D xEcalVsTrk = aida.histogram2D("X ECal Vs Track", 100, -400, 400, 100, -400, 400);
+ IHistogram2D yEcalVsTrk = aida.histogram2D("Y ECal Vs Track", 100, -100, 100, 100, -100, 100);
+
+ plotter4.region(0).plot(eVsP);
+ plotter4.region(3).plot(eOverP);
+ plotter4.region(1).plot(distX);
+ plotter4.region(4).plot(distY);
+ plotter4.region(2).plot(xEcalVsTrk);
+ plotter4.region(5).plot(yEcalVsTrk);
+
+ if(showPlots) plotter4.show();
+
+ // ******************************************************************
+ top2 = fac.createPlotterFactory().create("Top ECal Plots");
+ top2.setTitle("Top ECal Correlations");
+ IPlotterStyle stop2 = top2.style();
+ stop2.dataStyle().fillStyle().setColor("green");
+ stop2.dataStyle().errorBarStyle().setVisible(false);
+ stop2.setParameter("hist2DStyle", "colorMap");
+ stop2.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ top2.createRegions(2, 3);
+ //topFrame.addPlotter(top2);
+
+ IHistogram2D topeVsP = aida.histogram2D("Top Energy Vs Momentum", 50, 0, 0.500, 50, 0, 1.5);
+ IHistogram1D topeOverP = aida.histogram1D("Top Energy Over Momentum", 50, 0, 2);
+
+ IHistogram1D topdistX = aida.histogram1D("Top deltaX", 50, -100, 100);
+ IHistogram1D topdistY = aida.histogram1D("Top deltaY", 50, -40, 40);
+
+ IHistogram2D topxEcalVsTrk = aida.histogram2D("Top X ECal Vs Track", 100, -400, 400, 100, -100, 100);
+ IHistogram2D topyEcalVsTrk = aida.histogram2D("Top Y ECal Vs Track", 100, 0, 100, 100, 0, 100);
+
+ top2.region(0).plot(topeVsP);
+ top2.region(3).plot(topeOverP);
+ top2.region(1).plot(topdistX);
+ top2.region(4).plot(topdistY);
+ top2.region(2).plot(topxEcalVsTrk);
+ top2.region(5).plot(topyEcalVsTrk);
+
+ if(showPlots) top2.show();
+
+ bot2 = fac.createPlotterFactory().create("Bottom ECal Plots");
+ bot2.setTitle("Bottom ECal Correlations");
+ IPlotterStyle sbot2 = bot2.style();
+ sbot2.dataStyle().fillStyle().setColor("green");
+ sbot2.dataStyle().errorBarStyle().setVisible(false);
+ sbot2.setParameter("hist2DStyle", "colorMap");
+ sbot2.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ bot2.createRegions(2, 3);
+ //bottomFrame.addPlotter(bot2);
+
+ IHistogram2D BottomeVsP = aida.histogram2D("Bottom Energy Vs Momentum", 50, 0, 0.500, 50, 0, 1.5);
+ IHistogram1D BottomeOverP = aida.histogram1D("Bottom Energy Over Momentum", 50, 0, 2);
+
+ IHistogram1D BottomdistX = aida.histogram1D("Bottom deltaX", 50, -100, 100);
+ IHistogram1D BottomdistY = aida.histogram1D("Bottom deltaY", 50, -40, 40);
+
+ IHistogram2D BottomxEcalVsTrk = aida.histogram2D("Bottom X ECal Vs Track", 100, -400, 400, 100, -400, 400);
+ IHistogram2D BottomyEcalVsTrk = aida.histogram2D("Bottom Y ECal Vs Track", 100, -100, 0, 100, -100, 0);
+
+ bot2.region(0).plot(BottomeVsP);
+ bot2.region(3).plot(BottomeOverP);
+ bot2.region(1).plot(BottomdistX);
+ bot2.region(4).plot(BottomdistY);
+ bot2.region(2).plot(BottomxEcalVsTrk);
+ bot2.region(5).plot(BottomyEcalVsTrk);
+
+ if(showPlots) bot2.show();
+
+
+ // ******************************************************************
+ top3 = fac.createPlotterFactory().create("Top ECal Plots");
+ top3.setTitle("Top ECal More Correlations");
+ IPlotterStyle stop3 = top3.style();
+ stop3.dataStyle().fillStyle().setColor("green");
+ stop3.dataStyle().errorBarStyle().setVisible(false);
+ stop3.setParameter("hist2DStyle", "colorMap");
+ stop3.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ top3.createRegions(1, 2);
+ //topFrame.addPlotter(top3);
+
+ IHistogram2D topdistXvsX = aida.histogram2D("Top deltaX vs X", 51, -400, 400, 25, -100, 100);
+ IHistogram2D topdistYvsY = aida.histogram2D("Top deltaY vs Y", 51, 0, 100, 25, -40, 40);
+
+ top3.region(0).plot(topdistXvsX);
+ top3.region(1).plot(topdistYvsY);
+
+ if(showPlots) top3.show();
+
+ bot3 = fac.createPlotterFactory().create("Bottom ECal Plots");
+ bot3.setTitle("Bottom ECal More Correlations");
+ IPlotterStyle sbot3 = bot3.style();
+ sbot3.dataStyle().fillStyle().setColor("green");
+ sbot3.dataStyle().errorBarStyle().setVisible(false);
+ sbot3.setParameter("hist2DStyle", "colorMap");
+ sbot3.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ bot3.createRegions(1, 2);
+ //bottomFrame.addPlotter(bot3);
+
+ IHistogram2D botdistXvsX = aida.histogram2D("Bottom deltaX vs X", 51, -400, 400, 25, -100, 100);
+ IHistogram2D botdistYvsY = aida.histogram2D("Bottom deltaY vs Y", 51, -100, 0, 25, -40, 40);
+
+ bot3.region(0).plot(botdistXvsX);
+ bot3.region(1).plot(botdistYvsY);
+
+ if(showPlots) bot3.show();
+
+ // ******************************************************************
+ top4 = fac.createPlotterFactory().create("Track Matching Plots");
+ top4.setTitle("Track Matching Plots");
+ IPlotterStyle stop4 = top4.style();
+ stop4.dataStyle().fillStyle().setColor("green");
+ stop4.dataStyle().errorBarStyle().setVisible(false);
+ stop4.setParameter("hist2DStyle", "colorMap");
+ stop4.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ top4.createRegions(2, 3);
+ //topFrame.addPlotter(top4);
+
+ IHistogram1D trackmatchN = aida.histogram1D("Tracks matched", 3, 0, 3);
+ IHistogram1D toptrackmatchN = aida.histogram1D("Tracks matched Top", 3, 0, 3);
+ IHistogram1D bottrackmatchN = aida.histogram1D("Tracks matched Bottom", 3, 0, 3);
+ IHistogram1D trackmatchN2 = aida.histogram1D("Tracks matched (Pz>0.8)", 3, 0, 3);
+ IHistogram1D toptrackmatchN2 = aida.histogram1D("Tracks matched Top (Pz>0.8)", 3, 0, 3);
+ IHistogram1D bottrackmatchN2 = aida.histogram1D("Tracks matched Bottom (Pz>0.8)", 3, 0, 3);
+
+ top4.region(0).plot(trackmatchN);
+ top4.region(1).plot(toptrackmatchN);
+ top4.region(2).plot(bottrackmatchN);
+ top4.region(3).plot(trackmatchN2);
+ top4.region(4).plot(toptrackmatchN2);
+ top4.region(5).plot(bottrackmatchN2);
+
+ if(showPlots) top4.show();
+
+ // ******************************************************************
+ top44 = fac.createPlotterFactory().create("e+e- Plots");
+ top44.setTitle("e+e- Plots");
+ IPlotterStyle stop44 = top44.style();
+ stop44.dataStyle().fillStyle().setColor("green");
+ stop44.dataStyle().errorBarStyle().setVisible(false);
+ stop44.setParameter("hist2DStyle", "colorMap");
+ stop44.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ top44.createRegions(2,4);
+ //topFrame.addPlotter(top44);
+
+ IHistogram2D trackPCorr = aida.histogram2D("p(e-) vs p(e+) max", 25, 0, 1.2, 25, 0, 1.2);
+ IHistogram1D ne = aida.histogram1D("n(e-)", 3, 0, 3);
+ IHistogram1D np = aida.histogram1D("n(e+)", 3, 0, 3);
+ IHistogram1D pem = aida.histogram1D("p(e-) max", 25, 0, 1.5);
+ IHistogram1D pe = aida.histogram1D("p(e-)", 25, 0, 1.5);
+ IHistogram1D ppm = aida.histogram1D("p(e+) max", 25, 0, 1.5);
+ IHistogram1D pp = aida.histogram1D("p(e+)", 25, 0, 1.5);
+
+ top44.region(0).plot(trackPCorr);
+ top44.region(1).plot(ne);
+ top44.region(2).plot(np);
+ top44.region(3).plot(pe);
+ top44.region(4).plot(pp);
+ top44.region(5).plot(pem);
+ top44.region(6).plot(ppm);
+
+ if(showPlots) top44.show();
+
+
+
+// ******************************************************************
+ plotter5 = fac.createPlotterFactory().create("HPS Hit Positions");
+ plotter5.setTitle("Hit Positions: Top");
+ //plotterFrame.addPlotter(plotter5);
+ IPlotterStyle style5 = plotter5.style();
+ style5.setParameter("hist2DStyle", "colorMap");
+ style5.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style5.dataStyle().fillStyle().setColor("yellow");
+ style5.dataStyle().errorBarStyle().setVisible(false);
+ plotter5.createRegions(1, 2);
+
+ IHistogram2D l1Pos = aida.histogram2D("Layer 1 HTH Position: Top", 50, -55, 55, 55, -25, 25);
+ IHistogram2D l7Pos = aida.histogram2D("Layer 7 HTH Position: Top", 50, -55, 55, 55, -25, 25);
+
+ plotter5.region(0).plot(l1Pos);
+ plotter5.region(1).plot(l7Pos);
+
+ if(showPlots) plotter5.show();
+
+ plotter5_1 = fac.createPlotterFactory().create("HPS Hit Positions");
+ plotter5_1.setTitle("Hit Positions: Bottom");
+ //plotterFrame.addPlotter(plotter5_1);
+ IPlotterStyle style5_1 = plotter5_1.style();
+ style5_1.setParameter("hist2DStyle", "colorMap");
+ style5_1.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style5_1.dataStyle().fillStyle().setColor("yellow");
+ style5_1.dataStyle().errorBarStyle().setVisible(false);
+ plotter5_1.createRegions(1, 2);
+
+
+ IHistogram2D l1PosBot = aida.histogram2D("Layer 1 HTH Position: Bottom", 50, -55, 55, 55, -25, 25);
+ IHistogram2D l7PosBot = aida.histogram2D("Layer 7 HTH Position: Bottom", 50, -55, 55, 55, -25, 25);
+ plotter5_1.region(0).plot(l1PosBot);
+ plotter5_1.region(1).plot(l7PosBot);
+
+ if(showPlots) plotter5_1.show();
+
+ plotter55 = fac.createPlotterFactory().create("HPS Hit Positions");
+ plotter55.setTitle("Helical Track Hits");
+ //plotterFrame.addPlotter(plotter55);
+ IPlotterStyle style55 = plotter55.style();
+ style55.dataStyle().fillStyle().setColor("Green");
+ style55.dataStyle().errorBarStyle().setVisible(false);
+ style55.dataStyle().markerStyle().setSize(20);
+ plotter55.createRegions(1, 2);
+
+ IProfile avgLayersTopPlot = aida.profile1D("Number of Stereo Hits per layer in Top Half", 13, 0, 13);
+ IProfile avgLayersBottomPlot = aida.profile1D("Number of Stereo Hits per layer in Bottom Half", 13, 0, 13);
+
+ plotter55.region(0).plot(avgLayersTopPlot);
+ plotter55.region(1).plot(avgLayersBottomPlot);
+
+ if(showPlots) plotter55.show();
+
+ plotter6 = fac.createPlotterFactory().create("HPS ECAL Hit Positions");
+ plotter6.setTitle("ECAL Positions");
+ //plotterFrame.addPlotter(plotter6);
+ IPlotterStyle style6 = plotter6.style();
+ style6.setParameter("hist2DStyle", "colorMap");
+ style6.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style6.dataStyle().fillStyle().setColor("yellow");
+ style6.dataStyle().errorBarStyle().setVisible(false);
+ plotter6.createRegions(4, 2);
+
+ IHistogram2D topECal = aida.histogram2D("Top ECal Cluster Position", 50, -400, 400, 10, 0, 100);
+ IHistogram2D botECal = aida.histogram2D("Bottom ECal Cluster Position", 50, -400, 400, 10, -100, 0);
+ IHistogram2D topECal1 = aida.histogram2D("Top ECal Cluster Position (>0 tracks)", 50, -400, 400, 10, 0, 100);
+ IHistogram2D botECal1 = aida.histogram2D("Bottom ECal Cluster Position (>0 tracks)", 50, -400, 400, 10, -100, 0);
+ IHistogram2D topECal2 = aida.histogram2D("Top ECal Cluster Position (E>0.1,>0 tracks)", 50, -400, 400, 10, 0, 100);
+ IHistogram2D botECal2 = aida.histogram2D("Bottom ECal Cluster Position (E>0.1,>0 tracks)", 50, -400, 400, 10, -100, 0);
+ IHistogram2D topECal3 = aida.histogram2D("Top ECal Cluster Position w_E (E>0.1,>0 tracks)", 50, -400, 400, 10, 0, 100);
+ IHistogram2D botECal3 = aida.histogram2D("Bottom ECal Cluster Position w_E (E>0.1,>0 tracks)", 50, -400, 400, 10, -100, 0);
+
+ plotter6.region(0).plot(topECal);
+ plotter6.region(1).plot(botECal);
+ plotter6.region(2).plot(topECal1);
+ plotter6.region(3).plot(botECal1);
+ plotter6.region(4).plot(topECal2);
+ plotter6.region(5).plot(botECal2);
+ plotter6.region(6).plot(topECal3);
+ plotter6.region(7).plot(botECal3);
+
+ if(showPlots) plotter6.show();
+
+
+ plotter66 = fac.createPlotterFactory().create("HPS ECAL Basic Plots");
+ plotter66.setTitle("ECAL Basic Plots");
+ //plotterFrame.addPlotter(plotter6);
+ IPlotterStyle style66 = plotter66.style();
+ style66.dataStyle().fillStyle().setColor("yellow");
+ style66.dataStyle().errorBarStyle().setVisible(false);
+ plotter66.createRegions(2, 2);
+
+ IHistogram1D topECalE = aida.histogram1D("Top ECal Cluster Energy", 50, 0, 2);
+ IHistogram1D botECalE = aida.histogram1D("Bottom ECal Cluster Energy", 50, 0, 2);
+ IHistogram1D topECalN = aida.histogram1D("Number of Clusters Top", 6, 0, 6);
+ IHistogram1D botECalN = aida.histogram1D("Number of Clusters Bot", 6, 0, 6);
+
+ plotter66.region(0).plot(topECalE);
+ plotter66.region(1).plot(botECalE);
+ plotter66.region(2).plot(botECalN);
+ plotter66.region(3).plot(topECalN);
+
+ if(showPlots) plotter66.show();
+
+
+
+
+ plotter8 = fac.createPlotterFactory().create("HPS Strip Hit From Stereo Multiplicity");
+ plotter8.setTitle("Strip Hit Multiplicity");
+ //plotterFrame.addPlotter(plotter8);
+ IPlotterStyle style8 = plotter8.style();
+ style8.dataStyle().fillStyle().setColor("yellow");
+ style8.dataStyle().errorBarStyle().setVisible(false);
+ plotter8.createRegions(6, 6);
+ i=0;
+ for(SiSensor sensor : sensors) {
+ IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits from stereo", 10, 0, 10);
+ plotter8.region(i).plot(resX);
+ i++;
+ }
+
+ if(showPlots) plotter8.show();
+
+ plotter88 = fac.createPlotterFactory().create("HPS Strip Hit Multiplicity");
+ plotter88.setTitle("Strip Hit Multiplicity");
+ //plotterFrame.addPlotter(plotter88);
+ plotter88.setStyle(style8);
+ plotter88.createRegions(6, 6);
+ i=0;
+ for(SiSensor sensor : sensors) {
+ IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits", 10, 0, 10);
+ plotter88.region(i).plot(resX);
+ i++;
+ }
+
+ if(showPlots) plotter88.show();
+
+
+
+
+
+ plotter888 = fac.createPlotterFactory().create("HPS Strip Hit Isolation");
+ plotter888.setTitle("Strip Hit Isolation");
+ //plotterFrame.addPlotter(plotter88);
+ plotter888.setStyle(style8);
+ plotter888.createRegions(6, 6);
+ i=0;
+ for(SiSensor sensor : sensors) {
+ IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits iso", 50, 0, 5);
+ plotter888.region(i).plot(resX);
+ i++;
+ }
+
+ if(showPlots) plotter888.show();
+
+ plotter8888 = fac.createPlotterFactory().create("HPS Strip Hit On Track Isolation");
+ plotter8888.setTitle("Strip Hit On Track Isolation");
+ //plotterFrame.addPlotter(plotter88);
+ plotter8888.setStyle(style8);
+ plotter8888.createRegions(6, 6);
+ i=0;
+ for(SiSensor sensor : sensors) {
+ IHistogram1D resX = aida.histogram1D(sensor.getName() + " strip hits iso on track", 50, 0, 5);
+ plotter8888.region(i).plot(resX);
+ i++;
+ }
+
+ if(showPlots) plotter8888.show();
+
+
+ }
+
+
+
+
private Cluster findClosestCluster(Hep3Vector posonhelix, List<Cluster> clusters) {
Cluster closest = null;
double minDist = 9999;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TriggerTurnOnAnalysis.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TriggerTurnOnAnalysis.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TriggerTurnOnAnalysis.java Wed Apr 27 11:11:32 2016
@@ -10,12 +10,15 @@
import hep.aida.IHistogramFactory;
import hep.aida.IPlotter;
import hep.physics.vec.Hep3Vector;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.hps.analysis.ecal.HPSMCParticlePlotsDriver;
+import org.hps.users.phansson.testrun.TrigRateDriver;
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TruthMomentumResolutionDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TruthMomentumResolutionDriver.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/phansson/TruthMomentumResolutionDriver.java Wed Apr 27 11:11:32 2016
@@ -18,6 +18,7 @@
import java.util.logging.Logger;
import org.hps.analysis.ecal.HPSMCParticlePlotsDriver;
+import org.hps.users.phansson.testrun.TrigRateDriver;
import org.lcsim.constants.Constants;
import org.lcsim.event.EventHeader;
import org.lcsim.event.MCParticle;
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/rafo/test1.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/rafo/test1.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/rafo/test1.java Wed Apr 27 11:11:32 2016
@@ -4,15 +4,15 @@
import org.lcsim.util.Driver;
public class test1 extends Driver {
- private int clusterID;
+ private int clusterID;
- public void process(EventHeader event) {
- System.out.println("The cluster ID = " + clusterID);
- }
+ public void process(EventHeader event) {
+ System.out.println("The cluster ID = " + clusterID);
+ }
- public void setClusterID(int clusterID) {
- this.clusterID = clusterID;
- }
+ public void setClusterID(int clusterID) {
+ this.clusterID = clusterID;
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/GetChargeFromScalersMultirun.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/GetChargeFromScalersMultirun.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/GetChargeFromScalersMultirun.java Wed Apr 27 11:11:32 2016
@@ -18,144 +18,144 @@
*
*/
public class GetChargeFromScalersMultirun {
- private static ArrayList<String> runs;
- private static ArrayList<Date> starts;
- private static ArrayList<Date> ends;
+ private static ArrayList<String> runs;
+ private static ArrayList<Date> starts;
+ private static ArrayList<Date> ends;
- public static void main(String[] arg) throws FileNotFoundException, ParseException{
- String inputFile = arg[0];
- String timingInfoFile = arg[1];
- String outputFile = arg[2];
-
-
- readTimingInfoFile(timingInfoFile);
-
- Map map = getCharges(runs, starts, ends, inputFile);
- mergeBiasIntervals(map);
-
- ArrayList<String> keys = new ArrayList(map.keySet());
- Collections.sort(keys);
-
-
- PrintWriter pw = new PrintWriter(new File(outputFile));
- for(String s : keys){
- pw.println(s + "\t" + map.get(s));
- System.out.println(s + "\t" + map.get(s));
- }
- pw.close();
- }
+ public static void main(String[] arg) throws FileNotFoundException, ParseException{
+ String inputFile = arg[0];
+ String timingInfoFile = arg[1];
+ String outputFile = arg[2];
+
+
+ readTimingInfoFile(timingInfoFile);
+
+ Map map = getCharges(runs, starts, ends, inputFile);
+ mergeBiasIntervals(map);
+
+ ArrayList<String> keys = new ArrayList(map.keySet());
+ Collections.sort(keys);
+
+
+ PrintWriter pw = new PrintWriter(new File(outputFile));
+ for(String s : keys){
+ pw.println(s + "\t" + map.get(s));
+ System.out.println(s + "\t" + map.get(s));
+ }
+ pw.close();
+ }
- static void readTimingInfoFile(String s) throws FileNotFoundException, ParseException{
- Scanner scanner = new Scanner(new File(s));
- scanner.useDelimiter("[\n\t]");
- runs = new ArrayList();
- starts = new ArrayList();
- ends = new ArrayList();
- DateFormat df = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z");
-
- while(scanner.hasNext()){
- runs.add(scanner.next());
- String n = scanner.next();
- if(n.matches("\\d+"))
- starts.add(new Date(Long.parseLong(n)));
- else
- starts.add(df.parse(n));
- n = scanner.next();
- if(n.matches("\\d+"))
- ends.add(new Date(Long.parseLong(n)));
- else
- ends.add(df.parse(n));
-
- }
- scanner.close();
- }
-
-
-
-
-
- /**
- * returns charge in microCoulombs
- * @param runs names of the runs
- * @param starts starting times of the runs
- * @param ends ending times of the runs
- * @param file the mya file that we need to use
- * @return a map relating the runs to the charges (uC).
- * @throws FileNotFoundException
- */
-
- static Map<String, Double> getCharges(ArrayList<String> runs, ArrayList<Date> starts, ArrayList<Date> ends, String file) throws FileNotFoundException{
- Scanner s = new Scanner(new File(file));
- HashMap<String, Double> map = new HashMap();
- long prev = 0;
- long time = 0;
- for(int i = 0; i< runs.size(); i++){
- long endt = ends.get(i).getTime();
- long startt = starts.get(i).getTime();
-
- double charge = 0;
- boolean started = false;
- double prevval = 0;
- if(time > endt){
- s.close();
- s = new Scanner(new File(file));
- }
- inner : while(s.hasNext()){
- String var = s.next();
- prev = time;
- time = s.nextLong()*1000; //convert from s to ms
+ static void readTimingInfoFile(String s) throws FileNotFoundException, ParseException{
+ Scanner scanner = new Scanner(new File(s));
+ scanner.useDelimiter("[\n\t]");
+ runs = new ArrayList();
+ starts = new ArrayList();
+ ends = new ArrayList();
+ DateFormat df = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z");
+
+ while(scanner.hasNext()){
+ runs.add(scanner.next());
+ String n = scanner.next();
+ if(n.matches("\\d+"))
+ starts.add(new Date(Long.parseLong(n)));
+ else
+ starts.add(df.parse(n));
+ n = scanner.next();
+ if(n.matches("\\d+"))
+ ends.add(new Date(Long.parseLong(n)));
+ else
+ ends.add(df.parse(n));
+
+ }
+ scanner.close();
+ }
+
+
+
+
+
+ /**
+ * returns charge in microCoulombs
+ * @param runs names of the runs
+ * @param starts starting times of the runs
+ * @param ends ending times of the runs
+ * @param file the mya file that we need to use
+ * @return a map relating the runs to the charges (uC).
+ * @throws FileNotFoundException
+ */
+
+ static Map<String, Double> getCharges(ArrayList<String> runs, ArrayList<Date> starts, ArrayList<Date> ends, String file) throws FileNotFoundException{
+ Scanner s = new Scanner(new File(file));
+ HashMap<String, Double> map = new HashMap();
+ long prev = 0;
+ long time = 0;
+ for(int i = 0; i< runs.size(); i++){
+ long endt = ends.get(i).getTime();
+ long startt = starts.get(i).getTime();
+
+ double charge = 0;
+ boolean started = false;
+ double prevval = 0;
+ if(time > endt){
+ s.close();
+ s = new Scanner(new File(file));
+ }
+ inner : while(s.hasNext()){
+ String var = s.next();
+ prev = time;
+ time = s.nextLong()*1000; //convert from s to ms
- double val = s.nextDouble();
- if(!var.equals("scaler_calc1"))
- continue;
-
- if(!started && time> startt){ //first sample in the run
- charge += (val)/2.*(time-startt);
- started= true;
- }
-
- else if(time > startt && endt > time){ //middle samples in the run
- charge += (val/*+prevval*/)/*/2.*/*(time-prev);
-
- }
-
- if(endt < time){ //last sample that is in the run
- charge += (/*prev*/val)/2.*(endt-prev);
- break inner;
- }
- prevval = val;
- }
- charge/=1e6;
- map.put(runs.get(i), charge);
- }
- s.close();
- return map;
+ double val = s.nextDouble();
+ if(!var.equals("scaler_calc1"))
+ continue;
+
+ if(!started && time> startt){ //first sample in the run
+ charge += (val)/2.*(time-startt);
+ started= true;
+ }
+
+ else if(time > startt && endt > time){ //middle samples in the run
+ charge += (val/*+prevval*/)/*/2.*/*(time-prev);
+
+ }
+
+ if(endt < time){ //last sample that is in the run
+ charge += (/*prev*/val)/2.*(endt-prev);
+ break inner;
+ }
+ prevval = val;
+ }
+ charge/=1e6;
+ map.put(runs.get(i), charge);
+ }
+ s.close();
+ return map;
- }
-
- /**
- * If the subsections of the runs in which the bias is on are labeled according to a scheme,
- they will be added together.
- for instance, 5779a, 5779b, 5779c, etc. will be added up as 5779bias.
- * @param map the map of run names (and portions of runs that have bias labeled as [run number][a,b,c,d...],
- * corresponding to the total charge in that run (or piece of a run).
- */
- static void mergeBiasIntervals(Map<String, Double> map){
- Map<String, Double> map2 = new HashMap();
- for(Map.Entry<String, Double> entry : map.entrySet()){
- String key1 = entry.getKey();
- if(!entry.getKey().matches("\\d+a"))
- continue;
- double charge = entry.getValue();
- for(Map.Entry<String, Double> entry2 : map.entrySet()){
- if(entry2.getKey().matches(key1.substring(0, 4) + "[b-z]"))
- charge += entry2.getValue();
- }
- map2.put(key1.substring(0, 4) + "bias", charge);
-
- }
- for(Map.Entry<String, Double> e : map2.entrySet()){
- map.put(e.getKey(), e.getValue());
- }
- }
+ }
+
+ /**
+ * If the subsections of the runs in which the bias is on are labeled according to a scheme,
+ they will be added together.
+ for instance, 5779a, 5779b, 5779c, etc. will be added up as 5779bias.
+ * @param map the map of run names (and portions of runs that have bias labeled as [run number][a,b,c,d...],
+ * corresponding to the total charge in that run (or piece of a run).
+ */
+ static void mergeBiasIntervals(Map<String, Double> map){
+ Map<String, Double> map2 = new HashMap();
+ for(Map.Entry<String, Double> entry : map.entrySet()){
+ String key1 = entry.getKey();
+ if(!entry.getKey().matches("\\d+a"))
+ continue;
+ double charge = entry.getValue();
+ for(Map.Entry<String, Double> entry2 : map.entrySet()){
+ if(entry2.getKey().matches(key1.substring(0, 4) + "[b-z]"))
+ charge += entry2.getValue();
+ }
+ map2.put(key1.substring(0, 4) + "bias", charge);
+
+ }
+ for(Map.Entry<String, Double> e : map2.entrySet()){
+ map.put(e.getKey(), e.getValue());
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/HitrateHistograms.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/HitrateHistograms.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/HitrateHistograms.java Wed Apr 27 11:11:32 2016
@@ -19,109 +19,109 @@
import org.lcsim.util.Driver;
public class HitrateHistograms extends Driver{
- HashMap<String, IHistogram2D[]> hist2d = new HashMap<String, IHistogram2D[]>();
+ HashMap<String, IHistogram2D[]> hist2d = new HashMap<String, IHistogram2D[]>();
- IAnalysisFactory af = IAnalysisFactory.create();
- IHistogramFactory hf = af.createHistogramFactory(af.createTreeFactory().create());
- IPlotterFactory pf = af.createPlotterFactory();
- public HitrateHistograms(){
- addHistograms2D("Ecal", 1, -30, 30, -8, 16);
- addHistograms2D("muon", 8, -30, 30, -8, 16);
+ IAnalysisFactory af = IAnalysisFactory.create();
+ IHistogramFactory hf = af.createHistogramFactory(af.createTreeFactory().create());
+ IPlotterFactory pf = af.createPlotterFactory();
+ public HitrateHistograms(){
+ addHistograms2D("Ecal", 1, -30, 30, -8, 16);
+ addHistograms2D("muon", 8, -30, 30, -8, 16);
- try {
- ecalDecoder = new IDDecoder(new IDDescriptor("system:0:6,layer:6:2,ix:8:-8,iy:16:-6"));
- } catch (IDException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
+ try {
+ ecalDecoder = new IDDecoder(new IDDescriptor("system:0:6,layer:6:2,ix:8:-8,iy:16:-6"));
+ } catch (IDException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
- }
- private void addHistograms2D(String detector, int nLayers, int ixMin, int ixMax, int iyMin, int iyMax){
- IPlotter plotter1 = pf.create("detector");
- int nx = (int)Math.ceil(Math.sqrt(nLayers));
- int ny = (int)Math.ceil(nLayers/(double)nx);
- plotter1.createRegions(nx, ny);
+ }
+ private void addHistograms2D(String detector, int nLayers, int ixMin, int ixMax, int iyMin, int iyMax){
+ IPlotter plotter1 = pf.create("detector");
+ int nx = (int)Math.ceil(Math.sqrt(nLayers));
+ int ny = (int)Math.ceil(nLayers/(double)nx);
+ plotter1.createRegions(nx, ny);
- hist2d.put(detector,new IHistogram2D[nLayers]);
- for(int i = 0; i< nLayers; i++){
- hist2d.get(detector)[i]
- = hf.createHistogram2D(detector + " layer " + (i+1), ixMax-ixMin, ixMin, ixMax, iyMax-iyMin, iyMin, iyMax);
- plotter1.region(i).plot(hist2d.get(detector)[i]);
+ hist2d.put(detector,new IHistogram2D[nLayers]);
+ for(int i = 0; i< nLayers; i++){
+ hist2d.get(detector)[i]
+ = hf.createHistogram2D(detector + " layer " + (i+1), ixMax-ixMin, ixMin, ixMax, iyMax-iyMin, iyMin, iyMax);
+ plotter1.region(i).plot(hist2d.get(detector)[i]);
- }
- }
+ }
+ }
- IDDecoder ecalDecoder;
- private float recency;
+ IDDecoder ecalDecoder;
+ private float recency;
- public void process(EventHeader header){
+ public void process(EventHeader header){
- //System.out.println(header.keys());
+ //System.out.println(header.keys());
- for(SIOSimCalorimeterHit hit: header.get(SIOSimCalorimeterHit.class,"EcalHits")){
- int fieldCount = hit.getIDDecoder().getFieldCount();
- ecalDecoder.setID(hit.getCellID());
- int ix = ecalDecoder.getValue("ix");
- int iy = ecalDecoder.getValue("iy");
- int layer = ecalDecoder.getValue("layer");
- hist2d.get("Ecal")[layer].fill(ix, iy);
- }
-
-
- /*if(recency != 0)
- for(IHistogram2D[] hists : hist2d.values()){
- for(IHistogram2D hist : hists){
- hf.
- }
- }*/
- //}
- }
- /**
- * "recency" is a parameter used to determine how to time-weight the
- * histogram so that the more recent events are more highly weighted.
- * A recency of zero means that there is no time weightedness.
- * A recency of R means that each event is weighted by a factor of (1-R)^n,
- * where n is the number of events that have taken place since the event shown.
- * @param recency
- */
- //public void setRecency(float recency){
- //this.recency = recency;
- //}
+ for(SIOSimCalorimeterHit hit: header.get(SIOSimCalorimeterHit.class,"EcalHits")){
+ int fieldCount = hit.getIDDecoder().getFieldCount();
+ ecalDecoder.setID(hit.getCellID());
+ int ix = ecalDecoder.getValue("ix");
+ int iy = ecalDecoder.getValue("iy");
+ int layer = ecalDecoder.getValue("layer");
+ hist2d.get("Ecal")[layer].fill(ix, iy);
+ }
+
+
+ /*if(recency != 0)
+ for(IHistogram2D[] hists : hist2d.values()){
+ for(IHistogram2D hist : hists){
+ hf.
+ }
+ }*/
+ //}
+ }
+ /**
+ * "recency" is a parameter used to determine how to time-weight the
+ * histogram so that the more recent events are more highly weighted.
+ * A recency of zero means that there is no time weightedness.
+ * A recency of R means that each event is weighted by a factor of (1-R)^n,
+ * where n is the number of events that have taken place since the event shown.
+ * @param recency
+ */
+ //public void setRecency(float recency){
+ //this.recency = recency;
+ //}
- public void startOfData(){
+ public void startOfData(){
- }
- public void endOfData(){
+ }
+ public void endOfData(){
- for(String name : hist2d.keySet()){
- IHistogram2D[] hists = hist2d.get(name);
- System.out.println(name);
- for(int i = 0; i< hists.length; i++){
- double max = hists[i].maxBinHeight();
- double total = hists[i].sumAllBinHeights();
- double ratioPercent = 100.*max/(double)total;
- System.out.printf(" layer %d: %.2f %% of hits were in the most populated bin\n",i, ratioPercent);
- }
- }
- }
- public static void main(String arg[]) throws IOException{
- LCIOReader lcReader = new LCIOReader(new File(arg[0]));
- HitrateHistograms driver = new HitrateHistograms();
- driver.startOfData();
- for(int i=0;i<1000;i++){
- EventHeader event = lcReader.read();
+ for(String name : hist2d.keySet()){
+ IHistogram2D[] hists = hist2d.get(name);
+ System.out.println(name);
+ for(int i = 0; i< hists.length; i++){
+ double max = hists[i].maxBinHeight();
+ double total = hists[i].sumAllBinHeights();
+ double ratioPercent = 100.*max/(double)total;
+ System.out.printf(" layer %d: %.2f %% of hits were in the most populated bin\n",i, ratioPercent);
+ }
+ }
+ }
+ public static void main(String arg[]) throws IOException{
+ LCIOReader lcReader = new LCIOReader(new File(arg[0]));
+ HitrateHistograms driver = new HitrateHistograms();
+ driver.startOfData();
+ for(int i=0;i<1000;i++){
+ EventHeader event = lcReader.read();
- if(event == null)
- break;
- driver.process(event);
+ if(event == null)
+ break;
+ driver.process(event);
- }
- driver.endOfData();
- }
+ }
+ driver.endOfData();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/PulserFilter.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/PulserFilter.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/PulserFilter.java Wed Apr 27 11:11:32 2016
@@ -25,65 +25,65 @@
public class PulserFilter extends Driver{
- public void process(EventHeader event) {
+ public void process(EventHeader event) {
- // only keep pulser triggers:
- if (!event.hasCollection(GenericObject.class,"TriggerBank"))
- throw new Driver.NextEventException();
- boolean isPulser=false;
- for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
- {
- if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
- TIData tid = new TIData(gob);
- if (tid.isPulserTrigger())
- {
- isPulser=true;
- break;
- }
- }
+ // only keep pulser triggers:
+ if (!event.hasCollection(GenericObject.class,"TriggerBank"))
+ throw new Driver.NextEventException();
+ boolean isPulser=false;
+ for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+ {
+ if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+ TIData tid = new TIData(gob);
+ if (tid.isPulserTrigger())
+ {
+ isPulser=true;
+ break;
+ }
+ }
- // don't drop any events with EPICS data or scalers data
- // (could also do this via event tag=31)
- final EpicsData edata = EpicsData.read(event);
- if (edata != null) return;
- ScalerData sdata = ScalerData.read(event);
- if(sdata != null) return;
+ // don't drop any events with EPICS data or scalers data
+ // (could also do this via event tag=31)
+ final EpicsData edata = EpicsData.read(event);
+ if (edata != null) return;
+ ScalerData sdata = ScalerData.read(event);
+ if(sdata != null) return;
- if (!isPulser) throw new Driver.NextEventException();
+ if (!isPulser) throw new Driver.NextEventException();
- }
+ }
- public static void main(String arg[]) throws IOException{
- ConditionsDriver hack = new ConditionsDriver();
- hack.setDetectorName("HPS-EngRun2015-Nominal-v1");
- hack.setFreeze(true);
- hack.setRunNumber(Integer.parseInt(arg[2]));
- hack.initialize();
- PulserFilter pf = new PulserFilter();
- LCIOWriter writer = new LCIOWriter(arg[1]);
- File file = new File(arg[0]);
- LCIOReader reader = new LCIOReader(file);
- System.out.println(file.getPath());
+ public static void main(String arg[]) throws IOException{
+ ConditionsDriver hack = new ConditionsDriver();
+ hack.setDetectorName("HPS-EngRun2015-Nominal-v1");
+ hack.setFreeze(true);
+ hack.setRunNumber(Integer.parseInt(arg[2]));
+ hack.initialize();
+ PulserFilter pf = new PulserFilter();
+ LCIOWriter writer = new LCIOWriter(arg[1]);
+ File file = new File(arg[0]);
+ LCIOReader reader = new LCIOReader(file);
+ System.out.println(file.getPath());
- try{
- while(true){
- try{
- EventHeader eh = reader.read();
- if(eh.getEventNumber() %100 == 0)
- System.out.println(eh.getEventNumber());
- pf.process(eh);
- writer.write(eh);
- }catch(Driver.NextEventException e){
+ try{
+ while(true){
+ try{
+ EventHeader eh = reader.read();
+ if(eh.getEventNumber() %100 == 0)
+ System.out.println(eh.getEventNumber());
+ pf.process(eh);
+ writer.write(eh);
+ }catch(Driver.NextEventException e){
- }
- }
- }catch(IOException e){
- e.printStackTrace();
- reader.close();
- }
+ }
+ }
+ }catch(IOException e){
+ e.printStackTrace();
+ reader.close();
+ }
- writer.close();
- }
+ writer.close();
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/StyleUtil.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/StyleUtil.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/StyleUtil.java Wed Apr 27 11:11:32 2016
@@ -1,56 +1,195 @@
package org.hps.users.spaul;
+import java.awt.Component;
+import java.awt.Graphics2D;
+import java.awt.image.BufferedImage;
+import java.io.File;
import java.util.Arrays;
+import java.util.Random;
-import hep.aida.IAnalysisFactory;
-import hep.aida.IFunction;
-import hep.aida.IPlotterRegion;
+import javax.imageio.ImageIO;
+
+import hep.aida.*;
+import hep.aida.ref.plotter.PlotterUtilities;
public class StyleUtil {
-
- public static void stylize(IPlotterRegion r, String title, String lx, String ly, double xmin, double xmax, double ymin, double ymax){
- r.setTitle(title);
- stylize(r, lx, ly);
- r.setXLimits(xmin, xmax);
- r.setYLimits(ymin, ymax);
- }
- public static void stylize(IPlotterRegion r, String title, String lx, String ly){
- r.setTitle(title);
- stylize(r, lx, ly);
- }
- public static void stylize(IPlotterRegion r, String lx, String ly){
-
- r.style().titleStyle().textStyle().setFontSize(22);
- r.style().xAxisStyle().setLabel(lx);
- r.style().xAxisStyle().labelStyle().setFontSize(16);
- r.style().xAxisStyle().tickLabelStyle().setFontSize(14);
- r.style().yAxisStyle().setLabel(ly);
- r.style().yAxisStyle().labelStyle().setFontSize(16);
- r.style().yAxisStyle().tickLabelStyle().setFontSize(14);
- r.style().statisticsBoxStyle().setParameter("backgroundColor", "white");
- r.style().statisticsBoxStyle().boxStyle().backgroundStyle().setColor("White");
- r.style().statisticsBoxStyle().boxStyle().backgroundStyle().setParameter("color", "white");
- r.style().statisticsBoxStyle().boxStyle().backgroundStyle().setOpacity(100);
- System.out.println(Arrays.toString(r.style().dataStyle().availableParameters()));
- r.style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
- r.style().dataStyle().fillStyle().setParameter("showZeroHeightBins", "false");
- //r.style().dataStyle().setParameter("showDataInStatisticsBox", "false");
- r.style().setParameter("hist2DStyle", "colorMap");
- //r.style().dataBoxStyle()
- }
- static void addHorizontalLine(IPlotterRegion r, double y, String name){
- IAnalysisFactory af = IAnalysisFactory.create();
- IFunction f = af.createFunctionFactory(af.createTreeFactory().create()).createFunctionByName(name, "p0");
- f.setParameter("p0", y);
- r.plot(f);
- }
- public static void addParabola(IPlotterRegion region, double p0,
- double p1, double p2, String string) {
- IAnalysisFactory af = IAnalysisFactory.create();
- IFunction f = af.createFunctionFactory(af.createTreeFactory().create()).createFunctionByName(string, "p2");
- f.setParameter("p0", p0);
- f.setParameter("p1", p1);
- f.setParameter("p2", p2);
- region.plot(f);
- }
+
+ public static void stylize(IPlotterRegion r, String title, String lx, String ly, double xmin, double xmax, double ymin, double ymax){
+ r.setTitle(title);
+ stylize(r, lx, ly);
+ r.setXLimits(xmin, xmax);
+ r.setYLimits(ymin, ymax);
+ }
+ public static void stylize(IPlotterRegion r, String title, String lx, String ly){
+ r.setTitle(title);
+ stylize(r, lx, ly);
+ }
+ public static void stylize(IPlotterRegion r, String lx, String ly){
+
+ r.style().titleStyle().textStyle().setFontSize(22);
+ r.style().xAxisStyle().setLabel(lx);
+ r.style().xAxisStyle().labelStyle().setFontSize(16);
+ r.style().xAxisStyle().tickLabelStyle().setFontSize(14);
+ r.style().yAxisStyle().setLabel(ly);
+ r.style().yAxisStyle().labelStyle().setFontSize(16);
+ r.style().yAxisStyle().tickLabelStyle().setFontSize(14);
+ // r.style().statisticsBoxStyle().set;
+ //debugPrint());
+ r.style().legendBoxStyle().textStyle().setFontSize(16);
+ r.style().statisticsBoxStyle().textStyle().setFontSize(16);
+
+ //r.style().dataStyle().showInLegendBox(false);
+
+ r.style().legendBoxStyle().boxStyle().foregroundStyle().setOpacity(1.0);
+ r.style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ r.style().dataStyle().fillStyle().setParameter("showZeroHeightBins", "false");
+ //debugPrint(r.style().dataStyle().availableParameters()));
+ //r.style().dataStyle().setParameter("showDataInStatisticsBox", "false");
+ r.style().setParameter("hist2DStyle", "colorMap");
+ //r.style().dataBoxStyle()
+ }
+ public static void addHorizontalLine(IPlotterRegion r, double y, String name){
+ IAnalysisFactory af = IAnalysisFactory.create();
+ IFunction f = af.createFunctionFactory(af.createTreeFactory().create()).createFunctionByName(name, "p0");
+ f.setParameter("p0", y);
+ r.plot(f);
+ }
+ public static void addParabola(IPlotterRegion region, double p0,
+ double p1, double p2, String string) {
+ IAnalysisFactory af = IAnalysisFactory.create();
+ IFunction f = af.createFunctionFactory(af.createTreeFactory().create()).createFunctionByName(string, "p2");
+ f.setParameter("p0", p0);
+ f.setParameter("p1", p1);
+ f.setParameter("p2", p2);
+ region.plot(f);
+ }
+ public static void noFillHistogramBars(IPlotterRegion region) {
+ region.style().dataStyle().setParameter("fillHistogramBars", "false");
+ region.style().dataStyle().fillStyle().setVisible(false);
+
+
+ region.style().dataStyle().lineStyle().setParameter("colorRotateMethod", "regionOverlayIndex");
+ region.style().dataStyle().lineStyle().setParameter("colorRotate", "black, red, green, blue");
+ region.style().dataStyle().lineStyle().setParameter("thickness", "3");
+ region.style().dataStyle().outlineStyle().setParameter("colorRotateMethod", "regionOverlayIndex");
+ //debug = true;
+ debugPrint(region.style().dataStyle().outlineStyle().availableParameters());
+ region.style().dataStyle().outlineStyle().setParameter("colorRotate", "black, red, green, blue");
+ region.style().dataStyle().outlineStyle().setParameter("thickness", "3");
+ region.style().dataStyle().errorBarStyle().setVisible(false);
+ debugPrint(region.style().dataStyle().lineStyle().availableParameterOptions("colorRotateMethod"));
+ }
+ public static void setSize(IPlotter p, int width, int height){
+ p.setParameter("plotterWidth", width +"");
+ p.setParameter("plotterHeight", height +"");
+ }
+
+ public static void setLog(IPlotterRegion r){
+
+ r.style().yAxisStyle().setParameter("scale", "log");
+ r.style().gridStyle().setUnits(100);
+ debugPrint(r.style().gridStyle().availableParameters());
+
+ }
+ static boolean debug = false;
+ static void debugPrint(String[] stuff){
+ if(debug){
+ System.out.println(Arrays.toString(stuff));
+ }
+ }
+ public static void main(String arg[]){
+ IAnalysisFactory af = IAnalysisFactory.create();
+ IHistogramFactory hf = af.createHistogramFactory(af.createTreeFactory().create());
+
+ IPlotter p = af.createPlotterFactory().create();
+ debugPrint(p.availableParameters());
+ p.createRegions(1, 2);
+ IHistogram1D h1 = hf.createHistogram1D("blah", 100, -5, 5);
+ IHistogram1D h2 = hf.createHistogram1D("bleh", 100, -5, 5);
+ Random random = new Random();
+ for(int i = 0; i< 100000; i++){
+ h1.fill(random.nextGaussian());
+ h2.fill(random.nextGaussian()*2);
+ }
+ hideLegendAndStats(p.region(1));
+ noFillHistogramBars(p.region(0));
+ stylize(p.region(0), "title", "x axis label", "y axis label");
+ stylize(p.region(1), "stuff", "x axis label", "y axis label");
+ p.region(0).plot(h1);
+ p.region(0).plot(h2);
+
+
+ IHistogram2D h3 = hf.createHistogram2D("blah", 100, -5, 5, 100, -5,5);
+
+ for(int i = 0; i< 100000; i++){
+ h3.fill(random.nextGaussian(), random.nextGaussian());
+ }
+
+ p.region(1).plot(h3);
+
+
+
+ p.show();
+
+ p = af.createPlotterFactory().create();
+ debugPrint(p.availableParameters());
+ p.createRegions(1, 2);
+
+ p.region(0).plot(h1);
+ setLog(p.region(0));
+
+
+
+ p.show();
+ }
+ public static void hideLegendAndStats(IPlotterRegion r){
+ r.style().statisticsBoxStyle().setVisible(false);
+ r.style().legendBoxStyle().setVisible(false);
+ }
+ public static IPlotterStyle smoothCurveStyle(IPlotterFactory pf) {
+ IPlotterStyle style = pf.createPlotterStyle();
+ debugPrint(style.dataStyle().availableParameters());
+
+ style.dataStyle().markerStyle().setVisible(false);
+
+ return style;
+ }
+ public static void writeToFile(IPlotter plotter, String filename, String filetype){
+ //JFrame frame = new JFrame()
+ //if(plotter.)
+ //plotter.hide();
+ //plotter.show();
+ //PlotterUtilities.writeToFile(plotter, filename, filetype, null);
+ try {
+
+
+ //PlotterUtilities.writeToFile(plotter, filename, filetype, null);
+ Thread.sleep(1000);
+ Component c = PlotterUtilities.componentForPlotter(plotter);
+ int width = Integer.parseInt(plotter.parameterValue("plotterWidth"));
+ int height = Integer.parseInt(plotter.parameterValue("plotterHeight"));
+ if(width <= 0){
+ width = 300;
+ plotter.setParameter("plotterWidth", Integer.toString(width));
+ }
+ if(height <= 0){
+ height = 300;
+
+ plotter.setParameter("plotterHeight", Integer.toString(height));
+ }
+
+ c.setSize(width, height);
+ BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
+ Graphics2D graphics2D = image.createGraphics();
+ c.paint(graphics2D);
+ ImageIO.write(image,filetype, new File(filename));
+ Runtime.getRuntime().exec("open " + filename);
+ System.out.println("saved");
+
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/SumEverything.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/SumEverything.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/SumEverything.java Wed Apr 27 11:11:32 2016
@@ -2,6 +2,10 @@
import java.io.File;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import org.lcsim.util.aida.AIDA;
import hep.aida.IAnalysisFactory;
import hep.aida.IHistogram1D;
@@ -15,75 +19,163 @@
// full of aida output files. Then add up all of the histograms in each sub folder,
// and put the sums in separate files in a folder called "sums"
public class SumEverything {
- public static void main(String arg[]) throws IllegalArgumentException, IOException{
- if(arg.length > 1){
- twoArg(arg[0], arg[1]);
- }
- else{
- oneArg(arg[0]);
- }
- }
- static void oneArg(String indir) throws IllegalArgumentException, IOException{
- File outdir = new File(indir + "/sums");
- outdir.mkdir();
- for(File subdirf : new File(indir).listFiles()){
- String subdir = subdirf.getAbsolutePath();
- if(subdir.matches(".*/sums/?"))
- continue;
- String split[] = subdir.split("/");
- String outfile = indir + "/sums/" + split[split.length-1] + ".aida";
- twoArg(subdir, outfile);
- }
- new File(indir + "/sums/total.aida").delete();
- twoArg(outdir.getAbsolutePath(), indir + "/sums/total.aida");
-
- }
+ public static void main(String arg[]) throws IllegalArgumentException, IOException{
+ if(arg.length == 2){
+ twoArg(arg[0], arg[1]);
+ }
+ else if(arg.length == 1){
+ oneArg(arg[0]);
+ }
+ else
+ polyArg(arg);
+ }
+ static void oneArg(final String indir) throws IllegalArgumentException, IOException{
+ File outdir = new File(indir + "/sums");
+ outdir.mkdir();
+ ArrayList<Thread> threads = new ArrayList<Thread>();
+ for(final File subdirf : new File(indir).listFiles()){
+ Thread t = new Thread(){
+ public void run(){
- static void twoArg(String indir, String out) throws IllegalArgumentException, IOException{
- IAnalysisFactory af = IAnalysisFactory.create();
- ITreeFactory tf = af.createTreeFactory();
- new File(out).delete();
- ITree tree0 = tf.create(out, "xml", false, true);
- IHistogramFactory hf = af.createHistogramFactory(tree0);
-
+ String subdir = subdirf.getAbsolutePath();
+ if(subdir.matches(".*/sums/?"))
+ return;
+ String split[] = subdir.split("/");
+ String outfile = indir + "/sums/" + split[split.length-1] + ".aida";
+ try {
+ twoArg(subdir, outfile);
+ } catch (IllegalArgumentException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ };
+ threads.add(t);
+ t.start();
+ }
+ for(Thread t : threads){
+ try {
+ t.join();
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ new File(indir + "/sums/total.aida").delete();
+ twoArg(outdir.getAbsolutePath(), indir + "/sums/total.aida");
- int j = 0;
- for(File s : new File(indir).listFiles()){
- ITree tree = af.createTreeFactory().create(s.getAbsolutePath(),"xml");
+ }
-
- if(j == 0){
- String [] names = tree.listObjectNames();
- tree0.mount("/tmp", tree, "/");
- for(String name : names){
- Object o = tree.find(name);
- if(o instanceof IHistogram1D || o instanceof IHistogram2D)
- tree0.cp("/tmp/" + name, name);
- }
- tree0.unmount("/tmp");
- tree.close();
-
- }
- else{
- //tree.
- String [] names = tree.listObjectNames();
- tree0.mount("/tmp", tree, "/");
- for(String name : names){
- Object o = tree.find(name);
- if(o instanceof IHistogram1D)
- ((IHistogram1D)tree0.find(name)).add((IHistogram1D)o);
- if(o instanceof IHistogram2D)
- ((IHistogram2D)tree0.find(name)).add((IHistogram2D)o);
- }
- tree0.unmount("/tmp");
- tree.close();
- }
- tree.close();
- j++;
- System.out.println(j + " files have been read");
- }
- tree0.commit();
- }
+
+ static void twoArg(String indir, String out) throws IllegalArgumentException, IOException{
+
+ run(new File(indir).listFiles(), out);
+ }
+ static void run(File[] files, String out) throws IllegalArgumentException, IOException{
+
+ long timeStart = System.currentTimeMillis();
+ IAnalysisFactory af = IAnalysisFactory.create();
+ //AIDA.defaultInstance().
+ ITreeFactory tf = af.createTreeFactory();
+ new File(out).delete();
+ ITree outtree = tf.createTree(out, "xml", ITreeFactory.RECREATE);
+ IHistogramFactory hf = af.createHistogramFactory(outtree);
+ int j = 0;
+ String names[] = null;
+ for(File s : files){
+ System.gc();
+ if(!s.getAbsolutePath().endsWith("aida"))
+ continue;
+ try{
+
+ ITree tree = tf.createTree(s.getAbsolutePath(), "xml", ITreeFactory.READONLY);//.create(s.getAbsolutePath(),"xml");
+
+
+ if(j == 0){
+ names = tree.listObjectNames("/", true);
+ System.out.println(Arrays.toString(names));
+ //outtree.mount("/tmp", tree, "/");
+ for(String name : names){
+ if(name.endsWith("/")){
+ outtree.mkdirs(name);
+ continue;
+ }
+ Object o = tree.find(name);
+ if(o instanceof IHistogram1D)
+ hf.createCopy(name,(IHistogram1D)o);
+ if(o instanceof IHistogram2D)
+ hf.createCopy(name,(IHistogram2D)o);
+
+ }
+ //outtree.unmount("/tmp");
+ //tree.close();
+
+ }
+ else{
+ //tree.
+ //String [] names = tree.listObjectNames("/", true);
+ //outtree.mount("/tmp", tree, "/");
+ //System.out.println(Arrays.toString(names));
+ for(String name : names){
+ if(name.endsWith("/"))
+ continue;
+ Object o = null;
+ try{
+ o = tree.find(name);
+ } catch(IllegalArgumentException e){
+ System.err.println("couldn't find object called " + name + " in file " + s);
+ throw e;
+ }
+ if(o instanceof IHistogram1D){
+ if(((IHistogram1D)o).allEntries() != 0)
+ ((IHistogram1D)outtree.find(name)).add((IHistogram1D)o);
+ }
+ if(o instanceof IHistogram2D)
+ if(((IHistogram2D)o).allEntries() != 0)
+ ((IHistogram2D)outtree.find(name)).add((IHistogram2D)o);
+ }
+ //outtree.unmount("/tmp");
+ //tree.close();
+ }
+
+ tree.close();
+ j++;
+ System.out.println(j + " files have been read (" +(System.currentTimeMillis()-timeStart)/j + " ms per file)");
+
+ } catch(IllegalArgumentException e){
+ //print the filename
+ System.out.println("Exception happened at file " + s.getAbsolutePath());
+
+ e.printStackTrace();
+ }
+
+ }
+ outtree.commit();
+ System.out.println("summed file " + out +" commited. Total time = " + (System.currentTimeMillis()-timeStart)/1000 + " seconds");
+
+ }
+
+ static void polyArg(String[] arg) throws IllegalArgumentException, IOException{
+ ArrayList<File> files = new ArrayList<File>();
+ boolean nextIsOutput = false;
+ for(String a : arg){
+ if(a.equals("-o")){
+ nextIsOutput = true;
+ continue;
+ }
+ if(nextIsOutput){
+ run(files.toArray(new File[0]), a);
+ nextIsOutput = false;
+ files.clear();
+ continue;
+ }
+ files.add(new File(a));
+
+ }
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/BinGenerator.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/BinGenerator.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/BinGenerator.java Wed Apr 27 11:11:32 2016
@@ -5,76 +5,86 @@
import java.io.PrintWriter;
public class BinGenerator {
- public static void main(String arg[]) throws FileNotFoundException{
- int nBins = 20;
+ public static void main(String arg[]) throws FileNotFoundException{
+ int nBins = 32;
- PrintStream pw = new PrintStream("generatedbins.txt");
- pw.println(nBins);
- double[] thetaBins = getThetaBins(nBins);
- for(int i = 0; i< nBins; i++){
- double thetaMin = thetaBins[i];
- double thetaMax = thetaBins[i+1];
- double phiBounds[] = getPhiBounds(thetaMin, thetaMax);
- pw.printf("%d %.4f %.4f ", phiBounds.length/2, thetaMin, thetaMax);
- for(int j = 0; j< phiBounds.length; j++){
- pw.printf("%.4f ", phiBounds[j]);
- }
- pw.println();
- }
- ShowCustomBinning.main(new String[]{"generatedbins.txt"});
+ PrintStream pw = new PrintStream("generatedbins.txt");
+ pw.println(nBins);
+ double[] thetaBins = getThetaBins(nBins);
+ for(int i = 0; i< nBins; i++){
+ double thetaMin = thetaBins[i];
+ double thetaMax = thetaBins[i+1];
+ double phiBounds[] = getPhiBounds(thetaMin, thetaMax);
+ pw.printf("%d %.4f %.4f ", phiBounds.length/2, thetaMin, thetaMax);
+ for(int j = 0; j< phiBounds.length; j++){
+ pw.printf("%.4f ", phiBounds[j]);
+ }
+ pw.println();
+ }
+ ShowCustomBinning.main(new String[]{"generatedbins.txt"});
- }
+ }
- private static double[] getThetaBins(int nBins){
- /*double thetaMin = 0.035;
- double dTheta = .2/nBins;
- double[] bins = new double[nBins +1];
- for(int i = 0; i< nBins+1; i++){
- bins[i] = thetaMin+dTheta*i;
- }
- return bins; */
- double thetaMax = .145;
- double thetaMin = .035;
+ private static double[] getThetaBins(int nBins){
+ /*double thetaMin = 0.035;
+ double dTheta = .2/nBins;
+ double[] bins = new double[nBins +1];
+ for(int i = 0; i< nBins+1; i++){
+ bins[i] = thetaMin+dTheta*i;
+ }
+ return bins; */
+ double thetaMax = .200;
+ double thetaMin = .040;
- double[] bins = new double[nBins +1];
- double xMin = 1/(thetaMax*thetaMax);
- double xMax = 1/(thetaMin*thetaMin);
- for(int i = 0; i< nBins+1; i++){
- double x = xMax - i*(xMax-xMin)/nBins;
- bins[i] = Math.pow(x, -.5);
- }
- return bins;
- }
- private static double[] getPhiBounds(double thetaMin, double thetaMax){
- double phiBins[] = new double[6];
- double dphi = .01;
- int edgeNumber = 0;
+ double[] bins = new double[nBins +1];
+ for(int i = 0; i<nBins+1; i++){
+ bins[i] = thetaMin+i*(thetaMax-thetaMin)/nBins;
+ }
+ return bins;
+ /*double xMin = 1/(thetaMax*thetaMax);
+ double xMax = 1/(thetaMin*thetaMin);
+ for(int i = 0; i< nBins+1; i++){
+ double x = xMax - i*(xMax-xMin)/nBins;
+ bins[i] = Math.pow(x, -.5);
+ }
+ return bins;*/
+ }
+ private static double[] getPhiBounds(double thetaMin, double thetaMax){
+ double phiBins[] = new double[6];
+ double dphi = .01;
+ int edgeNumber = 0;
- boolean prevInRange = false;
- for(double phi = 0; phi< 3.14; phi+= dphi){
- boolean inRange = EcalUtil.fid_ECal_spherical(thetaMin, phi) && EcalUtil.fid_ECal_spherical(thetaMax, phi)
- && EcalUtil.fid_ECal_spherical(thetaMin, -phi) && EcalUtil.fid_ECal_spherical(thetaMax, -phi);
- if(inRange && !prevInRange)
- phiBins[edgeNumber++] = phi;
- if(prevInRange && !inRange)
- phiBins[edgeNumber++] = phi-dphi;
- prevInRange = inRange;
- }
- if(phiBins[2] == 0)
- return new double[]{phiBins[0], phiBins[1]};
- if(phiBins[4] == 0)
- return new double[]{phiBins[0], phiBins[1],phiBins[2], phiBins[3]};
-
- //3 segments: choose the largest two
- if(phiBins[4] != 0 && phiBins[1] - phiBins[0] > phiBins[3]-phiBins[2] && phiBins[5] - phiBins[4] > phiBins[3]-phiBins[2]){
- return new double[]{phiBins[0], phiBins[1],phiBins[4], phiBins[5]};
- }
- if(phiBins[4] != 0 && phiBins[3] - phiBins[2] > phiBins[1]-phiBins[0] && phiBins[5] - phiBins[4] > phiBins[1]-phiBins[0]){
- return new double[]{phiBins[2], phiBins[3],phiBins[4], phiBins[5]};
- }
- return new double[]{phiBins[0], phiBins[1],phiBins[2], phiBins[3]};
-
-
- }
+ boolean prevInRange = false;
+ for(double phi = 0; phi< 3.14; phi+= dphi){
+
+ // make the angular cuts on the tracks such that the particles that go into that cut
+ // are expected to be within 4 mm (~= 2 times the angular resolution of 1.5 mrad) of
+ // the ecal cuts.
+ double d = 4;
+
+ boolean inRange = EcalUtil.fid_ECal_spherical_more_strict(thetaMin, phi, d) && EcalUtil.fid_ECal_spherical_more_strict(thetaMax, phi, d)
+ && EcalUtil.fid_ECal_spherical_more_strict(thetaMin, -phi, d) && EcalUtil.fid_ECal_spherical_more_strict(thetaMax, -phi, d);
+ if(inRange && !prevInRange)
+ phiBins[edgeNumber++] = phi;
+ if(prevInRange && !inRange)
+ phiBins[edgeNumber++] = phi-dphi;
+ prevInRange = inRange;
+ }
+ if(phiBins[2] == 0)
+ return new double[]{phiBins[0], phiBins[1]};
+ if(phiBins[4] == 0)
+ return new double[]{phiBins[0], phiBins[1],phiBins[2], phiBins[3]};
+
+ //3 segments: choose the largest two
+ if(phiBins[4] != 0 && phiBins[1] - phiBins[0] > phiBins[3]-phiBins[2] && phiBins[5] - phiBins[4] > phiBins[3]-phiBins[2]){
+ return new double[]{phiBins[0], phiBins[1],phiBins[4], phiBins[5]};
+ }
+ if(phiBins[4] != 0 && phiBins[3] - phiBins[2] > phiBins[1]-phiBins[0] && phiBins[5] - phiBins[4] > phiBins[1]-phiBins[0]){
+ return new double[]{phiBins[2], phiBins[3],phiBins[4], phiBins[5]};
+ }
+ return new double[]{phiBins[0], phiBins[1],phiBins[2], phiBins[3]};
+
+
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/CustomBinning.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/CustomBinning.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/CustomBinning.java Wed Apr 27 11:11:32 2016
@@ -5,92 +5,76 @@
import java.util.Scanner;
public class CustomBinning {
- public CustomBinning(File f) throws FileNotFoundException{
- Scanner s = new Scanner(f);
+ public CustomBinning(File f) throws FileNotFoundException{
+ Scanner s = new Scanner(f);
- nTheta = s.nextInt(); //number of bins in theta;
- thetaMax = new double[nTheta];
- thetaMin = new double[nTheta];
-
- phiMax = new double[nTheta][];
- phiMin = new double[nTheta][];
- int i = 0;
- while(s.hasNext()){ //new row
- int nPhi = s.nextInt();
- thetaMin[i] = s.nextDouble();
- thetaMax[i] = s.nextDouble();
-
- phiMax[i] = new double[nPhi];
- phiMin[i] = new double[nPhi];
- for(int j = 0; j<nPhi; j++){
- phiMin[i][j] = s.nextDouble();
- phiMax[i][j] = s.nextDouble();
- }
- i++;
- }
- }
- double[][] phiMax;
- double[][] phiMin;
- public double thetaMax[], thetaMin[];
- public int nTheta;
+ nTheta = s.nextInt(); //number of bins in theta;
+ thetaMax = new double[nTheta];
+ thetaMin = new double[nTheta];
+
+ phiMax = new double[nTheta][];
+ phiMin = new double[nTheta][];
+ int i = 0;
+ while(s.hasNext()){ //new row
+ int nPhi = s.nextInt();
+ thetaMin[i] = s.nextDouble();
+ thetaMax[i] = s.nextDouble();
+
+ phiMax[i] = new double[nPhi];
+ phiMin[i] = new double[nPhi];
+ for(int j = 0; j<nPhi; j++){
+ phiMin[i][j] = s.nextDouble();
+ phiMax[i][j] = s.nextDouble();
+ }
+ i++;
+ }
+ }
+ double[][] phiMax;
+ double[][] phiMin;
+ public double thetaMax[], thetaMin[];
+ public int nTheta;
- double getSteradians(int binNumber){
- double t1 = thetaMin[binNumber];
- double t2 = thetaMax[binNumber];
- double dCos = Math.cos(t1)-Math.cos(t2);
- double dPhiTot = 0;
- for(int i = 0; i< phiMax[binNumber].length; i++){
- dPhiTot += phiMax[binNumber][i]-phiMin[binNumber][i];
- }
- return 2*dPhiTot*dCos; //factor of two because top and bottom
- }
- boolean inRange(double theta, double phi){
- phi = Math.abs(phi);
- /*int i =(int) Math.floor((theta-theta0)/deltaTheta);
- if(i>= nTheta || i<0)
- return false;*/
- if(theta > thetaMax[nTheta-1] || theta < thetaMin[0])
- return false;
- int i;
- boolean found = false;
- for(i = 0; i< nTheta; i++){
- if(theta > thetaMin[i] && theta < thetaMax[i]){
- found = true;
- break;
- }
- }
- if(!found)
- return false;
-
- for(int j = 0; j<phiMax[i].length; j++){
- if(phi>phiMin[i][j] && phi< phiMax[i][j])
- return true;
- }
- return false;
+ double getSteradians(int binNumber){
+ double t1 = thetaMin[binNumber];
+ double t2 = thetaMax[binNumber];
+ double dCos = Math.cos(t1)-Math.cos(t2);
+ double dPhiTot = 0;
+ for(int i = 0; i< phiMax[binNumber].length; i++){
+ dPhiTot += phiMax[binNumber][i]-phiMin[binNumber][i];
+ }
+ return 2*dPhiTot*dCos; //factor of two because top and bottom
+ }
+ boolean inRange(double theta, double phi){
+ phi = Math.abs(phi);
+ /*int i =(int) Math.floor((theta-theta0)/deltaTheta);
+ if(i>= nTheta || i<0)
+ return false;*/
+ if(theta > thetaMax[nTheta-1] || theta < thetaMin[0])
+ return false;
+ int i;
+ boolean found = false;
+ for(i = 0; i< nTheta; i++){
+ if(theta > thetaMin[i] && theta < thetaMax[i]){
+ found = true;
+ break;
+ }
+ }
+ if(!found)
+ return false;
+
+ for(int j = 0; j<phiMax[i].length; j++){
+ if(phi>phiMin[i][j] && phi< phiMax[i][j])
+ return true;
+ }
+ return false;
- }
- public double getTotSteradians() {
- double tot = 0;
- for(int i = 0; i<nTheta; i++){
- tot += getSteradians(i);
- }
- return tot;
- }
- /**
- * @param bin
- * @param a = 2E/M
- * @return the integral of 1/sin^4(th/2)*cos^2(th/2)/(1+a*sin^2(th/2)) times dPhi,
- * which appears in the integral of mott scattering.
- */
- public double mottIntegralFactor(double a, int bin){
- double dPhi = 0;
- for(int i = 0; i< phiMax[bin].length; i++)
- dPhi += 2*(phiMax[bin][0] - phiMin[bin][0]); //factor of 2 from top and bottom
-
- double csc2 = Math.pow(Math.sin(thetaMax[bin]/2), -2);
- double Imax = (-csc2+(1+a)*Math.log(a+2*csc2));
- csc2 = Math.pow(Math.sin(thetaMin[bin]/2), -2);
- double Imin = (-csc2+(1+a)*Math.log(a+2*csc2));
- return 2*dPhi*(Imax-Imin);
- }
+ }
+ public double getTotSteradians() {
+ double tot = 0;
+ for(int i = 0; i<nTheta; i++){
+ tot += getSteradians(i);
+ }
+ return tot;
+ }
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/DisplayHistograms.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/DisplayHistograms.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/DisplayHistograms.java Wed Apr 27 11:11:32 2016
@@ -3,7 +3,8 @@
import java.io.IOException;
public class DisplayHistograms {
- public static void main(String arg[]) throws IllegalArgumentException, IOException{
- MakeHistograms.main(new String[]{arg[0]});
- }
+ public static void main(String arg[]) throws IllegalArgumentException, IOException{
+ //System.out.println("dognabo");
+ MakeHistograms.main(new String[]{arg[0]});
+ }
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/EcalUtil.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/EcalUtil.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/EcalUtil.java Wed Apr 27 11:11:32 2016
@@ -12,153 +12,232 @@
import org.lcsim.event.Cluster;
public class EcalUtil {
- /**
- * Holly's algorithm.
- * @param x x position of cluster
- * @param y y position of cluster
- * @param e energy of cluster
- * @param pid type of particle (11 = electron; -1 = positron)
- * @return array of doubles
- * <br> [0] theta = atan(py/pz)
- * <br> [1] phi = atan(px/pz)
- */
- static double [] toHollysCoordinates(double x, double y, double e, int pid){
- if(pid == 11){
- return new double[]{0.00071 * y +0.000357, 0.00071*x + 0.00003055*e + 0.04572/e +0.0006196};
- } else if(pid == -11){
- return new double[]{0.00071 * y +0.000357, 0.00071*x - 0.0006465*e + 0.045757/e +0.003465};
- }
- return null;
- }
-
- static double getSteradians(double x, double y, double e,int pid, double dA){
- double thetaPhi[] = toHollysCoordinates(x, y, e, pid);
- return .00071*.00071*dA/Math.sqrt(thetaPhi[0]*thetaPhi[0]+ thetaPhi[1]*thetaPhi[1]+1);
- }
-
- static boolean isSeedEdge(Cluster c){
- CalorimeterHit seedhit = (CalorimeterHit)c.getCalorimeterHits().get(0);
- // seedhit
- int ix = seedhit.getIdentifierFieldValue("ix");
- int iy = seedhit.getIdentifierFieldValue("iy");
-
- //seedhit.get
- return isEdge(ix, iy);
- }
- static boolean isEdge(int ix, int iy){
- if(iy == 5 || iy == 1 || iy == -1 || iy == -5)
- return true;
- if(ix == -23 || ix == 23)
- return true;
- if((iy == 2 || iy == -2) && (ix >=-11 && ix <= -1))
- return true;
- return false;
- }
- public static boolean fid_ECal(Cluster c){
- return fid_ECal(c.getPosition()[0], c.getPosition()[1]);
- }
- public static boolean fid_ECal(double x, double y)
- {
- y = Math.abs(y);
-
- boolean in_fid = false;
- double x_edge_low = -262.74;
- double x_edge_high = 347.7;
- double y_edge_low = 33.54;
- double y_edge_high = 75.18;
-
- double x_gap_low = -106.66;
- double x_gap_high = 42.17;
- double y_gap_high = 47.18;
-
- y = Math.abs(y);
-
- if( x > x_edge_low && x < x_edge_high && y > y_edge_low && y < y_edge_high )
- {
- if( !(x > x_gap_low && x < x_gap_high && y > y_edge_low && y < y_gap_high) )
- {
- in_fid = true;
- }
- }
-
- return in_fid;
- }
- static double[] toSphericalFromBeam(double pxpz, double pypz){
- double x = pxpz, y = pypz, z = 1;
- double beamTilt = .03057;
- double xtemp = Math.cos(beamTilt)*x - Math.sin(beamTilt)*z;
- double ztemp = Math.cos(beamTilt)*z + Math.sin(beamTilt)*x;
- double ytemp = y;
-
- double theta = Math.atan(Math.hypot(xtemp, ytemp)/ ztemp);
- double phi = Math.atan2(ytemp, xtemp);
-
-
- return new double[]{theta, phi};
- }
- static Map<Integer, double[]> map = new HashMap();
- static void readMap() throws FileNotFoundException{
- Scanner s = new Scanner(new File("ecal_positions.txt"));
-
-
- while(s.hasNext()){
- int ix =s.nextInt();
- int iy = s.nextInt();
- double x = s.nextDouble();
- double y =s.nextDouble();
- map.put(ix+100*iy, new double[]{x, y});
-
- }
- s.close();
- }
- static double getArea(int ix, int iy){
- int ixp = ix+1;
- if(ixp == 0)
- ixp = 1;
- int ixm = ix-1;
- if(ixm == 0)
- ixm = -1;
- double[] plus = map.get(ixp+100*(iy+1));
- double[] minus = map.get(ixm+100*(iy-1));
- return (plus[0]-minus[0])*(plus[1]-minus[1])/4;
- }
-
- public static double[] getThetaPhiSpherical(double x,double y){
- double hcoord[] = toHollysCoordinates(x,y, 1.056, 11);
- return toSphericalFromBeam(Math.tan(hcoord[1]),Math.tan(hcoord[0]));
- }
- /*beam tilt*/
- static double tilt = .03057;
- //assuming FEE electron.
-
- public static double[] getXY(double theta, double phi){
-
- double ux = Math.cos(phi)*Math.sin(theta)*Math.cos(tilt)+Math.cos(theta)*Math.sin(tilt);
- double uy = Math.sin(phi)*Math.sin(theta);
- double uz = Math.cos(theta)*Math.cos(tilt)-Math.cos(phi)*Math.sin(theta)*Math.sin(tilt);
- double pxpz = ux/uz;
- double pypz = uy/uz;
- //holly's coordinates:
- double h1 = Math.atan(pypz);
- double h2 = Math.atan(pxpz);
- //0.00071 * y +0.000357,
- //0.00071*x + 0.00003055*e + 0.04572/e +0.0006196
- double y = (h1-0.000357)/0.00071;
- double e = 1.056;
- double x = (h2 - 0.00003055*e - 0.04572/e -0.0006196)/0.00071;
- return new double[]{x,y};
- }
- public static boolean fid_ECal_spherical(double theta, double phi){
- double[] xy = getXY(theta, phi);
- double x = xy[0];
- double y = xy[1];
- return fid_ECal(x, y);
- }
- public static void main(String arg[]){
- double x = 0, y = 0;
- double sp[] = getThetaPhiSpherical(x,y);
- System.out.println(Arrays.toString(getXY(sp[0], sp[1])));
- }
+
+ static int[] getCrystalIndex(Cluster c){
+ if(map == null){
+ try{
+ readMap();
+ }catch(Exception e){
+ e.printStackTrace();
+ }
+ }
+ c.getPosition();
+ int bestix = 0, bestiy = 0;
+ double bestdist = 100000;
+
+ double cx = c.getPosition()[0];
+ double cy = c.getPosition()[1];
+ for(int ix = -23; ix<= 23; ix++){
+ if(!map.containsKey(200+ix))
+ continue;
+ double x = map.get(200+ix)[0];
+ if(Math.abs(x-cx)<bestdist){
+ bestdist = Math.abs(x-cx);
+ bestix = ix;
+ }
+ }
+
+ bestdist = 100000;
+ for(int iy = -5; iy<= 5; iy++){
+ if(!map.containsKey(100*iy + bestix))
+ continue;
+ double y = map.get(100*iy + bestix)[1];
+ if(Math.abs(y-cy)<bestdist){
+ bestdist = Math.abs(y-cy);
+ bestiy = iy;
+ }
+ }
+ return new int[]{bestix, bestiy};
+ }
+
+
+ /**
+ * Holly's algorithm.
+ * @param x x position of cluster
+ * @param y y position of cluster
+ * @param e energy of cluster
+ * @param pid type of particle (11 = electron; -1 = positron)
+ * @return array of doubles
+ * <br> [0] theta = atan(py/pz)
+ * <br> [1] phi = atan(px/pz)
+ */
+ static double [] toHollysCoordinates(double x, double y, double e, int pid){
+ if(pid == 11){
+ return new double[]{0.00071 * y +0.000357, 0.00071*x + 0.00003055*e + 0.04572/e +0.0006196};
+ } else if(pid == -11){
+ return new double[]{0.00071 * y +0.000357, 0.00071*x - 0.0006465*e + 0.045757/e +0.003465};
+ }
+ return null;
+ }
+
+ static double getSteradians(double x, double y, double e,int pid, double dA){
+ double thetaPhi[] = toHollysCoordinates(x, y, e, pid);
+ return .00071*.00071*dA/Math.sqrt(thetaPhi[0]*thetaPhi[0]+ thetaPhi[1]*thetaPhi[1]+1);
+ }
+
+ static boolean isSeedEdge(Cluster c){
+ CalorimeterHit seedhit = (CalorimeterHit)c.getCalorimeterHits().get(0);
+ // seedhit
+ int ix = seedhit.getIdentifierFieldValue("ix");
+ int iy = seedhit.getIdentifierFieldValue("iy");
+
+ //seedhit.get
+ return isEdge(ix, iy);
+ }
+ static boolean isEdge(int ix, int iy){
+ if(iy == 5 || iy == 1 || iy == -1 || iy == -5)
+ return true;
+ if(ix == -23 || ix == 23)
+ return true;
+ if((iy == 2 || iy == -2) && (ix >=-11 && ix <= -1))
+ return true;
+ return false;
+ }
+ public static boolean fid_ECal(Cluster c){
+ return fid_ECal(c.getPosition()[0], c.getPosition()[1]);
+ }
+ public static boolean fid_ECal(double x, double y)
+ {
+ y = Math.abs(y);
+
+ boolean in_fid = false;
+ double x_edge_low = -262.74;
+ double x_edge_high = 347.7;
+ double y_edge_low = 33.54;
+ double y_edge_high = 75.18;
+
+ double x_gap_low = -106.66;
+ double x_gap_high = 42.17;
+ double y_gap_high = 47.18;
+
+ y = Math.abs(y);
+
+ if( x > x_edge_low && x < x_edge_high && y > y_edge_low && y < y_edge_high )
+ {
+ if( !(x > x_gap_low && x < x_gap_high && y > y_edge_low && y < y_gap_high) )
+ {
+ in_fid = true;
+ }
+ }
+
+ return in_fid;
+ }
+ /**
+ *
+ * @param x
+ * @param y
+ * @param d the additional distance from the edge of the ecal in addition
+ * to what is required by fid_Cal(double, double)
+ * @return
+ */
+ public static boolean fid_ecal_more_strict(double x, double y, double d){
+ y = Math.abs(y);
+
+ boolean in_fid = false;
+
+ double x_edge_low = -262.74 + d;
+ double x_edge_high = 347.7 - d;
+ double y_edge_low = 33.54 + d;
+ double y_edge_high = 75.18 - d;
+
+ double x_gap_low = -106.66 - d;
+ double x_gap_high = 42.17 + d;
+ double y_gap_high = 47.18 + d;
+
+ y = Math.abs(y);
+
+ if( x > x_edge_low && x < x_edge_high && y > y_edge_low && y < y_edge_high )
+ {
+ if( !(x > x_gap_low && x < x_gap_high && y > y_edge_low && y < y_gap_high) )
+ {
+ in_fid = true;
+ }
+ }
+
+ return in_fid;
+ }
+ static double[] toSphericalFromBeam(double pxpz, double pypz){
+ double x = pxpz, y = pypz, z = 1;
+ double beamTilt = .03057;
+ double xtemp = Math.cos(beamTilt)*x - Math.sin(beamTilt)*z;
+ double ztemp = Math.cos(beamTilt)*z + Math.sin(beamTilt)*x;
+ double ytemp = y;
+
+ double theta = Math.atan(Math.hypot(xtemp, ytemp)/ ztemp);
+ double phi = Math.atan2(ytemp, xtemp);
+
+
+ return new double[]{theta, phi};
+ }
+ static Map<Integer, double[]> map ;
+ static void readMap() throws FileNotFoundException{
+ Scanner s = new Scanner(new File(System.getenv("HOME") + "/ecal_positions.txt"));
+ map = new HashMap();
+
+ while(s.hasNext()){
+ int ix =s.nextInt();
+ int iy = s.nextInt();
+ double x = s.nextDouble();
+ double y =s.nextDouble();
+ map.put(ix+100*iy, new double[]{x, y});
+
+ }
+ s.close();
+ }
+ static double getArea(int ix, int iy){
+ int ixp = ix+1;
+ if(ixp == 0)
+ ixp = 1;
+ int ixm = ix-1;
+ if(ixm == 0)
+ ixm = -1;
+ double[] plus = map.get(ixp+100*(iy+1));
+ double[] minus = map.get(ixm+100*(iy-1));
+ return (plus[0]-minus[0])*(plus[1]-minus[1])/4;
+ }
+
+ public static double[] getThetaPhiSpherical(double x,double y){
+ double hcoord[] = toHollysCoordinates(x,y, 1.056, 11);
+ return toSphericalFromBeam(Math.tan(hcoord[1]),Math.tan(hcoord[0]));
+ }
+ /*beam tilt*/
+ static double tilt = .03057;
+ //assuming FEE electron.
+
+ public static double[] getXY(double theta, double phi){
+
+ double ux = Math.cos(phi)*Math.sin(theta)*Math.cos(tilt)+Math.cos(theta)*Math.sin(tilt);
+ double uy = Math.sin(phi)*Math.sin(theta);
+ double uz = Math.cos(theta)*Math.cos(tilt)-Math.cos(phi)*Math.sin(theta)*Math.sin(tilt);
+ double pxpz = ux/uz;
+ double pypz = uy/uz;
+ //holly's coordinates:
+ double h1 = Math.atan(pypz);
+ double h2 = Math.atan(pxpz);
+ //0.00071 * y +0.000357,
+ //0.00071*x + 0.00003055*e + 0.04572/e +0.0006196
+ double y = (h1-0.000357)/0.00071;
+ double e = 1.056;
+ double x = (h2 - 0.00003055*e - 0.04572/e -0.0006196)/0.00071;
+ return new double[]{x,y};
+ }
+ public static boolean fid_ECal_spherical(double theta, double phi){
+ double[] xy = getXY(theta, phi);
+ double x = xy[0];
+ double y = xy[1];
+ return fid_ECal(x, y);
+ }
+ public static boolean fid_ECal_spherical_more_strict(double theta, double phi, double d){
+ double[] xy = getXY(theta, phi);
+ double x = xy[0];
+ double y = xy[1];
+ return fid_ecal_more_strict(x, y, d);
+ }
+ public static void main(String arg[]){
+ double x = 0, y = 0;
+ double sp[] = getThetaPhiSpherical(x,y);
+ System.out.println(Arrays.toString(getXY(sp[0], sp[1])));
+ }
}
-
-
+
+
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MakeHistograms.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MakeHistograms.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/MakeHistograms.java Wed Apr 27 11:11:32 2016
@@ -2,12 +2,15 @@
import java.io.File;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import org.hps.conditions.ConditionsDriver;
+import org.hps.recon.tracking.TrackType;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.TIData;
import org.lcsim.event.CalorimeterHit;
@@ -15,9 +18,12 @@
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.Track;
+import org.lcsim.event.TrackState;
import org.lcsim.lcio.LCIOReader;
import hep.aida.IAnalysisFactory;
+import hep.aida.IDataPointSetFactory;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
import hep.aida.IHistogramFactory;
@@ -29,232 +35,448 @@
import org.hps.users.spaul.StyleUtil;
public class MakeHistograms {
- static boolean display = false;
- static CustomBinning cb;
- public static void main(String arg[]) throws IllegalArgumentException, IOException{
- if(arg.length == 1){
- File file = new File(arg[0]);
- String path = arg[0];
- if(file.isDirectory()){
- org.hps.users.spaul.SumEverything.main(new String[]{path, "temp.aida"});
- path = "temp.aida";
- }
- IAnalysisFactory af = IAnalysisFactory.create();
- ITreeFactory tf = af.createTreeFactory();
- ITree tree0 = tf.create(path, "xml");
- extractHistograms(tree0);
- setupPlotter(af);
-
- } else{
-
- String input = arg[0];
- String output = arg[1];
- cb = new CustomBinning(new File(arg[2]));
- if(arg.length == 5)
- display = true;
- IAnalysisFactory af = IAnalysisFactory.create();
- ITree tree = af.createTreeFactory().create(output,"xml",false,true);
- IHistogramFactory hf = af.createHistogramFactory(tree);
- setupHistograms(hf);
- if(display){
- setupPlotter(af);
- }
- ConditionsDriver hack = new ConditionsDriver();
- //hack.setXmlConfigResource("/u/group/hps/hps_soft/detector-data/detectors/HPS-EngRun2015-Nominal-v3");
- hack.setDetectorName("HPS-EngRun2015-Nominal-v3");
- hack.setFreeze(true);
- hack.setRunNumber(Integer.parseInt(arg[3]));
- hack.initialize();
- LCIOReader reader = new LCIOReader(new File(input));
- //reader.open(input);
- //reader.
- EventHeader event = reader.read();
- int nEvents = 0;
- try{
- outer : while(event != null){
- processEvent(event);
-
- //System.out.println(Q2);
-
- event = reader.read();
- }
- } catch (Exception e){
- e.printStackTrace();
- }
- tree.commit();
- tree.close();
- }
-
- }
-
- static IHistogram2D h1, h2, h2a, h2b, h2c;
- static IHistogram2D h4,h4a;
- static IHistogram1D h3, h3a;
- static IHistogram1D h5, h6;
-
- private static void extractHistograms(ITree tree0) {
- h2 = (IHistogram2D) tree0.find("theta vs phi");
- h2a = (IHistogram2D) tree0.find("theta vs phi cut");
- h2b = (IHistogram2D) tree0.find("theta vs phi cut alt");
- h2c = (IHistogram2D) tree0.find("theta vs phi alt");
-
- h3 = (IHistogram1D) tree0.find("theta");
- h4 = (IHistogram2D) tree0.find("px\\/pz vs py\\/pz");
- h4a = (IHistogram2D) tree0.find("px\\/pz vs py\\/pz cut");
- h5 = (IHistogram1D) tree0.find("energy");
-
- }
- static void setupHistograms(IHistogramFactory hf){
- //h1 = hf.createHistogram2D("px\\/pz vs py\\/pz", 160, -.16, .24, 160, -.2, .2);
-
-
-
-
- h2 = hf.createHistogram2D("theta vs phi", 300, 0, .3, 314, -3.14, 3.14);
-
- h2a = hf.createHistogram2D("theta vs phi cut", 300, 0, .3, 314, -3.14, 3.14);
-
- double thetaBins[] = new double[cb.nTheta+1];
- for(int i = 0; i<cb.nTheta; i++){
- thetaBins[i] = cb.thetaMin[i];
- }
-
- thetaBins[thetaBins.length-1] = cb.thetaMax[cb.nTheta-1];
-
- double phiBins[] = new double[315];
- for(int i = 0; i<315; i++){
- phiBins[i] = i/50.-3.14; //every 10 mrad;
- }
-
- //identical to h2a, except different binning
- h2b = hf.createHistogram2D("theta vs phi cut alt", "theta vs phi cut alt", thetaBins, phiBins);
- h2c = hf.createHistogram2D("theta vs phi alt", "theta vs phi alt", thetaBins, phiBins);
-
- h3 = hf.createHistogram1D("theta", "theta", thetaBins);
-
- h4 = hf.createHistogram2D("px\\/pz vs py\\/pz", 160, -.16, .24, 160, -.2, .2);
- h4a = hf.createHistogram2D("px\\/pz vs py\\/pz cut", 160, -.16, .24, 160, -.2, .2);
-
- h5 = hf.createHistogram1D("energy", 75, 0, 1.5);
- }
- static void setupPlotter(IAnalysisFactory af){
- IPlotterFactory pf = af.createPlotterFactory();
- IPlotter p = pf.create();
- p.createRegions(2,2);
- p.region(0).plot(h2);
-
- StyleUtil.stylize(p.region(0), "theta", "phi");
- p.region(1).plot(h2a);
- StyleUtil.stylize(p.region(1), "theta", "phi");
- p.region(2).plot(h3);
- StyleUtil.stylize(p.region(2), "theta", "# of particles");
- p.region(3).plot(h5);
- StyleUtil.stylize(p.region(3), "energy", "# of particles");
-
- p.show();
- //new window for the next plot
- IPlotter p2 = pf.create();
- p2.region(0).plot(h2b);
- StyleUtil.stylize(p2.region(0), "theta", "phi");
-
- p2.show();
-
- //new window for the next plot
- IPlotter p3 = pf.create();
- p3.region(0).plot(h2c);
- StyleUtil.stylize(p3.region(0), "theta", "phi");
-
- p3.show();
-
- //new window for the next plot
- IPlotter p4 = pf.create();
- p4.region(0).plot(h4);
- StyleUtil.stylize(p4.region(0), "px/pz", "py/pz");
-
- p4.show();
-
- //new window for the next plot
- IPlotter p5 = pf.create();
- p5.region(0).plot(h4a);
- StyleUtil.stylize(p5.region(0), "px/pz", "py/pz");
-
- p5.show();
- }
- private static void processEvent(EventHeader event) {
- if(event.getEventNumber() %1000 == 0)
- System.out.println("event number " + event.getEventNumber());
-
- for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
- {
- if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
- TIData tid = new TIData(gob);
- if (!tid.isSingle1Trigger())
- {
- return;
- }
- }
- List<ReconstructedParticle> particles = event.get(ReconstructedParticle.class, "FinalStateParticles");
-
- for(ReconstructedParticle p : particles){
-
- boolean isGood = addParticle(p);
-
-
- }
- }
-
- static double eMin = .8;
- static double eMax = 1.2;
- static double beamEnergy = 1.057;
-
- static double beamTilt = .03057;
- static double maxChi2 = 50;
- static boolean addParticle(ReconstructedParticle part){
-
- if(part.getTracks().size() == 0)
- return false;
- if(part.getTracks().get(0).getChi2()>maxChi2){
- return false;
- }
- if(part.getClusters().size() == 0)
- return false;
- Cluster c = part.getClusters().get(0);
- double time = c.getCalorimeterHits().get(0).getTime();
- if(EcalUtil.fid_ECal(c)){
- if(c.getCalorimeterHits().size() < 3)
- return false;
- if(time>40 && time <48)
- h5.fill(c.getEnergy());
- if(c.getEnergy() > eMin && c.getEnergy() < eMax && (time >40 && time < 48)) {
-
- Hep3Vector p = part.getMomentum();
-
- double px = p.x(), pz = p.z();
- double pxtilt = px*Math.cos(beamTilt)-pz*Math.sin(beamTilt);
- double py = p.y();
- double pztilt = pz*Math.cos(beamTilt)+px*Math.sin(beamTilt);
-
- double theta = Math.atan(Math.hypot(pxtilt, py)/pztilt);
- double phi =Math.atan2(py, pxtilt);
-
- h2.fill(theta, phi);
- h2c.fill(theta, phi);
-
- h4.fill(px/pz, py/pz);
-
- if(cb.inRange(theta, phi)){
- h2a.fill(theta, phi);
- h2b.fill(theta, phi);
- h3.fill(theta);
- h4a.fill(px/pz, py/pz);
- }
-
-
- return true;
- }
-
- }
- return false;
- }
-
+
+
+
+ static boolean display = false;
+ static CustomBinning cb;
+ public static void main(String arg[]) throws IllegalArgumentException, IOException{
+ if(arg.length == 1){
+ File file = new File(arg[0]);
+ String path = arg[0];
+ if(file.isDirectory()){
+ org.hps.users.spaul.SumEverything.main(new String[]{path, "temp.aida"});
+ path = "temp.aida";
+ }
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITreeFactory tf = af.createTreeFactory();
+ ITree tree0 = tf.create(path, "xml");
+ extractHistograms(tree0);
+ setupPlotter(af);
+
+ } else{
+
+ String input = arg[0];
+ String output = arg[1];
+ cb = new CustomBinning(new File(arg[2]));
+ if(arg[arg.length -1].equals("display"))
+ display = true;
+ IAnalysisFactory af = IAnalysisFactory.create();
+ ITree tree = af.createTreeFactory().create(output,"xml",false,true);
+ IHistogramFactory hf = af.createHistogramFactory(tree);
+ setupHistograms(hf);
+ if(display){
+ setupPlotter(af);
+ }
+ ConditionsDriver hack = new ConditionsDriver();
+ //hack.setXmlConfigResource("/u/group/hps/hps_soft/detector-data/detectors/HPS-EngRun2015-Nominal-v3");
+ hack.setDetectorName("HPS-EngRun2015-Nominal-v3");
+ hack.setFreeze(true);
+ hack.setRunNumber(Integer.parseInt(arg[3]));
+
+ hack.initialize();
+ beamTiltY = Double.parseDouble(arg[4]);
+ beamTiltX = Double.parseDouble(arg[5]);
+ LCIOReader reader = new LCIOReader(new File(input));
+ //reader.open(input);
+ //reader.
+ EventHeader event = reader.read();
+ int nEvents = 0;
+ try{
+ outer : while(event != null){
+ processEvent(event);
+
+ //System.out.println(Q2);
+
+ event = reader.read();
+ }
+ } catch (Exception e){
+ e.printStackTrace();
+ }
+ tree.commit();
+ tree.close();
+ }
+
+ }
+
+ static IHistogram2D h1, h2, h2a, h2b, h2c;
+ static IHistogram2D h4,h4a;
+ static IHistogram1D h3, /*h3a,*/ h3_t, h3_b;
+ static IHistogram1D h5, h5a;
+ //static IHistogram2D h6, h6a;
+ static IHistogram1D h7, h7a;
+ static IHistogram1D h8;
+ static IHistogram1D h9_t, h9_b;
+ static IHistogram1D h10_t, h10_b;
+ private static IHistogram1D h4y;
+
+ private static void extractHistograms(ITree tree0) {
+ h1 = (IHistogram2D) tree0.find("theta vs energy");
+
+ h2 = (IHistogram2D) tree0.find("theta vs phi");
+ h2a = (IHistogram2D) tree0.find("theta vs phi cut");
+ h2b = (IHistogram2D) tree0.find("theta vs phi cut alt");
+ h2c = (IHistogram2D) tree0.find("theta vs phi alt");
+
+ h3 = (IHistogram1D) tree0.find("theta");
+ //h3a = (IHistogram1D) tree0.find("theta isolated ");
+ h3_t = (IHistogram1D) tree0.find("theta top");
+ h3_b = (IHistogram1D) tree0.find("theta bottom");
+
+ h4 = (IHistogram2D) tree0.find("px\\/pz vs py\\/pz");
+ h4a = (IHistogram2D) tree0.find("px\\/pz vs py\\/pz cut");
+ System.out.println(h4a.xAxis().bins());
+ h5 = (IHistogram1D) tree0.find("energy top");
+ h5 = (IHistogram1D) tree0.find("energy bottom");
+
+// h6 = (IHistogram2D) tree0.find("cluster");
+// h6a = (IHistogram2D) tree0.find("cluster matched");
+ h7 = (IHistogram1D) tree0.find("y top");
+ h7a = (IHistogram1D) tree0.find("y bottom");
+ h8 = (IHistogram1D) tree0.find("seed energy");
+
+
+ h9_t = (IHistogram1D) tree0.find("pz top");
+ h9_b = (IHistogram1D) tree0.find("pz bottom");
+
+
+ h10_t = (IHistogram1D) tree0.find("clustsize top");
+ h10_b = (IHistogram1D) tree0.find("clustsize bottom");
+
+ }
+ static void setupHistograms(IHistogramFactory hf){
+ //h1 = hf.createHistogram2D("px\\/pz vs py\\/pz", 160, -.16, .24, 160, -.2, .2);
+
+
+
+
+ h2 = hf.createHistogram2D("theta vs phi", 300, 0, .3, 314, -3.14, 3.14);
+
+ h2a = hf.createHistogram2D("theta vs phi cut", 300, 0, .3, 314, -3.14, 3.14);
+
+ double thetaBins[] = new double[cb.nTheta+1];
+ for(int i = 0; i<cb.nTheta; i++){
+ thetaBins[i] = cb.thetaMin[i];
+ }
+
+ thetaBins[thetaBins.length-1] = cb.thetaMax[cb.nTheta-1];
+
+ double phiBins[] = new double[315];
+ for(int i = 0; i<315; i++){
+ phiBins[i] = i/50.-3.14; //every 10 mrad;
+ }
+
+ double eBins[] = new double[66];
+ for(int i = 0; i<66; i++){
+ eBins[i] = i/50.; //every 20 MeV up to 1300 MeV
+ }
+
+
+ h1 = hf.createHistogram2D("theta vs energy", "theta vs energy", thetaBins, eBins);
+
+
+ //identical to h2a, except different binning
+ h2b = hf.createHistogram2D("theta vs phi cut alt", "theta vs phi cut alt", thetaBins, phiBins);
+ h2c = hf.createHistogram2D("theta vs phi alt", "theta vs phi alt", thetaBins, phiBins);
+
+ h3 = hf.createHistogram1D("theta", "theta", thetaBins);
+// h3a = hf.createHistogram1D("theta isolated ", "theta isolated", thetaBins);
+
+ h3_t = hf.createHistogram1D("theta top", "theta top", thetaBins);
+ h3_b = hf.createHistogram1D("theta bottom", "theta bottom", thetaBins);
+
+
+ h4 = hf.createHistogram2D("px\\/pz vs py\\/pz", 300, -.16, .24, 300, -.2, .2);
+ h4a = hf.createHistogram2D("px\\/pz vs py\\/pz cut", 300, -.16, .24, 300, -.2, .2);
+ h4y = hf.createHistogram1D("py\\pz", 1200, -.06, .06);
+
+ h5 = hf.createHistogram1D("energy top", 75, 0, 1.5);
+ h5a = hf.createHistogram1D("energy bottom", 75, 0, 1.5);
+
+
+ h9_t = hf.createHistogram1D("pz top", 75, 0, 1.5);
+ h9_b = hf.createHistogram1D("pz bottom", 75, 0, 1.5);
+
+// h6 = hf.createHistogram2D("cluster", 47, -23.5, 23.5, 11, -5.5, 5.5);
+// h6a = hf.createHistogram2D("cluster matched", 47, -23.5, 23.5, 11, -5.5, 5.5);
+
+ h7 = hf.createHistogram1D("y top", 500, 0, 100);
+
+ h7a = hf.createHistogram1D("y bottom", 500, 0, 100);
+
+ h8 = hf.createHistogram1D("seed energy", 120, 0, 1.2);
+
+ h10_t = hf.createHistogram1D("clustsize top", 10,0, 10);
+ h10_b = hf.createHistogram1D("clustsize bottom", 10,0, 10);
+ }
+ static void setupPlotter(IAnalysisFactory af){
+ IPlotterFactory pf = af.createPlotterFactory();
+ IPlotter p = pf.create();
+ p.createRegions(2,2);
+ p.region(0).plot(h2);
+ StyleUtil.stylize(p.region(0), "theta", "phi");
+// p.region(1).plot(h3a);
+ StyleUtil.stylize(p.region(1), "theta", "# of particles");
+ p.region(2).plot(h9_t);
+ p.region(2).plot(h9_b);
+ StyleUtil.noFillHistogramBars(p.region(2));
+ StyleUtil.stylize(p.region(2), "pztilt" ,"pztilt", "# of particles");
+ p.region(3).plot(h5);
+ p.region(3).plot(h5a);
+ StyleUtil.noFillHistogramBars(p.region(3));
+ StyleUtil.stylize(p.region(3), "energy", "# of particles");
+
+ p.show();
+
+ //new window for the next plot
+ IPlotter p2 = pf.create();
+ p2.region(0).plot(h2b);
+ //IDataPointSetFactory dpsf = af.createDataPointSetFactory(af.createTreeFactory().create());
+
+ StyleUtil.stylize(p2.region(0), "theta", "phi");
+
+ p2.show();
+
+ //new window for the next plot
+ IPlotter p3 = pf.create();
+ p3.region(0).plot(h2c);
+ StyleUtil.stylize(p3.region(0), "theta", "phi");
+
+ p3.show();
+
+ //new window for the next plot
+ IPlotter p4 = pf.create();
+ p4.region(0).plot(h4);
+ StyleUtil.stylize(p4.region(0), "px/pz", "py/pz");
+
+ p4.show();
+
+ //new window for the next plot
+ IPlotter p5 = pf.create();
+ p5.region(0).plot(h4a);
+ StyleUtil.stylize(p5.region(0), "px/pz", "py/pz");
+
+ p5.show();
+
+ IPlotter p6 = pf.create("efficiency");
+ p6.createRegions(1,2);
+// p6.region(0).plot(h6);
+ StyleUtil.stylize(p6.region(0), "ix", "iy");
+// p6.region(1).plot(h6a);
+ StyleUtil.stylize(p6.region(1), "ix", "iy");
+ p6.show();
+
+ IPlotter p7 = pf.create("theta vs energy");
+ //p6.createRegions(1,2);
+ p7.region(0).plot(h1);
+ StyleUtil.stylize(p7.region(0), "theta", "energy");
+ // StyleUtil.stylize(p6.region(1), "ix", "iy");
+ p7.show();
+
+ IPlotter p8 = pf.create("y");
+ //p6.createRegions(1,2);
+ p8.region(0).plot(h7);
+ p8.region(0).plot(h7a);
+ StyleUtil.stylize(p8.region(0), "y", "# of particles");
+ // StyleUtil.stylize(p6.region(1), "ix", "iy");
+ p8.show();
+
+ IPlotter p9 = pf.create("theta: top vs. bottom");
+ //p6.createRegions(1,2);
+ p9.region(0).plot(h3_t);
+ p9.region(0).plot(h3_b);
+ StyleUtil.stylize(p9.region(0), "theta", "theta", "# of particles");
+ StyleUtil.noFillHistogramBars(p9.region(0));
+ //StyleUtil.stylize(p6.region(1), "ix", "iy");
+ p9.show();
+
+ IPlotter p10 = pf.create("seed energy");
+ //p6.createRegions(1,2);
+ p10.createRegions(2,1);
+ p10.region(0).plot(h8);
+ StyleUtil.stylize(p10.region(0), "seed energy", "seed energy (GeV)", "# of particles");
+
+ p10.region(1).plot(h10_t);
+ p10.region(1).plot(h10_b);
+ StyleUtil.noFillHistogramBars(p10.region(1));
+ StyleUtil.stylize(p10.region(1), "clust size", "n ecal hits", "# of particles");
+
+ //StyleUtil.noFillHistogramBars(p10.region(0));
+ //StyleUtil.stylize(p6.region(1), "ix", "iy");
+ p10.show();
+
+ }
+ private static void processEvent(EventHeader event) {
+ if(event.getEventNumber() %10000 == 0)
+ System.out.println("event number " + event.getEventNumber());
+
+ for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+ {
+ if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+ TIData tid = new TIData(gob);
+ if (!tid.isSingle1Trigger())
+ {
+ return;
+ }
+ }
+ List<ReconstructedParticle> particles = event.get(ReconstructedParticle.class, "FinalStateParticles");
+ particles = RemoveDuplicateParticles.removeDuplicateParticles(particles);
+ outer : for(ReconstructedParticle p : particles){
+
+
+ boolean isGood = addParticle(p);
+
+
+ }
+
+ }
+
+
+
+ static double eMin = .8;
+ static double eMax = 1.2;
+ static double beamEnergy = 1.057;
+
+ static double beamTiltX = .03057;
+ static double beamTiltY;
+ static double maxChi2 = 50;
+ //maximum difference between the reconstructed energy and momentum
+ static double maxdE = .5;
+
+ static double seedEnergyCut = .4;
+
+
+ static boolean addParticle(ReconstructedParticle part){
+ if(part.getTracks().size() != 0){
+ if(part.getMomentum().magnitudeSquared() > .8
+ && part.getTracks().get(0).getChi2() > maxChi2){
+ h4y.fill(part.getMomentum().y()/part.getMomentum().z());
+ }
+ }
+ if(part.getCharge() != -1)
+ return false;
+ if(part.getClusters().size() == 0)
+ return false;
+ Cluster c = part.getClusters().get(0);
+ double time = c.getCalorimeterHits().get(0).getTime();
+
+ if(!(time>40 && time <50))
+ return false;
+ double seedEnergy = 0;
+ for(CalorimeterHit hit : c.getCalorimeterHits()){
+ if(hit.getCorrectedEnergy() > seedEnergy)
+ seedEnergy = hit.getCorrectedEnergy();
+ }
+ h8.fill(seedEnergy);
+
+
+ if(seedEnergy < seedEnergyCut)
+ return false;
+
+ if(c.getPosition()[1] > 0){
+ h10_t.fill(c.getSize());
+ }
+ else{
+ h10_b.fill(c.getSize());
+ }
+
+
+ if(c.getCalorimeterHits().size() < 3)
+ return false;
+
+
+ if(c.getEnergy() > eMin && c.getEnergy() < eMax){
+ if(c.getPosition()[1] > 0)
+ h7.fill(c.getPosition()[1]);
+ else if(c.getPosition()[1] < 0)
+ h7a.fill(-c.getPosition()[1]);
+ }
+
+ if(EcalUtil.fid_ECal(c)){
+
+ if(c.getPosition()[1] > 0){
+ h5.fill(c.getEnergy());
+ }
+ else{
+ h5a.fill(c.getEnergy());
+ }
+ if(part.getTracks().size() == 0)
+ return false;
+ Track t = part.getTracks().get(0);
+ if(t.getChi2()>maxChi2){
+ return false;
+ }
+ if(!TrackType.isGBL(t.getType()))
+ return false;
+
+
+
+ Hep3Vector p = part.getMomentum();
+
+
+
+ double px = p.x(), pz = p.z();
+ double pxtilt = px*Math.cos(beamTiltX)-pz*Math.sin(beamTiltX);
+ double py = p.y();
+ double pztilt = pz*Math.cos(beamTiltX)+px*Math.sin(beamTiltX);
+
+ double pytilt = py*Math.cos(beamTiltY)-pztilt*Math.sin(beamTiltY);
+ pztilt = pz*Math.cos(beamTiltY) + pytilt*Math.sin(beamTiltY);
+
+ if(Math.abs(pztilt - c.getEnergy()) > maxdE)
+ return false;
+ if(c.getPosition()[1] > 0)
+ h9_t.fill(pztilt);
+ else
+ h9_b.fill(pztilt);
+
+ double theta = Math.atan(Math.hypot(pxtilt, pytilt)/pztilt);
+ double phi =Math.atan2(pytilt, pxtilt);
+ boolean inRange = cb.inRange(theta, phi);
+ if(inRange)
+ h1.fill(theta, c.getEnergy());
+
+
+
+ if(c.getEnergy() > eMin && c.getEnergy() < eMax) {
+
+
+
+ h2.fill(theta, phi);
+ h2c.fill(theta, phi);
+
+ h4.fill(px/pz, py/pz);
+ //h4y.fill(py/pz);
+
+ if(inRange){
+
+ //System.out.println(c.getEnergy() + " " + t.getType());
+ /*for(TrackState ts : t.getTrackStates()){
+ if(ts.getLocation() == TrackState.AtIP)
+ System.out.println(Arrays.toString(
+ ts.getReferencePoint()));
+ }*/
+ h2a.fill(theta, phi);
+ h2b.fill(theta, phi);
+
+ h3.fill(theta);
+ if(py > 0)
+ h3_t.fill(theta);
+ else
+ h3_b.fill(theta);
+ //if(h3_t.sumBinHeights()+h3_b.sumBinHeights() != h3.sumBinHeights())
+ //System.out.println("NABO ERROR");
+
+
+ h4a.fill(px/pz, py/pz);
+ }
+
+
+ return true;
+ }
+
+ }
+ return false;
+ }
+
}
Modified: java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinning.java
=============================================================================
--- java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinning.java (original)
+++ java/branches/HPSJAVA-409/users/src/main/java/org/hps/users/spaul/feecc/ShowCustomBinning.java Wed Apr 27 11:11:32 2016
@@ -2,10 +2,16 @@
import java.awt.Canvas;
import java.awt.Color;
+import java.awt.Container;
+import java.awt.Font;
import java.awt.Graphics;
+import java.awt.Graphics2D;
+import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileNotFoundException;
-
+import java.io.IOException;
+
+import javax.imageio.ImageIO;
import javax.swing.JFrame;
import hep.aida.IAnalysisFactory;
@@ -18,201 +24,277 @@
import hep.aida.ITreeFactory;
public class ShowCustomBinning extends Canvas{
- /**
- * show Rafo's fiducial cuts translated into rotated theta and phi,
- * and overlay this with my bins in x and y.
- * @param arg
- * @throws FileNotFoundException
- */
- public static void main(String arg[]) throws FileNotFoundException{
- JFrame frame = new JFrame();
- frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
- frame.add(new ShowCustomBinning(new File(arg[0])));
- frame.setSize(800, 800);
- frame.setVisible(true);
-
- }
- public void paint(Graphics g){
- drawEcalOutline(g);
- drawFidEcalOutline(g);
- drawCustomBinRectangles(g);
- }
-
- void drawFidEcalOutline(Graphics g){
- g.setColor(Color.GRAY);
- double x_edge_low = -262.74;
- double x_edge_high = 347.7;
- double y_edge_low = 33.54;
- double y_edge_high = 75.18;
-
- double x_gap_low = -106.66;
- double x_gap_high = 42.17;
- double y_gap_high = 47.18;
- double x1,y1, x2, y2;
- double nPoints = 500;
- for(int i = 0; i< nPoints-1; i++){
- x1 = x_gap_high+i/nPoints*(x_edge_high-x_gap_high);
- x2 = x_gap_high+(i+1)/nPoints*(x_edge_high-x_gap_high);
- y1 = y_edge_low;
- y2 = y1;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
-
- x1 = x_edge_low+i/nPoints*(x_gap_low-x_edge_low);
- x2 = x_edge_low+(i+1)/nPoints*(x_gap_low-x_edge_low);
- y1 = y2 = y_edge_low;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
-
-
- x1 = x_gap_low+i/nPoints*(x_gap_high-x_gap_low);
- x2 = x_gap_low+(i+1)/nPoints*(x_gap_high-x_gap_low);
- y1 = y2 = y_gap_high;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
- x1 = x_edge_low+i/nPoints*(x_edge_high-x_edge_low);
- x2 = x_edge_low+(i+1)/nPoints*(x_edge_high-x_edge_low);
- y1 = y2 = y_edge_high;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
- }
- drawEcalFaceLine(g, x_gap_low, y_edge_low, x_gap_low, y_gap_high);
- drawEcalFaceLine(g, x_gap_high, y_edge_low, x_gap_high, y_gap_high);
-
- drawEcalFaceLine(g, x_edge_low, y_edge_low, x_edge_low, y_edge_high);
- drawEcalFaceLine(g, x_edge_high, y_edge_low, x_edge_high, y_edge_high);
-
-
- drawEcalFaceLine(g, x_gap_low, -y_edge_low, x_gap_low, -y_gap_high);
- drawEcalFaceLine(g, x_gap_high, -y_edge_low, x_gap_high, -y_gap_high);
-
- drawEcalFaceLine(g, x_edge_low, -y_edge_low, x_edge_low, -y_edge_high);
- drawEcalFaceLine(g, x_edge_high, -y_edge_low, x_edge_high, -y_edge_high);
-
- }
-
- void drawEcalOutline(Graphics g){
- g.setColor(Color.BLACK);
- double x_edge_low = -276.50;
- double x_edge_high = 361.55;
- double y_edge_low = 20.17;
- double y_edge_high = 89;
-
- double x_gap_low = -93.30;
- double x_gap_high = 28.93;
- double y_gap_high = 33.12;
- double x1,y1, x2, y2;
- double nPoints = 500;
- for(int i = 0; i< nPoints-1; i++){
- x1 = x_gap_high+i/nPoints*(x_edge_high-x_gap_high);
- x2 = x_gap_high+(i+1)/nPoints*(x_edge_high-x_gap_high);
- y1 = y_edge_low;
- y2 = y1;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
-
- x1 = x_edge_low+i/nPoints*(x_gap_low-x_edge_low);
- x2 = x_edge_low+(i+1)/nPoints*(x_gap_low-x_edge_low);
- y1 = y2 = y_edge_low;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
-
-
- x1 = x_gap_low+i/nPoints*(x_gap_high-x_gap_low);
- x2 = x_gap_low+(i+1)/nPoints*(x_gap_high-x_gap_low);
- y1 = y2 = y_gap_high;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
-
- x1 = x_edge_low+i/nPoints*(x_edge_high-x_edge_low);
- x2 = x_edge_low+(i+1)/nPoints*(x_edge_high-x_edge_low);
- y1 = y2 = y_edge_high;
- drawEcalFaceLine(g, x1, y1, x2, y2);
- drawEcalFaceLine(g, x1, -y1, x2, -y2);
- }
- drawEcalFaceLine(g, x_gap_low, y_edge_low, x_gap_low, y_gap_high);
- drawEcalFaceLine(g, x_gap_high, y_edge_low, x_gap_high, y_gap_high);
-
- drawEcalFaceLine(g, x_edge_low, y_edge_low, x_edge_low, y_edge_high);
- drawEcalFaceLine(g, x_edge_high, y_edge_low, x_edge_high, y_edge_high);
-
-
- drawEcalFaceLine(g, x_gap_low, -y_edge_low, x_gap_low, -y_gap_high);
- drawEcalFaceLine(g, x_gap_high, -y_edge_low, x_gap_high, -y_gap_high);
-
- drawEcalFaceLine(g, x_edge_low, -y_edge_low, x_edge_low, -y_edge_high);
- drawEcalFaceLine(g, x_edge_high, -y_edge_low, x_edge_high, -y_edge_high);
- }
-
- CustomBinning binning;
-
- ShowCustomBinning(File file) throws FileNotFoundException{
- this.binning = new CustomBinning(file);
- print(this.binning);
- }
- Color altBin1 = new Color(0, 0, 128);
- Color altBin2 = new Color(0,128,0);
- void drawCustomBinRectangles(Graphics g){
- for(int i = 0; i<binning.nTheta; i++){
- g.setColor(i%2 == 0 ? altBin1 : altBin2);
- for(int j = 0; j<binning.phiMax[i].length; j++){
- double phi1 = binning.phiMax[i][j];
- double phi2 = binning.phiMin[i][j];
- double theta1 = binning.thetaMin[i];
- double theta2 = binning.thetaMax[i];
-
- int x =getX(theta1)+1, y = getY(phi1), w = getX(theta2)-getX(theta1), h = getY(phi2)-getY(phi1);
- g.drawRect(x, y, w, h);
- x =getX(theta1)+1; y = getY(-phi2); w = getX(theta2)-getX(theta1); h = getY(-phi1)-getY(-phi2);
-
- g.drawRect(x, y, w, h);
-
-
- }
- }
- }
-
-
- void drawEcalFaceLine(Graphics g, double x1, double y1, double x2, double y2){
-
- double[] polar1 = EcalUtil.getThetaPhiSpherical(x1, y1);
- double[] polar2 = EcalUtil.getThetaPhiSpherical(x2, y2);
- g.drawLine(getX(polar1[0]), getY(polar1[1]), getX(polar2[0]), getY(polar2[1]));
-
- }
- int getX(double theta){
- return (int)(this.getWidth()*theta/.3);
- }
- int getY(double phi){
- return (int)(this.getHeight()*(3.2-phi)/6.4);
- }
- static void print(CustomBinning binning){
- System.out.println(" Bin \\# & $\\theta_{\\textrm{min}}$ & $\\theta_{\\textrm{max}}$ & $\\phi_{\\textrm{min 1}}$ & $\\phi_{\\textrm{max 1}}$ & $\\phi_{\\textrm{min 2}}$ & $\\phi_{\\textrm{max 2}}$ & Solid angle \\\\");
- for(int i = 0; i<binning.nTheta; i++){
- if(binning.phiMax[i].length == 1)
- System.out.printf("%d & %.0f & %.0f & %.0f & %.0f & -- & -- & %.0f \\\\\n",
- i+1,
- binning.thetaMin[i]*1000,
- binning.thetaMax[i]*1000,
- binning.phiMin[i][0]*1000,
- binning.phiMax[i][0]*1000.,
- binning.getSteradians(i)*1e6);
- if(binning.phiMax[i].length == 2)
- System.out.printf("%d & %.0f & %.0f & %.0f & %.0f & %.0f & %.0f & %.0f \\\\\n",
- i+1,
- binning.thetaMin[i]*1000,
- binning.thetaMax[i]*1000,
- binning.phiMin[i][0]*1000,
- binning.phiMax[i][0]*1000,
- binning.phiMin[i][1]*1000,
- binning.phiMax[i][1]*1000,
- binning.getSteradians(i)*1e6);
- }
- System.out.println("total " + binning.getTotSteradians()*1e6 + " microsteradians");
- }
+ /**
+ * show Rafo's fiducial cuts translated into rotated theta and phi,
+ * and overlay this with my bins in x and y.
+ * @param arg
+ * @throws FileNotFoundException
+ */
+ public static void main(String arg[]) throws FileNotFoundException{
+ JFrame frame = new JFrame();
+ frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ Canvas c = new ShowCustomBinning(new File(arg[0]));
+ String outdir = arg[1];
+ frame.add(c);
+ frame.setSize(1200, 800);
+ frame.setVisible(true);
+
+
+ try {
+ BufferedImage im = new BufferedImage(c.getWidth(), c.getHeight(), BufferedImage.TYPE_INT_ARGB);
+ c.paint(im.getGraphics());
+ ImageIO.write(im, "PNG", new File(outdir +"/bins.png"));
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ frame = new JFrame();
+ frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ c = new ShowCustomBinningXY(new File(arg[0]));
+ frame.add(c);
+ frame.setSize(1200, 615);
+ frame.setVisible(true);
+
+ try {
+ BufferedImage im = new BufferedImage(c.getWidth(), c.getHeight(), BufferedImage.TYPE_INT_ARGB);
+ c.paint(im.getGraphics());
+ ImageIO.write(im, "PNG", new File(outdir + "/bins_xy.png"));
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ public void paint(Graphics g){
+ g.setFont(new Font(Font.DIALOG, Font.PLAIN, 24));
+
+ drawEcalOutline(g);
+ drawFidEcalOutline(g);
+ drawCustomBinRectangles(g);
+ g.setColor(Color.BLACK);
+ drawXAxis(g);
+ drawYAxis(g);
+ }
+
+ void drawFidEcalOutline(Graphics g){
+ g.setColor(Color.GRAY);
+ double x_edge_low = -262.74;
+ double x_edge_high = 347.7;
+ double y_edge_low = 33.54;
+ double y_edge_high = 75.18;
+
+ double x_gap_low = -106.66;
+ double x_gap_high = 42.17;
+ double y_gap_high = 47.18;
+ double x1,y1, x2, y2;
+ double nPoints = 500;
+ for(int i = 0; i< nPoints-1; i++){
+ x1 = x_gap_high+i/nPoints*(x_edge_high-x_gap_high);
+ x2 = x_gap_high+(i+1)/nPoints*(x_edge_high-x_gap_high);
+ y1 = y_edge_low;
+ y2 = y1;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+
+ x1 = x_edge_low+i/nPoints*(x_gap_low-x_edge_low);
+ x2 = x_edge_low+(i+1)/nPoints*(x_gap_low-x_edge_low);
+ y1 = y2 = y_edge_low;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+
+
+ x1 = x_gap_low+i/nPoints*(x_gap_high-x_gap_low);
+ x2 = x_gap_low+(i+1)/nPoints*(x_gap_high-x_gap_low);
+ y1 = y2 = y_gap_high;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+ x1 = x_edge_low+i/nPoints*(x_edge_high-x_edge_low);
+ x2 = x_edge_low+(i+1)/nPoints*(x_edge_high-x_edge_low);
+ y1 = y2 = y_edge_high;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+ }
+ drawEcalFaceLine(g, x_gap_low, y_edge_low, x_gap_low, y_gap_high);
+ drawEcalFaceLine(g, x_gap_high, y_edge_low, x_gap_high, y_gap_high);
+
+ drawEcalFaceLine(g, x_edge_low, y_edge_low, x_edge_low, y_edge_high);
+ drawEcalFaceLine(g, x_edge_high, y_edge_low, x_edge_high, y_edge_high);
+
+
+ drawEcalFaceLine(g, x_gap_low, -y_edge_low, x_gap_low, -y_gap_high);
+ drawEcalFaceLine(g, x_gap_high, -y_edge_low, x_gap_high, -y_gap_high);
+
+ drawEcalFaceLine(g, x_edge_low, -y_edge_low, x_edge_low, -y_edge_high);
+ drawEcalFaceLine(g, x_edge_high, -y_edge_low, x_edge_high, -y_edge_high);
+
+ }
+
+ void drawEcalOutline(Graphics g){
+ /*double x_edge_low = -262.74;
+ double x_edge_high = 347.7;
+ double y_edge_low = 33.54;
+ double y_edge_high = 75.18;
+
+ double x_gap_low = -106.66;
+ double x_gap_high = 42.17;
+ double y_gap_high = 47.18;*/
+
+ g.setColor(Color.BLACK);
+ double x_edge_low = -269.56;
+ double x_edge_high = 354.52;
+ double y_edge_low = 26.72;
+ double y_edge_high = 82;
+
+ double x_gap_low = -99.84;
+ double x_gap_high = 33.35;
+ double y_gap_high = 40.36;
+ double x1,y1, x2, y2;
+ double nPoints = 500;
+ for(int i = 0; i< nPoints-1; i++){
+ x1 = x_gap_high+i/nPoints*(x_edge_high-x_gap_high);
+ x2 = x_gap_high+(i+1)/nPoints*(x_edge_high-x_gap_high);
+ y1 = y_edge_low;
+ y2 = y1;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+
+ x1 = x_edge_low+i/nPoints*(x_gap_low-x_edge_low);
+ x2 = x_edge_low+(i+1)/nPoints*(x_gap_low-x_edge_low);
+ y1 = y2 = y_edge_low;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+
+
+ x1 = x_gap_low+i/nPoints*(x_gap_high-x_gap_low);
+ x2 = x_gap_low+(i+1)/nPoints*(x_gap_high-x_gap_low);
+ y1 = y2 = y_gap_high;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+
+ x1 = x_edge_low+i/nPoints*(x_edge_high-x_edge_low);
+ x2 = x_edge_low+(i+1)/nPoints*(x_edge_high-x_edge_low);
+ y1 = y2 = y_edge_high;
+ drawEcalFaceLine(g, x1, y1, x2, y2);
+ drawEcalFaceLine(g, x1, -y1, x2, -y2);
+ }
+ drawEcalFaceLine(g, x_gap_low, y_edge_low, x_gap_low, y_gap_high);
+ drawEcalFaceLine(g, x_gap_high, y_edge_low, x_gap_high, y_gap_high);
+
+ drawEcalFaceLine(g, x_edge_low, y_edge_low, x_edge_low, y_edge_high);
+ drawEcalFaceLine(g, x_edge_high, y_edge_low, x_edge_high, y_edge_high);
+
+
+ drawEcalFaceLine(g, x_gap_low, -y_edge_low, x_gap_low, -y_gap_high);
+ drawEcalFaceLine(g, x_gap_high, -y_edge_low, x_gap_high, -y_gap_high);
+
+ drawEcalFaceLine(g, x_edge_low, -y_edge_low, x_edge_low, -y_edge_high);
+ drawEcalFaceLine(g, x_edge_high, -y_edge_low, x_edge_high, -y_edge_high);
+ }
+
+ CustomBinning binning;
+
+ ShowCustomBinning(File file) throws FileNotFoundException{
+ this.binning = new CustomBinning(file);
+ print(this.binning);
+ }
+ Color altBin1 = new Color(0, 0, 128);
+ Color altBin2 = new Color(0,128,0);
+ Color fillBin1 = new Color(196, 196, 255);
+ Color fillBin2 = new Color(196,255,196);
+ void drawCustomBinRectangles(Graphics g){
+ for(int i = 0; i<binning.nTheta; i++){
+ g.setColor(i%2 == 0 ? altBin1 : altBin2);
+ for(int j = 0; j<binning.phiMax[i].length; j++){
+ double phi1 = binning.phiMax[i][j];
+ double phi2 = binning.phiMin[i][j];
+ double theta1 = binning.thetaMin[i];
+ double theta2 = binning.thetaMax[i];
+
+ int x =getX(theta1)+1, y = getY(phi1), w = getX(theta2)-getX(theta1), h = getY(phi2)-getY(phi1);
+
+ g.setColor(i%2 == 0 ? fillBin1 : fillBin2);
+ g.fillRect(x, y, w, h);
+ g.setColor(i%2 == 0 ? altBin1 : altBin2);
+ g.drawRect(x, y, w, h);
+ x =getX(theta1)+1; y = getY(-phi2); w = getX(theta2)-getX(theta1); h = getY(-phi1)-getY(-phi2);
+ g.setColor(i%2 == 0 ? fillBin1 : fillBin2);
+ g.fillRect(x, y, w, h);
+ g.setColor(i%2 == 0 ? altBin1 : altBin2);
+ g.drawRect(x, y, w, h);
+ }
+
+ }
+ }
+ void drawXAxis(Graphics g){
+ //x axis
+ g.drawString("θ", getX(.28), getY(0) - 40);
+ g.drawLine(getX(0), getY(0), getX(.28), getY(0));
+ for(int i = 0; i< 28; i++){
+ if(i%5 == 0 && i != 0){
+ g.drawString(i/100.+"", getX(i/100.)-15, getY(0)-20);
+ g.drawLine(getX(i/100.), getY(0), getX(i/100.), getY(0)-15);
+ }
+ g.drawLine(getX(i/100.), getY(0), getX(i/100.), getY(0)-5);
+ }
+ }
+ void drawYAxis(Graphics g){
+ g.drawString("Ï", getX(0)+70, getY(3));
+ g.drawLine(getX(0), getY(-3), getX(0), getY(3));
+ for(int i = -30; i<= 30; i++){
+ if(i%5 == 0 && i != 0){
+ g.drawString(i/10.+"", getX(0)+20, getY(i/10.) + 5);
+
+ g.drawLine(getX(0), getY(i/10.), getX(0) + 15, getY(i/10.));
+ }
+ if(i == 0){
+ //g.drawString(i/10.+"", getX(0)+10, getY(i/10.) - 15);
+ }
+ g.drawLine(getX(0), getY(i/10.), getX(0) + 5, getY(i/10.));
+ }
+ }
+
+
+ void drawEcalFaceLine(Graphics g, double x1, double y1, double x2, double y2){
+
+ double[] polar1 = EcalUtil.getThetaPhiSpherical(x1, y1);
+ double[] polar2 = EcalUtil.getThetaPhiSpherical(x2, y2);
+ g.drawLine(getX(polar1[0]), getY(polar1[1]), getX(polar2[0]), getY(polar2[1]));
+
+ }
+ int getX(double theta){
+ return (int)(this.getWidth()*theta/.3)+left_margin;
+ }
+ int left_margin = 20;
+ int getY(double phi){
+ return (int)(this.getHeight()*(3.2-phi)/6.4);
+ }
+ static void print(CustomBinning binning){
+ System.out.println(" Bin \\# & $\\theta_{\\textrm{min}}$ & $\\theta_{\\textrm{max}}$ & $\\phi_{\\textrm{min 1}}$ & $\\phi_{\\textrm{max 1}}$ & $\\phi_{\\textrm{min 2}}$ & $\\phi_{\\textrm{max 2}}$ & Solid angle \\\\");
+ for(int i = 0; i<binning.nTheta; i++){
+ if(binning.phiMax[i].length == 1)
+ System.out.printf("%d & %.0f & %.0f & %.0f & %.0f & -- & -- & %.0f \\\\\n",
+ i+1,
+ binning.thetaMin[i]*1000,
+ binning.thetaMax[i]*1000,
+ binning.phiMin[i][0]*1000,
+ binning.phiMax[i][0]*1000.,
+ binning.getSteradians(i)*1e6);
+ if(binning.phiMax[i].length == 2)
+ System.out.printf("%d & %.0f & %.0f & %.0f & %.0f & %.0f & %.0f & %.0f \\\\\n",
+ i+1,
+ binning.thetaMin[i]*1000,
+ binning.thetaMax[i]*1000,
+ binning.phiMin[i][0]*1000,
+ binning.phiMax[i][0]*1000,
+ binning.phiMin[i][1]*1000,
+ binning.phiMax[i][1]*1000,
+ binning.getSteradians(i)*1e6);
+ }
+ System.out.println("total " + binning.getTotSteradians()*1e6 + " microsteradians");
+ }
}
Modified: java/branches/HPSJAVA-409/util/pom.xml
=============================================================================
--- java/branches/HPSJAVA-409/util/pom.xml (original)
+++ java/branches/HPSJAVA-409/util/pom.xml Wed Apr 27 11:11:32 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.9-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/util/</url>
Modified: java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/CalculateAcceptanceFromMadGraph.java
=============================================================================
--- java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/CalculateAcceptanceFromMadGraph.java (original)
+++ java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/CalculateAcceptanceFromMadGraph.java Wed Apr 27 11:11:32 2016
@@ -21,7 +21,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.lcsim.fit.helicaltrack.HelixParamCalculator;
public class CalculateAcceptanceFromMadGraph {
@@ -165,7 +165,7 @@
// Set up command line parsing.
Options options = createCommandLineOptions();
- CommandLineParser parser = new DefaultParser();
+ CommandLineParser parser = new PosixParser();
// Parse command line arguments.
CommandLine cl = null;
Modified: java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/ConvertToStdhep.java
=============================================================================
--- java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/ConvertToStdhep.java (original)
+++ java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/ConvertToStdhep.java Wed Apr 27 11:11:32 2016
@@ -15,7 +15,6 @@
import java.util.ArrayList;
import java.util.List;
-import hep.io.stdhep.StdhepBeginRun;
import hep.io.stdhep.StdhepEndRun;
import hep.io.stdhep.StdhepEvent;
import hep.io.stdhep.StdhepWriter;
@@ -31,7 +30,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.lcsim.detector.IRotation3D;
import org.lcsim.detector.RotationGeant;
@@ -95,7 +94,7 @@
// Set up command line parsing.
Options options = createCommandLineOptions();
- CommandLineParser parser = new DefaultParser();
+ CommandLineParser parser = new PosixParser();
// Parse command line arguments.
CommandLine cl = null;
@@ -502,10 +501,10 @@
}*/
static private double getDecayLength(double gamma){
- Random generator = new Random();
- double a = generator.nextDouble();
- double l = -gamma*_declength*Math.log(1-a);
- return l;
+ Random generator = new Random();
+ double a = generator.nextDouble();
+ double l = -gamma*_declength*Math.log(1-a);
+ return l;
}
/*
@@ -724,7 +723,7 @@
double gamma = ApEnergy / ApMass;
if (expDecay) {
decLen = getDecayLength(gamma);
- // decLen = getDecayLength(maxWght, gamma);
+ // decLen = getDecayLength(maxWght, gamma);
}
if (flatDecay) {
decLen = generator.nextDouble() * maxLen;
Modified: java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/DumpLHEEventsToASCII.java
=============================================================================
--- java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/DumpLHEEventsToASCII.java (original)
+++ java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/DumpLHEEventsToASCII.java Wed Apr 27 11:11:32 2016
@@ -26,7 +26,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.PosixParser;
import org.lcsim.detector.IRotation3D;
import org.lcsim.detector.RotationGeant;
@@ -89,7 +89,7 @@
// Set up command line parsing.
Options options = createCommandLineOptions();
- CommandLineParser parser = new DefaultParser();
+ CommandLineParser parser = new PosixParser();
// Parse command line arguments.
CommandLine cl = null;
@@ -108,7 +108,7 @@
String sigyString = String.valueOf(sigy);
eptString = convertDecimal(eptString);
if (cl.hasOption("t")) {
- trident=true;
+ trident=true;
}
if (cl.hasOption("m")) {
massString = cl.getOptionValue("m");
@@ -475,7 +475,7 @@
throw new RuntimeException("Unexpected entry for number of particles");
}
int nhep = nums.get(0).intValue();
- // System.out.println("Number of particles for event " + nevhep + ": " + nhep);
+ // System.out.println("Number of particles for event " + nevhep + ": " + nhep);
double decLen = 0;
double maxWght = 0;
@@ -503,14 +503,14 @@
if (vals.size() != 13) {
throw new RuntimeException("Unexpected entry for a particle");
}
- idhepTmp = vals.get(0).intValue();
-// System.out.println(idhepTmp);
+ idhepTmp = vals.get(0).intValue();
+// System.out.println(idhepTmp);
if (vals.get(1).intValue() == 9) {//apparently, vertices aren't counted in nhep
- nhep++;
- }
+ nhep++;
+ }
if (vals.get(1).intValue() == 1) {//ignore initial & intermediate state particles
- // System.out.println("Ok...good"+idhepTmp);
+ // System.out.println("Ok...good"+idhepTmp);
@@ -523,8 +523,8 @@
phepRec[j] = vals.get(j + 6);
if (idhepTmp == -623){
phepNuc[j] = vals.get(j + 6);
- // System.out.println("Found the recoil nucleus");
- }
+ // System.out.println("Found the recoil nucleus");
+ }
}
Modified: java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/LCIOFilterDriver.java
=============================================================================
--- java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/LCIOFilterDriver.java (original)
+++ java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/LCIOFilterDriver.java Wed Apr 27 11:11:32 2016
@@ -5,7 +5,6 @@
package org.hps.util;
import java.io.IOException;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.Track;
import org.lcsim.util.Driver;
import org.lcsim.lcio.LCIOWriter;
Modified: java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/MergeBunches.java
=============================================================================
--- java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/MergeBunches.java (original)
+++ java/branches/HPSJAVA-409/util/src/main/java/org/hps/util/MergeBunches.java Wed Apr 27 11:11:32 2016
@@ -34,7 +34,6 @@
import org.lcsim.lcio.LCIOUtil;
import org.lcsim.lcio.LCIOWriter;
import org.lcsim.lcio.SIOMCParticle;
-import org.lcsim.util.loop.LCIODriver;
public class MergeBunches extends Driver {
@@ -455,7 +454,6 @@
* Copies an mc particle and stores it together with the copy in a map.
* Adds it to the list of mc particles as well as the overlay mc particles.
* Also copies and keeps all ancestors.
- * @param event
* @param particle
*/
protected void addOverlayMcParticle(MCParticle particle) {
|