Author: [log in to unmask]
Date: Tue Jan 26 18:16:25 2016
New Revision: 4139
Log:
Updates to dev branch.
Added:
java/branches/jeremy-dev/analysis/src/main/java/org/hps/analysis/plots/
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java
- copied, changed from r4065, java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
- copied, changed from r4065, java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java
Removed:
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java
Modified:
java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon_Pass2.lcsim
Modified: java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
=============================================================================
--- java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java (original)
+++ java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java Tue Jan 26 18:16:25 2016
@@ -264,7 +264,7 @@
try {
addRunData(new RunData(record));
} catch (NumberFormatException e) {
- e.printStackTrace();
+ //e.printStackTrace();
}
}
}
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java Tue Jan 26 18:16:25 2016
@@ -243,7 +243,7 @@
// Check that there is at least one file format enabled for filtering.
if (this.config.getFileFormats().isEmpty()) {
- throw new IllegalStateException("At least one file format must be provided with the -f switch.");
+ throw new IllegalStateException("At least one file format must be provided with the -o switch.");
}
LOGGER.info("Done parsing command line options.");
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java Tue Jan 26 18:16:25 2016
@@ -29,6 +29,8 @@
*
* @author Jeremy McCormick, SLAC
*/
+// TODO: add physics events count
+// TODO: remove trigger rate and TI time offset
final class EvioMetadataReader implements FileMetadataReader {
/**
@@ -56,6 +58,7 @@
public Map<String, Object> getMetadata(final File file) throws IOException {
long events = 0;
+ int physicsEvents = 0;
int badEvents = 0;
int blinded = 0;
Long run = null;
@@ -63,7 +66,7 @@
Integer lastHeadTimestamp = null;
Integer lastPhysicsEvent = null;
Integer firstPhysicsEvent = null;
- double triggerRate = 0;
+ Double triggerRate = null;
long lastTI = 0;
long minTIDelta = 0;
long maxTIDelta = 0;
@@ -188,6 +191,8 @@
LOGGER.finer("set first physics event " + firstPhysicsEvent);
}
}
+
+ ++physicsEvents;
}
// Count trigger types for this event.
@@ -201,7 +206,8 @@
// Activate TI time offset processor.
tiProcessor.process(evioEvent);
- } catch (IOException | EvioException e) {
+ //} catch (IOException | NegativeArraySizeException | EvioException e) {
+ } catch (Exception e) {
// Trap event processing errors.
badEvents++;
LOGGER.warning("error processing EVIO event " + evioEvent.getEventNumber());
@@ -237,6 +243,14 @@
// Create and fill the metadata map.
final Map<String, Object> metadataMap = new LinkedHashMap<String, Object>();
+ try {
+ if (run == null) {
+ run = new Long(EvioFileUtilities.getRunFromName(file));
+ }
+ } catch (Exception e) {
+ throw new RuntimeException("Unable to determine run number from data or file name.", e);
+ }
+
// Set built-in system metadata.
metadataMap.put("runMin", run);
metadataMap.put("runMax", run);
@@ -251,28 +265,56 @@
metadataMap.put("BLINDED", blinded);
// First and last timestamps which may come from control or physics events.
- metadataMap.put("FIRST_HEAD_TIMESTAMP", firstHeadTimestamp);
- metadataMap.put("LAST_HEAD_TIMESTAMP", lastHeadTimestamp);
+ if (firstHeadTimestamp != null) {
+ metadataMap.put("FIRST_HEAD_TIMESTAMP", firstHeadTimestamp);
+ } else {
+ metadataMap.put("FIRST_HEAD_TIMESTAMP", 0L);
+ }
+
+ if (lastHeadTimestamp != null) {
+ metadataMap.put("LAST_HEAD_TIMESTAMP", lastHeadTimestamp);
+ } else {
+ metadataMap.put("LAST_HEAD_TIMESTAMP", 0L);
+ }
// First and last physics event numbers.
- metadataMap.put("FIRST_PHYSICS_EVENT", firstPhysicsEvent);
- metadataMap.put("LAST_PHYSICS_EVENT", lastPhysicsEvent);
+ if (firstPhysicsEvent != null) {
+ metadataMap.put("FIRST_PHYSICS_EVENT", firstPhysicsEvent);
+ } else {
+ metadataMap.put("FIRST_PHYSICS_EVENT", 0L);
+ }
+
+ if (lastPhysicsEvent != null) {
+ metadataMap.put("LAST_PHYSICS_EVENT", lastPhysicsEvent);
+ } else {
+ metadataMap.put("LAST_PHYSICS_EVENT", 0L);
+ }
// TI times and offset.
metadataMap.put("FIRST_TI_TIME", firstTI);
metadataMap.put("LAST_TI_TIME", lastTI);
metadataMap.put("TI_TIME_DELTA", maxTIDelta - minTIDelta);
- // TI time offset (stored as string because of bug in MySQL datacat backend).
- metadataMap.put("TI_TIME_OFFSET", tiProcessor.getTiTimeOffset());
+ // TI time offset.
+ //metadataMap.put("TI_TIME_OFFSET", tiProcessor.getTiTimeOffset());
// Event counts.
metadataMap.put("BAD_EVENTS", badEvents);
- // Trigger rate in KHz.
- DecimalFormat df = new DecimalFormat("#.##");
- df.setRoundingMode(RoundingMode.CEILING);
- metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
+ // Physics event count.
+ metadataMap.put("PHYSICS_EVENTS", physicsEvents);
+
+ // Trigger rate in Hz to 2 decimal places.
+ /*
+ if (triggerRate != null && !Double.isInfinite(triggerRate) && !Double.isNaN(triggerRate)) {
+ DecimalFormat df = new DecimalFormat("#.##");
+ df.setRoundingMode(RoundingMode.CEILING);
+ LOGGER.info("setting trigger rate " + triggerRate);
+ metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
+ } else {
+ metadataMap.put("TRIGGER_RATE", 0);
+ }
+ */
// Trigger type counts.
for (Entry<TriggerType, Integer> entry : triggerCounts.entrySet()) {
Modified: java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties
=============================================================================
--- java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties (original)
+++ java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties Tue Jan 26 18:16:25 2016
@@ -59,7 +59,7 @@
org.hps.record.evio.level = WARNING
org.hps.record.scalers.level = WARNING
org.hps.record.triggerbank.level = WARNING
-org.hps.record.svt.level = WARNING
+org.hps.record.svt.level = INFO
# tracking
org.hps.recon.tracking.level = WARNING
Copied: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java (from r4065, java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java)
=============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java (original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java Tue Jan 26 18:16:25 2016
@@ -8,6 +8,8 @@
import org.hps.record.evio.EvioBankTag;
import org.hps.record.evio.EvioEventProcessor;
import org.hps.record.evio.EvioEventUtilities;
+import org.hps.record.triggerbank.TriggerConfigData;
+import org.hps.record.triggerbank.TriggerConfigData.Crate;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
@@ -19,19 +21,13 @@
*
* @author Jeremy McCormick, SLAC
*/
-public class DAQConfigEvioProcessor extends EvioEventProcessor {
+public class TriggerConfigEvioProcessor extends EvioEventProcessor {
- private Logger LOGGER = Logger.getLogger(DAQConfigEvioProcessor.class.getPackage().getName());
-
- private DAQConfig daqConfig = null;
-
- private Map<Integer, String> stringData = new HashMap<Integer, String>();
-
+ private Logger LOGGER = Logger.getLogger(TriggerConfigEvioProcessor.class.getPackage().getName());
+
+ private TriggerConfigData triggerConfig = null;
private Integer run = null;
-
- private int timestamp;
-
- private int currentTimestamp;
+ private int timestamp = 0;
/**
* Process EVIO events to extract DAQ config data.
@@ -55,22 +51,13 @@
BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
if (headBank != null) {
if (headBank.getIntData()[3] != 0) {
- currentTimestamp = headBank.getIntData()[3];
- LOGGER.finest("set timestamp " + currentTimestamp + " from head bank");
+ timestamp = headBank.getIntData()[3];
+ LOGGER.finest("set timestamp " + timestamp + " from head bank");
}
}
-
+
// Parse config data from the EVIO banks.
- EvioDAQParser evioParser = parseEvioData(evioEvent);
-
- // Was there a valid config created from the EVIO event?
- if (evioParser != null) {
- // Set the current DAQ config object.
- ConfigurationManager.updateConfiguration(evioParser);
- daqConfig = ConfigurationManager.getInstance();
- timestamp = currentTimestamp;
- }
-
+ parseEvioData(evioEvent);
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error parsing DAQ config from EVIO.", e);
@@ -83,9 +70,8 @@
* @param evioEvent the EVIO event
* @return a parser object if the event has valid config data; otherwise <code>null</code>
*/
- private EvioDAQParser parseEvioData(EvioEvent evioEvent) {
- EvioDAQParser parser = null;
- int configBanks = 0;
+ private void parseEvioData(EvioEvent evioEvent) {
+ Map<Crate, String> stringData = null;
for (BaseStructure bank : evioEvent.getChildrenList()) {
if (bank.getChildCount() <= 0) {
continue;
@@ -97,57 +83,35 @@
LOGGER.warning("Trigger config bank is missing string data.");
} else {
try {
- if (parser == null) {
- parser = new EvioDAQParser();
- stringData.clear();
+ if (stringData == null) {
+ stringData = new HashMap<Crate, String>();
}
- LOGGER.fine("raw string data" + subBank.getStringData()[0]);
- stringData.put(crate, subBank.getStringData()[0]);
- LOGGER.info("Parsing DAQ config from crate " + crate + ".");
- parser.parse(crate, run, subBank.getStringData());
- ++configBanks;
+ //LOGGER.fine("got raw trigger config string data ..." + '\n' + subBank.getStringData()[0]);
+ stringData.put(TriggerConfigData.Crate.fromCrateNumber(crate), subBank.getStringData()[0]);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed to parse DAQ config.", e);
+ LOGGER.log(Level.WARNING, "Failed to parse crate " + crate + " config.", e);
}
}
}
}
}
- if (configBanks >= 4 || parser == null) {
- if (parser != null) {
- LOGGER.info("DAQ config was created from event " + evioEvent.getEventNumber() + " with " + configBanks + " banks.");
+ if (stringData != null) {
+ TriggerConfigData currentConfig = new TriggerConfigData(stringData, timestamp);
+ if (currentConfig.isValid()) {
+ triggerConfig = currentConfig;
+ LOGGER.warning("Found valid config in event num " + evioEvent.getEventNumber());
+ } else {
+ LOGGER.warning("Skipping invalid config from event num " + evioEvent.getEventNumber());
}
- return parser;
- } else {
- LOGGER.warning("Not enough banks were found to build DAQ config.");
- return null;
}
}
-
- /**
- * Get the DAQ config.
- *
- * @return the DAQ config
- */
- public DAQConfig getDAQConfig() {
- return this.daqConfig;
- }
-
+
/**
* Get a map of bank number to string data for the current config.
*
* @return a map of bank to trigger config data
*/
- public Map<Integer, String> getTriggerConfigData() {
- return this.stringData;
- }
-
- /**
- * Get the timestamp associated with the config.
- *
- * @return the timestamp
- */
- public int getTimestamp() {
- return timestamp;
+ public TriggerConfigData getTriggerConfigData() {
+ return this.triggerConfig;
}
}
Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java
=============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java (original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/scalers/ScalerUtilities.java Tue Jan 26 18:16:25 2016
@@ -74,7 +74,6 @@
// [67]/[68] = CLOCK
final double clock = (double) clockGated / (double) clockUngated;
- // Compute the live times.
final double[] liveTimes = new double[3];
liveTimes[LiveTimeIndex.FCUP_TDC.ordinal()] = fcupTdc;
liveTimes[LiveTimeIndex.FCUP_TRG.ordinal()] = fcupTrg;
Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
=============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java (original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java Tue Jan 26 18:16:25 2016
@@ -58,12 +58,18 @@
config = new SvtConfigData(timestamp);
}
if (stringData.length > 0) {
+ System.out.println("found string data with length " + stringData.length);
+ for (int i = 0; i < stringData.length; i++) {
+ System.out.println("Printing raw string data " + i + " ...");
+ System.out.println(stringData[i]);
+ System.out.println("End print raw string data");
+ }
if (!stringData[0].trim().isEmpty()) {
- LOGGER.info("Adding SVT config data with len " + stringData[0].length() + " ..." + '\n' + stringData[0]);
+ LOGGER.info("Adding SVT config data with len " + stringData[0].length() + " ..." + '\n' + stringData[0]);
config.setData(RocTag.fromTag(bank.getHeader().getTag()), stringData[0]);
++configBanks;
} else {
- LOGGER.warning("String data has no XML content.");
+ LOGGER.warning("String data has no content.");
}
} else {
LOGGER.warning("String data has zero len.");
@@ -73,7 +79,7 @@
}
}
}
- }
+ }
if (config != null) {
LOGGER.info("Adding SVT config " + evioEvent.getEventNumber() + " with " + configBanks
+ " banks and timestamp " + timestamp + " from event " + evioEvent.getEventNumber());
Copied: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java (from r4065, java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java)
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java (original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java Tue Jan 26 18:16:25 2016
@@ -1,4 +1,4 @@
-package org.hps.run.database;
+package org.hps.record.triggerbank;
import java.util.Map;
import java.util.Map.Entry;
@@ -12,27 +12,40 @@
*
* @author Jeremy McCormick, SLAC
*/
-final class TriggerConfig {
+public final class TriggerConfigData {
+
+ public enum Crate {
+ CONFIG1(37),
+ CONFIG2(39),
+ CONFIG3(46),
+ CONFIG4(58);
+
+ private int crate;
+
+ private Crate(int crate) {
+ this.crate = crate;
+ }
+
+ public int crate() {
+ return crate;
+ }
+
+ public static Crate fromCrateNumber(int crateNumber) {
+ for (Crate crate : Crate.values()) {
+ if (crate.crate() == crateNumber) {
+ return crate;
+ }
+ }
+ return null;
+ }
+ }
+
+ private int timestamp;
+ private Map<Crate, String> data;
- /**
- * Expected number of string banks in trigger config.
- */
- static final int DATA_LENGTH = 4;
-
- /*
- * Mapping of trigger config database fields to their crate numbers.
- */
- static final int CONFIG1 = 37;
- static final int CONFIG2 = 39;
- static final int CONFIG3 = 46;
- static final int CONFIG4 = 58;
-
- private int timestamp;
- private Map<Integer, String> data;
-
- TriggerConfig(Map<Integer, String> data, int timestamp) {
+ public TriggerConfigData(Map<Crate, String> data, int timestamp) {
if (data == null) {
- throw new RuntimeException("The data is null.");
+ throw new RuntimeException("The data map is null.");
}
this.data = data;
this.timestamp = timestamp;
@@ -43,28 +56,38 @@
*
* @return the config's timestamp
*/
- int getTimestamp() {
+ public int getTimestamp() {
return timestamp;
}
/**
- * Get the config data as a map from bank numbers to strings.
+ * Get the config data as a map from crates to strings.
*
* @return the config data
*/
- Map<Integer, String> getData() {
+ public Map<Crate, String> getData() {
return data;
}
/**
- * Return <code>true</code> if the config is valid which means it has
- * four, non-null string data banks.
+ * Return <code>true</code> if the config is valid which means it has four string banks with the correct crate
+ * numbers and non-null data strings.
*
* @return <code>true</code> if config is valid
*/
- boolean isValid() {
- return data.size() == DATA_LENGTH && data.get(CONFIG1) != null && data.get(CONFIG2) != null
- && data.get(CONFIG3) != null && data.get(CONFIG4) != null;
+ public boolean isValid() {
+ if (data.size() != Crate.values().length) {
+ return false;
+ }
+ for (Crate crate : Crate.values()) {
+ if (!data.containsKey(crate)) {
+ return false;
+ }
+ if (data.get(crate) == null) {
+ return false;
+ }
+ }
+ return true;
}
/**
@@ -73,10 +96,10 @@
* @param the run number (needed by configuration manager)
* @return the DAQ config object
*/
- DAQConfig loadDAQConfig(int run) {
+ public DAQConfig loadDAQConfig(int run) {
EvioDAQParser parser = new EvioDAQParser();
- for (Entry<Integer, String> entry : data.entrySet()) {
- parser.parse(entry.getKey(), run, new String[] {entry.getValue()});
+ for (Entry<Crate, String> entry : data.entrySet()) {
+ parser.parse(entry.getKey().crate(), run, new String[] {entry.getValue()});
}
ConfigurationManager.updateConfiguration(parser);
return ConfigurationManager.getInstance();
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java Tue Jan 26 18:16:25 2016
@@ -152,7 +152,7 @@
final List<EpicsVariable> variables = epicsVariableDao.getEpicsVariables(epicsType);
selectEpicsData = connection.prepareStatement("SELECT * FROM " + epicsType.getTableName()
+ " LEFT JOIN epics_headers ON " + epicsType.getTableName() + ".epics_header_id = epics_headers.id"
- + " WHERE epics_headers.run = ?");
+ + " WHERE epics_headers.run = ? ORDER BY epics_headers.sequence");
selectEpicsData.setInt(1, run);
ResultSet resultSet = selectEpicsData.executeQuery();
while (resultSet.next()) {
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java Tue Jan 26 18:16:25 2016
@@ -11,12 +11,11 @@
import java.util.logging.Logger;
import org.hps.conditions.database.ConnectionParameters;
-import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.run.RunSpreadsheet;
import org.hps.conditions.run.RunSpreadsheet.RunData;
import org.hps.record.AbstractRecordProcessor;
import org.hps.record.daqconfig.DAQConfig;
-import org.hps.record.daqconfig.DAQConfigEvioProcessor;
+import org.hps.record.daqconfig.TriggerConfigEvioProcessor;
import org.hps.record.epics.EpicsData;
import org.hps.record.epics.EpicsRunProcessor;
import org.hps.record.evio.EventTagConstant;
@@ -28,16 +27,15 @@
import org.hps.record.scalers.ScalerUtilities;
import org.hps.record.scalers.ScalerUtilities.LiveTimeIndex;
import org.hps.record.scalers.ScalersEvioProcessor;
-import org.hps.record.svt.SvtConfigData;
-import org.hps.record.svt.SvtConfigEvioProcessor;
import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
import org.hps.record.triggerbank.HeadBankData;
import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
+import org.hps.record.triggerbank.TriggerConfigData;
+import org.hps.record.triggerbank.TriggerConfigData.Crate;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
import org.srs.datacat.client.Client;
import org.srs.datacat.model.DatasetModel;
import org.srs.datacat.model.DatasetResultSetModel;
@@ -119,12 +117,12 @@
/**
* List of SVT configuration bank data.
*/
- private List<SvtConfigData> svtConfigs;
+ //private List<SvtConfigData> svtConfigs;
/**
* The trigger config object.
*/
- private TriggerConfig config;
+ private TriggerConfigData config;
/**
* Reload run data after insert for debugging.
@@ -164,8 +162,8 @@
List<ScalerData> scalerData = runManager.getScalerData();
LOGGER.info("loaded " + scalerData.size() + " scaler records");
- List<SvtConfigData> svtConfigs = runManager.getSvtConfigData();
- LOGGER.info("loaded " + svtConfigs.size() + " SVT configurations");
+ //List<SvtConfigData> svtConfigs = runManager.getSvtConfigData();
+ //LOGGER.info("loaded " + svtConfigs.size() + " SVT configurations");
LOGGER.info("printing DAQ config ...");
DAQConfig daqConfig = runManager.getDAQConfig();
@@ -262,7 +260,7 @@
runFactory.getRunSummaryDao().insertRunSummary(runSummary);
// Insert the EPICS data.
- if (epicsData != null) {
+ if (epicsData != null && !epicsData.isEmpty()) {
LOGGER.info("inserting EPICS data");
runFactory.getEpicsDataDao().insertEpicsData(epicsData);
} else {
@@ -278,19 +276,19 @@
}
// Insert SVT config data.
- if (this.svtConfigs != null) {
- LOGGER.info("inserting SVT config");
- runFactory.getSvtConfigDao().insertSvtConfigs(svtConfigs, getRun());
- } else {
- LOGGER.warning("no SVT config to insert");
- }
+ //if (this.svtConfigs != null) {
+ // LOGGER.info("inserting SVT config");
+ // runFactory.getSvtConfigDao().insertSvtConfigs(svtConfigs, getRun());
+ //} else {
+ // LOGGER.warning("no SVT config to insert");
+ //}
// Insert trigger config data.
if (this.config != null) {
LOGGER.info("inserting trigger config");
runFactory.getTriggerConfigDao().insertTriggerConfig(config, getRun());
} else {
- LOGGER.warning("no trigger config to inesrt");
+ LOGGER.warning("no trigger config to insert");
}
LOGGER.info("done inserting run " + getRun());
@@ -311,6 +309,7 @@
} else {
LOGGER.info("no scaler data");
}
+ /*
if (svtConfigs != null) {
for (SvtConfigData config : svtConfigs) {
try {
@@ -322,8 +321,9 @@
} else {
LOGGER.info("no SVT config");
}
+ */
if (config != null) {
- for (Entry<Integer, String> entry : config.getData().entrySet()) {
+ for (Entry<Crate, String> entry : config.getData().entrySet()) {
LOGGER.info("trigger config data " + entry.getKey() + " with timestamp " + config.getTimestamp() + " ..." + entry.getValue());
}
} else {
@@ -343,6 +343,7 @@
}
// Initialize the conditions system because the DAQ config processor needs it.
+ /*
try {
DatabaseConditionsManager dbManager = DatabaseConditionsManager.getInstance();
DatabaseConditionsManager.getInstance().setDetector(detectorName, runSummary.getRun());
@@ -350,6 +351,7 @@
} catch (ConditionsNotFoundException e) {
throw new RuntimeException(e);
}
+ */
// List of processors to execute in the job.
ArrayList<AbstractRecordProcessor<EvioEvent>> processors = new ArrayList<AbstractRecordProcessor<EvioEvent>>();
@@ -368,12 +370,12 @@
processors.add(tiProcessor);
// Processor for getting DAQ config.
- DAQConfigEvioProcessor daqProcessor = new DAQConfigEvioProcessor();
+ TriggerConfigEvioProcessor daqProcessor = new TriggerConfigEvioProcessor();
processors.add(daqProcessor);
// Processor for getting the SVT XML config.
- SvtConfigEvioProcessor svtProcessor = new SvtConfigEvioProcessor();
- processors.add(svtProcessor);
+ //SvtConfigEvioProcessor svtProcessor = new SvtConfigEvioProcessor();
+ //processors.add(svtProcessor);
// Run the job using the EVIO loop.
EvioLoop loop = new EvioLoop();
@@ -398,12 +400,10 @@
scalerData = scalersProcessor.getScalerData();
// Set SVT config data strings.
- svtConfigs = svtProcessor.getSvtConfigs();
+ //svtConfigs = svtProcessor.getSvtConfigs();
// Set trigger config object.
- if (!daqProcessor.getTriggerConfigData().isEmpty()) {
- config = new TriggerConfig(daqProcessor.getTriggerConfigData(), daqProcessor.getTimestamp());
- }
+ config = daqProcessor.getTriggerConfigData();
LOGGER.info("done processing EVIO files");
}
@@ -558,6 +558,12 @@
return this;
}
+ /**
+ * Set the datacat site.
+ *
+ * @param site the datacat site
+ * @return this object
+ */
RunDatabaseBuilder setSite(String site) {
this.site = site;
return this;
@@ -734,16 +740,17 @@
private void updateLivetimes(ScalersEvioProcessor scalersProcessor) {
LOGGER.fine("updating livetime calculations");
ScalerData scalers = scalersProcessor.getCurrentScalerData();
- if (scalers == null) {
- throw new IllegalStateException("No scaler data was found by the EVIO processor.");
- }
- double[] livetimes = ScalerUtilities.getLiveTimes(scalers);
- runSummary.setLivetimeClock(livetimes[LiveTimeIndex.CLOCK.ordinal()]);
- runSummary.setLivetimeFcupTdc(livetimes[LiveTimeIndex.FCUP_TDC.ordinal()]);
- runSummary.setLivetimeFcupTrg(livetimes[LiveTimeIndex.FCUP_TRG.ordinal()]);
- LOGGER.info("clock livetime = " + runSummary.getLivetimeClock());
- LOGGER.info("fcup tdc livetime = " + runSummary.getLivetimeFcupTdc());
- LOGGER.info("fcup trg livetime = " + runSummary.getLivetimeFcupTrg());
+ if (scalers != null) {
+ double[] livetimes = ScalerUtilities.getLiveTimes(scalers);
+ runSummary.setLivetimeClock(livetimes[LiveTimeIndex.CLOCK.ordinal()]);
+ runSummary.setLivetimeFcupTdc(livetimes[LiveTimeIndex.FCUP_TDC.ordinal()]);
+ runSummary.setLivetimeFcupTrg(livetimes[LiveTimeIndex.FCUP_TRG.ordinal()]);
+ LOGGER.info("clock livetime = " + runSummary.getLivetimeClock());
+ LOGGER.info("fcup tdc livetime = " + runSummary.getLivetimeFcupTdc());
+ LOGGER.info("fcup trg livetime = " + runSummary.getLivetimeFcupTrg());
+ } else {
+ LOGGER.warning("Could not calculate livetimes; no scaler data was found by the EVIO processor.");
+ }
}
/**
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java Tue Jan 26 18:16:25 2016
@@ -10,6 +10,7 @@
import org.hps.record.epics.EpicsData;
import org.hps.record.scalers.ScalerData;
import org.hps.record.svt.SvtConfigData;
+import org.hps.record.triggerbank.TriggerConfigData;
import org.lcsim.conditions.ConditionsEvent;
import org.lcsim.conditions.ConditionsListener;
@@ -220,7 +221,7 @@
*/
public DAQConfig getDAQConfig() {
this.checkRunNumber();
- TriggerConfig config = factory.getTriggerConfigDao().getTriggerConfig(run);
+ TriggerConfigData config = factory.getTriggerConfigDao().getTriggerConfig(run);
return config.loadDAQConfig(run);
}
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java Tue Jan 26 18:16:25 2016
@@ -51,13 +51,6 @@
RunSummaryDaoImpl(final Connection connection) {
if (connection == null) {
throw new IllegalArgumentException("The connection is null.");
- }
- try {
- if (connection.isClosed()) {
- throw new IllegalArgumentException("The connection is closed.");
- }
- } catch (SQLException e) {
- throw new RuntimeException(e);
}
this.connection = connection;
}
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java Tue Jan 26 18:16:25 2016
@@ -96,7 +96,8 @@
PreparedStatement selectScalers = null;
final List<ScalerData> scalerDataList = new ArrayList<ScalerData>();
try {
- selectScalers = this.connection.prepareStatement("SELECT * FROM scalers WHERE run = ? ORDER BY event");
+ selectScalers = this.connection.prepareStatement("SELECT * FROM sc"
+ + "alers WHERE run = ? ORDER BY event");
selectScalers.setInt(1, run);
final ResultSet resultSet = selectScalers.executeQuery();
while (resultSet.next()) {
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java Tue Jan 26 18:16:25 2016
@@ -1,7 +1,9 @@
package org.hps.run.database;
+import org.hps.record.triggerbank.TriggerConfigData;
+
/**
- * Database interface for getting raw trigger config data.
+ * Database interface for getting raw trigger config data and inserting into run db.
*
* @author Jeremy McCormick, SLAC
*/
@@ -13,7 +15,7 @@
* @param run the run number
* @return the trigger config
*/
- TriggerConfig getTriggerConfig(int run);
+ TriggerConfigData getTriggerConfig(int run);
/**
* Insert a trigger config.
@@ -21,7 +23,7 @@
* @param config the trigger config
* @param run the run number
*/
- void insertTriggerConfig(TriggerConfig config, int run);
+ void insertTriggerConfig(TriggerConfigData config, int run);
/**
* Delete a trigger config by run.
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java Tue Jan 26 18:16:25 2016
@@ -7,6 +7,9 @@
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
+
+import org.hps.record.triggerbank.TriggerConfigData;
+import org.hps.record.triggerbank.TriggerConfigData.Crate;
final class TriggerConfigDaoImpl implements TriggerConfigDao {
@@ -37,7 +40,7 @@
@Override
- public void insertTriggerConfig(TriggerConfig config, int run) {
+ public void insertTriggerConfig(TriggerConfigData config, int run) {
if (!config.isValid()) {
throw new RuntimeException("The trigger config is not valid.");
}
@@ -46,14 +49,14 @@
preparedStatement = connection.prepareStatement(INSERT);
preparedStatement.setInt(1, run);
preparedStatement.setInt(2, config.getTimestamp());
- Map<Integer, String> data = config.getData();
- if (data.size() != TriggerConfig.DATA_LENGTH) {
+ Map<Crate, String> data = config.getData();
+ if (data.size() != TriggerConfigData.Crate.values().length) {
throw new IllegalArgumentException("The trigger config data has the wrong length.");
}
- preparedStatement.setBytes(3, data.get(TriggerConfig.CONFIG1).getBytes());
- preparedStatement.setBytes(4, data.get(TriggerConfig.CONFIG2).getBytes());
- preparedStatement.setBytes(5, data.get(TriggerConfig.CONFIG3).getBytes());
- preparedStatement.setBytes(6, data.get(TriggerConfig.CONFIG4).getBytes());
+ preparedStatement.setBytes(3, data.get(TriggerConfigData.Crate.CONFIG1).getBytes());
+ preparedStatement.setBytes(4, data.get(TriggerConfigData.Crate.CONFIG2).getBytes());
+ preparedStatement.setBytes(5, data.get(TriggerConfigData.Crate.CONFIG3).getBytes());
+ preparedStatement.setBytes(6, data.get(TriggerConfigData.Crate.CONFIG4).getBytes());
preparedStatement.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e);
@@ -87,33 +90,33 @@
}
@Override
- public TriggerConfig getTriggerConfig(int run) {
+ public TriggerConfigData getTriggerConfig(int run) {
PreparedStatement preparedStatement = null;
- TriggerConfig config = null;
+ TriggerConfigData config = null;
try {
preparedStatement = connection.prepareStatement(SELECT);
preparedStatement.setInt(1, run);
ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
- Map<Integer, String> data = new LinkedHashMap<Integer, String>();
+ Map<Crate, String> data = new LinkedHashMap<Crate, String>();
int timestamp = resultSet.getInt("timestamp");
Clob clob = resultSet.getClob("config1");
if (clob != null) {
- data.put(TriggerConfig.CONFIG1, clob.getSubString(1, (int) clob.length()));
+ data.put(TriggerConfigData.Crate.CONFIG1, clob.getSubString(1, (int) clob.length()));
}
clob = resultSet.getClob("config2");
if (clob != null) {
- data.put(TriggerConfig.CONFIG2, clob.getSubString(1, (int) clob.length()));
+ data.put(TriggerConfigData.Crate.CONFIG2, clob.getSubString(1, (int) clob.length()));
}
clob = resultSet.getClob("config3");
if (clob != null) {
- data.put(TriggerConfig.CONFIG3, clob.getSubString(1, (int) clob.length()));
+ data.put(TriggerConfigData.Crate.CONFIG3, clob.getSubString(1, (int) clob.length()));
}
clob = resultSet.getClob("config4");
if (clob != null) {
- data.put(TriggerConfig.CONFIG4, clob.getSubString(1, (int) clob.length()));
+ data.put(TriggerConfigData.Crate.CONFIG4, clob.getSubString(1, (int) clob.length()));
}
- config = new TriggerConfig(data, timestamp);
+ config = new TriggerConfigData(data, timestamp);
}
} catch (SQLException e) {
throw new RuntimeException(e);
Modified: java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
=============================================================================
--- java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim (original)
+++ java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim Tue Jan 26 18:16:25 2016
@@ -15,43 +15,53 @@
<driver name="FinalStateMonitoring"/>
<driver name="V0Monitoring"/>
<driver name="TridentMonitoring"/>
-<!-- Singles0 -->
+ <driver name="TrackingMonitoringGBL"/>
+ <driver name="FinalStateMonitoringGBL"/>
+ <driver name="V0MonitoringGBL"/>
+ <driver name="TridentMonitoringGBL"/>
+ <!-- Singles0 -->
<driver name="EcalMonitoringSingles0"/>
-<!-- <driver name="SVTMonitoringSingles0"/> -->
<driver name="TrackingMonitoringSingles0"/>
+ <driver name="TrackingMonitoringGBLSingles0"/>
<driver name="TrackingResidualsSingles0"/>
<driver name="FinalStateMonitoringSingles0"/>
<driver name="V0MonitoringSingles0"/>
<driver name="TridentMonitoringSingles0"/>
-<!-- Singles1 -->
+ <!-- Singles1 -->
<driver name="EcalMonitoringSingles1"/>
- <!-- <driver name="SVTMonitoringSingles1"/> -->
<driver name="TrackingMonitoringSingles1"/>
+ <driver name="TrackingMonitoringGBLSingles1"/>
<driver name="TrackingResidualsSingles1"/>
<driver name="FinalStateMonitoringSingles1"/>
<driver name="V0MonitoringSingles1"/>
<driver name="TridentMonitoringSingles1"/>
-<!-- Pairs0 -->
+ <driver name="FinalStateMonitoringGBLSingles1"/>
+ <driver name="V0MonitoringGBLSingles1"/>
+ <driver name="TridentMonitoringGBLSingles1"/>
+ <!-- Pairs0 -->
<driver name="EcalMonitoringPairs0"/>
- <!-- <driver name="SVTMonitoringPairs0"/> -->
<driver name="TrackingMonitoringPairs0"/>
+ <driver name="TrackingMonitoringGBLPairs0"/>
<driver name="TrackingResidualsPairs0"/>
<driver name="FinalStateMonitoringPairs0"/>
<driver name="V0MonitoringPairs0"/>
<driver name="TridentMonitoringPairs0"/>
-<!-- Pairs1 -->
+ <!-- Pairs1 -->
<driver name="EcalMonitoringPairs1"/>
- <!-- <driver name="SVTMonitoringPairs1"/> -->
<driver name="TrackingMonitoringPairs1"/>
+ <driver name="TrackingMonitoringGBLPairs1"/>
<driver name="TrackingResidualsPairs1"/>
- <driver name="FinalStateMonitoringPairs1"/>
+ <driver name="FinalStateMonitoringPairs1"/>
<driver name="V0MonitoringPairs1"/>
- <driver name="TridentMonitoringPairs1"/>
+ <driver name="TridentMonitoringPairs1"/>
+ <driver name="FinalStateMonitoringGBLPairs1"/>
+ <driver name="V0MonitoringGBLPairs1"/>
+ <driver name="TridentMonitoringGBLPairs1"/>
+ <!-- -->
<driver name="AidaSaveDriver"/>
<driver name="CleanupDriver"/>
</execute>
<drivers>
- <!-- <driver name="DQMDatabaseDriver" type="org.hps.analysis.dataquality.DQMDatabaseDriver"/> -->
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
</driver>
@@ -59,10 +69,10 @@
<readoutCollections>SVTRawTrackerHits</readoutCollections>
</driver>
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>${outputFile}.root</outputFileName>
- </driver>
-
-<!-- all triggers -->
+ <outputFileName>${outputFile}</outputFileName>
+ </driver>
+
+ <!-- all triggers -->
<driver name="EcalMonitoring" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>all</triggerType>
</driver>
@@ -70,9 +80,9 @@
<triggerType>all</triggerType>
</driver>
<driver name="TrackingMonitoring" type="org.hps.analysis.dataquality.TrackingMonitoring">
- <overwriteDB>false</overwriteDB>
- <triggerType>all</triggerType>
- </driver>
+ <triggerType>all</triggerType>
+ </driver>
+
<driver name="TrackingResiduals" type="org.hps.analysis.dataquality.TrackingResiduals">
<triggerType>all</triggerType>
</driver>
@@ -80,21 +90,40 @@
<triggerType>all</triggerType>
</driver>
<driver name="V0Monitoring" type="org.hps.analysis.dataquality.V0Monitoring">
- <triggerType>all</triggerType>
+ <triggerType>all</triggerType>
</driver>
<driver name="TridentMonitoring" type="org.hps.analysis.dataquality.TridentMonitoring">
<triggerType>all</triggerType>
</driver>
-
-<!-- singles0 triggers -->
+ <driver name="TrackingMonitoringGBL" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <trackCollectionName>GBLTracks</trackCollectionName>
+ <triggerType>all</triggerType>
+ </driver>
+ <driver name="FinalStateMonitoringGBL" type="org.hps.analysis.dataquality.FinalStateMonitoring">
+ <triggerType>all</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="V0MonitoringGBL" type="org.hps.analysis.dataquality.V0Monitoring">
+ <triggerType>all</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="TridentMonitoringGBL" type="org.hps.analysis.dataquality.TridentMonitoring">
+ <triggerType>all</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+
+ <!-- singles0 triggers -->
<driver name="EcalMonitoringSingles0" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>singles0</triggerType>
</driver>
<driver name="SVTMonitoringSingles0" type="org.hps.analysis.dataquality.SvtMonitoring">
<triggerType>singles0</triggerType>
</driver>
+ <driver name="TrackingMonitoringGBLSingles0" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <trackCollectionName>GBLTracks</trackCollectionName>
+ <triggerType>singles0</triggerType>
+ </driver>
<driver name="TrackingMonitoringSingles0" type="org.hps.analysis.dataquality.TrackingMonitoring">
- <overwriteDB>false</overwriteDB>
<triggerType>singles0</triggerType>
</driver>
<driver name="TrackingResidualsSingles0" type="org.hps.analysis.dataquality.TrackingResiduals">
@@ -104,21 +133,24 @@
<triggerType>singles0</triggerType>
</driver>
<driver name="V0MonitoringSingles0" type="org.hps.analysis.dataquality.V0Monitoring">
- <triggerType>singles0</triggerType>
+ <triggerType>singles0</triggerType>
</driver>
<driver name="TridentMonitoringSingles0" type="org.hps.analysis.dataquality.TridentMonitoring">
<triggerType>singles0</triggerType>
</driver>
-<!-- singles1 triggers -->
+ <!-- singles1 triggers -->
<driver name="EcalMonitoringSingles1" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>singles1</triggerType>
</driver>
<driver name="SVTMonitoringSingles1" type="org.hps.analysis.dataquality.SvtMonitoring">
<triggerType>singles1</triggerType>
- </driver>
- <driver name="TrackingMonitoringSingles1" type="org.hps.analysis.dataquality.TrackingMonitoring">
- <overwriteDB>false</overwriteDB>
+ </driver>
+ <driver name="TrackingMonitoringSingles1" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <triggerType>singles1</triggerType>
+ </driver>
+ <driver name="TrackingMonitoringGBLSingles1" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <trackCollectionName>GBLTracks</trackCollectionName>
<triggerType>singles1</triggerType>
</driver>
<driver name="TrackingResidualsSingles1" type="org.hps.analysis.dataquality.TrackingResiduals">
@@ -127,22 +159,37 @@
<driver name="FinalStateMonitoringSingles1" type="org.hps.analysis.dataquality.FinalStateMonitoring">
<triggerType>singles1</triggerType>
</driver>
- <driver name="V0MonitoringSingles1" type="org.hps.analysis.dataquality.V0Monitoring">
- <triggerType>singles1</triggerType>
+ <driver name="V0MonitoringSingles1" type="org.hps.analysis.dataquality.V0Monitoring">
+ <triggerType>singles1</triggerType>
</driver>
<driver name="TridentMonitoringSingles1" type="org.hps.analysis.dataquality.TridentMonitoring">
- <triggerType>singles1</triggerType>
- </driver>
-
-<!-- pairs0 triggers -->
+ <triggerType>singles1</triggerType>
+ </driver>
+ <driver name="FinalStateMonitoringGBLSingles1" type="org.hps.analysis.dataquality.FinalStateMonitoring">
+ <triggerType>singles1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="V0MonitoringGBLSingles1" type="org.hps.analysis.dataquality.V0Monitoring">
+ <triggerType>singles1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="TridentMonitoringGBLSingles1" type="org.hps.analysis.dataquality.TridentMonitoring">
+ <triggerType>singles1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+
+ <!-- pairs0 triggers -->
<driver name="EcalMonitoringPairs0" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>pairs0</triggerType>
</driver>
<driver name="SVTMonitoringPairs0" type="org.hps.analysis.dataquality.SvtMonitoring">
<triggerType>pairs0</triggerType>
</driver>
- <driver name="TrackingMonitoringPairs0" type="org.hps.analysis.dataquality.TrackingMonitoring">
- <overwriteDB>false</overwriteDB>
+ <driver name="TrackingMonitoringPairs0" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <triggerType>pairs0</triggerType>
+ </driver>
+ <driver name="TrackingMonitoringGBLPairs0" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <trackCollectionName>GBLTracks</trackCollectionName>
<triggerType>pairs0</triggerType>
</driver>
<driver name="TrackingResidualsPairs0" type="org.hps.analysis.dataquality.TrackingResiduals">
@@ -152,23 +199,25 @@
<triggerType>pairs0</triggerType>
</driver>
<driver name="V0MonitoringPairs0" type="org.hps.analysis.dataquality.V0Monitoring">
- <triggerType>pairs0</triggerType>
+ <triggerType>pairs0</triggerType>
</driver>
<driver name="TridentMonitoringPairs0" type="org.hps.analysis.dataquality.TridentMonitoring">
<triggerType>pairs0</triggerType>
</driver>
-
-<!-- pairs1 triggers -->
+ <!-- pairs1 triggers -->
<driver name="EcalMonitoringPairs1" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>pairs1</triggerType>
</driver>
<driver name="SVTMonitoringPairs1" type="org.hps.analysis.dataquality.SvtMonitoring">
<triggerType>pairs1</triggerType>
</driver>
- <driver name="TrackingMonitoringPairs1" type="org.hps.analysis.dataquality.TrackingMonitoring">
- <overwriteDB>false</overwriteDB>
+ <driver name="TrackingMonitoringPairs1" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <triggerType>pairs1</triggerType>
+ </driver>
+ <driver name="TrackingMonitoringGBLPairs1" type="org.hps.analysis.dataquality.TrackingMonitoring">
+ <trackCollectionName>GBLTracks</trackCollectionName>
<triggerType>pairs1</triggerType>
</driver>
<driver name="TrackingResidualsPairs1" type="org.hps.analysis.dataquality.TrackingResiduals">
@@ -178,11 +227,24 @@
<triggerType>pairs1</triggerType>
</driver>
<driver name="V0MonitoringPairs1" type="org.hps.analysis.dataquality.V0Monitoring">
- <triggerType>pairs1</triggerType>
+ <triggerType>pairs1</triggerType>
</driver>
<driver name="TridentMonitoringPairs1" type="org.hps.analysis.dataquality.TridentMonitoring">
<triggerType>pairs1</triggerType>
</driver>
+ <driver name="FinalStateMonitoringGBLPairs1" type="org.hps.analysis.dataquality.FinalStateMonitoring">
+ <triggerType>pairs1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="V0MonitoringGBLPairs1" type="org.hps.analysis.dataquality.V0Monitoring">
+ <triggerType>pairs1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="TridentMonitoringGBLPairs1" type="org.hps.analysis.dataquality.TridentMonitoring">
+ <triggerType>pairs1</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+
<driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver"/>
</drivers>
Modified: java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon_Pass2.lcsim
=============================================================================
--- java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon_Pass2.lcsim (original)
+++ java/branches/jeremy-dev/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon_Pass2.lcsim Tue Jan 26 18:16:25 2016
@@ -69,7 +69,7 @@
<readoutCollections>SVTRawTrackerHits</readoutCollections>
</driver>
<driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>${outputFile}.root</outputFileName>
+ <outputFileName>${outputFile}</outputFileName>
</driver>
<!-- all triggers -->
|