LISTSERV mailing list manager LISTSERV 16.5

Help for HPS-SVN Archives


HPS-SVN Archives

HPS-SVN Archives


HPS-SVN@LISTSERV.SLAC.STANFORD.EDU


View:

Message:

[

First

|

Previous

|

Next

|

Last

]

By Topic:

[

First

|

Previous

|

Next

|

Last

]

By Author:

[

First

|

Previous

|

Next

|

Last

]

Font:

Proportional Font

LISTSERV Archives

LISTSERV Archives

HPS-SVN Home

HPS-SVN Home

HPS-SVN  December 2015

HPS-SVN December 2015

Subject:

r4005 - in /java/branches/jeremy-dev: crawler/src/main/java/org/hps/crawler/ record-util/src/main/java/org/hps/record/ record-util/src/main/java/org/hps/record/daqconfig/ record-util/src/main/java/org/hps/record/svt/ record-util/src/main/java/org/hps/record/util/ run-database/src/main/java/org/hps/run/database/

From:

[log in to unmask]

Reply-To:

Notification of commits to the hps svn repository <[log in to unmask]>

Date:

Thu, 3 Dec 2015 04:58:23 -0000

Content-Type:

text/plain

Parts/Attachments:

Parts/Attachments

text/plain (1994 lines)

Author: [log in to unmask]
Date: Wed Dec  2 20:58:15 2015
New Revision: 4005

Log:
More dev work on run database; trigger config moved to separate table.

Added:
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java
      - copied, changed from r3998, java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileUtilities.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
Removed:
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileUtilities.java
Modified:
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/PathFilter.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsType.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/package-info.java

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java	Wed Dec  2 20:58:15 2015
@@ -4,6 +4,8 @@
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
+
+import org.hps.record.util.FileUtilities;
 
 /**
  * This is a metadata reader for ROOT DQM files.
@@ -22,7 +24,7 @@
     @Override
     public Map<String, Object> getMetadata(final File file) throws IOException {
         final Map<String, Object> metadata = new HashMap<String, Object>();
-        final int run = CrawlerFileUtilities.getRunFromFileName(file);
+        final int run = FileUtilities.getRunFromFileName(file);
         metadata.put("runMin", run);
         metadata.put("runMax", run);
         return metadata;

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java	Wed Dec  2 20:58:15 2015
@@ -24,6 +24,7 @@
 import org.hps.datacat.client.DatacatClientFactory;
 import org.hps.datacat.client.DatasetFileFormat;
 import org.hps.datacat.client.DatasetSite;
+import org.hps.record.util.FileUtilities;
 
 /**
  * Command line file crawler for populating the data catalog.
@@ -266,7 +267,6 @@
                 for (String arg : cl.getArgList()) {
                     config.addPath(arg);
                 }
-                
             }
 
         } catch (final ParseException e) {
@@ -360,8 +360,8 @@
 
                 // Use file on JLAB cache disk if necessary.
                 File actualFile = file;
-                if (CrawlerFileUtilities.isMssFile(file)) {
-                    actualFile = CrawlerFileUtilities.getCachedFile(file);
+                if (FileUtilities.isMssFile(file)) {
+                    actualFile = FileUtilities.getCachedFile(file);
                     LOGGER.info("using cached file " + actualFile.getPath());
                 }
                 
@@ -373,7 +373,7 @@
                 } else {
                     // Assign run number even if metadata is not enabled.
                     metadata = new HashMap<String, Object>();
-                    int run = CrawlerFileUtilities.getRunFromFileName(file);
+                    int run = FileUtilities.getRunFromFileName(file);
                     metadata.put("runMin", run);
                     metadata.put("runMax", run);
                     metadata.put("scanStatus", "UNSCANNED");

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/LcioReconMetadataReader.java	Wed Dec  2 20:58:15 2015
@@ -90,7 +90,6 @@
         metadata.put("DETECTOR", detectorName);
         metadata.put("COLLECTIONS", sb.toString());
         
-        
         return metadata;
     }
 }

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/PathFilter.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/PathFilter.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/PathFilter.java	Wed Dec  2 20:58:15 2015
@@ -32,8 +32,9 @@
     @Override
     public boolean accept(File pathname) {
         for (String acceptPath : paths) {
-            if (pathname.getPath().equals(acceptPath)) {
-                LOGGER.info("accepted path " + pathname);                
+            // FIXME: Use endsWith, equals or contains here????
+            if (pathname.getPath().endsWith(acceptPath)) {
+                LOGGER.info("accepted path " + pathname);
                 return true;
             }
         }

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java	Wed Dec  2 20:58:15 2015
@@ -4,6 +4,8 @@
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
+
+import org.hps.record.util.FileUtilities;
 
 /**
  * This is a metadata reader for ROOT DQM files.
@@ -22,7 +24,7 @@
     @Override
     public Map<String, Object> getMetadata(final File file) throws IOException {
         final Map<String, Object> metadata = new HashMap<String, Object>();
-        final int run = CrawlerFileUtilities.getRunFromFileName(file);
+        final int run = FileUtilities.getRunFromFileName(file);
         metadata.put("runMin", run);
         metadata.put("runMax", run);
         return metadata;

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java	Wed Dec  2 20:58:15 2015
@@ -102,7 +102,7 @@
      * @throws NoSuchRecordException if there are no records available from the queue
      */
     @Override
-    public void next() throws IOException, NoSuchRecordException {
+    public synchronized void next() throws IOException, NoSuchRecordException {
         try {
             if (this.timeOutMillis > 0L) {
                 // Poll the queue for the next record until timeout is exceeded.

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/DAQConfigEvioProcessor.java	Wed Dec  2 20:58:15 2015
@@ -15,7 +15,7 @@
  * Copied and modified from code in {@link org.hps.evio.TriggerConfigEvioReader} to extract DAQ config without
  * needing an output LCSim event.
  * <p>
- * Only the last valid DAQ config object will be saved.
+ * Only the last valid DAQ config object is available once the job is finished.
  * 
  * @author Jeremy McCormick, SLAC
  */
@@ -28,6 +28,10 @@
     private Map<Integer, String> stringData = new HashMap<Integer, String>();
     
     private Integer run = null;
+    
+    private int timestamp;
+    
+    private int currentTimestamp;
 
     /**
      * Process EVIO events to extract DAQ config data.
@@ -43,10 +47,19 @@
                 } catch (NullPointerException e) {
                 }
             }
-
+                        
             // Can only start parsing DAQ banks once the run is set.
             if (run != null) {
                 
+                // Set current timestamp from head bank.
+                BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
+                if (headBank != null) {
+                    if (headBank.getIntData()[3] != 0) {
+                        currentTimestamp = headBank.getIntData()[3];
+                        LOGGER.finest("set timestamp " + currentTimestamp + " from head bank");
+                    }
+                }
+                                
                 // Parse config data from the EVIO banks.
                 EvioDAQParser evioParser = parseEvioData(evioEvent);
             
@@ -55,7 +68,9 @@
                     // Set the current DAQ config object.
                     ConfigurationManager.updateConfiguration(evioParser);
                     daqConfig = ConfigurationManager.getInstance();
+                    timestamp = currentTimestamp;
                 }
+                                
             }
         } catch (Exception e) {
             LOGGER.log(Level.WARNING, "Error parsing DAQ config from EVIO.", e);
@@ -119,11 +134,20 @@
     }
     
     /**
-     * Get a map of bank number to its string data for the current config.
+     * Get a map of bank number to string data for the current config.
      * 
      * @return a map of bank to trigger config data
      */
     public Map<Integer, String> getTriggerConfigData() {
         return this.stringData;
     }
+    
+    /**
+     * Get the timestamp associated with the config.
+     * 
+     * @return the timestamp
+     */
+    public int getTimestamp() {
+        return timestamp;
+    }
 }

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/svt/SvtConfigEvioProcessor.java	Wed Dec  2 20:58:15 2015
@@ -34,9 +34,9 @@
         BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
         int configBanks = 0;
         if (headBank != null) {
-            if (headBank.getIntData()[0] != 0) {
-                timestamp = headBank.getIntData()[0];
-                LOGGER.info("set timestamp " + timestamp);
+            if (headBank.getIntData()[3] != 0) {
+                timestamp = headBank.getIntData()[3];
+                //LOGGER.finest("set timestamp " + timestamp + " from head bank");
             }
         }
         for (BaseStructure bank : evioEvent.getChildrenList()) {
@@ -75,8 +75,8 @@
             }
         } 
         if (config != null) {
-            LOGGER.info("Adding SVT config " + evioEvent.getEventNumber() + " with " + configBanks 
-                    + " banks from event " + evioEvent.getEventNumber());
+            LOGGER.info("Adding SVT config " + evioEvent.getEventNumber() + " with " + configBanks
+                    + " banks and timestamp " + timestamp + " from event " + evioEvent.getEventNumber());
             this.configs.add(config);
         }
     }

Copied: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java (from r3998, java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileUtilities.java)
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileUtilities.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java	Wed Dec  2 20:58:15 2015
@@ -1,4 +1,4 @@
-package org.hps.crawler;
+package org.hps.record.util;
 
 import java.io.File;
 
@@ -7,15 +7,15 @@
  *
  * @author Jeremy McCormick, SLAC
  */
-final class CrawlerFileUtilities {
-
+public final class FileUtilities {
+    
     /**
      * Get run number from file name assuming it looks like "hps_001234".
      *
      * @param file the file
      * @return the run number
      */
-    static int getRunFromFileName(final File file) {
+    public static int getRunFromFileName(final File file) {
         final String name = file.getName();
         return Integer.parseInt(name.substring(4, 10));
     }
@@ -60,4 +60,7 @@
     public static boolean isMssFile(final File file) {
         return file.getPath().startsWith("/mss");
     }
+    
+    private FileUtilities() {
+    }
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsType.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsType.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsType.java	Wed Dec  2 20:58:15 2015
@@ -13,11 +13,11 @@
     /**
      * 20S EPICS data.
      */
-    EPICS_20s(10),
+    EPICS_20s(20),
     /**
      * 2S EPICS data.
      */
-    EPICS_2s(1);
+    EPICS_2s(2);
 
     /**
      * Get the type from an int.

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java	Wed Dec  2 20:58:15 2015
@@ -3,6 +3,7 @@
 import java.io.File;
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashSet;
@@ -18,10 +19,9 @@
 import org.hps.conditions.run.RunSpreadsheet;
 import org.hps.conditions.run.RunSpreadsheet.RunData;
 import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatacatClientFactory;
 import org.hps.datacat.client.Dataset;
-import org.hps.datacat.client.DatasetSite;
 import org.hps.record.AbstractRecordProcessor;
+import org.hps.record.daqconfig.DAQConfig;
 import org.hps.record.daqconfig.DAQConfigEvioProcessor;
 import org.hps.record.epics.EpicsData;
 import org.hps.record.epics.EpicsRunProcessor;
@@ -39,6 +39,7 @@
 import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
 import org.hps.record.triggerbank.HeadBankData;
 import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
+import org.hps.record.util.FileUtilities;
 import org.jlab.coda.jevio.BaseStructure;
 import org.jlab.coda.jevio.EvioEvent;
 import org.jlab.coda.jevio.EvioException;
@@ -48,7 +49,7 @@
 /**
  * Builds a complete {@link RunSummary} object from various data sources, including the data catalog and the run
  * spreadsheet, so that it is ready to be inserted into the run database using the DAO interfaces.  This class also 
- * extracts EPICS and scaler records from the EVIO data for insertion into run database tables.
+ * extracts EPICS data, scaler data, trigger config and SVT config information.
  * <p>
  * The setters and some other methods follow the builder pattern and so can be chained by the caller.
  * 
@@ -97,6 +98,11 @@
      * List of EVIO files.
      */
     private List<File> evioFiles;
+    
+    /**
+     * List of EVIO files with cache path/
+     */
+    private List<File> cacheFiles;
 
     /**
      * Allow replacement of information in the database (off by default).
@@ -127,7 +133,45 @@
      * List of SVT configuration bank data.
      */
     private List<SvtConfigData> svtConfigs;
-        
+    
+    /**
+     * The trigger config object.
+     */
+    private TriggerConfig config;
+    
+    /**
+     * Reload run data after insert for debugging.
+     */
+    private boolean reload;
+    
+    /**
+     * Reload state for the current run number (used for testing after a database insert).
+     */
+    static void reload(Connection connection, int run) {
+        
+        RunManager runManager = new RunManager(connection);
+        runManager.setRun(run);
+
+        RunSummary runSummary = runManager.getRunSummary();
+
+        LOGGER.info("loaded run summary ..." + '\n' + runSummary);
+
+        LOGGER.info("loaded " + runManager.getEpicsData(EpicsType.EPICS_2s).size() + " EPICS 2S records");
+        LOGGER.info("loaded " + runManager.getEpicsData(EpicsType.EPICS_20s).size() + " EPICS 20S records");
+
+        List<ScalerData> scalerData = runManager.getScalerData();
+        LOGGER.info("loaded " + scalerData.size() + " scaler records");
+
+        List<SvtConfigData> svtConfigs = runManager.getSvtConfigData();
+        LOGGER.info("loaded " + svtConfigs.size() + " SVT configurations");
+            
+        LOGGER.info("printing DAQ config ...");
+        DAQConfig daqConfig = runManager.getDAQConfig();
+        daqConfig.printConfig();
+        
+        runManager.closeConnection();
+    }
+                      
     /**
      * Create an empty run summary.
      * 
@@ -163,16 +207,26 @@
             throw new IllegalStateException("No EVIO datasets for run " + getRun() + " were found in the data catalog.");
         }
         
-        // Map file to dataset.
+        // Map files to datasets.
         for (final Dataset dataset : datasets) {
             evioDatasets.put(new File(dataset.getLocations().get(0).getResource()), dataset);
         }
         
-        // Create the list of EVIO files.
+        // Create the list of sorted EVIO files.
         evioFiles = new ArrayList<File>();
         evioFiles.addAll(evioDatasets.keySet());
         EvioFileUtilities.sortBySequence(evioFiles);
         
+        // Create a list of files with cache paths in case running at JLAB.
+        cacheFiles = new ArrayList<File>();
+        for (File file : evioFiles) {
+            if (FileUtilities.isMssFile(file)) {
+                cacheFiles.add(FileUtilities.getCachedFile(file));
+            } else {
+                cacheFiles.add(file);
+            }
+        }        
+        
         LOGGER.info("found " + evioFiles.size() + " EVIO file(s) for run " + runSummary.getRun());
     }
    
@@ -184,19 +238,7 @@
     int getRun() {
         return runSummary.getRun();
     }
-
-    /**
-     * Initialize the datacat client.
-     */
-    private void initializeDatacat() {
-
-        LOGGER.info("initializing data catalog client");
-
-        // DEBUG: use dev datacat server; prod should use default JLAB connection
-        datacatClient = new DatacatClientFactory().createClient("http://localhost:8080/datacat-v0.4-SNAPSHOT/r",
-                DatasetSite.SLAC, "HPS");
-    }
-
+    
     /**
      * Insert the run data into the database using the current connection.
      */
@@ -235,49 +277,17 @@
             LOGGER.warning("no SVT config to insert");
         }
         
-        try {
-            connection.close();
-        } catch (Exception e) {
-            LOGGER.log(Level.WARNING, e.getMessage(), e);
-        }
-               
+        // Insert trigger config data.
+        if (this.config != null) {
+            LOGGER.info("inserting trigger config");
+            runFactory.createTriggerConfigDao().insertTriggerConfig(config, getRun());
+        } else {
+            LOGGER.warning("no trigger config to inesrt");
+        }
+                       
         LOGGER.info("done inserting run " + getRun());
     }
-    
-    /**
-     * Reload state for the current run number into this object (used for testing after a database insert).
-     * 
-     * @param load <code>true</code> if this method should be executed (skipped if <code>false</code>)
-     * @return this object
-     */
-    RunDatabaseBuilder load(boolean load) {
-        if (load) {
-            RunManager runManager = new RunManager(connectionParameters.createConnection());
-            runManager.setRun(getRun());
-
-            this.runSummary = RunSummaryImpl.class.cast(runManager.getRunSummary());
-
-            LOGGER.info("loaded run summary ..." + '\n' + runSummary);
-
-            epicsData = new ArrayList<EpicsData>();
-            epicsData.addAll(runManager.getEpicsData(EpicsType.EPICS_2s));
-            epicsData.addAll(runManager.getEpicsData(EpicsType.EPICS_20s));
-            LOGGER.info("loaded " + epicsData.size() + " EPICS records");
-
-            scalerData = runManager.getScalerData();
-            LOGGER.info("loaded " + scalerData.size() + " scaler records");
-
-            svtConfigs = runManager.getSvtConfigData();
-            LOGGER.info("loaded " + svtConfigs.size() + " SVT configurations");
-
-            runManager.closeConnection();
-        } else {
-            LOGGER.info("load is skipped");
-        }
-        
-        return this;
-    }
-    
+          
     /**
      * Print summary information to the log.
      */
@@ -304,9 +314,9 @@
         } else {
             LOGGER.info("no SVT config");
         }
-        if (runSummary.getTriggerConfigData() != null) {
-            for (Entry<Integer, String> entry : runSummary.getTriggerConfigData().entrySet()) {
-                LOGGER.info("trigger config data " + entry.getKey() + " ..." + entry.getValue());
+        if (config != null) {
+            for (Entry<Integer, String> entry : config.getData().entrySet()) {
+                LOGGER.info("trigger config data " + entry.getKey() + " with timestamp " + config.getTimestamp() + " ..." + entry.getValue());
             }
         } else {
             LOGGER.info("no trigger config");
@@ -328,7 +338,7 @@
             throw new IllegalStateException("The detector name was not set.");
         }
 
-        // Initialize the conditions system.
+        // Initialize the conditions system because the DAQ config processor needs it.
         try {
             DatabaseConditionsManager dbManager = DatabaseConditionsManager.getInstance();
             DatabaseConditionsManager.getInstance().setDetector(detectorName, runSummary.getRun());
@@ -344,8 +354,12 @@
         ScalersEvioProcessor scalersProcessor = new ScalersEvioProcessor();
         scalersProcessor.setResetEveryEvent(false);
         processors.add(scalersProcessor);
-
-        // Processor for calculating TI time offset.
+        
+        // Processor for getting EPICS data.
+        EpicsRunProcessor epicsProcessor = new EpicsRunProcessor();
+        processors.add(epicsProcessor);
+
+        // Processor for calculating the TI time offset.
         TiTimeOffsetEvioProcessor tiProcessor = new TiTimeOffsetEvioProcessor();
         processors.add(tiProcessor);
 
@@ -357,14 +371,10 @@
         SvtConfigEvioProcessor svtProcessor = new SvtConfigEvioProcessor();
         processors.add(svtProcessor);
 
-        // Processor for getting EPICS data.
-        EpicsRunProcessor epicsProcessor = new EpicsRunProcessor();
-        processors.add(epicsProcessor);
-
         // Run the job using the EVIO loop.
         EvioLoop loop = new EvioLoop();
         loop.addProcessors(processors);
-        EvioFileSource source = new EvioFileSource(evioFiles);
+        EvioFileSource source = new EvioFileSource(cacheFiles);
         loop.setEvioFileSource(source);
         loop.loop(-1);
 
@@ -374,13 +384,6 @@
         // Set TI time offset.
         runSummary.setTiTimeOffset(tiProcessor.getTiTimeOffset());
 
-        // Set DAQ config object.
-        runSummary.setDAQConfig(daqProcessor.getDAQConfig());
-
-        // Set map of crate number to string trigger config data.
-        runSummary.setTriggerConfigData(daqProcessor.getTriggerConfigData());
-        LOGGER.info("found " + daqProcessor.getTriggerConfigData().size() + " valid SVT config events");
-
         // Set EPICS data list.
         epicsData = epicsProcessor.getEpicsData();
 
@@ -389,6 +392,11 @@
 
         // Set SVT config data strings.
         svtConfigs = svtProcessor.getSvtConfigs();
+        
+        // Set trigger config object.
+        if (!daqProcessor.getTriggerConfigData().isEmpty()) {            
+            config = new TriggerConfig(daqProcessor.getTriggerConfigData(), daqProcessor.getTimestamp());
+        }
 
         LOGGER.info("done processing EVIO files");
     }
@@ -406,9 +414,10 @@
             throw new IllegalStateException("The run summary was never created.");
         }        
         
-        // Setup datacat client.
-        initializeDatacat();
-        
+        if (this.datacatClient == null) {
+            throw new IllegalStateException("The datacat client was not set.");
+        }
+                
         // Find EVIO datasets in the datacat.
         findEvioDatasets();
 
@@ -447,11 +456,17 @@
         if (!dryRun) {
             // Update the database.
             updateDatabase();
+            
+            if (reload) {
+                LOGGER.info("reloading data for run " + getRun() + " ...");
+                reload(connectionParameters.createConnection(), getRun());
+            }
+            
         } else {
             // Dry run so database is not updated.
             LOGGER.info("Dry run enabled so no updates were performed.");
         }
-        
+                        
         return this;
     }
 
@@ -463,6 +478,17 @@
      */
     RunDatabaseBuilder setConnectionParameters(ConnectionParameters connectionParameters) {
         this.connectionParameters = connectionParameters;
+        return this;
+    }
+    
+    /**
+     * Set the datacat client for querying the data catalog.
+     * 
+     * @param datacatClient the datacat client
+     * @return this object
+     */
+    RunDatabaseBuilder setDatacatClient(DatacatClient datacatClient) {
+        this.datacatClient = datacatClient;
         return this;
     }
 
@@ -487,6 +513,17 @@
     RunDatabaseBuilder setDryRun(boolean dryRun) {
         this.dryRun = dryRun;
         LOGGER.config("dryRun = " + this.dryRun);
+        return this;
+    }
+    
+    /**
+     * Set whether data should be reloaded at end (as debug check).
+     * 
+     * @param reload <code>true</code> to reload data at end of job
+     * @return this object
+     */
+    RunDatabaseBuilder setReload(boolean reload) {
+        this.reload = reload;
         return this;
     }
 
@@ -539,27 +576,46 @@
         LOGGER.fine("updating the run database");
         
         // Initialize the run manager.
-        RunManager runManager = new RunManager(connectionParameters.createConnection());
+        Connection connection = connectionParameters.createConnection();
+        RunManager runManager = new RunManager(connection);
         runManager.setRun(runSummary.getRun());
-
-        // Does run exist?
-        if (runManager.runExists()) {
+        
+        // Turn off autocommit to start transaction.
+        try {
+            connection.setAutoCommit(false);
+
+            // Does run exist?
+            if (runManager.runExists()) {
             
-            LOGGER.info("run already exists");
+                LOGGER.info("run already exists");
             
-            // If replacement is not enabled and run exists, then this is a fatal exception.
-            if (!replace) {
-                throw new RuntimeException("Run already exists (use -x option to enable replacement).");
-            }
-
-            // Delete the run so insert statements can be used to rebuild it.
-            LOGGER.info("deleting existing run");
-            runManager.deleteRun();
-        }
-
-        // Insert the run data into the database.
-        LOGGER.info("inserting the run data");
-        insertRun(runManager.getConnection());
+                // If replacement is not enabled and run exists, then this is a fatal exception.
+                if (!replace) {
+                    throw new RuntimeException("Run already exists (use -x option to enable replacement).");
+                }
+
+                // Delete the run so insert statements can be used to rebuild it.
+                LOGGER.info("deleting existing run");
+                runManager.deleteRun();
+            }
+
+            // Insert the run data into the database.
+            LOGGER.info("inserting the run data");
+            insertRun(connection);
+        
+            // Commit the transaction.                                 
+            LOGGER.info("committing to run db ...");
+            connection.commit();
+            LOGGER.info("done committing");
+            
+        } catch (Exception e1) {
+            try {
+                LOGGER.log(Level.SEVERE, "Error occurred updating database; rolling back transaction...", e1);
+                connection.rollback();
+            } catch (SQLException e2) {
+                throw new RuntimeException(e2);
+            }
+        }        
 
         // Close the database connection.
         runManager.closeConnection();
@@ -571,7 +627,7 @@
     private void updateEndTimestamp() {
         LOGGER.info("updating end timestamp");
         IntBankDefinition headBankDefinition = new IntBankDefinition(HeadBankData.class, new int[] {0x2e, 0xe10f});
-        File lastEvioFile = evioFiles.get(evioFiles.size() - 1);
+        File lastEvioFile = cacheFiles.get(cacheFiles.size() - 1);
         EvioReader reader = null;
         Integer endTimestamp = null;
         try {
@@ -618,16 +674,22 @@
         RunData data = runSpreadsheet.getRunMap().get(runSummary.getRun());        
         if (data != null) {
             LOGGER.info("found run data ..." + '\n' + data.getRecord());
+            
+            // Trigger config name.
             String triggerConfigName = data.getRecord().get("trigger_config");
             if (triggerConfigName != null) {
                 runSummary.setTriggerConfigName(triggerConfigName);
                 LOGGER.info("set trigger config name <" + runSummary.getTriggerConfigName() + "> from spreadsheet");
             }
+            
+            // Notes.
             String notes = data.getRecord().get("notes");
             if (notes != null) {
                 runSummary.setNotes(notes);
                 LOGGER.info("set notes <" + runSummary.getNotes() + "> from spreadsheet");
             }
+            
+            // Target.
             String target = data.getRecord().get("target");
             if (target != null) {
                 runSummary.setTarget(target);
@@ -653,9 +715,9 @@
         runSummary.setLivetimeClock(livetimes[LiveTimeIndex.CLOCK.ordinal()]);
         runSummary.setLivetimeFcupTdc(livetimes[LiveTimeIndex.FCUP_TDC.ordinal()]);
         runSummary.setLivetimeFcupTrg(livetimes[LiveTimeIndex.FCUP_TRG.ordinal()]);
-        LOGGER.info("clock livetime set to " + runSummary.getLivetimeClock());
-        LOGGER.info("fcup tdc livetime set to " + runSummary.getLivetimeFcupTdc());
-        LOGGER.info("fcup trg livetime set to " + runSummary.getLivetimeFcupTrg());
+        LOGGER.info("clock livetime = " + runSummary.getLivetimeClock());
+        LOGGER.info("fcup tdc livetime = " + runSummary.getLivetimeFcupTdc());
+        LOGGER.info("fcup trg livetime = " + runSummary.getLivetimeFcupTrg());
     }
 
     /**
@@ -663,7 +725,7 @@
      */
     private void updateStartTimestamps() {
         LOGGER.fine("updating start timestamps");
-        File firstEvioFile = evioFiles.get(0);
+        File firstEvioFile = cacheFiles.get(0);
         int sequence = EvioFileUtilities.getSequenceFromName(firstEvioFile);
         if (sequence != 0) {
             LOGGER.warning("first file does not have sequence 0");

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java	Wed Dec  2 20:58:15 2015
@@ -8,6 +8,10 @@
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.hps.conditions.database.ConnectionParameters;
+import org.hps.datacat.client.DatacatClient;
+import org.hps.datacat.client.DatacatClientFactory;
+import org.hps.datacat.client.DatacatConstants;
+import org.hps.datacat.client.DatasetSite;
 
 /**
  * Command line tool for inserting records into the run database.
@@ -19,7 +23,7 @@
     /**
      * Command line options for the crawler.
      */
-    private static final Options OPTIONS = new Options();    
+    private static final Options OPTIONS = new Options();
 
     /**
      * Statically define the command options.
@@ -28,12 +32,15 @@
         OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
         OPTIONS.addOption("r", "run", true, "run to update");
         OPTIONS.addOption("p", "connection-properties", true, "database connection properties file (required)");       
-        OPTIONS.addOption("D", "dry-run", false, "dry run which will not update the database");
+        OPTIONS.addOption("Y", "dry-run", false, "dry run which will not update the database");
         OPTIONS.addOption("x", "replace", false, "allow deleting and replacing an existing run");
         OPTIONS.addOption("s", "spreadsheet", true, "path to run database spreadsheet (CSV format)");
-        OPTIONS.addOption("d", "detector", true, "conditions system detector name");        
+        OPTIONS.addOption("d", "detector", true, "conditions system detector name");
         OPTIONS.addOption("N", "no-evio-processing", false, "skip processing of all EVIO files");
         OPTIONS.addOption("L", "load", false, "load back run information after inserting (for debugging)");
+        OPTIONS.addOption("u", "url", true, "datacat URL");
+        OPTIONS.addOption("S", "site", true, "datacat site (e.g. SLAC or JLAB)");        
+        // TODO: add -D option for defining metadata values
     }
 
     /**
@@ -79,12 +86,17 @@
     /**
      * Load back run information after insert (for debugging).
      */
-    private boolean load = false;
+    private boolean reload = false;
     
     /**
      * Database connection parameters.
      */
     private ConnectionParameters connectionParameters = null;
+    
+    /**
+     * Datacat client to use for connecting to data catalog.
+     */
+    private DatacatClient datacatClient = null;
     
     /**
      * Parse command line options and return reference to <code>this</code> object.
@@ -125,7 +137,7 @@
             }
             
             // Dry run.
-            if (cl.hasOption("D")) {
+            if (cl.hasOption("Y")) {
                 this.dryRun = true;
             }
             
@@ -154,8 +166,20 @@
             
             // Load back run info at end of job.
             if (cl.hasOption("L")) {
-                this.load = true;
-            }
+                this.reload = true;
+            }
+            
+            // Setup datacat client.
+            DatasetSite site = DatasetSite.JLAB;            
+            String url = DatacatConstants.BASE_URL;            
+            String rootFolder = DatacatConstants.ROOT_FOLDER;            
+            if (cl.hasOption("u")) {
+                url = cl.getOptionValue("u");
+            }
+            if (cl.hasOption("S")) {
+                site = DatasetSite.valueOf(cl.getOptionValue("S"));
+            }
+            datacatClient = new DatacatClientFactory().createClient(url, site, rootFolder);
             
         } catch (final ParseException e) {
             throw new RuntimeException(e);
@@ -172,12 +196,12 @@
             .createRunSummary(run)
             .setDetectorName(detectorName)
             .setConnectionParameters(connectionParameters)
+            .setDatacatClient(datacatClient)
             .setDryRun(dryRun)
             .setReplace(replace)
             .skipEvioProcessing(skipEvioProcessing)
             .setSpreadsheetFile(spreadsheetFile)
-            .run()
-            .load(load);
-    }
-        
+            .setReload(reload)
+            .run();
+    }        
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java	Wed Dec  2 20:58:15 2015
@@ -78,4 +78,13 @@
     SvtConfigDao createSvtConfigDao() {
         return new SvtConfigDaoImpl(connection);
     }
+    
+    /**
+     * Get the trigger config DAO.
+     * 
+     * @return the trigger config DAO
+     */
+    TriggerConfigDao createTriggerConfigDao() {
+        return new TriggerConfigDaoImpl(connection);
+    }
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java	Wed Dec  2 20:58:15 2015
@@ -6,6 +6,7 @@
 import java.util.logging.Logger;
 
 import org.hps.conditions.database.ConnectionParameters;
+import org.hps.record.daqconfig.DAQConfig;
 import org.hps.record.epics.EpicsData;
 import org.hps.record.scalers.ScalerData;
 import org.hps.record.svt.SvtConfigData;
@@ -28,6 +29,7 @@
         List<EpicsData> epicsData = null;
         List<ScalerData> scalerData = null;
         List<SvtConfigData> svtConfigData = null;
+        DAQConfig daqConfig = null;
     }
 
     /**
@@ -71,7 +73,7 @@
     /**
      * The data cache of run information.
      */
-    private DataCache dataCache;
+    private DataCache cache;
 
     /**
      * Factory for creating database API objects.
@@ -140,15 +142,12 @@
      *
      * @param run the run number
      */
-    void deleteRun() {
-        
+    void deleteRun() {        
         factory.createEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2s, run);
         factory.createEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20s, run);
-        
-        factory.createScalerDataDao().deleteScalerData(run);
-        
+        factory.createScalerDataDao().deleteScalerData(run);        
         factory.createSvtConfigDao().deleteSvtConfigs(run);
-        
+        factory.createTriggerConfigDao().deleteTriggerConfig(run);
         factory.createRunSummaryDao().deleteRunSummary(run);
     }
 
@@ -169,11 +168,11 @@
      */
     public List<EpicsData> getEpicsData(final EpicsType epicsType) {
         this.checkRunNumber();
-        if (this.dataCache.epicsData == null) {
+        if (this.cache.epicsData == null) {
             LOGGER.info("loading EPICS data for run " + this.run);
-            this.dataCache.epicsData = factory.createEpicsDataDao().getEpicsData(epicsType, this.run);
-        }
-        return this.dataCache.epicsData;
+            this.cache.epicsData = factory.createEpicsDataDao().getEpicsData(epicsType, this.run);
+        }
+        return this.cache.epicsData;
     }
 
     /**
@@ -211,10 +210,10 @@
      */
     public RunSummary getRunSummary() {
         this.checkRunNumber();
-        if (this.dataCache.runSummary == null) {
-            this.dataCache.runSummary = factory.createRunSummaryDao().getRunSummary(this.run);
-        }
-        return this.dataCache.runSummary;
+        if (this.cache.runSummary == null) {
+            this.cache.runSummary = factory.createRunSummaryDao().getRunSummary(this.run);
+        }
+        return this.cache.runSummary;
     }
 
     /**
@@ -224,11 +223,11 @@
      */
     public List<ScalerData> getScalerData() {
         this.checkRunNumber();
-        if (this.dataCache.scalerData == null) {
+        if (this.cache.scalerData == null) {
             LOGGER.info("loading scaler data for run " + this.run);
-            this.dataCache.scalerData = factory.createScalerDataDao().getScalerData(run);
-        }
-        return this.dataCache.scalerData;
+            this.cache.scalerData = factory.createScalerDataDao().getScalerData(run);
+        }
+        return this.cache.scalerData;
     }
     
     /**
@@ -238,11 +237,25 @@
      */
     public List<SvtConfigData> getSvtConfigData() {
         this.checkRunNumber();
-        if (this.dataCache.svtConfigData == null) {
+        if (this.cache.svtConfigData == null) {
             LOGGER.info("loading SVT configuration data for run " + this.run);
-            this.dataCache.svtConfigData = factory.createSvtConfigDao().getSvtConfigs(run);
-        }
-        return this.dataCache.svtConfigData;
+            this.cache.svtConfigData = factory.createSvtConfigDao().getSvtConfigs(run);
+        }
+        return this.cache.svtConfigData;
+    }
+    
+    /**
+     * Get the DAQ configuration for the run.
+     * 
+     * @return the DAQ configuration for the run
+     */
+    public DAQConfig getDAQConfig() {
+        this.checkRunNumber();
+        if (this.cache.daqConfig == null) {
+            TriggerConfig config = factory.createTriggerConfigDao().getTriggerConfig(run);
+            cache.daqConfig = config.loadDAQConfig(run);
+        }
+        return this.cache.daqConfig;
     }
      
     /**
@@ -268,10 +281,10 @@
      */
     public boolean runExists() {
         this.checkRunNumber();
-        if (this.dataCache.runExists == null) {
-            this.dataCache.runExists = factory.createRunSummaryDao().runExists(this.run);
-        }
-        return this.dataCache.runExists;
+        if (this.cache.runExists == null) {
+            this.cache.runExists = factory.createRunSummaryDao().runSummaryExists(this.run);
+        }
+        return this.cache.runExists;
     }
 
     /**
@@ -281,7 +294,7 @@
      * @return <code>true</code> if the run exists in the database
      */
     boolean runExists(final int run) {
-        return factory.createRunSummaryDao().runExists(run);
+        return factory.createRunSummaryDao().runSummaryExists(run);
     }
 
     /**
@@ -299,7 +312,7 @@
             this.run = run;
 
             // Reset the data cache.
-            this.dataCache = new DataCache();
+            this.cache = new DataCache();
         }
     }
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java	Wed Dec  2 20:58:15 2015
@@ -1,9 +1,6 @@
 package org.hps.run.database;
 
 import java.util.Date;
-import java.util.Map;
-
-import org.hps.record.daqconfig.DAQConfig;
 
 /**
  * This is an API for accessing run summary information which is persisted as a row in the <i>run_summaries</i> table.
@@ -18,27 +15,12 @@
  */
 public interface RunSummary {
 
-    /*
-     * Mapping of trigger config fields to crate numbers.
-     */
-    public static final int TRIGGER_CONFIG1 = 37;
-    public static final int TRIGGER_CONFIG2 = 39;
-    public static final int TRIGGER_CONFIG3 = 46;
-    public static final int TRIGGER_CONFIG4 = 58;
-
     /**
      * Get the creation date of this record.
      *
      * @return the creation date of this record
      */
     Date getCreated();
-
-    /**
-     * Get the trigger config.
-     * 
-     * @return the trigger config
-     */
-    DAQConfig getDAQConfig();
 
     /**
      * Get the END event timestamp or the timestamp from the last head bank if END is not present.
@@ -125,13 +107,6 @@
     Integer getTotalFiles();
 
     /**
-     * Get a map of crate number to trigger config data.
-     * 
-     * @return the map of crate number to trigger config data
-     */
-    Map<Integer, String> getTriggerConfigData();
-
-    /**
      * Get the trigger config name (from the run spreadsheet).
      * 
      * @return the trigger config name

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java	Wed Dec  2 20:58:15 2015
@@ -15,13 +15,6 @@
      * @param run the run number
      */
     void deleteRunSummary(int run);
-
-    /**
-     * Delete a run summary but not its objects.
-     *
-     * @param runSummary the run summary object
-     */
-    void deleteRunSummary(RunSummary runSummary);
 
     /**
      * Get the list of run numbers.
@@ -51,12 +44,5 @@
      * @param run the run number
      * @return <code>true</code> if <code>run</code> exists in the database
      */
-    boolean runExists(int run);
-
-    /**
-     * Update a run summary.
-     *
-     * @param runSummary the run summary to update
-     */
-    void updateRunSummary(RunSummary runSummary);    
+    boolean runSummaryExists(int run);
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java	Wed Dec  2 20:58:15 2015
@@ -1,14 +1,11 @@
 package org.hps.run.database;
 
-import java.sql.Clob;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.logging.Logger;
 
 /**
@@ -19,11 +16,6 @@
 final class RunSummaryDaoImpl implements RunSummaryDao {
 
     /**
-     * Expected number of string banks in trigger config.
-     */
-    private static final int TRIGGER_CONFIG_LEN = 4;
-
-    /**
      * Delete by run number.
      */
     private static final String DELETE = "DELETE FROM run_summaries WHERE run = ?";
@@ -32,23 +24,15 @@
      * Insert a record for a run.
      */
     private static final String INSERT = "INSERT INTO run_summaries (run, nevents, nfiles, prestart_timestamp,"
-            + " go_timestamp, end_timestamp, trigger_rate, trigger_config_name, trigger_config1, trigger_config2," 
-            + " trigger_config3, trigger_config4, ti_time_offset, livetime_clock, livetime_fcup_tdc, livetime_fcup_trg,"
-            + " target, notes, created) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NOW())";
+            + " go_timestamp, end_timestamp, trigger_rate, trigger_config_name, ti_time_offset," 
+            + " livetime_clock, livetime_fcup_tdc, livetime_fcup_trg, target, notes, created, updated)"
+            + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())";
                      
     /**
      * Select record by run number.
      */
     private static final String SELECT = "SELECT * FROM run_summaries WHERE run = ?";
-        
-    /**
-     * Update information for a run.
-     */
-    private static final String UPDATE = "UPDATE run_summaries SET nevents = ?, nfiles = ?, prestart_timestamp = ?,"
-            + " go_timestamp = ?, end_timestamp = ?, trigger_rate = ?, trigger_config_name = ?, trigger_config1 = ?,"
-            + " trigger_config2 = ?, trigger_config3 = ?, trigger_config4 = ?, ti_time_offset = ?, livetime_clock = ?,"
-            + " livetime_fcup_tdc = ?, livetime_fcup_trg = ?, target = ?, notes = ?, created WHERE run = ?";
-
+           
     /**
      * Initialize the logger.
      */
@@ -102,32 +86,7 @@
             }
         }
     }
-
-    /**
-     * Delete a run summary but not its objects.
-     *
-     * @param runSummary the run summary object
-     */
-    @Override
-    public void deleteRunSummary(final RunSummary runSummary) {
-        PreparedStatement preparedStatement = null;
-        try {
-            preparedStatement = connection.prepareStatement(DELETE);
-            preparedStatement.setInt(1, runSummary.getRun());
-            preparedStatement.executeUpdate();
-        } catch (final SQLException e) {
-            throw new RuntimeException(e);
-        } finally {
-            if (preparedStatement != null) {
-                try {
-                    preparedStatement.close();
-                } catch (final SQLException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
+   
     /**
      * Get the list of run numbers.
      *
@@ -183,10 +142,6 @@
             runSummary.setEndTimestamp(resultSet.getInt("end_timestamp"));
             runSummary.setTriggerRate(resultSet.getDouble("trigger_rate"));
             runSummary.setTriggerConfigName(resultSet.getString("trigger_config_name"));
-            Map<Integer, String> triggerConfigData = createTriggerConfigData(resultSet);
-            if (!triggerConfigData.isEmpty()) {
-                runSummary.setTriggerConfigData(triggerConfigData);
-            } 
             runSummary.setTiTimeOffset(resultSet.getLong("ti_time_offset"));
             runSummary.setLivetimeClock(resultSet.getDouble("livetime_clock"));
             runSummary.setLivetimeFcupTdc(resultSet.getDouble("livetime_fcup_tdc"));
@@ -207,34 +162,6 @@
             }
         }
         return runSummary;
-    }
-
-    /**
-     * Create trigger config data from result set.
-     * 
-     * @param resultSet the result set with the run summary record
-     * @return the trigger config data as a map of bank number to string data
-     * @throws SQLException if there is an error querying the database
-     */
-    private Map<Integer, String> createTriggerConfigData(final ResultSet resultSet) throws SQLException {
-        Map<Integer, String> triggerConfigData = new LinkedHashMap<Integer, String>();
-        Clob clob = resultSet.getClob("trigger_config1");            
-        if (clob != null) {
-            triggerConfigData.put(RunSummary.TRIGGER_CONFIG1, clob.getSubString(1, (int) clob.length()));
-        }
-        clob = resultSet.getClob("trigger_config2");
-        if (clob != null) {
-            triggerConfigData.put(RunSummary.TRIGGER_CONFIG2, clob.getSubString(1, (int) clob.length()));
-        }
-        clob = resultSet.getClob("trigger_config3");
-        if (clob != null) {
-            triggerConfigData.put(RunSummary.TRIGGER_CONFIG3, clob.getSubString(1, (int) clob.length()));
-        }
-        clob = resultSet.getClob("trigger_config4");
-        if (clob != null) {
-            triggerConfigData.put(RunSummary.TRIGGER_CONFIG4, clob.getSubString(1, (int) clob.length()));
-        }
-        return triggerConfigData;
     }
       
     /**
@@ -255,14 +182,12 @@
             preparedStatement.setInt(6, runSummary.getEndTimestamp());
             preparedStatement.setDouble(7, runSummary.getTriggerRate());
             preparedStatement.setString(8, runSummary.getTriggerConfigName());
-            Map<Integer, String> triggerData = runSummary.getTriggerConfigData();
-            prepareTriggerData(preparedStatement, triggerData);
-            preparedStatement.setLong(13, runSummary.getTiTimeOffset());
-            preparedStatement.setDouble(14, runSummary.getLivetimeClock());
-            preparedStatement.setDouble(15, runSummary.getLivetimeFcupTdc());
-            preparedStatement.setDouble(16, runSummary.getLivetimeFcupTrg());
-            preparedStatement.setString(17, runSummary.getTarget());
-            preparedStatement.setString(18, runSummary.getNotes());
+            preparedStatement.setLong(9, runSummary.getTiTimeOffset());
+            preparedStatement.setDouble(10, runSummary.getLivetimeClock());
+            preparedStatement.setDouble(11, runSummary.getLivetimeFcupTdc());
+            preparedStatement.setDouble(12, runSummary.getLivetimeFcupTrg());
+            preparedStatement.setString(13, runSummary.getTarget());
+            preparedStatement.setString(14, runSummary.getNotes());
             LOGGER.fine(preparedStatement.toString());
             preparedStatement.executeUpdate();
         } catch (final SQLException e) {
@@ -277,30 +202,6 @@
             }
         }
     }
-
-    /**
-     * Set trigger config data on prepared statement.
-     * @param preparedStatement the prepared statement
-     * @param triggerData the trigger config data
-     * @throws SQLException if there is an error querying the database
-     */
-    private void prepareTriggerData(PreparedStatement preparedStatement, Map<Integer, String> triggerData)
-            throws SQLException {
-        if (triggerData != null && !triggerData.isEmpty()) {
-            if (triggerData.size() != TRIGGER_CONFIG_LEN) {
-                throw new IllegalArgumentException("The trigger config data has the wrong length.");
-            }
-            preparedStatement.setBytes(9, triggerData.get(RunSummary.TRIGGER_CONFIG1).getBytes());
-            preparedStatement.setBytes(10, triggerData.get(RunSummary.TRIGGER_CONFIG2).getBytes());
-            preparedStatement.setBytes(11, triggerData.get(RunSummary.TRIGGER_CONFIG3).getBytes());
-            preparedStatement.setBytes(12, triggerData.get(RunSummary.TRIGGER_CONFIG4).getBytes());
-        } else {
-            preparedStatement.setBytes(9, null);
-            preparedStatement.setBytes(10, null);
-            preparedStatement.setBytes(11, null);
-            preparedStatement.setBytes(12, null);
-        }
-    }
    
     /**
      * Return <code>true</code> if a run summary exists in the database for the run number.
@@ -309,7 +210,7 @@
      * @return <code>true</code> if run exists in the database
      */
     @Override
-    public boolean runExists(final int run) {
+    public boolean runSummaryExists(final int run) {
         PreparedStatement preparedStatement = null;
         try {
             preparedStatement = connection.prepareStatement("SELECT run FROM run_summaries where run = ?");
@@ -328,58 +229,4 @@
             }
         }
     }
-
-    /**
-     * Update a run summary.
-     *
-     * @param runSummary the run summary to update
-     */
-    @Override
-    public void updateRunSummary(final RunSummary runSummary) {
-        PreparedStatement preparedStatement = null;
-        try {
-            preparedStatement = connection.prepareStatement(UPDATE);                       
-            preparedStatement.setInt(1, runSummary.getTotalEvents());
-            preparedStatement.setInt(2, runSummary.getTotalFiles());
-            preparedStatement.setInt(3, runSummary.getPrestartTimestamp());
-            preparedStatement.setInt(4, runSummary.getGoTimestamp());
-            preparedStatement.setInt(5, runSummary.getEndTimestamp());
-            preparedStatement.setDouble(6, runSummary.getTriggerRate());
-            preparedStatement.setString(7, runSummary.getTriggerConfigName());
-            Map<Integer, String> triggerData = runSummary.getTriggerConfigData();
-            if (triggerData != null && !triggerData.isEmpty()) {
-                if (triggerData.size() != 4) {
-                    throw new IllegalArgumentException("The trigger config data has the wrong length.");
-                }
-                preparedStatement.setBytes(8, triggerData.get(RunSummary.TRIGGER_CONFIG1).getBytes());
-                preparedStatement.setBytes(9, triggerData.get(RunSummary.TRIGGER_CONFIG2).getBytes());
-                preparedStatement.setBytes(10, triggerData.get(RunSummary.TRIGGER_CONFIG3).getBytes());
-                preparedStatement.setBytes(11, triggerData.get(RunSummary.TRIGGER_CONFIG4).getBytes());
-            } else {
-                preparedStatement.setBytes(8, null);
-                preparedStatement.setBytes(9, null);
-                preparedStatement.setBytes(10, null);
-                preparedStatement.setBytes(11, null);
-            }
-            preparedStatement.setLong(12, runSummary.getTiTimeOffset());
-            preparedStatement.setDouble(13, runSummary.getLivetimeClock());
-            preparedStatement.setDouble(14, runSummary.getLivetimeFcupTdc());
-            preparedStatement.setDouble(15, runSummary.getLivetimeFcupTrg());
-            preparedStatement.setString(16, runSummary.getTarget());
-            preparedStatement.setString(17, runSummary.getNotes());
-            preparedStatement.setInt(18, runSummary.getRun());
-            LOGGER.fine(preparedStatement.toString());
-            preparedStatement.executeUpdate();
-        } catch (final SQLException e) {
-            throw new RuntimeException(e);
-        } finally {
-            if (preparedStatement != null) {
-                try {
-                    preparedStatement.close();
-                } catch (final SQLException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }      
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java	Wed Dec  2 20:58:15 2015
@@ -1,12 +1,6 @@
 package org.hps.run.database;
 
 import java.util.Date;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.hps.record.daqconfig.ConfigurationManager;
-import org.hps.record.daqconfig.DAQConfig;
-import org.hps.record.daqconfig.EvioDAQParser;
 
 /**
  * Implementation of {@link RunSummary} for retrieving information from the run database.
@@ -21,11 +15,6 @@
     private Date created;
 
     /**
-     * DAQ config object built from string data.
-     */
-    private DAQConfig daqConfig;
-
-    /**
      * Timestamp of END event.
      */
     private Integer endTimestamp;
@@ -84,12 +73,7 @@
      * The total number of files in the run.
      */
     private Integer totalFiles;
-
-    /**
-     * Map of crate number to trigger config string data.
-     */
-    private Map<Integer, String> triggerConfigData;
-
+   
     /**
      * Get the name of the trigger config file.
      */
@@ -120,11 +104,6 @@
     }
 
     @Override
-    public DAQConfig getDAQConfig() {
-        return this.daqConfig;
-    }
-
-    @Override
     public Integer getEndTimestamp() {
         return endTimestamp;
     }
@@ -183,12 +162,7 @@
     public Integer getTotalFiles() {
         return this.totalFiles;
     }
-
-    @Override
-    public Map<Integer, String> getTriggerConfigData() {
-        return this.triggerConfigData;
-    }
-
+   
     @Override
     public String getTriggerConfigName() {
         return this.triggerConfigName;
@@ -205,55 +179,82 @@
     }
 
     /**
-     * Load DAQ config object from trigger config string data.
-     */
-    private void loadDAQConfig() {
-        if (this.triggerConfigData != null && !this.triggerConfigData.isEmpty()) {
-            EvioDAQParser parser = new EvioDAQParser();
-            for (Entry<Integer, String> entry : this.triggerConfigData.entrySet()) {
-                parser.parse(entry.getKey(), this.getRun(), new String[] {entry.getValue()});
-            }
-            ConfigurationManager.updateConfiguration(parser);
-            daqConfig = ConfigurationManager.getInstance();
-        }
-    }
-
+     * Set the creation date of the run summary.
+     * 
+     * @param created the creation date
+     */
     void setCreated(Date created) {
         this.created = created;
     }
 
-    void setDAQConfig(DAQConfig daqConfig) {
-        this.daqConfig = daqConfig;
-    }
-
+    /**
+     * Set the end timestamp.
+     * 
+     * @param endTimestamp the end timestamp
+     */
     void setEndTimestamp(Integer endTimestamp) {
         this.endTimestamp = endTimestamp;
     }
 
+    /**
+     * Set the GO timestamp.
+     * 
+     * @param goTimestamp the GO timestamp
+     */
     void setGoTimestamp(Integer goTimestamp) {
         this.goTimestamp = goTimestamp;
     }
 
+    /**
+     * Set the clock livetime. 
+     * 
+     * @param livetimeClock the clock livetime
+     */
     void setLivetimeClock(Double livetimeClock) {
         this.livetimeClock = livetimeClock;
     }
 
+    /**
+     * Set the FCUP TDC livetime.
+     * 
+     * @param livetimeTdc the FCUP TDC livetime
+     */
     void setLivetimeFcupTdc(Double livetimeTdc) {
         this.livetimeTdc = livetimeTdc;
     }
 
+    /**
+     * Set the FCUP TRG livetime.
+     * 
+     * @param livetimeTrg the FCUP TRG livetime
+     */
     void setLivetimeFcupTrg(Double livetimeTrg) {
         this.livetimeTrg = livetimeTrg;
     }
 
+    /**
+     * Set the notes.
+     * 
+     * @param notes the notes
+     */
     void setNotes(String notes) {
         this.notes = notes;
     }
 
+    /**
+     * Set the PRESTART timestamp.
+     * 
+     * @param prestartTimestamp the PRESTART timestamp
+     */
     void setPrestartTimestamp(Integer prestartTimestamp) {
         this.prestartTimestamp = prestartTimestamp;
     }
 
+    /**
+     * Set the target description.
+     * 
+     * @param target the target description
+     */
     void setTarget(String target) {
         this.target = target;
     }
@@ -286,19 +287,6 @@
     }
 
     /**
-     * Build the DAQ config from the trigger config string data.
-     * 
-     * @param triggerConfigData a map of crate number to the trigger config string data from the bank
-     */
-    void setTriggerConfigData(Map<Integer, String> triggerConfigData) {
-        this.triggerConfigData = triggerConfigData;
-        // Load DAQ config if not already set.
-        if (daqConfig == null) {
-            loadDAQConfig();
-        }
-    }
-
-    /**
      * Set the trigger config file.
      * 
      * @param triggerConfigName the trigger config file
@@ -316,6 +304,11 @@
         this.triggerRate = triggerRate;
     }
 
+    /**
+     * Set the updated date of the summary.
+     * 
+     * @param updated the updated date
+     */
     void setUpdated(Date updated) {
         this.updated = updated;
     }
@@ -337,7 +330,6 @@
                 + ", goTimestamp: " + this.getGoTimestamp()
                 + ", endTimestamp: " + this.getEndTimestamp()
                 + ", triggerConfigFile: " + this.getTriggerConfigName()
-                + ", DAQConfig: " + (this.getDAQConfig() != null ? true : false)
                 + ", triggerRate: " + this.getTriggerRate()
                 + ", livetimeClock: " + this.getLivetimeClock()
                 + ", livetimeTdc: " + this.getLivetimeFcupTdc()

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/ScalerDataDaoImpl.java	Wed Dec  2 20:58:15 2015
@@ -18,15 +18,9 @@
 final class ScalerDataDaoImpl implements ScalerDataDao {
 
     /**
-     * SQL query strings.
+     * Insert a record.
      */
-    private static final class ScalerDataQuery {
-
-        /**
-         * Insert a record.
-         */
-        private static final String INSERT = createInsertSql();
-    }
+    private static final String INSERT = createInsertSql();    
 
     /**
      * Create insert SQL for scaler data.
@@ -139,7 +133,7 @@
     public void insertScalerData(final List<ScalerData> scalerDataList, final int run) {
         PreparedStatement insertScalers = null;
         try {
-            insertScalers = this.connection.prepareStatement(ScalerDataQuery.INSERT);
+            insertScalers = this.connection.prepareStatement(INSERT);
             for (final ScalerData scalerData : scalerDataList) {
                 insertScalers.setInt(1, run);
                 insertScalers.setInt(2, scalerData.getEventId());

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDao.java	Wed Dec  2 20:58:15 2015
@@ -9,7 +9,7 @@
  * 
  * @author Jeremy McCormick, SLAC
  */
-public interface SvtConfigDao {
+interface SvtConfigDao {
    
     /**
      * Insert SVT configurations.

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SvtConfigDaoImpl.java	Wed Dec  2 20:58:15 2015
@@ -16,7 +16,7 @@
  * 
  * @author Jeremy McCormick, SLAC
  */
-public class SvtConfigDaoImpl implements SvtConfigDao {
+final class SvtConfigDaoImpl implements SvtConfigDao {
 
     private Connection connection = null;
     

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfig.java	Wed Dec  2 20:58:15 2015
@@ -0,0 +1,84 @@
+package org.hps.run.database;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.hps.record.daqconfig.ConfigurationManager;
+import org.hps.record.daqconfig.DAQConfig;
+import org.hps.record.daqconfig.EvioDAQParser;
+
+/**
+ * Raw trigger config string data with an associated timestamp.
+ * 
+ * @author Jeremy McCormick, SLAC
+ */
+final class TriggerConfig {
+    
+    /**
+     * Expected number of string banks in trigger config.
+     */
+    static final int DATA_LENGTH = 4;
+    
+    /*
+     * Mapping of trigger config database fields to their crate numbers.
+     */
+    static final int CONFIG1 = 37;
+    static final int CONFIG2 = 39;
+    static final int CONFIG3 = 46;
+    static final int CONFIG4 = 58;
+        
+    private int timestamp;
+    private Map<Integer, String> data;
+    
+    TriggerConfig(Map<Integer, String> data, int timestamp) {
+        if (data == null) {
+            throw new RuntimeException("The data is null.");
+        }
+        this.data = data;
+        this.timestamp = timestamp;
+    }
+    
+    /**
+     * Get the config's timestamp.
+     * 
+     * @return the config's timestamp
+     */
+    int getTimestamp() {
+        return timestamp;
+    }
+    
+    /**
+     * Get the config data as a map from bank numbers to strings.
+     * 
+     * @return the config data
+     */
+    Map<Integer, String> getData() {
+        return data;
+    }
+    
+    /**
+     * Return <code>true</code> if the config is valid which means it has 
+     * four, non-null string data banks.
+     *  
+     * @return <code>true</code> if config is valid
+     */
+    boolean isValid() {
+        return data.size() == DATA_LENGTH && data.get(CONFIG1) != null && data.get(CONFIG2) != null
+                && data.get(CONFIG3) != null && data.get(CONFIG4) != null;
+    }
+         
+    /**
+     * Load DAQ config object from trigger config string data.
+     * 
+     * @param the run number (needed by configuration manager)
+     * @return the DAQ config object
+     */
+    DAQConfig loadDAQConfig(int run) {
+        EvioDAQParser parser = new EvioDAQParser();
+        for (Entry<Integer, String> entry : data.entrySet()) {
+            parser.parse(entry.getKey(), run, new String[] {entry.getValue()});
+        }
+        ConfigurationManager.updateConfiguration(parser);
+        return ConfigurationManager.getInstance();
+    }    
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDao.java	Wed Dec  2 20:58:15 2015
@@ -0,0 +1,32 @@
+package org.hps.run.database;
+
+/**
+ * Database interface for getting raw trigger config data.
+ * 
+ * @author Jeremy McCormick, SLAC
+ */
+interface TriggerConfigDao {
+    
+    /**
+     * Get a trigger config by run number.
+     * 
+     * @param run the run number
+     * @return the trigger config
+     */
+    TriggerConfig getTriggerConfig(int run);
+    
+    /**
+     * Insert a trigger config.
+     * 
+     * @param config the trigger config
+     * @param run the run number
+     */
+    void insertTriggerConfig(TriggerConfig config, int run);
+            
+    /**
+     * Delete a trigger config by run.
+     * 
+     * @param run the run number
+     */
+    void deleteTriggerConfig(int run);
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigDaoImpl.java	Wed Dec  2 20:58:15 2015
@@ -0,0 +1,129 @@
+package org.hps.run.database;
+
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+final class TriggerConfigDaoImpl implements TriggerConfigDao {
+      
+    private static final String INSERT =
+            "INSERT INTO trigger_configs (run, timestamp, config1, config2, config3, config4)"
+                + " VALUES (?, ?, ?, ?, ?, ?)";
+    
+    private static final String SELECT =  "SELECT * FROM trigger_configs WHERE run = ?";
+    
+    private static final String DELETE = "DELETE FROM trigger_configs WHERE run = ?";
+    
+    /**
+     * The database connection.
+     */
+    private final Connection connection;
+
+    /**
+     * Create object for managing scaler data in the run database.
+     *
+     * @param connection the database connection
+     */
+    TriggerConfigDaoImpl(final Connection connection) {
+        if (connection == null) {
+            throw new IllegalArgumentException("The connection is null.");
+        }
+        this.connection = connection;
+    }
+    
+
+    @Override
+    public void insertTriggerConfig(TriggerConfig config, int run) {
+        if (!config.isValid()) {
+            throw new RuntimeException("The trigger config is not valid.");
+        }
+        PreparedStatement preparedStatement = null;
+        try {
+            preparedStatement = connection.prepareStatement(INSERT);
+            preparedStatement.setInt(1, run);
+            preparedStatement.setInt(2, config.getTimestamp());
+            Map<Integer, String> data = config.getData();
+            if (data.size() != TriggerConfig.DATA_LENGTH) {
+                throw new IllegalArgumentException("The trigger config data has the wrong length.");
+            }
+            preparedStatement.setBytes(3, data.get(TriggerConfig.CONFIG1).getBytes());
+            preparedStatement.setBytes(4, data.get(TriggerConfig.CONFIG2).getBytes());
+            preparedStatement.setBytes(5, data.get(TriggerConfig.CONFIG3).getBytes());
+            preparedStatement.setBytes(6, data.get(TriggerConfig.CONFIG4).getBytes());
+            preparedStatement.executeUpdate();
+        } catch (SQLException e) {
+            throw new RuntimeException(e);
+        } finally {
+            try {
+                preparedStatement.close();
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    @Override
+    public void deleteTriggerConfig(int run) {
+        PreparedStatement preparedStatement = null;
+        try {
+            preparedStatement = connection.prepareStatement(DELETE);
+            preparedStatement.setInt(1, run);
+            preparedStatement.executeUpdate();
+        } catch (final SQLException e) {
+            throw new RuntimeException(e);
+        } finally {
+            if (preparedStatement != null) {
+                try {
+                    preparedStatement.close();
+                } catch (final SQLException e) {
+                    e.printStackTrace();
+                }
+            }
+        }       
+    }
+    
+    @Override
+    public TriggerConfig getTriggerConfig(int run) {
+        PreparedStatement preparedStatement = null;
+        TriggerConfig config = null;
+        try {
+            preparedStatement = connection.prepareStatement(SELECT);
+            preparedStatement.setInt(1, run);
+            ResultSet resultSet = preparedStatement.executeQuery();
+            if (resultSet.next()) {
+                Map<Integer, String> data = new LinkedHashMap<Integer, String>();
+                int timestamp = resultSet.getInt("timestamp");
+                Clob clob = resultSet.getClob("config1");
+                if (clob != null) {
+                    data.put(TriggerConfig.CONFIG1, clob.getSubString(1, (int) clob.length()));
+                }
+                clob = resultSet.getClob("config2");
+                if (clob != null) {
+                    data.put(TriggerConfig.CONFIG2, clob.getSubString(1, (int) clob.length()));
+                }
+                clob = resultSet.getClob("config3");
+                if (clob != null) {
+                    data.put(TriggerConfig.CONFIG3, clob.getSubString(1, (int) clob.length()));
+                }
+                clob = resultSet.getClob("config4");
+                if (clob != null) {
+                    data.put(TriggerConfig.CONFIG4, clob.getSubString(1, (int) clob.length()));
+                }
+                config = new TriggerConfig(data, timestamp);
+            }
+        } catch (SQLException e) {
+            throw new RuntimeException(e);
+        } finally {
+            try {
+                preparedStatement.close();
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        return config;
+    }
+}

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/package-info.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/package-info.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/package-info.java	Wed Dec  2 20:58:15 2015
@@ -1,4 +1,4 @@
 /**
- * API for accessing the HPS run database.
+ * API for accessing and updating the HPS run database.
  */
 package org.hps.run.database;

Top of Message | Previous Page | Permalink

Advanced Options


Options

Log In

Log In

Get Password

Get Password


Search Archives

Search Archives


Subscribe or Unsubscribe

Subscribe or Unsubscribe


Archives

November 2017
August 2017
July 2017
January 2017
December 2016
November 2016
October 2016
September 2016
August 2016
July 2016
June 2016
May 2016
April 2016
March 2016
February 2016
January 2016
December 2015
November 2015
October 2015
September 2015
August 2015
July 2015
June 2015
May 2015
April 2015
March 2015
February 2015
January 2015
December 2014
November 2014
October 2014
September 2014
August 2014
July 2014
June 2014
May 2014
April 2014
March 2014
February 2014
January 2014
December 2013
November 2013

ATOM RSS1 RSS2



LISTSERV.SLAC.STANFORD.EDU

Secured by F-Secure Anti-Virus CataList Email List Search Powered by the LISTSERV Email List Manager

Privacy Notice, Security Notice and Terms of Use