Print

Print


Author: [log in to unmask]
Date: Wed Feb 10 14:26:49 2016
New Revision: 4196

Log:
Update dev branch with changes for run db, datacat and file crawling.

Added:
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/MetadataWriter.java
    java/branches/jeremy-dev/crawler/src/main/python/
    java/branches/jeremy-dev/crawler/src/main/python/crawler/
    java/branches/jeremy-dev/crawler/src/main/python/crawler/create_dataset_from_metadata.py   (with props)
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatabaseUpdater.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java
    java/branches/jeremy-dev/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
Modified:
    java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java
    java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
    java/branches/jeremy-dev/distribution/pom.xml
    java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java
    java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
    java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java

Modified: java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java
 =============================================================================
--- java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java	(original)
+++ java/branches/jeremy-dev/conditions/src/main/java/org/hps/conditions/run/RunSpreadsheet.java	Wed Feb 10 14:26:49 2016
@@ -99,11 +99,14 @@
      * @param file the CSV file
      */
     public RunSpreadsheet(final File file) {
+        if (file == null) {
+            throw new IllegalArgumentException("The file argument is null.");
+        }
         this.file = file;
         try {
             this.fromCsv(this.file);
         } catch (final Exception e) {
-            throw new RuntimeException();
+            throw new RuntimeException("Failed to parse run spreadsheet.", e);
         }
     }
 

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java	Wed Feb 10 14:26:49 2016
@@ -13,21 +13,18 @@
 import org.srs.datacat.model.DatasetModel;
 
 /**
- * Command line file crawler for populating the data catalog.
+ * Command line tool for adding files to the data catalog.
  *
  * @author Jeremy McCormick, SLAC
  */
 public final class DatacatAddFile {
 
-    /**
-     * Setup the logger.
-     */
     private static final Logger LOGGER = Logger.getLogger(DatacatCrawler.class.getPackage().getName());
     
-    private List<File> paths;
+    private List<File> paths = new ArrayList<File>();
     
     /**
-     * Command line options for the crawler.
+     * Command line options.
      */
     private static final Options OPTIONS = new Options();
 
@@ -95,7 +92,6 @@
                         
             // List of paths.
             if (!cl.getArgList().isEmpty()) {
-                paths = new ArrayList<File>();
                 for (String arg : cl.getArgList()) {                    
                     paths.add(new File(arg));
                 }
@@ -129,7 +125,7 @@
     }
 
     /**
-     * Print the usage statement for this tool to the console and then exit the program.
+     * Print the usage statement and then exit.
      */
     private void printUsage() {
         final HelpFormatter help = new HelpFormatter();

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java	Wed Feb 10 14:26:49 2016
@@ -37,17 +37,17 @@
     /*
      * Static map of strings to file formats.
      */
-    private static final Map<String, FileFormat> formatMap = new HashMap<String, FileFormat>();
+    private static final Map<String, FileFormat> FORMATS = new HashMap<String, FileFormat>();
     static {
         for (final FileFormat format : FileFormat.values()) {
-            formatMap.put(format.extension(), format);
+            FORMATS.put(format.extension(), format);
         }
     }
     
     /* 
      * System metadata fields. 
      */
-    private static final Set<String> SYSTEM_METADATA = new HashSet<String>();
+    static final Set<String> SYSTEM_METADATA = new HashSet<String>();
     static {
         SYSTEM_METADATA.add("eventCount");
         SYSTEM_METADATA.add("size");
@@ -56,9 +56,13 @@
         SYSTEM_METADATA.add("checksum");
         SYSTEM_METADATA.add("scanStatus");
     }
-   
-    /**
-     * Create metadata for a file using its specific reader.
+    
+    static final boolean isSystemMetadata(String name) {
+        return SYSTEM_METADATA.contains(name);
+    }
+           
+    /**
+     * Create metadata for a file using its {@link FileMetadataReader}.
      *
      * @param file the file
      * @return the metadata for the file
@@ -82,6 +86,7 @@
         } catch (final IOException e) {
             throw new RuntimeException(e);
         }
+        metadata.put("scanStatus", "OK");
         return metadata;
     }
 
@@ -128,7 +133,7 @@
             name = stripEvioFileNumber(name);
         }
         final String extension = name.substring(name.lastIndexOf(".") + 1);
-        return formatMap.get(extension);
+        return FORMATS.get(extension);
     }
 
     /**

Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java	(original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java	Wed Feb 10 14:26:49 2016
@@ -11,11 +11,11 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.hps.record.evio.EventTagConstant;
 import org.hps.record.evio.EvioEventUtilities;
 import org.hps.record.evio.EvioFileUtilities;
 import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
 import org.hps.record.triggerbank.HeadBankData;
-import org.hps.record.triggerbank.TIData;
 import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
 import org.hps.record.triggerbank.TriggerType;
 import org.jlab.coda.jevio.BaseStructure;
@@ -24,13 +24,10 @@
 import org.jlab.coda.jevio.EvioReader;
 
 /**
- * Reads metadata from EVIO files, including the event count, run min and run max expected by the datacat, as well as
- * many custom field values applicable to HPS EVIO raw data.
+ * Creates detailed metadata for the datacat from an EVIO input file.
  * 
  * @author Jeremy McCormick, SLAC
  */
-// TODO: add physics events count
-// TODO: remove trigger rate and TI time offset 
 final class EvioMetadataReader implements FileMetadataReader {
 
     /**
@@ -42,11 +39,6 @@
      * Head bank definition.
      */
     private static IntBankDefinition HEAD_BANK = new IntBankDefinition(HeadBankData.class, new int[] {0x2e, 0xe10f});
-
-    /**
-     * TI data bank definition.
-     */
-    private static IntBankDefinition TI_BANK = new IntBankDefinition(TIData.class, new int[] {0x2e, 0xe10a});
 
     /**
      * Get the EVIO file metadata.
@@ -57,7 +49,7 @@
     @Override
     public Map<String, Object> getMetadata(final File file) throws IOException {
         
-        long events = 0;
+        long totalEvents = 0;
         int physicsEvents = 0;
         int badEvents = 0;
         int blinded = 0;
@@ -66,12 +58,12 @@
         Integer lastHeadTimestamp = null;
         Integer lastPhysicsEvent = null;
         Integer firstPhysicsEvent = null;
+        Integer prestartTimestamp = null;
+        Integer endTimestamp = null;
+        Integer goTimestamp = null;
         Double triggerRate = null;
-        long lastTI = 0;
-        long minTIDelta = 0;
-        long maxTIDelta = 0;
-        long firstTI = 0;
-        
+        
+        // Processor for calculating TI time offsets.
         TiTimeOffsetEvioProcessor tiProcessor = new TiTimeOffsetEvioProcessor();
 
         // Create map for counting trigger types.
@@ -83,7 +75,7 @@
         // Get the file number from the name.
         final int fileNumber = EvioFileUtilities.getSequenceFromName(file);
 
-        // Files with a sequence number that is not divisible by 10 are blinded (Eng Run 2015 scheme).
+        // File numbers indivisible by 10 are blinded (Eng Run 2015 scheme).
         if (!(fileNumber % 10 == 0)) {
             blinded = 1;
         }
@@ -106,22 +98,22 @@
             EvioEvent evioEvent = null;
 
             // Event read loop.
-            fileLoop: while (true) {
+            eventLoop: while (true) {
                 try {
                     // Parse next event.
                     evioEvent = evioReader.parseNextEvent();
 
                     // End of file.
                     if (evioEvent == null) {
-                        LOGGER.fine("EOF after " + events + " events");
-                        break fileLoop;
+                        LOGGER.fine("EOF after " + totalEvents + " events.");
+                        break eventLoop;
                     }
                     
                     // Increment event count (doesn't count events that can't be parsed).
-                    ++events;
+                    ++totalEvents;
 
                     // Debug print event number and tag.
-                    LOGGER.finest("parsed event " + evioEvent.getEventNumber() + " with tag 0x"
+                    LOGGER.finest("Parsed event " + evioEvent.getEventNumber() + " with tag 0x"
                             + String.format("%08x", evioEvent.getHeader().getTag()));
 
                     // Get head bank.
@@ -139,7 +131,7 @@
                                 // First header timestamp.
                                 if (firstHeadTimestamp == null) {
                                     firstHeadTimestamp = thisTimestamp;
-                                    LOGGER.finer("first head timestamp " + firstHeadTimestamp + " from event "
+                                    LOGGER.finer("First head timestamp " + firstHeadTimestamp + " from event "
                                             + evioEvent.getEventNumber());
                                 }
 
@@ -151,31 +143,12 @@
                             if (run == null) {
                                 if (headBankData[1] != 0) {
                                     run = (long) headBankData[1];
-                                    LOGGER.finer("run " + run + " from event " + evioEvent.getEventNumber());
+                                    LOGGER.finer("Run number " + run + " from event " + evioEvent.getEventNumber());
                                 }
                             }
                         }
                     }
-
-                    // Process trigger bank data for TI times (copied from Sho's BasicEvioFileReader class).
-                    BaseStructure tiBank = TI_BANK.findBank(evioEvent);
-                    if (tiBank != null) {
-                        TIData tiData = new TIData(tiBank.getIntData());
-                        if (lastTI == 0) {
-                            firstTI = tiData.getTime();
-                        }
-                        lastTI = tiData.getTime();
-                        if (thisTimestamp != 0) {
-                            long delta = thisTimestamp * 1000000000L - tiData.getTime();
-                            if (minTIDelta == 0 || minTIDelta > delta) {
-                                minTIDelta = delta;
-                            }
-                            if (maxTIDelta == 0 || maxTIDelta < delta) {
-                                maxTIDelta = delta;
-                            }
-                        }
-                    }
-
+                    
                     if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
                                                 
                         final int[] eventIdData = EvioEventUtilities.getEventIdData(evioEvent);
@@ -188,11 +161,24 @@
                             // Set the first physics event.
                             if (firstPhysicsEvent == null) {
                                 firstPhysicsEvent = eventIdData[0];
-                                LOGGER.finer("set first physics event " + firstPhysicsEvent);
+                                LOGGER.finer("Set first physics event " + firstPhysicsEvent);
                             }
                         }
                         
                         ++physicsEvents;
+                    } else if (EvioEventUtilities.isControlEvent(evioEvent)) {
+                        int[] controlData = EvioEventUtilities.getControlEventData(evioEvent);
+                        if (controlData[0] != 0) {
+                            if (EventTagConstant.PRESTART.isEventTag(evioEvent)) {
+                                prestartTimestamp = controlData[0];
+                            }                        
+                            if (EventTagConstant.GO.isEventTag(evioEvent)) {
+                                goTimestamp = controlData[0];
+                            }
+                            if (EventTagConstant.END.isEventTag(evioEvent)) {
+                                endTimestamp = controlData[0];
+                            }
+                        }
                     }
 
                     // Count trigger types for this event.
@@ -200,17 +186,16 @@
                     for (TriggerType mask : triggerTypes) {
                         int count = triggerCounts.get(mask) + 1;
                         triggerCounts.put(mask, count);
-                        LOGGER.finest("incremented " + mask.name() + " to " + count);
+                        LOGGER.finest("Incremented " + mask.name() + " to " + count);
                     }
                     
                     // Activate TI time offset processor.
                     tiProcessor.process(evioEvent);
                     
-                //} catch (IOException | NegativeArraySizeException | EvioException e) {
                 } catch (Exception e) {  
-                    // Trap event processing errors.
+                    // Trap all event processing errors.
                     badEvents++;
-                    LOGGER.warning("error processing EVIO event " + evioEvent.getEventNumber());
+                    LOGGER.warning("Error processing EVIO event " + evioEvent.getEventNumber());
                 }
             }
         } catch (final EvioException e) {
@@ -222,22 +207,23 @@
                 try {
                     evioReader.close();
                 } catch (IOException e) {
-                    LOGGER.log(Level.WARNING, "error closing EVIO reader", e);
+                    LOGGER.log(Level.WARNING, "Error closing EVIO reader", e);
                 }
             }
         }
 
-        LOGGER.info("done reading " + events + " events from " + file.getPath());
+        LOGGER.info("Done reading " + totalEvents + " events from " + file.getPath());
 
         // Rough trigger rate calculation.
         try {
-            if (firstHeadTimestamp != null && lastHeadTimestamp != null && events > 0) {
-                triggerRate = calculateTriggerRate(firstHeadTimestamp, lastHeadTimestamp, events);
+            if (firstHeadTimestamp != null && lastHeadTimestamp != null && totalEvents > 0 
+                    && (firstHeadTimestamp - lastHeadTimestamp != 0)) {
+                triggerRate = calculateTriggerRate(firstHeadTimestamp, lastHeadTimestamp, totalEvents);
             } else {
                 LOGGER.log(Level.WARNING, "Missing information for calculating trigger rate.");
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Error calculating trigger rate.", e);
+            LOGGER.log(Level.WARNING, "Error calculating the trigger rate.", e);
         }
 
         // Create and fill the metadata map.
@@ -248,15 +234,15 @@
                 run = new Long(EvioFileUtilities.getRunFromName(file));
             }
         } catch (Exception e) {
-            throw new RuntimeException("Unable to determine run number from data or file name.", e);
-        }
-
-        // Set built-in system metadata.
+            throw new RuntimeException("Failed to get run number from event data or file name.", e);
+        }
+
+        // Set locationExtras metadata.
         metadataMap.put("runMin", run);
         metadataMap.put("runMax", run);
-        metadataMap.put("eventCount", events);
+        metadataMap.put("eventCount", totalEvents);
         metadataMap.put("size", size);
-        metadataMap.put("checksum", checksum);
+        metadataMap.put("checksum", checksum);     
         
         // File sequence number.
         metadataMap.put("FILE", fileNumber);
@@ -267,54 +253,52 @@
         // First and last timestamps which may come from control or physics events.
         if (firstHeadTimestamp != null) {
             metadataMap.put("FIRST_HEAD_TIMESTAMP", firstHeadTimestamp);
-        } else {
-            metadataMap.put("FIRST_HEAD_TIMESTAMP", 0L);
-        }
+        } 
         
         if (lastHeadTimestamp != null) {
             metadataMap.put("LAST_HEAD_TIMESTAMP", lastHeadTimestamp);
-        } else {
-            metadataMap.put("LAST_HEAD_TIMESTAMP", 0L);
-        }
+        } 
 
         // First and last physics event numbers.
         if (firstPhysicsEvent != null) {
             metadataMap.put("FIRST_PHYSICS_EVENT", firstPhysicsEvent);
-        } else {
-            metadataMap.put("FIRST_PHYSICS_EVENT", 0L);
-        }
+        } 
         
         if (lastPhysicsEvent != null) {
             metadataMap.put("LAST_PHYSICS_EVENT", lastPhysicsEvent);
-        } else {
-            metadataMap.put("LAST_PHYSICS_EVENT", 0L);
+        }
+        
+        // Timestamps which are only set if the corresponding control events were found in the file.
+        if (prestartTimestamp != null) {
+            metadataMap.put("PRESTART_TIMESTAMP", prestartTimestamp);
+        }
+        if (endTimestamp != null) {
+            metadataMap.put("END_TIMESTAMP", endTimestamp);
+        }
+        if (goTimestamp != null) {
+            metadataMap.put("GO_TIMESTAMP", goTimestamp);
         }
 
         // TI times and offset.
-        metadataMap.put("FIRST_TI_TIME", firstTI);
-        metadataMap.put("LAST_TI_TIME", lastTI);
-        metadataMap.put("TI_TIME_DELTA", maxTIDelta - minTIDelta);
-        
-        // TI time offset.
-        //metadataMap.put("TI_TIME_OFFSET", tiProcessor.getTiTimeOffset());
-
+        metadataMap.put("TI_TIME_MIN_OFFSET", new Long(tiProcessor.getMinOffset()).toString());
+        metadataMap.put("TI_TIME_MAX_OFFSET", new Long(tiProcessor.getMaxOffset()).toString());
+        metadataMap.put("TI_TIME_N_OUTLIERS", tiProcessor.getNumOutliers());
+        
         // Event counts.
         metadataMap.put("BAD_EVENTS", badEvents);
         
         // Physics event count.
-        metadataMap.put("PHYSICS_EVENTS",  physicsEvents);
-        
-        // Trigger rate in Hz to 2 decimal places.
-        /*
+        metadataMap.put("PHYSICS_EVENTS", physicsEvents);
+        
+        // Rough trigger rate.
         if (triggerRate != null && !Double.isInfinite(triggerRate) && !Double.isNaN(triggerRate)) {
             DecimalFormat df = new DecimalFormat("#.##");
             df.setRoundingMode(RoundingMode.CEILING);
-            LOGGER.info("setting trigger rate " + triggerRate);
+            LOGGER.info("Setting trigger rate to " + triggerRate + " Hz.");
             metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
         } else {
-            metadataMap.put("TRIGGER_RATE", 0);
-        }
-        */
+            LOGGER.warning("Failed to calculate trigger rate.");
+        }        
 
         // Trigger type counts.
         for (Entry<TriggerType, Integer> entry : triggerCounts.entrySet()) {
@@ -327,7 +311,7 @@
         for (Entry<String, Object> entry : metadataMap.entrySet()) {
             sb.append("  " + entry.getKey() + " = " + entry.getValue() + '\n');
         }
-        LOGGER.info("file metadata ..." + '\n' + sb.toString());
+        LOGGER.info("File metadata ..." + '\n' + sb.toString());
 
         // Return the completed metadata map.
         return metadataMap;

Added: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/MetadataWriter.java
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/MetadataWriter.java	(added)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/MetadataWriter.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,137 @@
+package org.hps.crawler;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+/**
+ * Creates metadata for a file and writes the results to a Python snippet that can be used as input to the SRS datacat.
+ * 
+ * @author Jeremy McCormick, SLAC
+ */
+public final class MetadataWriter {
+    
+    private static final Logger LOGGER = Logger.getLogger(MetadataWriter.class.getPackage().getName());        
+    private static final Options OPTIONS = new Options();
+    
+    private List<File> inputFiles;
+    private File outputDir = new File(".");    
+
+    static {
+        OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
+        OPTIONS.addOption("d", "dir", true, "directory where metadata files should be written");
+    }
+            
+    public static void main(final String[] args) {
+        new MetadataWriter().parse(args).run();
+    }
+    
+    private MetadataWriter parse(final String[] args) {
+        
+        try { 
+            final CommandLine cl = new DefaultParser().parse(OPTIONS, args);
+
+            // Print help.
+            if (cl.hasOption("h") || args.length == 0) {
+                this.printUsage();
+            }
+                        
+            // List of input files.
+            if (!cl.getArgList().isEmpty()) {
+                inputFiles = new ArrayList<File>();
+                for (String arg : cl.getArgList()) {                    
+                    inputFiles.add(new File(arg));
+                }
+            } else {
+                printUsage();
+            }            
+            if (this.inputFiles.isEmpty()) {
+                throw new RuntimeException("Missing at least one input file to process.");
+            }
+            
+            // Output directory for metadata files.
+            if (cl.hasOption("d")) {
+                outputDir = new File(cl.getOptionValue("d"));
+                if (!outputDir.isDirectory()) {
+                    throw new IllegalArgumentException("The file " + outputDir.getPath() + " is not a directory.");
+                }
+            }
+         
+        } catch (final ParseException e) {
+            throw new RuntimeException("Error parsing command line options.", e);
+        }
+
+        LOGGER.info("Done parsing command line options.");
+
+        return this;
+    }
+
+    private void printUsage() {
+        final HelpFormatter help = new HelpFormatter();
+        help.printHelp(80, "MetadataWriter [options] file1 file2 [...]", "", OPTIONS, "");
+        System.exit(0);
+    }
+
+    private void run() {
+        for (File file : inputFiles) {
+            LOGGER.info("Creating metadata for " + file.getPath() + " ...");
+            Map<String, Object> metadata = DatacatHelper.createMetadata(file);
+            String metadataFileName = this.outputDir + File.separator + file.getName() + ".metadata";
+            writeString(toPyDict(metadata), new File(metadataFileName));            
+            LOGGER.info("Wrote metadata for " + file.getPath() + " to " + metadataFileName);
+        }
+    }
+               
+    private static String toPyDict(Map<String, Object> metadata) {
+        StringBuffer sb = new StringBuffer();
+        sb.append("{");
+        for (String name : DatacatHelper.SYSTEM_METADATA) {
+            if (metadata.containsKey(name)) {
+                Object value = metadata.get(name);
+                if (value instanceof Number) {
+                    sb.append("\"" + name + "\" : " + metadata.get(name) + ", ");
+                } else {
+                    sb.append("\"" + name + "\" : \"" + metadata.get(name) + "\", ");
+                }
+            }            
+        }
+        sb.setLength(sb.length() - 2);
+        sb.append(", \"versionMetadata\" : {");
+        for (Map.Entry<String, Object> entry : metadata.entrySet()) {
+            if (!DatacatHelper.isSystemMetadata(entry.getKey())) {
+               Object value = entry.getValue();
+               String name = entry.getKey();
+               if (value instanceof Number) {
+                   sb.append("\"" + name + "\" : " + metadata.get(name) + ", ");
+               } else {
+                   sb.append("\"" + name + "\" : \"" + metadata.get(name) + "\", ");
+               }
+            }
+        }
+        sb.setLength(sb.length() - 2);
+        sb.append("}");
+        sb.append("}");
+        return sb.toString();
+    }
+    
+    private static void writeString(String dictString, File file) {
+        try {
+            FileWriter fileWriter = new FileWriter(file);
+            fileWriter.write(dictString);
+            fileWriter.flush();
+            fileWriter.close();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+}

Added: java/branches/jeremy-dev/crawler/src/main/python/crawler/create_dataset_from_metadata.py
 =============================================================================
--- java/branches/jeremy-dev/crawler/src/main/python/crawler/create_dataset_from_metadata.py	(added)
+++ java/branches/jeremy-dev/crawler/src/main/python/crawler/create_dataset_from_metadata.py	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,136 @@
+#!/usr/bin/env python
+
+"""
+Insert files into datacat using previously written .metadata files or allow updating
+the metadata of an existing dataset if the "-u" option is specified.
+
+The dataset's resource directory and target folder in the datacat must be provided explicitly
+with command line arguments. 
+
+author: Jeremy McCormick, SLAC
+"""
+
+import os, sys, glob, argparse
+
+from datacat import *
+from datacat.error import DcException
+from datacat.model import Dataset
+
+# assumes datacat config is in current working dir
+client = client_from_config_file(path=os.getcwd() + '/default.cfg')
+
+def get_data_format(name):
+    if name.endswith('.aida'):
+        return 'AIDA'
+    elif name.endswith('.slcio'):
+        return 'LCIO'
+    elif name.endswith('.root'):
+        return 'ROOT' 
+    elif '.evio' in os.path.basename(name):
+        return 'EVIO'
+    raise Exception('Failed to get data format for %s' % name)
+
+def get_data_type(name, format):
+    if format == 'EVIO':
+        return 'RAW'
+    elif format == 'LCIO' and '_recon' in name:
+        return 'RECON'
+    elif format == 'ROOT' and '_dst' in name:
+        return 'DST'
+    elif format == 'ROOT' and '_dqm' in name:
+        return 'DQM'
+    elif format == 'AIDA' and '_dqm' in name:
+        return 'DQM'    
+    raise Exception('Failed to get data type for %s' % name)
+
+# define CL options
+parser = argparse.ArgumentParser(description='insert or update datasets from metadata files')
+parser.add_argument('--basedir', '-b', dest='basedir', nargs=1, help='base dir containing metadata files to read', required = True)
+parser.add_argument('--folder', '-f', dest='folder', nargs=1, help='target folder in the datacat',  required = True)
+parser.add_argument('--resource', '-r', dest='resource', nargs=1, help='actual directory of the files', required = True)
+parser.add_argument('--site', '-s', dest='site', nargs=1, help='datacat site (default JLAB)', default='JLAB') # TODO: default to dir of .metadata file
+parser.add_argument('--update', '-u', dest='update', help='allow updates to metadata of existing files', action='store_true')
+args = parser.parse_args()
+
+basedir = args.basedir[0]
+folder = args.folder[0]
+if args.resource[0] is not None:
+    resource = args.resource[0]
+else:
+    resource = basedir
+site = args.site[0]
+allow_update = args.update
+
+metadata_files = glob.glob(basedir + '/*.metadata')
+
+if len(metadata_files) == 0:
+    raise Exception("No metadata files found in %s dir." % basedir) 
+        
+for metadata_file in metadata_files:
+    
+    metadata = eval(open(metadata_file).read())
+    if not isinstance(metadata, dict):
+        raise Exception("Input metadata from %s is not a dict." % metadata_file)
+    
+    locationExtras = {}
+    for k, v in metadata.iteritems():
+        if k != 'versionMetadata':
+           locationExtras[k] = v
+        else:
+            versionMetadata = v
+
+    if versionMetadata is None:
+        versionmetadata = {}
+
+    # TODO: check for empty metadata here (should have some)
+
+    name = os.path.basename(metadata_file).replace('.metadata', '')
+    data_format = get_data_format(name)
+    data_type = get_data_type(name, data_format)
+    
+    print "adding dataset ..."
+    print "folder = %s" % folder
+    print "name = %s" % name
+    print "data_format = %s" % data_format
+    print "data_type = %s" % data_type
+    print "site = %s" % site
+    print "resource = %s" % (resource + '/' + name)
+    print "versionMetadata = " + repr(versionMetadata)
+    print "locationExtras = " + repr(locationExtras)
+    print
+    
+    dataset_exists = False    
+    try:
+        p = client.path("%s/%s" % (folder, name))
+        if isinstance(p, Dataset):
+           dataset_exists = True 
+    except DcException:
+        pass
+    
+    if not dataset_exists:
+        print "Creating new dataset for %s ..." % name
+        try:
+            client.mkds(folder,
+                        name,
+                        data_type,
+                        data_format, 
+                        site=site, 
+                        resource=resource + '/' + name,
+                        versionMetadata=versionMetadata,
+                        locationExtras=locationExtras)
+            print "%s was added successfully." % name
+        except DcException as e:
+            print 'Insert of %s failed!' % name
+            print repr(e)
+    else:
+        if allow_update:
+            print "Updating metadata on existing dataset %s ..." % name
+            try:
+                if metadata['checksum'] is not None:
+                    del metadata['checksum']
+                client.patchds(folder + '/' + name, metadata)
+            except DcException as e:
+                print "Update of %s failed!" % name
+                print repr(e)                
+        else:
+            raise Exception("Dataset already exists and updates are not allowed.")                

Modified: java/branches/jeremy-dev/distribution/pom.xml
 =============================================================================
--- java/branches/jeremy-dev/distribution/pom.xml	(original)
+++ java/branches/jeremy-dev/distribution/pom.xml	Wed Feb 10 14:26:49 2016
@@ -88,35 +88,31 @@
                                 </program>
                                 <program>
                                     <mainClass>org.hps.job.JobManager</mainClass>
-                                    <id>job</id>
+                                    <id>job-manager</id>
                                 </program>
                                 <program>
                                     <mainClass>org.hps.conditions.cli.CommandLineTool</mainClass>
-                                    <id>conddb</id>
-                                </program>
-                                <program>
-                                    <mainClass>org.hps.crawler.DatacatCrawler</mainClass>
-                                    <id>crawler</id>
+                                    <id>conditions-cli</id>
                                 </program>
                                 <program>
                                     <mainClass>org.hps.run.database.RunDatabaseCommandLine</mainClass>
-                                    <id>rundb</id>
+                                    <id>run-database-cli</id>
                                 </program>
                                 <program>
                                     <mainClass>org.hps.monitoring.application.Main</mainClass>
-                                    <id>monapp</id>
+                                    <id>monitoring-app</id>
                                 </program>
                                 <program>
                                     <mainClass>org.lcsim.geometry.compact.converter.Main</mainClass>
-                                    <id>detcnv</id>
+                                    <id>detector-converter</id>
                                 </program>
                                 <program>
                                     <mainClass>org.hps.record.evio.EvioFileProducer</mainClass>
-                                    <id>evio_file_producer</id>
+                                    <id>evio-file-producer</id>
                                 </program>
                                 <program>
                                     <mainClass>org.jlab.coda.et.apps.StartEt</mainClass>
-                                    <id>et_server</id>
+                                    <id>et-server</id>
                                     <commandLineArguments>
                                         <commandLineArgument>-f</commandLineArgument>
                                         <commandLineArgument>ETBuffer</commandLineArgument>
@@ -124,6 +120,18 @@
                                         <commandLineArgument>20000</commandLineArgument>
                                         <commandLineArgument>-v</commandLineArgument>
                                     </commandLineArguments>
+                                </program>
+                                <program>
+                                    <mainClass>org.hps.crawler.MetadataWriter</mainClass>
+                                    <id>dc-create-metadata</id>
+                                </program>
+                                <program>
+                                    <mainClass>org.hps.crawler.DatacatAddFile</mainClass>
+                                    <id>dc-add-file</id>
+                                </program>
+                                <program>
+                                    <mainClass>org.hps.crawler.DatacatCrawler</mainClass>
+                                    <id>dc-crawler</id>
                                 </program>
                             </programs>
                         </configuration>

Modified: java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties
 =============================================================================
--- java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties	(original)
+++ java/branches/jeremy-dev/logging/src/main/resources/org/hps/logging/config/test_logging.properties	Wed Feb 10 14:26:49 2016
@@ -66,7 +66,7 @@
 org.hps.recon.tracking.gbl.level = WARNING
 
 # run-database
-org.hps.run.database.level = WARNING
+org.hps.run.database.level = ALL
 
 # monitoring-application
 org.hps.monitoring.application.model.level = WARNING

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/daqconfig/TriggerConfigEvioProcessor.java	Wed Feb 10 14:26:49 2016
@@ -14,10 +14,11 @@
 import org.jlab.coda.jevio.EvioEvent;
 
 /**
- * Copied and modified from code in {@link org.hps.evio.TriggerConfigEvioReader} to extract DAQ config without
- * needing an output LCSim event.
+ * Extracts DAQ config strings from an EVIO event stream, saving a reference to the most recent
+ * {@link org.hps.record.triggerbank.TriggerConfigData} object.
  * <p>
- * Only the last valid DAQ config object is available once the job is finished.
+ * When event processing is completed, the <code>triggerConfig</code> variable should reference
+ * the last valid DAQ config and can be accessed using the {@link #getTriggerConfigData()} method.
  * 
  * @author Jeremy McCormick, SLAC
  */
@@ -25,8 +26,7 @@
 
     private Logger LOGGER = Logger.getLogger(TriggerConfigEvioProcessor.class.getPackage().getName());
             
-    private TriggerConfigData triggerConfig = null;    
-    private Integer run = null;
+    private TriggerConfigData triggerConfig = null;
     private int timestamp = 0;
 
     /**
@@ -35,32 +35,21 @@
     @Override
     public void process(EvioEvent evioEvent) {       
         try {            
-            // Initialize the run number if necessary.
-            if (run == null) {
-                try {
-                    run = EvioEventUtilities.getRunNumber(evioEvent);
-                    LOGGER.info("run " + run);
-                } catch (NullPointerException e) {
+           
+            // Set current timestamp from head bank.
+            BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
+            if (headBank != null) {
+                if (headBank.getIntData()[3] != 0) {
+                    timestamp = headBank.getIntData()[3];
+                    LOGGER.finest("Set timestamp " + timestamp + " from head bank.");
                 }
             }
-                        
-            // Can only start parsing DAQ banks once the run is set.
-            if (run != null) {
                 
-                // Set current timestamp from head bank.
-                BaseStructure headBank = EvioEventUtilities.getHeadBank(evioEvent);
-                if (headBank != null) {
-                    if (headBank.getIntData()[3] != 0) {
-                        timestamp = headBank.getIntData()[3];
-                        LOGGER.finest("set timestamp " + timestamp + " from head bank");
-                    }
-                }
-                
-                // Parse config data from the EVIO banks.
-                parseEvioData(evioEvent);                                                          
-            }
+            // Parse config data from the EVIO banks.
+            parseEvioData(evioEvent);                                                          
+            
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Error parsing DAQ config from EVIO.", e);
+            LOGGER.log(Level.SEVERE, "Error parsing DAQ config from EVIO.", e);
         }
     }
     
@@ -72,46 +61,68 @@
      */
     private void parseEvioData(EvioEvent evioEvent) {
         Map<Crate, String> stringData = null;
+        // Loop over top banks.
         for (BaseStructure bank : evioEvent.getChildrenList()) {
             if (bank.getChildCount() <= 0) {
                 continue;
             }
-            int crate = bank.getHeader().getTag();
+            int crateNumber = bank.getHeader().getTag();
+            // Loop over sub-banks.
             for (BaseStructure subBank : bank.getChildrenList()) {
+                // In trigger config bank?
                 if (EvioBankTag.TRIGGER_CONFIG.equals(subBank)) {
-                    if (subBank.getStringData() == null) {
-                        LOGGER.warning("Trigger config bank is missing string data.");
-                    } else {
+                    // Has a valid string array?
+                    if (subBank.getStringData() != null) {                    
                         try { 
+                            
+                            // Make sure string data map is initialized for this event.
                             if (stringData == null) {
                                 stringData = new HashMap<Crate, String>();
-                            }
-                            //LOGGER.fine("got raw trigger config string data ..." + '\n' + subBank.getStringData()[0]);
-                            stringData.put(TriggerConfigData.Crate.fromCrateNumber(crate), subBank.getStringData()[0]);
+                            }                                                       
+                            
+                            // Get the Crate enum from crate number (if this returns null then the crate is ignored).
+                            Crate crate = Crate.fromCrateNumber(crateNumber);
+                            
+                            // Is crate number valid?
+                            if (crate != null) {
+                                
+                                // Is there valid string data in the array?
+                                if (subBank.getStringData().length > 0) {
+                                    // Add string data to map.
+                                    stringData.put(crate, subBank.getStringData()[0]);
+                                    LOGGER.info("Added crate " + crate.getCrateNumber() + " data ..." + '\n' + subBank.getStringData()[0]);
+                                } /*else { 
+                                    LOGGER.warning("The string bank has no data.");
+                                }*/
+                            } 
                         } catch (Exception e) {
-                            LOGGER.log(Level.WARNING, "Failed to parse crate " + crate + " config.", e);
+                            LOGGER.log(Level.SEVERE, "Error parsing DAQ config from crate " + crateNumber, e);
+                            e.printStackTrace();
                         }
                     }
-                }
+                } /*else {
+                    LOGGER.warning("Trigger config bank is missing string data.");
+                }*/
             }
         }
         if (stringData != null) {
+            LOGGER.info("Found " + stringData.size() + " config data strings in event " + evioEvent.getEventNumber());
             TriggerConfigData currentConfig = new TriggerConfigData(stringData, timestamp);
             if (currentConfig.isValid()) {
                 triggerConfig = currentConfig;
-                LOGGER.warning("Found valid config in event num " + evioEvent.getEventNumber());
+                LOGGER.info("Found valid DAQ config data in event num " + evioEvent.getEventNumber());
             } else {
-                LOGGER.warning("Skipping invalid config from event num "  + evioEvent.getEventNumber());
+                LOGGER.warning("Skipping invalid DAQ config data in event num "  + evioEvent.getEventNumber());
             }
         }
     }
    
     /**
-     * Get a map of bank number to string data for the current config.
+     * Get the last valid set of config data that was found in the event stream.
      * 
-     * @return a map of bank to trigger config data
+     * @return a map of bank number to the corresponding trigger config string data
      */
     public TriggerConfigData getTriggerConfigData() {
         return this.triggerConfig;
     }
-}
+}

Added: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java	(added)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetCalculator.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,56 @@
+package org.hps.record.triggerbank;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Calculate TI time offset given lists of min and max offsets and the number of outliers.
+ * 
+ * @author Jeremy McCormick, SLAC
+ */
+public class TiTimeOffsetCalculator {
+    
+    /* Constants from TiTimeOffsetEvioProcessor. */
+    private final static int MAX_OUTLIERS = 10;
+    private final static double MIN_RANGE = 0.99e9;
+    
+    private List<Long> minOffsets = new ArrayList<Long>();
+    private List<Long> maxOffsets = new ArrayList<Long>();
+    private int totalOutliers;
+    
+    public void addMinOffset(long minOffset) {
+        minOffsets.add(minOffset);
+    }
+    
+    public void addMaxOffset(long maxOffset) {
+        maxOffsets.add(maxOffset);
+    }
+    
+    public void addNumOutliers(int nOutliers) {
+        totalOutliers += nOutliers;
+    }
+    
+    public long calculateTimeOffset() {
+        
+        if (minOffsets.size() == 0) {
+            throw new RuntimeException("The min offsets list has no data.");
+        }
+        if (maxOffsets.size() == 0) {
+            throw new RuntimeException("The max offsets list has no data.");
+        }
+        
+        Collections.sort(minOffsets);
+        Collections.sort(maxOffsets);
+        
+        long minOffset = minOffsets.get(0);
+        long maxOffset = maxOffsets.get(maxOffsets.size() - 1);
+                
+        final long offsetRange = maxOffset - minOffset;
+        if (offsetRange > MIN_RANGE && totalOutliers < MAX_OUTLIERS) {
+            return minOffset;
+        } else {
+            return 0L;
+        }
+    }
+}

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TiTimeOffsetEvioProcessor.java	Wed Feb 10 14:26:49 2016
@@ -56,6 +56,18 @@
         }
     }
     
+    public long getMinOffset() {
+        return this.minOffset;
+    }
+    
+    public long getMaxOffset() {
+        return this.maxOffset;
+    }
+    
+    public int getNumOutliers() {
+        return this.nOutliers;
+    }
+    
     public long getTiTimeOffset() {
         final long offsetRange = maxOffset - minOffset;
         if (offsetRange > minRange && nOutliers < maxOutliers) {

Modified: java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
 =============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java	(original)
+++ java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java	Wed Feb 10 14:26:49 2016
@@ -26,20 +26,20 @@
             this.crate = crate;
         }
         
-        public int crate() {
+        public int getCrateNumber() {
             return crate;
         }
         
         public static Crate fromCrateNumber(int crateNumber) {
             for (Crate crate : Crate.values()) {
-                if (crate.crate() == crateNumber) {
+                if (crate.getCrateNumber() == crateNumber) {
                     return crate;
                 }
             }
             return null;
-        }
+        }              
     }
-              
+                  
     private int timestamp;
     private Map<Crate, String> data;
     
@@ -99,7 +99,7 @@
     public DAQConfig loadDAQConfig(int run) {
         EvioDAQParser parser = new EvioDAQParser();
         for (Entry<Crate, String> entry : data.entrySet()) {
-            parser.parse(entry.getKey().crate(), run, new String[] {entry.getValue()});
+            parser.parse(entry.getKey().getCrateNumber(), run, new String[] {entry.getValue()});
         }
         ConfigurationManager.updateConfiguration(parser);
         return ConfigurationManager.getInstance();

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/AbstractRunBuilder.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,35 @@
+package org.hps.run.database;
+
+/**
+ * Class for incrementally building records for the run database.
+ * <p>
+ * Classes that add information to the run summary or create objects
+ * for insertion into the run database should implement this.
+ * 
+ * @author Jeremy McCormick, SLAC
+ */
+public abstract class AbstractRunBuilder {
+    
+    private RunSummaryImpl runSummary;
+        
+    void setRunSummary(RunSummaryImpl runSummary) {
+        this.runSummary = runSummary;
+    }
+    
+    RunSummaryImpl getRunSummary() {
+        return runSummary;
+    }
+    
+    int getRun() {
+        if (this.runSummary == null) {
+            throw new IllegalStateException("The run summary object was never set.");
+        }
+        return this.runSummary.getRun();
+    }
+    
+    /**
+     * Abstract method that sub-classes should implement to update the run summary or 
+     * create objects for insertion into the database.
+     */
+    abstract void build();
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatabaseUpdater.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatabaseUpdater.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatabaseUpdater.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,61 @@
+package org.hps.run.database;
+
+import java.sql.Connection;
+
+import org.hps.record.triggerbank.TriggerConfigData;
+
+// TODO: add EPICs and scaler update
+
+public class DatabaseUpdater {
+    
+    private Connection connection;
+    private TriggerConfigData triggerConfig;
+    private RunSummary runSummary;
+    private boolean updateExisting = false;
+                      
+    DatabaseUpdater(Connection connection) {
+        this.connection = connection;
+    }
+    
+    void setTriggerConfigData(TriggerConfigData triggerConfig) {
+        this.triggerConfig = triggerConfig;
+    }
+    
+    void setRunSummary(RunSummary runSummary) {
+        this.runSummary = runSummary;
+    }
+    
+    void setUpdateExisting(boolean updateExisting) {
+        this.updateExisting = updateExisting;
+    }
+
+    void update() {
+
+        int run = runSummary.getRun();
+        
+        final DaoProvider runFactory = new DaoProvider(connection);
+        final RunSummaryDao runSummaryDao = runFactory.getRunSummaryDao();
+        
+        RunManager runManager = new RunManager();
+        runManager.setRun(runSummary.getRun());
+        if (runManager.runExists()) {
+            if (updateExisting) {
+                runSummaryDao.updateRunSummary(runSummary);
+            } else {
+                throw new RuntimeException("Run already exists and updates are not allowed.");
+            }
+        } else {
+            runSummaryDao.insertRunSummary(runSummary);
+        }        
+        
+        final TriggerConfigDao configDao = runFactory.getTriggerConfigDao();
+        if (configDao.getTriggerConfig(run) != null) {
+            if (updateExisting) {
+                configDao.deleteTriggerConfig(run);
+            } else {
+                throw new RuntimeException("Run already exists and updates are not allowed.");
+            }
+        }
+        configDao.insertTriggerConfig(this.triggerConfig, run);
+    }
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatBuilder.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,191 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.hps.record.triggerbank.TiTimeOffsetCalculator;
+import org.srs.datacat.client.Client;
+import org.srs.datacat.client.exception.DcClientException;
+import org.srs.datacat.model.DatasetModel;
+import org.srs.datacat.model.DatasetResultSetModel;
+import org.srs.datacat.model.dataset.DatasetWithViewModel;
+import org.srs.datacat.shared.DatasetLocation;
+
+final class DatacatBuilder extends AbstractRunBuilder {
+    
+    private static final Logger LOGGER = Logger.getLogger(DatacatBuilder.class.getPackage().getName());
+    
+    private static final String[] METADATA_FIELDS = {
+        "TI_TIME_MIN_OFFSET", 
+        "TI_TIME_MAX_OFFSET", 
+        "TI_TIME_N_OUTLIERS", 
+        "END_TIMESTAMP",
+        "GO_TIMESTAMP",
+        "PRESTART_TIMESTAMP"
+    };
+    
+    private Client datacatClient;
+    private String site;
+    private String folder;    
+    private List<File> files;
+                
+    private static long calculateTiTimeOffset(DatasetResultSetModel results) {
+        TiTimeOffsetCalculator calc = new TiTimeOffsetCalculator();
+        for (DatasetModel ds : results) {
+            DatasetWithViewModel view = (DatasetWithViewModel) ds;
+            Map<String, Object> metadata = view.getMetadataMap();                        
+            if (metadata.containsKey("TI_TIME_MIN_OFFSET")) {
+                calc.addMinOffset(Long.parseLong((String) metadata.get("TI_TIME_MIN_OFFSET")));
+            }
+            if (metadata.containsKey("TI_TIME_MAX_OFFSET")) {
+                calc.addMaxOffset(Long.parseLong((String) metadata.get("TI_TIME_MAX_OFFSET")));
+            }
+            if (metadata.containsKey("TI_TIME_N_OUTLIERS")) {
+                calc.addNumOutliers((int) (long) metadata.get("TI_TIME_N_OUTLIERS"));
+            }
+        }
+        return calc.calculateTimeOffset();
+    }
+    
+    private static long getTotalEvents(DatasetResultSetModel results) {
+        long totalEvents = 0;
+        for (DatasetModel ds : results) {
+            DatasetWithViewModel view = (DatasetWithViewModel) ds;
+            DatasetLocation loc = (DatasetLocation) view.getViewInfo().getLocations().iterator().next();
+            totalEvents += loc.getEventCount();
+        }
+        return totalEvents;
+    }
+    
+    private static Integer getPrestartTimestamp(DatasetResultSetModel results) {
+        DatasetWithViewModel ds = (DatasetWithViewModel) results.getResults().get(0);
+        if (ds.getMetadataMap().containsKey("PRESTART_TIMESTAMP")) {
+            return (int) (long) ds.getMetadataMap().get("PRESTART_TIMESTAMP");
+        } else {
+            return null;
+        }
+    }
+    
+    private static Integer getEndTimestamp(DatasetResultSetModel results) {        
+        DatasetWithViewModel ds = (DatasetWithViewModel) results.getResults().get(results.getResults().size() - 1);
+        if (ds.getMetadataMap().containsKey("END_TIMESTAMP")) {
+            return (int) (long) ds.getMetadataMap().get("END_TIMESTAMP");
+        } else {
+            return null;
+        }
+    }
+    
+    
+    private static Integer getGoTimestamp(DatasetResultSetModel results) {
+        DatasetWithViewModel ds = (DatasetWithViewModel) results.getResults().get(0);
+        if (ds.getMetadataMap().containsKey("GO_TIMESTAMP")) {
+            return (int) (long) ds.getMetadataMap().get("GO_TIMESTAMP");
+        } else {
+            return null;
+        }
+    }
+    
+    private static double calculateTriggerRate(Integer startTimestamp, Integer endTimestamp, long nEvents) {
+        if (startTimestamp == null) {
+            throw new IllegalArgumentException("The start timestamp is null.");
+        }
+        if (endTimestamp == null) {
+            throw new IllegalArgumentException("The end timestamp is null.");
+        }
+        if (endTimestamp - startTimestamp == 0) {
+            throw new IllegalArgumentException("The start and end timestamp are the same.");
+        }
+        if (nEvents == 0) {
+            throw new IllegalArgumentException("The number of events is zero.");
+        }
+        double triggerRate = (double) nEvents / ((double) endTimestamp - (double) startTimestamp);
+        return triggerRate;
+    }
+        
+    void build() {
+        
+        if (getRunSummary() == null) {
+            throw new RuntimeException("The run summary was not set.");
+        }        
+        if (this.datacatClient == null) {
+            throw new RuntimeException("The datacat client was not set.");
+        }        
+        if (this.folder == null) {
+            throw new RuntimeException("The target folder was not set.");
+        }
+        if (this.site == null) {
+            throw new RuntimeException("The site was not set.");
+        }
+        
+        DatasetResultSetModel results = null;
+        try {
+            results = findDatasets();
+        } catch (DcClientException e) {
+            System.err.println("HTTP status: " + e.getStatusCode());
+            throw new RuntimeException(e);
+        }
+        
+        files = DatacatUtilities.toFileList(results);
+        
+        if (results.getResults().isEmpty()) {
+            throw new RuntimeException("No results found for datacat search.");
+        }
+        
+        long tiTimeOffset = calculateTiTimeOffset(results);
+        getRunSummary().setTiTimeOffset(tiTimeOffset);
+        
+        long totalEvents = getTotalEvents(results);
+        getRunSummary().setTotalEvents(totalEvents);
+        
+        int nFiles = results.getResults().size();
+        getRunSummary().setTotalFiles(nFiles);
+        
+        int prestartTimestamp = getPrestartTimestamp(results);
+        getRunSummary().setPrestartTimestamp(prestartTimestamp);
+        
+        int goTimestamp = getGoTimestamp(results);
+        getRunSummary().setGoTimestamp(goTimestamp);
+        
+        int endTimestamp = getEndTimestamp(results);
+        getRunSummary().setEndTimestamp(endTimestamp);
+        
+        double triggerRate = calculateTriggerRate(prestartTimestamp, endTimestamp, totalEvents);
+        getRunSummary().setTriggerRate(triggerRate);        
+    }
+                         
+    private DatasetResultSetModel findDatasets() {
+        
+        LOGGER.info("finding EVIO datasets for run " + getRun() + " in " + this.folder + " at " + this.site + " ...");
+        
+        DatasetResultSetModel results = datacatClient.searchForDatasets(
+                this.folder,
+                "current",
+                this.site,
+                "fileFormat eq 'EVIO' AND dataType eq 'RAW' AND runMin eq " + getRun(),
+                new String[] {"FILE"},
+                METADATA_FIELDS
+                );
+        
+        LOGGER.info("found " + results.getResults().size() + " EVIO datasets for run " + getRun());
+                               
+        return results;
+    }    
+    
+    void setSite(String site) {
+        this.site = site;
+    }
+    
+    void setDatacatClient(Client datacatClient) {
+        this.datacatClient = datacatClient;
+    }
+    
+    void setFolder(String folder) {
+        this.folder = folder;
+    }
+    
+    List<File> getFileList() {
+        return files;
+    }
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DatacatUtilities.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,29 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.srs.datacat.model.DatasetModel;
+import org.srs.datacat.model.DatasetResultSetModel;
+import org.srs.datacat.model.dataset.DatasetWithViewModel;
+
+final class DatacatUtilities {
+    
+    private DatacatUtilities() {
+        throw new RuntimeException("Do not instantiate this class.");
+    }
+    
+    static final List<File> toFileList(DatasetResultSetModel datasets) {
+        List<File> files = new ArrayList<File>();
+        for (DatasetModel dataset : datasets.getResults()) {
+            String resource = 
+                    ((DatasetWithViewModel) dataset).getViewInfo().getLocations().iterator().next().getResource();
+            if (resource.startsWith("/ss")) {
+                resource = "/cache" + resource;
+            }
+            files.add(new File(resource));
+        }
+        return files;
+    }
+}

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDao.java	Wed Feb 10 14:26:49 2016
@@ -16,7 +16,7 @@
      *
      * @param run the run number
      */
-    public void deleteEpicsData(EpicsType epicsType, final int run);
+    public void deleteEpicsData(EpicsType epicsType, int run);
 
     /**
      * Get EPICS data by run.
@@ -34,5 +34,5 @@
      *
      * @param epicsDataList the list of EPICS data
      */
-    void insertEpicsData(List<EpicsData> epicsDataList);   
+    void insertEpicsData(List<EpicsData> epicsDataList, int run);
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java	Wed Feb 10 14:26:49 2016
@@ -192,12 +192,14 @@
     /**
      * Insert a list of EPICS data into the database.
      * <p>
-     * The run number comes from the header information.
+     * By default, the run number from the header will be used, but it will be overridden
+     * if it does not match the <code>run</code> argument.  (There are a few data files
+     * where the run in the EPICS header is occassionally wrong.)
      *
      * @param epicsDataList the list of EPICS data
      */
     @Override
-    public void insertEpicsData(final List<EpicsData> epicsDataList) {
+    public void insertEpicsData(final List<EpicsData> epicsDataList, int run) {
         if (epicsDataList.isEmpty()) {
             throw new IllegalArgumentException("The EPICS data list is empty.");
         }
@@ -211,9 +213,11 @@
                 if (epicsHeader == null) {
                     throw new IllegalArgumentException("The EPICS data is missing a header.");
                 }
-                insertHeaderStatement.setInt(1, epicsHeader.getRun());
+                insertHeaderStatement.setInt(1, run); /* Don't use run from bank as it is sometimes wrong! */
                 insertHeaderStatement.setInt(2, epicsHeader.getSequence());
                 insertHeaderStatement.setInt(3, epicsHeader.getTimestamp());
+                LOGGER.finer("creating EPICs record with run = " + run + " ; seq = " 
+                        + epicsHeader.getSequence() + "; ts = " + epicsHeader.getTimestamp());
                 final int rowsCreated = insertHeaderStatement.executeUpdate();
                 if (rowsCreated == 0) {
                     throw new SQLException("Creation of EPICS header record failed; no rows affected.");

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/LivetimeBuilder.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,58 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import org.hps.record.evio.EvioFileUtilities;
+import org.hps.record.scalers.ScalerData;
+import org.hps.record.scalers.ScalerUtilities;
+import org.hps.record.scalers.ScalersEvioProcessor;
+import org.hps.record.scalers.ScalerUtilities.LiveTimeIndex;
+import org.jlab.coda.jevio.EvioEvent;
+import org.jlab.coda.jevio.EvioException;
+import org.jlab.coda.jevio.EvioReader;
+
+public class LivetimeBuilder extends AbstractRunBuilder {
+    
+    private List<File> files;
+    private ScalerData scalerData;
+    
+    void setFiles(List<File> files) {
+        this.files = files;
+    }    
+    
+    void build() {
+        if (files == null) {
+            throw new RuntimeException("The list of files was never set.");
+        }        
+        int fileIndex = files.size() - 1;
+        ScalersEvioProcessor processor = new ScalersEvioProcessor();
+        processor.setResetEveryEvent(false);        
+        while (scalerData == null && fileIndex >= 0) {
+            File file = files.get(fileIndex);
+            try {
+                EvioReader reader = EvioFileUtilities.open(file, true);
+                EvioEvent evioEvent = reader.parseNextEvent();
+                while (evioEvent != null) {
+                    processor.process(evioEvent);
+                    evioEvent = reader.parseNextEvent();
+                }
+                if (processor.getCurrentScalerData() != null) {
+                    scalerData = processor.getCurrentScalerData();
+                    break;
+                }
+                fileIndex -= 1;
+            } catch (EvioException | IOException e) {
+                throw new RuntimeException(e);
+            }            
+        }
+                
+        if (scalerData != null) {
+            double[] livetimes = ScalerUtilities.getLiveTimes(scalerData);
+            getRunSummary().setLivetimeClock(livetimes[LiveTimeIndex.CLOCK.ordinal()]);
+            getRunSummary().setLivetimeFcupTdc(livetimes[LiveTimeIndex.FCUP_TDC.ordinal()]);
+            getRunSummary().setLivetimeFcupTrg(livetimes[LiveTimeIndex.FCUP_TRG.ordinal()]);
+        } 
+    }
+}

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java	Wed Feb 10 14:26:49 2016
@@ -6,6 +6,7 @@
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -29,6 +30,7 @@
 import org.hps.record.scalers.ScalersEvioProcessor;
 import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
 import org.hps.record.triggerbank.HeadBankData;
+import org.hps.record.triggerbank.TiTimeOffsetCalculator;
 import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
 import org.hps.record.triggerbank.TriggerConfigData;
 import org.hps.record.triggerbank.TriggerConfigData.Crate;
@@ -40,6 +42,7 @@
 import org.srs.datacat.model.DatasetModel;
 import org.srs.datacat.model.DatasetResultSetModel;
 import org.srs.datacat.model.dataset.DatasetWithViewModel;
+import org.srs.datacat.shared.DatasetLocation;
 
 /**
  * Builds a complete {@link RunSummary} object from various data sources, including the data catalog and the run
@@ -143,7 +146,7 @@
      * Default folder for file search.
      */
     private String folder;
-        
+    
     /**
      * Reload state for the current run number for testing.
      */
@@ -208,15 +211,13 @@
     private void findEvioDatasets() {
         
         LOGGER.info("finding EVIO datasets for run " + getRun() + " in folder " + this.folder + " at site " + this.site);
-                
+        
         DatasetResultSetModel results = datacatClient.searchForDatasets(
                 this.folder,
                 "current",
                 this.site,
                 "fileFormat eq 'EVIO' AND dataType eq 'RAW' AND runMin eq " + getRun(),
                 null,
-                null,
-                null,
                 null
                 );
         
@@ -262,7 +263,7 @@
         // Insert the EPICS data.
         if (epicsData != null && !epicsData.isEmpty()) {
             LOGGER.info("inserting EPICS data");
-            runFactory.getEpicsDataDao().insertEpicsData(epicsData);
+            runFactory.getEpicsDataDao().insertEpicsData(epicsData, getRun());
         } else {
             LOGGER.warning("no EPICS data to insert");
         }
@@ -663,12 +664,21 @@
         EvioReader reader = null;
         Integer endTimestamp = null;
         try {
-            reader = EvioFileUtilities.open(lastEvioFile, true);
-            EvioEvent evioEvent = reader.parseNextEvent();
-            while (evioEvent != null) {
+            reader = EvioFileUtilities.open(lastEvioFile, true);            
+            while (true) {
+                if (reader.getNumEventsRemaining() == 0) {
+                    break;
+                }
+                EvioEvent evioEvent = null;
+                try {                                   
+                    evioEvent = reader.parseNextEvent();
+                } catch (Exception e) {
+                    LOGGER.severe("Error parsing EVIO event; skipping to next event.");
+                    continue;
+                }
                 if (EventTagConstant.END.matches(evioEvent)) {
                     endTimestamp = EvioEventUtilities.getControlEventData(evioEvent)[0];
-                    LOGGER.fine("found END timestamp " + endTimestamp);
+                    LOGGER.fine("found END timestamp " + endTimestamp + " in event " + evioEvent.getEventNumber());
                     break;
                 }
                 BaseStructure headBank = headBankDefinition.findBank(evioEvent);
@@ -677,10 +687,9 @@
                         endTimestamp = headBank.getIntData()[0];
                     }
                 }
-                evioEvent = reader.parseNextEvent();
-            }
-        } catch (IOException | EvioException e) {
-            throw new RuntimeException("Error reading first EVIO file.", e);
+            }
+        } catch (IOException | EvioException e2) {
+            throw new RuntimeException("Error getting END timestamp.", e2);
         } finally {
             if (reader != null) {
                 try {
@@ -690,7 +699,9 @@
                 }
             }
         }
-        runSummary.setEndTimestamp(endTimestamp);
+        if (endTimestamp != null) {
+            runSummary.setEndTimestamp(endTimestamp);
+        }
         LOGGER.fine("end timestamp was set to " + endTimestamp);
     }
 
@@ -816,16 +827,15 @@
             startTimestamp = runSummary.getGoTimestamp();
         } else if (runSummary.getPrestartTimestamp() != null) {
             startTimestamp = runSummary.getPrestartTimestamp();
-        } else {
-            LOGGER.warning("Could not get starting timestamp for trigger rate calculation.");
-        }
-        if (runSummary.getEndTimestamp() != null && startTimestamp != null) {
+        } 
+        Integer endTimestamp = runSummary.getEndTimestamp();
+        if (endTimestamp!= null && startTimestamp != null && runSummary.getTotalEvents() > 0) {
             double triggerRate = ((double) runSummary.getTotalEvents() /
                     ((double) runSummary.getEndTimestamp() - (double) runSummary.getGoTimestamp()));
             runSummary.setTriggerRate(triggerRate);
             LOGGER.info("trigger rate set to " + runSummary.getTriggerRate());
         } else {
-            LOGGER.warning("Skipped trigger rate calculation because a timestamp is missing.");
+            LOGGER.warning("Skipped trigger rate calculation due to missing data.");
         }
     }
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDao.java	Wed Feb 10 14:26:49 2016
@@ -45,4 +45,11 @@
      * @return <code>true</code> if <code>run</code> exists in the database
      */
     boolean runSummaryExists(int run);
+    
+    /**
+     * Update a run summary that already exists.
+     * 
+     * @param runSummary the run summary to update
+     */
+    void updateRunSummary(RunSummary runSummary);
 }

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java	Wed Feb 10 14:26:49 2016
@@ -27,6 +27,12 @@
             + " go_timestamp, end_timestamp, trigger_rate, trigger_config_name, ti_time_offset," 
             + " livetime_clock, livetime_fcup_tdc, livetime_fcup_trg, target, notes, created, updated)"
             + " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())";
+    
+    private static final String UPDATE = "UPDATE run_summaries SET nevents = ?, nfiles = ?, prestart_timestamp = ?,"
+            + " go_timestamp = ?, end_timestamp = ?, trigger_rate = ?, trigger_config_name = ?, ti_time_offset = ?," 
+            + " livetime_clock = ?, livetime_fcup_tdc = ?, livetime_fcup_trg = ?, target = ?, notes = ?, updated = NOW()"
+            + " WHERE run = ?";
+    
                      
     /**
      * Select record by run number.
@@ -196,6 +202,41 @@
             }
         }
     }
+    
+    @Override
+    public void updateRunSummary(RunSummary runSummary) {
+        PreparedStatement preparedStatement = null;
+        try {
+            preparedStatement = connection.prepareStatement(UPDATE);                                   
+            preparedStatement.setLong(1, runSummary.getTotalEvents());
+            preparedStatement.setInt(2, runSummary.getTotalFiles());
+            preparedStatement.setObject(3, runSummary.getPrestartTimestamp());
+            preparedStatement.setObject(4, runSummary.getGoTimestamp());
+            preparedStatement.setObject(5, runSummary.getEndTimestamp());
+            preparedStatement.setObject(6, runSummary.getTriggerRate());
+            preparedStatement.setObject(7, runSummary.getTriggerConfigName());
+            preparedStatement.setObject(8, runSummary.getTiTimeOffset());
+            preparedStatement.setObject(9, runSummary.getLivetimeClock());
+            preparedStatement.setObject(10, runSummary.getLivetimeFcupTdc());
+            preparedStatement.setObject(11, runSummary.getLivetimeFcupTrg());
+            preparedStatement.setObject(12, runSummary.getTarget());
+            preparedStatement.setObject(13, runSummary.getNotes());
+            preparedStatement.setInt(14, runSummary.getRun());
+            LOGGER.fine(preparedStatement.toString());
+            preparedStatement.executeUpdate();
+        } catch (final SQLException e) {
+            throw new RuntimeException(e);
+        } finally {
+            if (preparedStatement != null) {
+                try {
+                    preparedStatement.close();
+                } catch (final SQLException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+    
    
     /**
      * Return <code>true</code> if a run summary exists in the database for the run number.

Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java	(original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java	Wed Feb 10 14:26:49 2016
@@ -75,7 +75,7 @@
     private Integer totalFiles;
    
     /**
-     * Get the name of the trigger config file.
+     * Name of the trigger config file.
      */
     private String triggerConfigName;
 

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/SpreadsheetBuilder.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,74 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.util.logging.Logger;
+
+import org.hps.conditions.run.RunSpreadsheet;
+import org.hps.conditions.run.RunSpreadsheet.RunData;
+
+/**
+ * Builds a complete {@link RunSummary} object from various data sources, including the data catalog and the run
+ * spreadsheet, so that it is ready to be inserted into the run database using the DAO interfaces.  This class also 
+ * extracts EPICS data, scaler data, trigger config and SVT config information from all of the EVIO files in a run.
+ * <p>
+ * The setters and some other methods follow the builder pattern and so can be chained by the caller.
+ * 
+ * @author Jeremy McCormick, SLAC
+ * @see RunSummary
+ * @see RunSummaryImpl
+ */
+final class SpreadsheetBuilder extends AbstractRunBuilder {
+    
+    private static final Logger LOGGER = Logger.getLogger(SpreadsheetBuilder.class.getPackage().getName());
+    
+    private File spreadsheetFile;
+    
+    void setSpreadsheetFile(File spreadsheetFile) {
+        this.spreadsheetFile = spreadsheetFile;
+    }
+
+    /**
+     * Update the current run summary from information in the run spreadsheet.
+     * 
+     * @param spreadsheetFile file object pointing to the run spreadsheet (CSV format)
+     * @return this object
+     */
+    @Override
+    void build() {       
+        if (this.spreadsheetFile == null) {
+            throw new IllegalStateException("The spreadsheet file was never set.");
+        }
+        if (getRunSummary() == null) {
+            throw new IllegalStateException("The run summary was never set.");
+        }
+        LOGGER.fine("updating from spreadsheet file " + spreadsheetFile.getPath());
+        RunSpreadsheet runSpreadsheet = new RunSpreadsheet(spreadsheetFile);
+        RunData data = runSpreadsheet.getRunMap().get(getRunSummary().getRun());        
+        if (data != null) {
+            LOGGER.info("found run data ..." + '\n' + data.getRecord());
+            
+            // Trigger config name.
+            String triggerConfigName = data.getRecord().get("trigger_config");
+            if (triggerConfigName != null) {
+                getRunSummary().setTriggerConfigName(triggerConfigName);
+                LOGGER.info("set trigger config name <" + getRunSummary().getTriggerConfigName() + "> from spreadsheet");
+            }
+            
+            // Notes.
+            String notes = data.getRecord().get("notes");
+            if (notes != null) {
+                getRunSummary().setNotes(notes);
+                LOGGER.info("set notes <" + getRunSummary().getNotes() + "> from spreadsheet");
+            }
+            
+            // Target.
+            String target = data.getRecord().get("target");
+            if (target != null) {
+                getRunSummary().setTarget(target);
+                LOGGER.info("set target <" + getRunSummary().getTarget() + "> from spreadsheet");
+            }
+        } else {
+            LOGGER.warning("No record for this run was found in spreadsheet.");
+        }
+    }
+}

Added: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java	(added)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/TriggerConfigBuilder.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,50 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import org.hps.record.daqconfig.TriggerConfigEvioProcessor;
+import org.hps.record.evio.EvioFileUtilities;
+import org.hps.record.triggerbank.TriggerConfigData;
+import org.jlab.coda.jevio.EvioEvent;
+import org.jlab.coda.jevio.EvioException;
+import org.jlab.coda.jevio.EvioReader;
+
+public class TriggerConfigBuilder extends AbstractRunBuilder {
+    
+    private TriggerConfigData triggerConfig;
+    private List<File> files = null;
+    
+    void setFiles(List<File> files) {
+        this.files = files;
+    }
+    
+    void build() {        
+        int fileIndex = files.size() - 1;
+        TriggerConfigEvioProcessor processor = new TriggerConfigEvioProcessor();
+        while (triggerConfig == null && fileIndex >= 0) {
+            File file = files.get(fileIndex);
+            try {
+                EvioReader reader = EvioFileUtilities.open(file, true);
+                EvioEvent evioEvent = reader.parseNextEvent();
+                while (evioEvent != null) {
+                    processor.process(evioEvent);
+                    if (processor.getTriggerConfigData() != null 
+                            && processor.getTriggerConfigData().isValid()) {
+                        triggerConfig = processor.getTriggerConfigData();
+                        break;
+                    }
+                    evioEvent = reader.parseNextEvent();
+                }
+                fileIndex -= 1;
+            } catch (EvioException | IOException e) {
+                throw new RuntimeException(e);
+            }            
+        }
+    }
+    
+    TriggerConfigData getTriggerConfigData() {
+        return triggerConfig;
+    }
+}

Added: java/branches/jeremy-dev/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
 =============================================================================
--- java/branches/jeremy-dev/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java	(added)
+++ java/branches/jeremy-dev/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java	Wed Feb 10 14:26:49 2016
@@ -0,0 +1,63 @@
+package org.hps.run.database;
+
+import java.io.File;
+import java.sql.Connection;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.hps.conditions.database.ConnectionParameters;
+import org.srs.datacat.client.ClientBuilder;
+
+public class RunBuilderTest extends TestCase {
+    
+    private static final int RUN = 5403;
+    private static String DATACAT_URL = "http://localhost:8080/datacat-v0.5-SNAPSHOT/r";
+    private static String SPREADSHEET = "/work/hps/rundb/HPS_Runs_2015_Sheet1.csv";
+    private static String FOLDER = "/HPS/test";
+    private static String SITE = "SLAC";
+    
+    private static final ConnectionParameters CONNECTION_PARAMETERS = 
+            new ConnectionParameters("root", "derp", "hps_run_db", "localhost");
+        
+    public void testRunBuilder() throws Exception {
+        
+        RunSummaryImpl runSummary = new RunSummaryImpl(RUN);
+        
+        // datacat
+        DatacatBuilder datacatBuilder = new DatacatBuilder();
+        datacatBuilder.setDatacatClient(new ClientBuilder().setUrl(DATACAT_URL).build());
+        datacatBuilder.setFolder(FOLDER);
+        datacatBuilder.setSite(SITE);
+        datacatBuilder.setRunSummary(runSummary);
+        datacatBuilder.build();
+        
+        List<File> files = datacatBuilder.getFileList();
+        
+        // livetime measurements
+        LivetimeBuilder livetimeBuilder = new LivetimeBuilder();
+        livetimeBuilder.setRunSummary(runSummary);
+        livetimeBuilder.setFiles(files);
+        livetimeBuilder.build();
+        
+        // trigger config
+        TriggerConfigBuilder configBuilder = new TriggerConfigBuilder();
+        configBuilder.setFiles(files);
+        configBuilder.build();
+        
+        // run spreadsheet
+        SpreadsheetBuilder spreadsheetBuilder = new SpreadsheetBuilder();
+        spreadsheetBuilder.setSpreadsheetFile(new File(SPREADSHEET));
+        spreadsheetBuilder.setRunSummary(datacatBuilder.getRunSummary());
+        spreadsheetBuilder.build();
+        
+        // database updater
+        Connection connection = CONNECTION_PARAMETERS.createConnection();
+        DatabaseUpdater updater = new DatabaseUpdater(connection);
+        updater.setRunSummary(runSummary);
+        System.out.println("built run summary ...");
+        System.out.println(runSummary);
+        //updater.setTriggerConfigData(configBuilder.getTriggerConfigData());
+        //updater.update();
+    }
+}