Commit in java/trunk/conditions/src on MAIN
main/java/org/hps/conditions/AbstractConditionsObject.java+1322 -> 323
                            /BasicConditionsObjectFactory.java-68322 removed
                            /ConditionsConverterRegistery.java-84322 removed
                            /ConditionsDriver.java+2-2322 -> 323
                            /ConditionsObject.java+1-5322 -> 323
                            /ConditionsObjectCollection.java+10-1322 -> 323
                            /ConditionsObjectConverter.java+128added 323
                            /ConditionsObjectFactory.java-38322 removed
                            /ConditionsRecordConverter.java+5-10322 -> 323
                            /ConditionsTableConstants.java+3-2322 -> 323
                            /ConditionsTableMetaData.java-37322 removed
                            /ConditionsTableMetaData.java+53added 323
                            /ConditionsTableMetaDataXMLLoader.java+99added 323
                            /ConditionsTableRegistry.java-114322 removed
                            /ConnectionParameters.java+12322 -> 323
                            /ConverterXMLLoader.java+45added 323
                            /DatabaseConditionsConverter.java-34322 removed
                            /DatabaseConditionsManager.java+156added 323
                            /DatabaseConditionsReader.java+19-38322 -> 323
                            /QueryBuilder.java+41added 323
                            /TestRunConditionsReader.java+5-2322 -> 323
main/java/org/hps/conditions/beam/BeamConverterRegistry.java+12added 323
                                 /BeamCurrent.java+4322 -> 323
                                 /BeamCurrentCollection.java-16322 removed
                                 /BeamCurrentConverter.java-77322 removed
main/java/org/hps/conditions/ecal/EcalBadChannel.java+4322 -> 323
                                 /EcalBadChannelCollection.java-18322 removed
                                 /EcalBadChannelConverter.java-80322 removed
                                 /EcalCalibration.java+4322 -> 323
                                 /EcalCalibrationCollection.java-19322 removed
                                 /EcalCalibrationConverter.java-84322 removed
                                 /EcalChannel.java+61322 -> 323
                                 /EcalChannelCollection.java-74322 removed
                                 /EcalChannelConverter.java-85322 removed
                                 /EcalConditions.java+8-2322 -> 323
                                 /EcalConditionsConverter.java+10-9322 -> 323
                                 /EcalConditionsLoader.java+2-1322 -> 323
                                 /EcalConverterRegistry.java+39added 323
                                 /EcalGain.java+4322 -> 323
                                 /EcalGainCollection.java-17322 removed
                                 /EcalGainConverter.java-85322 removed
main/java/org/hps/conditions/svt/SvtBadChannel.java+4322 -> 323
                                /SvtBadChannelCollection.java-20322 removed
                                /SvtBadChannelConverter.java-80322 removed
                                /SvtCalibration.java+5-1322 -> 323
                                /SvtCalibrationCollection.java-19322 removed
                                /SvtCalibrationConverter.java-95322 removed
                                /SvtChannel.java+57322 -> 323
                                /SvtChannelCollection.java-69322 removed
                                /SvtChannelConverter.java-78322 removed
                                /SvtConditions.java+4322 -> 323
                                /SvtConditionsConverter.java+10-8322 -> 323
                                /SvtConditionsLoader.java+3322 -> 323
                                /SvtConverterRegistry.java+61added 323
                                /SvtDaqMapping.java+55-1322 -> 323
                                /SvtDaqMappingCollection.java-67322 removed
                                /SvtDaqMappingConverter.java-89322 removed
                                /SvtGain.java+4322 -> 323
                                /SvtGainCollection.java-17322 removed
                                /SvtGainConverter.java-98322 removed
                                /SvtPulseParameters.java+4322 -> 323
                                /SvtPulseParametersCollection.java-16322 removed
                                /SvtPulseParametersConverter.java-102322 removed
                                /SvtTimeShift.java+17322 -> 323
                                /SvtTimeShiftCollection.java-28322 removed
                                /SvtTimeShiftConverter.java-85322 removed
main/resources/org/hps/conditions/config/conditions_database_testrun_2013.xml+187added 323
main/scripts/mysql_backup.sh+16added 323
            /mysql_backup_tables_only.sh+16added 323
            /mysql_console.sh+2added 323
main/sql/conditions_database_tables_only.sql+270added 323
        /conditions_database_testrun_2012_full.sql+409added 323
        /conditions_db_tables_only.sql-267322 removed
        /conditions_db_testrun_full.sql-406322 removed
test/java/org/hps/conditions/ConditionsDatabaseObjectTest.java-110322 removed
                            /ConditionsDriverTest.java+12-4322 -> 323
                            /DatabaseConditionsManagerTest.java+30added 323
                            /DatabaseConditionsReaderTest.java-46322 removed
test/java/org/hps/conditions/beam/BeamCurrentTest.java+11-3322 -> 323
test/java/org/hps/conditions/ecal/EcalConditionsConverterTest.java+16-19322 -> 323
                                 /EcalConditionsLoaderTest.java+16-17322 -> 323
test/java/org/hps/conditions/svt/SvtConditionsConverterTest.java+15-16322 -> 323
                                /SvtConditionsLoaderTest.java+17-18322 -> 323
+1969-2781
16 added + 34 removed + 33 modified, total 83 files
Checking in all my work from yesterday.  Big rewrite of the conditions system to be more generic.  All objects besides ConditionsRecord are now completely generic and use a common method.  Add some scripts.  Add database config file.  More changes coming.

java/trunk/conditions/src/main/java/org/hps/conditions
AbstractConditionsObject.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/AbstractConditionsObject.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/AbstractConditionsObject.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -10,6 +10,7 @@
  * The abstract implementation of {@link ConditionsObject}.
  * @author Jeremy McCormick <[log in to unmask]>
  */
+// FIXME: Database query methods need to be rewritten to use QueryBuilder (which itself needs to be written).
 public abstract class AbstractConditionsObject implements ConditionsObject {
 
     private ConnectionManager _connectionManager = null;

java/trunk/conditions/src/main/java/org/hps/conditions
BasicConditionsObjectFactory.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/BasicConditionsObjectFactory.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/BasicConditionsObjectFactory.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,68 +0,0 @@
-package org.hps.conditions;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-
-/**
- * The basic implementation of the {@link ConditionsObjectFactory} interface.
- * 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class BasicConditionsObjectFactory implements ConditionsObjectFactory {
-    
-    private ConnectionManager _connectionManager;
-    private ConditionsTableRegistry _tableRegistry;
-    
-    protected BasicConditionsObjectFactory(ConnectionManager connectionManager, ConditionsTableRegistry tableRegistry) {
-        _connectionManager = connectionManager;
-        _tableRegistry = tableRegistry;
-    }
-    
-    /**
-     * This method is the primary one in the API for creating new conditions objects.
-     */
-    @SuppressWarnings("unchecked")
-    public <T> T createObject(
-            Class<? extends ConditionsObject> klass, 
-            String tableName,  
-            int rowId,
-            FieldValueMap fieldValues,
-            boolean isReadOnly) throws ConditionsObjectException {                
-        ConditionsObject newObject = null;
-        try {
-            newObject = klass.newInstance();
-        } catch (InstantiationException x) { 
-            throw new RuntimeException(x);
-        } catch (IllegalAccessException x) {
-            throw new RuntimeException(x);
-        }        
-        if (rowId != -1)
-            newObject.setRowId(rowId);
-        newObject.setFieldValues(fieldValues);
-        newObject.setConnectionManager(_connectionManager);
-        ConditionsTableMetaData tableMetaData = _tableRegistry.getTableMetaData(tableName);
-        if (tableMetaData == null) {
-            throw new ConditionsObjectException("No meta data found for table: " + tableName);
-        }
-        newObject.setTableMetaData(tableMetaData);
-        if (isReadOnly)
-            newObject.setIsReadOnly();
-        return (T)newObject;
-    }   
-    
-    /**
-     * Get the <code>ConditionsTableRegistry</code> that will be used by the factory to get
-     * table meta data.
-     * @return The conditions table registry.
-     */
-    public ConditionsTableRegistry getTableRegistry() {
-        return _tableRegistry;
-    }
-    
-    /**
-     * Get table meta data by name from the registry.
-     * @return The table meta data or null if does not exist.
-     */
-    public ConditionsTableMetaData getTableMetaData(String name) {
-        return _tableRegistry.getTableMetaData(name);
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsConverterRegistery.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsConverterRegistery.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsConverterRegistery.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,84 +0,0 @@
-package org.hps.conditions;
-
-import org.hps.conditions.beam.BeamCurrentConverter;
-import org.hps.conditions.ecal.EcalBadChannelConverter;
-import org.hps.conditions.ecal.EcalCalibrationConverter;
-import org.hps.conditions.ecal.EcalChannelConverter;
-import org.hps.conditions.ecal.EcalConditionsConverter;
-import org.hps.conditions.ecal.EcalGainConverter;
-import org.hps.conditions.svt.SvtPulseParametersConverter;
-import org.hps.conditions.svt.SvtBadChannelConverter;
-import org.hps.conditions.svt.SvtCalibrationConverter;
-import org.hps.conditions.svt.SvtChannelConverter;
-import org.hps.conditions.svt.SvtConditionsConverter;
-import org.hps.conditions.svt.SvtDaqMappingConverter;
-import org.hps.conditions.svt.SvtGainConverter;
-import org.hps.conditions.svt.SvtTimeShiftConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class registers the full set of conditions converters onto the manager.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-class ConditionsConverterRegistery {
-    
-    /**
-     * This method will register all the conditions converters onto the given manager.
-     * @param manager The conditions manager.
-     */
-    static void register(ConditionsManager manager) {
-        
-        // Create the table meta data registry.
-        ConditionsTableRegistry tableRegistry = new ConditionsTableRegistry();
-        tableRegistry.registerDefaultTableMetaData();
-        
-        // Create the object factory.
-        ConditionsObjectFactory factory = 
-                new BasicConditionsObjectFactory(ConnectionManager.getConnectionManager(), tableRegistry);
-                
-        // ConditionsRecords with validity meta data.
-        manager.registerConditionsConverter(new ConditionsRecordConverter(factory));
-        
-        // SVT combined conditions.
-        manager.registerConditionsConverter(new SvtConditionsConverter(factory));
-
-        // SVT channel map.
-        manager.registerConditionsConverter(new SvtChannelConverter(factory));
-        
-        // SVT DAQ map.
-        manager.registerConditionsConverter(new SvtDaqMappingConverter(factory));
-                        
-        // SVT gains.  
-        manager.registerConditionsConverter(new SvtGainConverter(factory));
-        
-        // SVT pulse parameters.
-        manager.registerConditionsConverter(new SvtPulseParametersConverter(factory));
-        
-        // SVT calibrations.
-        manager.registerConditionsConverter(new SvtCalibrationConverter(factory));
-        
-        // SVT time shift by sensor.
-        manager.registerConditionsConverter(new SvtTimeShiftConverter(factory));
-        
-        // SVT bad channels.
-        manager.registerConditionsConverter(new SvtBadChannelConverter(factory));
-        
-        // ECAL combined conditions.
-        manager.registerConditionsConverter(new EcalConditionsConverter(factory));
-        
-        // ECAL channel map.
-        manager.registerConditionsConverter(new EcalChannelConverter(factory));        
-        
-        // ECAL bad channels.
-        manager.registerConditionsConverter(new EcalBadChannelConverter(factory));
-        
-        // ECAL gains.
-        manager.registerConditionsConverter(new EcalGainConverter(factory));
-        
-        // ECAL calibrations.
-        manager.registerConditionsConverter(new EcalCalibrationConverter(factory));
-                
-        // Beam current condition.
-        manager.registerConditionsConverter(new BeamCurrentConverter(factory));                        
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsDriver.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -16,11 +16,11 @@
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class ConditionsDriver extends Driver {
-
+    
     /**
      * This method updates a new detector with SVT and ECal conditions data.
      */
-    public void detectorChanged(Detector detector) {        
+    public void detectorChanged(Detector detector) {
         loadSvtConditions(detector);       
         loadEcalConditions(detector);
     }

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsObject.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObject.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObject.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -131,9 +131,5 @@
     /**
      * Set the object to read only mode.  This cannot be changed back once it is set.
      */
-    void setIsReadOnly();
-    
-    // String getTableName();
-    
-    // String[] getFieldNames();
+    void setIsReadOnly();    
 }

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsObjectCollection.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -9,7 +9,7 @@
 
     List<T> objects = new ArrayList<T>();    
     ConditionsTableMetaData _tableMetaData;
-    int _collectionId;
+    int _collectionId = -1;
     boolean _isReadOnly;
     boolean _isDirty;
     boolean _isNew;
@@ -84,6 +84,7 @@
         }
     }
 
+    // TODO: This method needs to get the next collection ID if it doesn't have one already.
     public void insertAll() throws ConditionsObjectException, SQLException {
         if (!isNew()) {
             throw new ConditionsObjectException("Collection already exists in the database.");
@@ -113,4 +114,12 @@
     public boolean isNew() {
         return _isNew;
     }
+    
+    void setCollectionId(int collectionId) {
+        _collectionId = collectionId;
+    }
+    
+    void setIsReadOnly(boolean isReadOnly) {
+        _isReadOnly = isReadOnly;
+    }
 }

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsObjectConverter.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectConverter.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,128 @@
+package org.hps.conditions;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+
+import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
+import org.lcsim.conditions.ConditionsConverter;
+import org.lcsim.conditions.ConditionsManager;
+
+public abstract class ConditionsObjectConverter<T> implements ConditionsConverter<T> {
+    
+    public ConditionsObjectConverter() {        
+    }
+    
+    /**
+     * Classes that extend this must define this method to specify what type the converter
+     * is able to handle.
+     * @return The Class that this converter handles.
+     */
+    public abstract Class getType();
+    
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public T getData(ConditionsManager conditionsManager, String name) {
+        
+        // This type of converter only works with the DatabaseConditionsManager class.
+        DatabaseConditionsManager databaseConditionsManager = null;
+        if (conditionsManager instanceof DatabaseConditionsManager) {
+            databaseConditionsManager = (DatabaseConditionsManager)conditionsManager;
+        } else {
+            throw new RuntimeException("This converter requires a ConditionsManager of type DatabaseConditionsManager.");
+        }
+        
+        // Get the table meta data from the key given by the caller.
+        ConditionsTableMetaData tableMetaData = databaseConditionsManager.findTableMetaData(name);
+        
+        // Create a collection to return.
+        ConditionsObjectCollection collection;
+        try {
+             collection = tableMetaData.getCollectionClass().newInstance();
+        } catch (InstantiationException | IllegalAccessException e) {
+            throw new RuntimeException(e);
+        }
+        
+        // Get the ConditionsRecord with the meta-data, which will use the current run number from the manager.        
+        ConditionsRecordCollection conditionsRecords = ConditionsRecord.find(conditionsManager, name);
+                
+        if (conditionsRecords.size() == 0) {
+            // There were no records returned, which is a fatal error.
+            throw new RuntimeException("No conditions found with key: " + name);
+        } else if (conditionsRecords.size() > 1) {            
+            if (!allowMultipleCollections())
+                // If there are multiple records returned but this is not allowed by the converter, 
+                // then this is a fatal error.
+                throw new RuntimeException("Multiple conditions records returned but this is not allowed.");
+        } else {
+            // The collection ID is only set on the collection object if all rows have the same
+            // collection ID.  Otherwise, the collection contains a mix of objects with different 
+            // collectionIDs and has no meaningful ID of its own.
+            collection.setCollectionId(conditionsRecords.get(0).getCollectionId());
+        }
+
+        // Loop over conditions records.  This will usually just be one record.
+        for (ConditionsRecord conditionsRecord : conditionsRecords) {
+                    
+            // Get the table name.
+            String tableName = conditionsRecord.getTableName();
+            
+            // Get the collection ID.
+            int collectionId = conditionsRecord.getCollectionId();
+            
+            // Build a select query.
+            String query = QueryBuilder.buildSelect(tableName, collectionId, tableMetaData.getFieldNames(), "id ASC");
+        
+            // Query the database.
+            ResultSet resultSet = ConnectionManager.getConnectionManager().query(query);
+            
+            try {
+                // Loop over rows.
+                while (resultSet.next()) {
+                    // Create new ConditionsObject.
+                    ConditionsObject newObject = createConditionsObject(resultSet, tableMetaData);
+                    
+                    // Add new object to collection, which will also assign it a collection ID if applicable.
+                    collection.add(newObject);
+                }
+            } catch (SQLException e) {
+                throw new RuntimeException(e);
+            }                 
+        }
+        
+        // Return new collection.
+        return (T)collection;
+    }
+
+    private ConditionsObject createConditionsObject(ResultSet resultSet, 
+            ConditionsTableMetaData tableMetaData) throws SQLException {
+        ResultSetMetaData metaData = resultSet.getMetaData();
+        int rowId = resultSet.getInt(1);
+        int ncols = metaData.getColumnCount();
+        FieldValueMap fieldValues = new FieldValueMap();
+        for (int i=2; i<=ncols; i++) {
+            fieldValues.put(metaData.getColumnName(i), resultSet.getObject(i));
+        }  
+        ConditionsObject newObject = null;
+        try {
+             newObject = tableMetaData.getObjectClass().newInstance();
+        } catch (InstantiationException | IllegalAccessException e) {
+            throw new RuntimeException(e);
+        }
+        try {
+            newObject.setRowId(rowId);
+        } catch (ConditionsObjectException e) {
+            throw new RuntimeException(e);
+        }
+        try {
+            newObject.setTableMetaData(tableMetaData);
+        } catch (ConditionsObjectException e) {
+            throw new RuntimeException(e);
+        }
+        newObject.setFieldValues(fieldValues);
+        return newObject;
+    }
+    
+    public boolean allowMultipleCollections() {
+        return false;
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsObjectFactory.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectFactory.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsObjectFactory.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,38 +0,0 @@
-package org.hps.conditions;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-
-/**
- * This is the primary interface in the API for generically instantiating {@link ConditionsObject} objects
- * from the database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public interface ConditionsObjectFactory {
-        
-    /**
-     * Create a <code>ConditionsObject</code> generically, given a concrete class,
-     * an associated table in the conditions database, and a map of field values.
-     * 
-     * The collection ID should be assigned externally to this method by adding it 
-     * to a <code>ConditionsObjectCollection</code>.
-     * 
-     * @param klass The concrete Class to be instantiated, which must have a zero argument constructor.
-     * @param tableName The name of the table in the conditions database.
-     * @param collectionId The unique collection ID which should be set to -1 for a new collection.
-     * @param fieldValues The field values of the object.
-     * @return The new ConditionsObject with concrete type <code>klass</code>.
-     * @throws ConditionsObjectException if there is a problem creating the object.
-     */
-    public <T> T createObject(
-            Class<? extends ConditionsObject> klass,
-            String tableName,
-            int rowId,
-            FieldValueMap fieldValues,
-            boolean isReadOnly) throws ConditionsObjectException;
-    
-    /**
-     * Get the registry of table meta data used by this factory.
-     * @return The registry of table meta data.
-     */
-    public ConditionsTableRegistry getTableRegistry();
-}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsRecordConverter.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsRecordConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsRecordConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -3,6 +3,7 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 
+import org.lcsim.conditions.ConditionsConverter;
 import org.lcsim.conditions.ConditionsManager;
 
 /**
@@ -10,16 +11,9 @@
  * @author Jeremy McCormick <[log in to unmask]>
  * @version $Id: ConditionsRecordConverter.java,v 1.5 2013/10/15 23:24:47 jeremy Exp $
  */
-public class ConditionsRecordConverter extends DatabaseConditionsConverter<ConditionsRecordCollection> {
-           
+public class ConditionsRecordConverter implements ConditionsConverter<ConditionsRecordCollection> {
+                       
     /**
-     * Class constructor.
-     */
-    public ConditionsRecordConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-        
-    /**
      * Get the ConditionsRecords for a run.  This method ignores the name argument 
      * and will fetch all conditions records for the current run.
      * @param manager The current conditions manager.
@@ -30,7 +24,8 @@
                                 
         ConditionsRecordCollection records = new ConditionsRecordCollection();
         
-        ConnectionManager connectionManager = this.getConnectionManager();
+        ConnectionManager connectionManager = ConnectionManager.getConnectionManager();
+        
         String tableName = connectionManager.getConnectionParameters().getConditionsTable();
         
         String query = "SELECT * from " 

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsTableConstants.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableConstants.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableConstants.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,8 +1,9 @@
 package org.hps.conditions;
 
 /**
- * This is a static set of data defining default table names and lookup key values for conditions data.
- * Unless otherwise requested via the converter interface, these are always the same.
+ * This is a static set of constants defining default table names and lookup key values for conditions data.
+ * The actual table names for the conditions data are defined externally in an XML configuration file read by the 
+ * {@link DatabaseConditionsManager}.
  */
 public final class ConditionsTableConstants {
     

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsTableMetaData.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaData.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaData.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,37 +0,0 @@
-package org.hps.conditions;
-
-import java.util.Set;
-
-/**
- * This class contains basic meta data information about tables in the conditions
- * database, including their name and list of fields.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class ConditionsTableMetaData {
-    
-    String _tableName;
-    Set<String> _fieldNames = null;
-    
-    ConditionsTableMetaData(String tableName, Set<String> fieldNames) {
-        _tableName = tableName;
-        _fieldNames = fieldNames;
-    }
-        
-    /*
-    ConditionsTableMetaData(String tableName, String[] fields) {
-        _tableName = tableName;
-        _fieldNames = new HashSet<String>();
-        for (String field : fields) {
-            _fieldNames.add(field);
-        }
-    }
-    */
-    
-    Set<String> getFieldNames() {
-        return _fieldNames;
-    }
-    
-    String getTableName() {
-        return _tableName;
-    }       
-}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsTableMetaData.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaData.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaData.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,53 @@
+package org.hps.conditions;
+
+import java.util.LinkedHashSet;
+import java.util.Set;
+
+/**
+ * This class provides meta data about a conditions table, including a
+ * list of conditions data fields (not including collection ID or row ID
+ * which are assumed).  It also has references to the classes which are
+ * used to map the data onto Java classes via the {@link ConditionsObject}
+ * and {@link ConditionsObjectCollection} APIs.
+ * 
+ * @author Jeremy McCormick <[log in to unmask]>
+ *
+ */
+public class ConditionsTableMetaData {
+    
+    String _tableName;
+    Class<? extends ConditionsObject> _objectClass;
+    Class<? extends ConditionsObjectCollection<?>> _collectionClass;
+    Set<String> _fieldNames = new LinkedHashSet<String>();
+        
+    ConditionsTableMetaData(String tableName, 
+            Class<? extends ConditionsObject> objectClass, 
+            Class<? extends ConditionsObjectCollection<?>> collectionClass) {
+        _tableName = tableName;
+        _objectClass = objectClass;
+        _collectionClass = collectionClass;
+    }
+    
+    Class<? extends ConditionsObject> getObjectClass() {
+        return _objectClass;
+    }
+    
+    Class<? extends ConditionsObjectCollection<?>> getCollectionClass() {
+        return _collectionClass;
+    }
+    
+    String[] getFieldNames() {
+        return _fieldNames.toArray(new String[]{});
+    }
+       
+    void addField(String name) {
+        if (_fieldNames.contains(name)) {
+            throw new RuntimeException("The table meta data already has a field called " + name);
+        }
+        _fieldNames.add(name);
+    }
+    
+    public String getTableName() {
+        return _tableName;
+    }
+}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsTableMetaDataXMLLoader.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaDataXMLLoader.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableMetaDataXMLLoader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,99 @@
+package org.hps.conditions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jdom.Element;
+
+/**
+ * This class loads an XML configuration of conditions table meta data.
+ * 
+ * @author Jeremy McCormick <[log in to unmask]>
+ */
+class ConditionsTableMetaDataXMLLoader {
+    
+    List<ConditionsTableMetaData> _tableDataList = null;
+        
+    @SuppressWarnings("unchecked")
+    /**
+     * This method expects an XML element containing child "table" elements.
+     * @param element
+     */
+    void load(Element element) {
+        
+        _tableDataList = new ArrayList<ConditionsTableMetaData>();
+        
+        for (Iterator<?> iterator = element.getChildren("table").iterator(); iterator.hasNext();) {
+            Element tableElement = (Element)iterator.next();
+            String tableName = tableElement.getAttributeValue("name");
+            
+            //System.out.println("tableName: " + tableName);
+            
+            Element classesElement = tableElement.getChild("classes");
+            Element classElement = classesElement.getChild("object");
+            Element collectionElement = classesElement.getChild("collection");
+            
+            String className = classElement.getAttributeValue("class");
+            String collectionName = collectionElement.getAttributeValue("class");
+            
+            //System.out.println("className: " + className);
+            //System.out.println("collectionName: " + collectionName);
+            
+            Class<? extends ConditionsObject> objectClass;
+            Class<?> rawObjectClass;
+            try {
+                rawObjectClass = Class.forName(className);
+                //System.out.println("created raw object class: " + rawObjectClass.getSimpleName());
+                if (!ConditionsObject.class.isAssignableFrom(rawObjectClass)) {
+                    throw new RuntimeException("The class " + rawObjectClass.getSimpleName() + " does not extend ConditionsObject.");
+                }
+                objectClass = (Class<? extends ConditionsObject>)rawObjectClass;
+                //System.out.println("created ConditionsObject class: " + objectClass.getSimpleName());
+            } catch (ClassNotFoundException e) {
+                throw new RuntimeException(e);
+            }            
+            
+            Class<? extends ConditionsObjectCollection<?>> collectionClass;
+            Class<?> rawCollectionClass;
+            try {
+                rawCollectionClass = Class.forName(collectionName);
+                //System.out.println("created raw collection class: " + rawCollectionClass.getSimpleName());
+                if (!ConditionsObjectCollection.class.isAssignableFrom(rawCollectionClass))
+                    throw new RuntimeException("The class " + rawCollectionClass.getSimpleName() + " does not extend ConditionsObjectCollection.");
+                collectionClass = (Class<? extends ConditionsObjectCollection<?>>)rawCollectionClass;
+            } catch (ClassNotFoundException e) {
+                throw new RuntimeException(e);
+            }
+            
+            ConditionsTableMetaData tableData = new ConditionsTableMetaData(tableName, objectClass, collectionClass);
+            
+            Element fieldsElement = tableElement.getChild("fields");
+            
+            for (Iterator<?> fieldsIterator = fieldsElement.getChildren("field").iterator(); fieldsIterator.hasNext();) {
+                Element fieldElement = (Element)fieldsIterator.next();
+                
+                String fieldName = fieldElement.getAttributeValue("name");                                
+                //System.out.println("field: " + fieldName);
+                
+                tableData.addField(fieldName);
+            }
+            
+            _tableDataList.add(tableData);
+            
+            //System.out.println();
+        }                      
+    }    
+    
+    List<ConditionsTableMetaData> getTableMetaDataList() {
+        return _tableDataList;
+    }
+    
+    ConditionsTableMetaData findTableMetaData(String name) {
+        for (ConditionsTableMetaData metaData : _tableDataList) {
+            if (metaData.getTableName().equals(name))
+                return metaData;
+        }
+        return null;
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions
ConditionsTableRegistry.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableRegistry.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConditionsTableRegistry.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,114 +0,0 @@
-package org.hps.conditions;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * A central registry of {@link ConditionsTableMetaData} objects for use by converters.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class ConditionsTableRegistry {
-    
-    Map<String, ConditionsTableMetaData> _tableMetaDataMap = new HashMap<String, ConditionsTableMetaData>();
-    
-    public ConditionsTableMetaData getTableMetaData(String name) {
-        return _tableMetaDataMap.get(name);
-    }
-    
-    void addTableMetaData(ConditionsTableMetaData tableMetaData) {
-        if (_tableMetaDataMap.get(tableMetaData.getTableName()) != null) {
-            throw new IllegalArgumentException("Table data already exists for " + tableMetaData.getTableName());
-        }
-        _tableMetaDataMap.put(tableMetaData.getTableName(), tableMetaData);        
-    }
-    
-    void registerDefaultTableMetaData() {
-        
-        Set<String> fields;
-        
-        // SVT gains
-        fields = new HashSet<String>();
-        fields.add("svt_channel_id");
-        fields.add("gain");
-        fields.add("offset");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_GAINS, fields));
-        
-        // SVT pulse parameters
-        fields = new HashSet<String>();
-        fields.add("svt_channel_id");
-        fields.add("amplitude");
-        fields.add("t0");
-        fields.add("tp");
-        fields.add("chisq");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_PULSE_PARAMETERS, fields));
-
-        // SVT gains
-        fields = new HashSet<String>();
-        fields.add("svt_channel_id");
-        fields.add("noise");
-        fields.add("pedestal");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_CALIBRATIONS, fields));
-        
-        // SVT channels
-        fields = new HashSet<String>();
-        fields.add("channel_id");
-        fields.add("fpga");
-        fields.add("hybrid");
-        fields.add("channel");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_CHANNELS, fields));
-
-        // SVT time shift
-        fields = new HashSet<String>();
-        fields.add("fpga");
-        fields.add("hybrid");
-        fields.add("time_shift");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_TIME_SHIFTS, fields));
-        
-        // SVT bad channels
-        fields = new HashSet<String>();
-        fields.add("svt_channel_id");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_BAD_CHANNELS, fields));
-        
-        // SVT DAQ map
-        fields = new HashSet<String>();
-        fields.add("half");
-        fields.add("layer");
-        fields.add("fpga");
-        fields.add("hybrid");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.SVT_DAQ_MAP, fields));
-        
-        // ECal bad channels
-        fields = new HashSet<String>();
-        fields.add("ecal_channel_id");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.ECAL_BAD_CHANNELS, fields));
-        
-        // ECal gains
-        fields = new HashSet<String>();
-        fields.add("ecal_channel_id");
-        fields.add("gain");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.ECAL_GAINS, fields));
-        
-        // ECal calibrations
-        fields = new HashSet<String>();
-        fields.add("ecal_channel_id");
-        fields.add("noise");
-        fields.add("pedestal");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.ECAL_CALIBRATIONS, fields));
-        
-        // ECal channels
-        fields = new HashSet<String>();
-        fields.add("crate");
-        fields.add("slot");
-        fields.add("channel");
-        fields.add("x");
-        fields.add("y"); 
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.ECAL_CHANNELS, fields));
-        
-        // Beam current
-        fields = new HashSet<String>();
-        fields.add("beam_current");
-        addTableMetaData(new ConditionsTableMetaData(ConditionsTableConstants.BEAM_CURRENT, fields));
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions
ConnectionParameters.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConnectionParameters.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConnectionParameters.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -5,6 +5,8 @@
 import java.sql.SQLException;
 import java.util.Properties;
 
+import org.jdom.Element;
+
 /**
  * This class encapsulates the parameters for connecting to a database, 
  * including hostname, port, user and password.  It can also create and 
@@ -126,4 +128,14 @@
         String conditionsTable = properties.getProperty("conditionsTable").toString();
         return new ConnectionParameters(user, password, database, hostname, port, conditionsTable);
     }    
+    
+    public static final ConnectionParameters fromXML(Element element) {
+        String user = element.getChild("user").getText();
+        String password = element.getChild("password").getText();
+        String database = element.getChild("database").getText();
+        String hostname = element.getChild("hostname").getText();
+        int port = Integer.parseInt(element.getChild("port").getText());
+        String conditionsTable = element.getChild("conditions_table").getText();
+        return new ConnectionParameters(user, password, database, hostname, port, conditionsTable);        
+    }
 }

java/trunk/conditions/src/main/java/org/hps/conditions
ConverterXMLLoader.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ConverterXMLLoader.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ConverterXMLLoader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,45 @@
+package org.hps.conditions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.jdom.Element;
+import org.lcsim.conditions.ConditionsConverter;
+
+/**
+ * This class reads in an XML configuration specifying a list of converter classes,
+ * e.g. from the config file for the {@link DatabaseConditionsManager}.
+ *  
+ * @author Jeremy McCormick <[log in to unmask]>
+ */
+public class ConverterXMLLoader {
+    
+    List<ConditionsConverter> _converterList;
+    
+    void load(Element element) {
+        _converterList = new ArrayList<ConditionsConverter>();
+        for (Iterator iterator = element.getChildren("converter").iterator(); iterator.hasNext(); ) {
+            Element converterElement = (Element)iterator.next();
+            try {
+                Class converterClass = Class.forName(converterElement.getAttributeValue("class"));
+                if (ConditionsConverter.class.isAssignableFrom(converterClass)) {
+                    try {
+                        //System.out.println("adding converter: " + converterClass.getSimpleName());
+                        _converterList.add((ConditionsConverter)converterClass.newInstance());
+                    } catch (InstantiationException | IllegalAccessException e) {
+                        throw new RuntimeException(e);
+                    }
+                } else {
+                    throw new RuntimeException("The converter class " + converterClass.getSimpleName() + " does not extend the correct base type.");
+                }
+            } catch (ClassNotFoundException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+    
+    List<ConditionsConverter> getConverterList() {
+        return _converterList;
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions
DatabaseConditionsConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,34 +0,0 @@
-package org.hps.conditions;
-
-import org.lcsim.conditions.ConditionsConverter;
-
-/**
- * The abstract base class for database conditions converters to extend.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public abstract class DatabaseConditionsConverter<T> implements ConditionsConverter<T> {   
-	    
-    protected ConditionsObjectFactory _objectFactory;
-    
-    public DatabaseConditionsConverter(ConditionsObjectFactory objectFactory) {
-        _objectFactory = objectFactory;
-    }
-    
-    /**
-     * Get the the {@link ConnectionManager} associated with this converter.
-     * For now, this calls the singleton method of the ConnectionManager
-     * to get its instance.
-     * @return The ConnectionManager of this converter.
-     */
-    protected ConnectionManager getConnectionManager() {
-        return ConnectionManager.getConnectionManager();
-    }
-        
-    protected void setObjectFactory(ConditionsObjectFactory objectFactory) {
-        _objectFactory = objectFactory;
-    }               
-    
-    public final ConditionsTableMetaData getTableMetaData(String tableName) {
-        return _objectFactory.getTableRegistry().getTableMetaData(tableName);
-    }
-}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions
DatabaseConditionsManager.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsManager.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsManager.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,156 @@
+package org.hps.conditions;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+
+import org.jdom.Document;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.lcsim.conditions.ConditionsConverter;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
+
+/**
+ * This class should be used as the top-level ConditionsManager for HPS when legacy access to text
+ * conditions is not needed.
+ */
+public class DatabaseConditionsManager extends LCSimConditionsManagerImplementation {
+
+    static DatabaseConditionsManager _instance;
+    int _runNumber = -1;
+    String _detectorName;    
+    ConnectionManager _connectionManager;
+    ConditionsTableMetaDataXMLLoader _loader;
+    ConverterXMLLoader _converters;
+
+    public DatabaseConditionsManager() {
+    }
+    
+    /**
+     * Create a static instance of this class and register it as the default conditions manager.
+     */
+    public static DatabaseConditionsManager createInstance() {
+        _instance = new DatabaseConditionsManager();
+        ConditionsManager.setDefaultConditionsManager(_instance); // FIXME: This probably should not be called here.
+        return _instance;
+    }
+
+    /**
+     * Perform setup for the current detector and run number.
+     */
+    public void setup() {
+        try {
+            //setConditionsReader(new DatabaseConditionsReader(getConditionsReader()), _detectorName);
+            setDetector(_detectorName, _runNumber);
+        } catch (ConditionsNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+    
+    public void setRunNumber(int runNumber) {
+        _runNumber = runNumber;
+    }
+
+    public void setDetectorName(String detectorName) {
+        _detectorName = detectorName;
+    }
+    
+    public int getRunNumber() {
+        return _runNumber;
+    }
+
+    public String getDetectorName() {
+        return this.getDetector();
+    }
+    
+    public Detector getDetectorObject() {
+        return getCachedConditions(Detector.class, "compact.xml").getCachedData();
+    }
+    
+    public <T> T getConditionsData(Class<T> klass, String name) {
+        return getCachedConditions(klass, name).getCachedData();
+    }
+    
+    /**
+     * Configure this object from an XML file.
+     * @param file The XML file.
+     */
+    public void configure(File file) {                        
+        try {
+            configure(new FileInputStream(file));
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+    
+    /**
+     * Configure this object from an embedded XML resource.
+     * @param resource The embedded XML resource.
+     */
+    public void configure(String resource) {
+        configure(getClass().getResourceAsStream(resource)); 
+    }
+    
+    List<ConditionsTableMetaData> getTableMetaDataList() {
+        return _loader.getTableMetaDataList();
+    }
+    
+    ConditionsTableMetaData findTableMetaData(String name) {
+        return _loader.findTableMetaData(name);
+    }
+    
+    private void configure(InputStream in) {
+        
+        // Create XML document.
+        Document config = createDocument(in);
+
+        // Load the connection parameters from XML.
+        loadConnectionParameters(config);
+        
+        // Load the table meta data from XML.
+        loadTableMetaData(config);
+        
+        // Load the converter classes from XML.
+        loadConverters(config);
+    }
+
+    private Document createDocument(InputStream in) {
+        // Create an XML document from an InputStream.
+        SAXBuilder builder = new SAXBuilder();
+        Document config = null;
+        try {
+            config = builder.build(in);
+        } catch (JDOMException | IOException e) {
+            throw new RuntimeException(e);
+        }
+        return config;
+    }
+
+    private void loadConverters(Document config) {
+        // Load the list of converters from the "converters" section of the config document.
+        _converters = new ConverterXMLLoader();
+        _converters.load(config.getRootElement().getChild("converters"));
+        
+        // Register the converters with this manager.
+        for (ConditionsConverter converter : _converters.getConverterList()) {
+            registerConditionsConverter(converter);
+        }
+    }
+
+    private void loadTableMetaData(Document config) {
+        // Load table meta data from the "tables" section of the config document.
+        _loader = new ConditionsTableMetaDataXMLLoader();
+        _loader.load(config.getRootElement().getChild("tables"));
+    }
+
+    private void loadConnectionParameters(Document config) {
+        // Setup the connection parameters from the "connection" section of the config document.
+        ConnectionManager.getConnectionManager().setConnectionParameters(
+                ConnectionParameters.fromXML(config.getRootElement().getChild("connection")));
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions
DatabaseConditionsReader.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsReader.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/DatabaseConditionsReader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -19,40 +19,22 @@
  * the conditions and their meta data in a fully generic fashion.
  * </p>
  * 
- * <p>
- * In order to override the default database connection parameters, the system property
- * <code>hps.conditions.db.configuration</code> should point to a properties file defining 
- * the variables read by ConnectionParameters (see that class for details).  Otherwise, the 
- * defaults will be used to connect to a test database at SLAC.
- * <p>
- * 
- * <p>
- * Setting custom connection properties would look something like the following from the CL:
- * </p>
- * 
- * <p><code>java -Dhps.conditions.db.configuration=/path/to/my/config.prop [...]</code></p>
- * 
- * <p>
- * Currently, this class should "know" directly about all the converters that are needed for loading
- * conditions data via the <code>registerConditionsConverters</code> method.  
- * </p>
- * 
  * @author Jeremy McCormick <[log in to unmask]>
  * @version $Id: DatabaseConditionsReader.java,v 1.21 2013/10/18 06:08:55 jeremy Exp $ 
  */
 public class DatabaseConditionsReader extends ConditionsReader {
         
     /** Database connection. */
-    private Connection connection = null;    
+    private Connection _connection = null;    
     
     /** Base ConditionsReader for getting the Detector. */
-    private final ConditionsReader reader;
+    private final ConditionsReader _reader;
     
     /** The current run number to determine if conditions are already loaded. */
-    private int currentRun = Integer.MIN_VALUE;
+    private int _currentRun = Integer.MIN_VALUE;
         
     /** The logger for printing messages. */
-    static Logger logger = null;
+    static Logger _logger = null;
 
     /**
      * Class constructor taking a ConditionsReader.  This constructor is automatically called 
@@ -62,8 +44,7 @@
      * @param reader The basic ConditionsReader allowing access to the detector.
      */
     public DatabaseConditionsReader(ConditionsReader reader) {
-        this.reader = reader;        
-        
+        _reader = reader;
         setupLogger();
     }
     
@@ -71,13 +52,13 @@
      * Setup the logger.
      */
     private final void setupLogger() {
-        if (logger == null) {
-            logger = Logger.getLogger(this.getClass().getSimpleName());
-            logger.setUseParentHandlers(false);
-            logger.setLevel(Level.ALL);
+        if (_logger == null) {
+            _logger = Logger.getLogger(getClass().getSimpleName());
+            _logger.setUseParentHandlers(false);
+            _logger.setLevel(Level.ALL);
             ConsoleHandler handler = new ConsoleHandler();
             handler.setFormatter(new ConditionsFormatter());
-            logger.addHandler(handler);
+            _logger.addHandler(handler);
         }
     }
     
@@ -101,20 +82,20 @@
      */
     public boolean update(ConditionsManager manager, String detectorName, int run) throws IOException {
                 
-        logger.info("updating detector <" + detectorName + "> for run <" + run + "> ...");
+        _logger.info("updating detector <" + detectorName + "> for run <" + run + "> ...");
         
         // Check if conditions are already cached for the run.
-        if (run == currentRun) {
-            logger.warning("Conditions already cached for run <" + run + ">.");
+        if (run == _currentRun) {
+            _logger.warning("Conditions already cached for run <" + run + ">.");
             return false;
         }
             
         // Register the converters on the manager.         
         // FIXME: This should really only happen once instead of being called here every time.
-        ConditionsConverterRegistery.register(manager);
+        //ConditionsConverterRegistery.register(manager);
                 
         // Open a connection to the database.
-        connection = ConnectionManager.getConnectionManager().createConnection();
+        _connection = ConnectionManager.getConnectionManager().createConnection();
         
         // Cache the ConditionsRecords.
         try {
@@ -125,8 +106,8 @@
                                
         // Close the database connection.
         try {
-            connection.close();
-            connection = null;
+            _connection.close();
+            _connection = null;
         } catch (SQLException x) {
             throw new IOException("Failed to close connection", x);
         }
@@ -138,7 +119,7 @@
      * Close the base reader.
      */
     public void close() throws IOException {
-        reader.close();
+        _reader.close();
     }
 
     /**
@@ -146,7 +127,7 @@
      * @return An InputStream with the conditions for <code>type</code>.
      */
     public InputStream open(String name, String type) throws IOException {
-        return reader.open(name, type);
+        return _reader.open(name, type);
     }
      
     /**

java/trunk/conditions/src/main/java/org/hps/conditions
QueryBuilder.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/QueryBuilder.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/QueryBuilder.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,41 @@
+package org.hps.conditions;
+
+
+class QueryBuilder {
+
+    static String buildSelect(String tableName, int collectionId, String[] fields, String order) {
+        StringBuffer buff = new StringBuffer();
+        buff.append("SELECT ");
+        if (fields == null) {
+            buff.append("* ");
+        } else {
+            // Always implicitly include the row ID.
+            buff.append("id, ");
+            for (String fieldName : fields) {
+                buff.append(fieldName + ", ");
+            }
+            buff.delete(buff.length()-2, buff.length()-1);
+        }
+        buff.append(" FROM " + tableName);
+        if (collectionId != -1)
+            buff.append(" WHERE collection_id = " + collectionId);
+        if (order != null) {
+            buff.append(" ORDER BY " + order);
+        }
+        System.out.println("QueryBuilder.buildSelectQuery: " + buff.toString());
+        return buff.toString();
+    }
+    
+    static String buildUpdate(String tableName, int rowId, String[] fields, Object[] values) {
+        return null;
+    }
+    
+    static String buildInsert(String tableName, String[] fields, Object[] values) {
+        return null;
+    }
+    
+    static String buildDelete(String tableName, int rowId) {
+        return null;
+    }
+    
+}

java/trunk/conditions/src/main/java/org/hps/conditions
TestRunConditionsReader.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/TestRunConditionsReader.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/TestRunConditionsReader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -18,11 +18,15 @@
     private String detectorName = null;
     //private int run;
 
-    public TestRunConditionsReader(ConditionsReader reader) {
+    public TestRunConditionsReader(ConditionsReader reader) {        
         super(reader);
+        System.out.println("TestRunConditionsReader - " + reader.getClass().getSimpleName());
     }
 
     public InputStream open(String name, String type) throws IOException {
+        
+        System.out.println(this.getClass().getSimpleName() + ".open - " + name);
+        
         // 1) Check the detector base directory.
         InputStream in = getClass().getResourceAsStream("/" + detectorName + "/" + name + "." + type);
         if (in == null) {
@@ -44,7 +48,6 @@
     public void close() throws IOException {
     }
 
-
     public boolean update(ConditionsManager manager, String detectorName, int run) throws IOException {
 //            loadCalibsByRun(run);
 //        Detector detector = manager.getCachedConditions(Detector.class,"compact.xml").getCachedData();

java/trunk/conditions/src/main/java/org/hps/conditions/beam
BeamConverterRegistry.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamConverterRegistry.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamConverterRegistry.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,12 @@
+package org.hps.conditions.beam;
+
+import org.hps.conditions.ConditionsObjectConverter;
+import org.hps.conditions.beam.BeamCurrent.BeamCurrentCollection;
+
+public class BeamConverterRegistry {   
+    public static final class BeamCurrentConverter extends ConditionsObjectConverter<BeamCurrentCollection> {
+        public Class getType() {
+            return BeamCurrentCollection.class;
+        }
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions/beam
BeamCurrent.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrent.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrent.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,12 +1,16 @@
 package org.hps.conditions.beam;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class is a simple data holder for the integrated beam current condition.
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class BeamCurrent extends AbstractConditionsObject {
+    
+    public static class BeamCurrentCollection extends ConditionsObjectCollection<BeamCurrent> {
+    }
             
     /**
      * Get the integrated beam current.

java/trunk/conditions/src/main/java/org/hps/conditions/beam
BeamCurrentCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrentCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrentCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,16 +0,0 @@
-package org.hps.conditions.beam;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-public class BeamCurrentCollection extends ConditionsObjectCollection<BeamCurrent> {
-    
-    /**
-     * Class constructor.
-     */
-    BeamCurrentCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-
-}

java/trunk/conditions/src/main/java/org/hps/conditions/beam
BeamCurrentConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrentConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/beam/BeamCurrentConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,77 +0,0 @@
-package org.hps.conditions.beam;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link BeamCurrent} from the conditions database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class BeamCurrentConverter extends DatabaseConditionsConverter<BeamCurrentCollection> {
-    
-    public BeamCurrentConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Get the conditions data.
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public BeamCurrentCollection getData(ConditionsManager manager, String name) {
-        
-        // Get the ConditionsRecord with the meta-data, which will use the current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-               
-        // Get the table name, field name, and field value defining the applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-        
-        // Collection to be returned to caller.
-        BeamCurrentCollection collection = new BeamCurrentCollection(getTableMetaData(name), collectionId, true);
-                        
-        // Get the connection manager.
-        ConnectionManager connectionManager = ConnectionManager.getConnectionManager();
-                                                                                            
-        // Construct the query to find matching records using the ID field.
-        String query = "SELECT id, beam_current FROM "
-                + tableName + " WHERE collection_id = " + collectionId;
-            
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-                
-        try {
-            // Loop over the records.            
-            while(resultSet.next()) {                                                             
-                int rowId = resultSet.getInt(1);
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("beam_current", resultSet.getDouble(2));
-                BeamCurrent newObject = _objectFactory.createObject(BeamCurrent.class, tableName, rowId, fieldValues, true);                
-                collection.add(newObject);
-            }            
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x){
-            throw new RuntimeException("Error converting to " + getType().getSimpleName() + "type", x);
-        }
-        
-        // Return collection of gain objects to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.     
-     * @return The type handled by this converter.
-     */
-    public Class<BeamCurrentCollection> getType() {
-        return BeamCurrentCollection.class;
-    }        
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalBadChannel.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannel.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannel.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,9 +1,13 @@
 package org.hps.conditions.ecal;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 public class EcalBadChannel extends AbstractConditionsObject {
     
+    public static class EcalBadChannelCollection extends ConditionsObjectCollection<EcalBadChannel> {    
+    }
+    
     int getChannelId() {
         return getFieldValue("ecal_channel_id");
     }

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalBadChannelCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannelCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannelCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,18 +0,0 @@
-package org.hps.conditions.ecal;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * A collection of bad channel IDs in the ECAL.
- */
-public class EcalBadChannelCollection extends ConditionsObjectCollection<EcalBadChannel> {
-    
-    public EcalBadChannelCollection() {        
-    }
-    
-    public EcalBadChannelCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalBadChannelConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannelConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalBadChannelConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,80 +0,0 @@
-package org.hps.conditions.ecal;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsRecordCollection;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link EcalBadChannelCollection} representing bad readout channels.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class EcalBadChannelConverter extends DatabaseConditionsConverter<EcalBadChannelCollection> {
-
-    public EcalBadChannelConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create the collection from the conditions database. 
-     * @param manager The conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public EcalBadChannelCollection getData(ConditionsManager manager, String name) {
-
-        // Collection to be returned to caller.
-        EcalBadChannelCollection collection = new EcalBadChannelCollection();
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecordCollection records = ConditionsRecord.find(manager, name);
-
-        // Loop over ConditionsRecords.  For this particular type of condition, multiple
-        // sets of bad channels are possible.
-        for (ConditionsRecord record : records) {
-        
-            // Get the table name, field name, and field value defining the
-            // applicable conditions.
-            String tableName = record.getTableName();
-            int collectionId = record.getCollectionId();
-
-            // Query for getting back bad channel records.
-            String query = "SELECT id, ecal_channel_id FROM " + tableName + " WHERE collection_id = " 
-                    + collectionId + " ORDER BY id ASC";
-            ResultSet resultSet = ConnectionManager.getConnectionManager().query(query);
-            
-            // Loop over the records.
-            try {
-                while (resultSet.next()) {                    
-                    int rowId = resultSet.getInt(1);                                       
-                    FieldValueMap fieldValues = new FieldValueMap();
-                    fieldValues.put("ecal_channel_id", resultSet.getInt(2));                    
-                    EcalBadChannel newObject = _objectFactory.createObject(EcalBadChannel.class, tableName, rowId, fieldValues, true);                    
-                    collection.add(newObject);
-                }
-            } catch (SQLException x) {
-                throw new RuntimeException(x);
-            } catch (ConditionsObjectException x) {
-                throw new RuntimeException("Error converting to " + getType().getSimpleName() + " type.");
-            }
-        }
-               
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * 
-     * @return The type handled by this converter.
-     */
-    public Class<EcalBadChannelCollection> getType() {
-        return EcalBadChannelCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalCalibration.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibration.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibration.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,7 @@
 package org.hps.conditions.ecal;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class is a simplistic representation of ECal pedestal and noise
@@ -10,6 +11,9 @@
  */
 public class EcalCalibration extends AbstractConditionsObject {
     
+    public static class EcalCalibrationCollection extends ConditionsObjectCollection<EcalCalibration> {
+    }
+    
     /**
      * Get the channel ID.
      * @return The channel ID.

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalCalibrationCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibrationCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibrationCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,19 +0,0 @@
-package org.hps.conditions.ecal;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * This class represents a list of {@link EcalCalibration} objects and their ECAL channel IDs.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class EcalCalibrationCollection extends ConditionsObjectCollection<EcalCalibration> {
-
-    /**
-     * Class constructor.
-     */
-    EcalCalibrationCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalCalibrationConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibrationConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalCalibrationConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,84 +0,0 @@
-package org.hps.conditions.ecal;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a list of {@link EcalCalibrationCollection} from the
- * conditions database.
- * 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class EcalCalibrationConverter extends DatabaseConditionsConverter<EcalCalibrationCollection> {
-    
-    public EcalCalibrationConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create the calibration collection from the conditions database.
-     * @param manager The conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public EcalCalibrationCollection getData(ConditionsManager manager, String name) {
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-
-        // Get the table name, field name, and field value defining the
-        // applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-        
-        // Collection to be returned to caller.
-        EcalCalibrationCollection collection = new EcalCalibrationCollection(getTableMetaData(name), collectionId, true);
-
-        // References to database objects.
-        ResultSet resultSet = null;
-        ConnectionManager connectionManager = getConnectionManager();
-
-        // The query to get conditions.
-        String query = "SELECT id, ecal_channel_id, pedestal, noise FROM " 
-                + tableName + " WHERE collection_id = " + collectionId 
-                + " ORDER BY ecal_channel_id ASC";
-
-        // Execute the query.
-        resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over the records.
-            while (resultSet.next()) {
-                int rowId = resultSet.getInt(1);                 
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("ecal_channel_id", resultSet.getInt(2));
-                fieldValues.put("pedestal", resultSet.getDouble(3));
-                fieldValues.put("noise", resultSet.getDouble(4));
-                EcalCalibration newObject = _objectFactory.createObject(EcalCalibration.class, tableName, rowId, fieldValues, true);                
-                collection.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to " + getType().getSimpleName() + " object", x);
-        }
-        
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<EcalCalibrationCollection> getType() {
-        return EcalCalibrationCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalChannel.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannel.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,7 @@
 package org.hps.conditions.ecal;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class encapsulates all the setup information about a single ECal channel, e.g. one crystal.
@@ -15,6 +16,66 @@
  */
 public class EcalChannel extends AbstractConditionsObject {
     
+    public static final class DaqId {
+        public int crate;
+        public int slot;
+        public int channel;
+    }
+    
+    public static final class GeometryId {
+        public int x;
+        public int y;
+    }
+    
+    public static final class ChannelId {
+        public int id;
+    }
+    
+    public static class EcalChannelCollection extends ConditionsObjectCollection<EcalChannel> {
+                    
+        /**
+         * Find a channel by using DAQ information.
+         * @param crate The crate number.
+         * @param slot The slot number.
+         * @param channelNumber The channel number.
+         * @return The matching channel or null if does not exist.
+         */
+        public EcalChannel findChannel(DaqId daqId) {
+            for (EcalChannel channel : getObjects()) {
+                if (channel.getCrate() == daqId.crate 
+                        && channel.getSlot() == daqId.slot 
+                        && channel.getChannel() == daqId.channel) {
+                    return channel;
+                }
+            }
+            return null;
+        }
+        
+        /**
+         * Find a channel by using its physical ID information.
+         * @param x The x value.
+         * @param y The y value.
+         * @return The matching channel or null if does not exist.
+         */
+        public EcalChannel findChannel(GeometryId geometryId) {
+            for (EcalChannel channel : getObjects()) {
+                if (channel.getX() == geometryId.x && channel.getY() == geometryId.y) {
+                    return channel;
+                }
+            }
+            return null;
+        }
+        
+        public EcalChannel findChannel(ChannelId channelId) {
+            for (EcalChannel channel : getObjects()) {
+                if (channel.getChannelId() == channelId.id) {
+                    return channel;
+                }
+            }
+            return null;
+        }
+    }
+    
     /**
      * Get the crate number.
      * @return The crate number.

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalChannelCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannelCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannelCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,74 +0,0 @@
-package org.hps.conditions.ecal;
-
-import org.hps.conditions.ConditionsObjectCollection;
-
-/**
- * This class maps ID values from the database to detailed ECal channel information.
- * There should really only be one of these data structures per job, as the EcalChannel 
- * objects are used as unique identifiers in the {@link EcalConditions} class.
- */
-public class EcalChannelCollection extends ConditionsObjectCollection<EcalChannel> {
-    
-    public static final class DaqId {
-        public int crate;
-        public int slot;
-        public int channel;
-    }
-    
-    public static final class GeometryId {
-        public int x;
-        public int y;
-    }
-    
-    public static final class ChannelId {
-        public int id;
-    }
-    
-    /**
-     * Class constructor.
-     */
-    EcalChannelCollection() {        
-    }
-    
-    /**
-     * Find a channel by using DAQ information.
-     * @param crate The crate number.
-     * @param slot The slot number.
-     * @param channelNumber The channel number.
-     * @return The matching channel or null if does not exist.
-     */
-    public EcalChannel findChannel(DaqId daqId) {
-        for (EcalChannel channel : getObjects()) {
-            if (channel.getCrate() == daqId.crate 
-                    && channel.getSlot() == daqId.slot 
-                    && channel.getChannel() == daqId.channel) {
-                return channel;
-            }
-        }
-        return null;
-    }
-    
-    /**
-     * Find a channel by using its physical ID information.
-     * @param x The x value.
-     * @param y The y value.
-     * @return The matching channel or null if does not exist.
-     */
-    public EcalChannel findChannel(GeometryId geometryId) {
-        for (EcalChannel channel : getObjects()) {
-            if (channel.getX() == geometryId.x && channel.getY() == geometryId.y) {
-                return channel;
-            }
-        }
-        return null;
-    }
-    
-    public EcalChannel findChannel(ChannelId channelId) {
-        for (EcalChannel channel : getObjects()) {
-            if (channel.getChannelId() == channelId.id) {
-                return channel;
-            }
-        }
-        return null;
-    }
-}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalChannelConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannelConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalChannelConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,85 +0,0 @@
-package org.hps.conditions.ecal;
-
-import static org.hps.conditions.ConditionsTableConstants.ECAL_CHANNELS;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsRecordCollection;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates the {@link EcalChannelCollection} from the conditions table
- * containing the channel data.
- * 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class EcalChannelConverter extends DatabaseConditionsConverter<EcalChannelCollection> {
-
-    public EcalChannelConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Load the data from the conditions database.
-     * @param manager The conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public EcalChannelCollection getData(ConditionsManager manager, String name) {
-
-        // References to database objects.
-        ResultSet resultSet = null;
-        ConnectionManager connectionManager = getConnectionManager();
-        
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-        int collectionId = record.getCollectionId();
-        String tableName = record.getTableName();
-        
-        // Collection to be returned to caller.
-        EcalChannelCollection collection = new EcalChannelCollection();
-
-        // Query to retrieve channel data.
-        String query = "SELECT id, channel_id, x, y, crate, slot, channel FROM " + name
-                + " WHERE collection_id = " + collectionId;
-
-        // Execute the query and get the results.
-        resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over the records.
-            while (resultSet.next()) {
-                
-                int rowId = resultSet.getInt(1);                                       
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("channel_id", resultSet.getInt(2));
-                fieldValues.put("x", resultSet.getInt(3));
-                fieldValues.put("y", resultSet.getInt(4));
-                fieldValues.put("crate", resultSet.getInt(5));
-                fieldValues.put("slot", resultSet.getInt(6));
-                fieldValues.put("channel", resultSet.getInt(7));                                
-                EcalChannel newObject = _objectFactory.createObject(EcalChannel.class, tableName, rowId, fieldValues, true);                    
-                collection.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to " + getType().getSimpleName() + " type.", x);
-        }
-        
-        return collection;
-    }
-
-    /**
-     * Get the type that this converter handles.
-     * @return The type handled by this converter.
-     */
-    public Class<EcalChannelCollection> getType() {
-        return EcalChannelCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalConditions.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -3,10 +3,16 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+
 /**
  * This class provides access to all ECal conditions from the database,
- * including gain, pedestal and bad channel settings, per crystal. 
+ * including gain, pedestal and bad channel settings, per crystal.
  * 
+ * Unlike most conditions data types, it does not extend 
+ * {@link org.hps.conditions.ConditionsObject}, because it is a composite
+ * object containing data assembled from many other <code>ConditionsObjects</code>
+ * 
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class EcalConditions {
@@ -121,5 +127,5 @@
             buff.append('\n');
         }
         return buff.toString();
-    }
+    }    
 }
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalConditionsConverter.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -5,9 +5,12 @@
 import static org.hps.conditions.ConditionsTableConstants.ECAL_CHANNELS;
 import static org.hps.conditions.ConditionsTableConstants.ECAL_GAINS;
 
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.hps.conditions.ecal.EcalChannelCollection.ChannelId;
+import org.hps.conditions.ecal.EcalBadChannel.EcalBadChannelCollection;
+import org.hps.conditions.ecal.EcalCalibration.EcalCalibrationCollection;
+import org.hps.conditions.ecal.EcalChannel.ChannelId;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalGain.EcalGainCollection;
+import org.lcsim.conditions.ConditionsConverter;
 import org.lcsim.conditions.ConditionsManager;
 
 /**
@@ -15,15 +18,13 @@
  * from the database, based on the current run number known by the conditions manager.
  * @author Jeremy McCormick <[log in to unmask]>
  */
-public class EcalConditionsConverter extends DatabaseConditionsConverter<EcalConditions> {
-    
-    public EcalConditionsConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
+// FIXME: This converter is not needed.  The data loading can be done from within EcalConditions itself.
+public class EcalConditionsConverter implements ConditionsConverter<EcalConditions> {
+      
     /**
      * Create ECAL conditions object containing all data for the current run.
      */
+    // TODO: This should be a method on EcalConditions itself.
     public EcalConditions getData(ConditionsManager manager, String name) {
         
         // Create new, empty conditions object to fill with data.

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalConditionsLoader.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsLoader.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsLoader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -2,7 +2,8 @@
 
 import java.util.List;
 
-import org.hps.conditions.ecal.EcalChannelCollection.GeometryId;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalChannel.GeometryId;
 import org.lcsim.detector.converter.compact.EcalCrystal;
 import org.lcsim.geometry.Detector;
 

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalConverterRegistry.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConverterRegistry.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConverterRegistry.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,39 @@
+package org.hps.conditions.ecal;
+
+import org.hps.conditions.ConditionsObjectConverter;
+import org.hps.conditions.ecal.EcalBadChannel.EcalBadChannelCollection;
+import org.hps.conditions.ecal.EcalCalibration.EcalCalibrationCollection;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalGain.EcalGainCollection;
+
+
+public class EcalConverterRegistry {
+    
+    public static final class EcalBadChannelConverter extends ConditionsObjectConverter<EcalBadChannelCollection> {         
+        public Class getType() {
+            return EcalBadChannelCollection.class;
+        }                
+
+        public boolean allowMultipleCollections() {
+            return true;
+        }        
+    }
+    
+    public static final class EcalCalibrationConverter extends ConditionsObjectConverter<EcalCalibrationCollection> {         
+        public Class getType() {
+            return EcalCalibrationCollection.class;
+        }
+    }
+    
+    public static final class EcalChannelConverter extends ConditionsObjectConverter<EcalChannelCollection> {         
+        public Class getType() {
+            return EcalChannelCollection.class;
+        }                
+    }
+    
+    public static final class EcalGainConverter extends ConditionsObjectConverter<EcalGainCollection> {         
+        public Class getType() {
+            return EcalGainCollection.class;
+        }
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalGain.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGain.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGain.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,7 @@
 package org.hps.conditions.ecal;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class is a simplistic representation of gain values from the ECal
@@ -8,6 +9,9 @@
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class EcalGain extends AbstractConditionsObject {
+    
+    public static class EcalGainCollection extends ConditionsObjectCollection<EcalGain> {
+    }
                
     /**
      * Get the gain value.

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalGainCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGainCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGainCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,17 +0,0 @@
-package org.hps.conditions.ecal;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * This class maps ECAL channel IDs from the database to ECal gain parameters.
- */
-public class EcalGainCollection extends ConditionsObjectCollection<EcalGain> {
-    
-    /**
-     * Class constructor.
-     */
-    EcalGainCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/ecal
EcalGainConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGainConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalGainConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,85 +0,0 @@
-package org.hps.conditions.ecal;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates an {@link EcalGainCollection} from the appropriate
- * conditions database information.
- */
-public class EcalGainConverter extends DatabaseConditionsConverter<EcalGainCollection> {
-
-    
-    public EcalGainConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create the collection from the conditions database.
-     * @param manager The conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public EcalGainCollection getData(ConditionsManager manager, String name) {
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-
-        // Get the table name, field name, and field value defining the
-        // applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-        
-        // Objects for building the return value.
-        ConditionsTableMetaData tableMetaData = _objectFactory.getTableRegistry().getTableMetaData(tableName);
-        EcalGainCollection collection = 
-                new EcalGainCollection(tableMetaData, collectionId, true); 
-
-        // References to database objects.
-        ConnectionManager connectionManager = getConnectionManager();
-
-        // Database query on ecal gain table.
-        String query = "SELECT id, ecal_channel_id, gain FROM " 
-                + tableName + " WHERE collection_id = " 
-                + collectionId + " ORDER BY id ASC";
-
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over the records.
-            while (resultSet.next()) {                
-                int rowId = resultSet.getInt(1);                 
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("ecal_channel_id", resultSet.getInt(2));
-                fieldValues.put("gain", resultSet.getDouble(3));
-                EcalGain newObject = _objectFactory.createObject(EcalGain.class, tableName, rowId, fieldValues, true);                
-                collection.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to " + getType().getSimpleName() + " type.", x);
-        }
-        
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<EcalGainCollection> getType() {
-        return EcalGainCollection.class;
-    }
-
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtBadChannel.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannel.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannel.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,9 +1,13 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 public class SvtBadChannel extends AbstractConditionsObject {
     
+    public static class SvtBadChannelCollection extends ConditionsObjectCollection<SvtBadChannel> {      
+    }
+   
     public int getChannelId() {
         return getFieldValue("svt_channel_id");
     }

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtBadChannelCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannelCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannelCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,20 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * This class represents a set of bad channels in the SVT by their channel IDs
- * from the conditions database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtBadChannelCollection extends ConditionsObjectCollection<SvtBadChannel> {
-    
-    public SvtBadChannelCollection() {        
-    }
-    
-    public SvtBadChannelCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtBadChannelConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannelConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannelConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,80 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsRecordCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates an {@link SvtBadChannelCollection} representing bad readout channels
- * in the SVT.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtBadChannelConverter extends DatabaseConditionsConverter<SvtBadChannelCollection> {
-
-    public SvtBadChannelConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create the collection from the conditions database. 
-     * @param manager The conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public SvtBadChannelCollection getData(ConditionsManager manager, String name) {
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecordCollection records = ConditionsRecord.find(manager, name);
-
-        SvtBadChannelCollection collection = new SvtBadChannelCollection();
-        
-        // Loop over ConditionsRecords.  For this particular type of condition, multiple
-        // sets of bad channels with overlapping validity are okay.
-        for (ConditionsRecord record : records) {
-        
-            String tableName = record.getTableName();
-            int collectionId = record.getCollectionId();
-            
-            // Query for getting back bad channel records.
-            String query = "SELECT id, svt_channel_id FROM " + tableName 
-                    + " WHERE collection_id = " + collectionId 
-                    + " ORDER BY id ASC";
-            
-            ResultSet resultSet = ConnectionManager.getConnectionManager().query(query);
-            
-            // Loop over the records.
-            try {
-                while (resultSet.next()) {
-                    int rowId = resultSet.getInt(1);                    
-                    FieldValueMap fieldValues = new FieldValueMap();
-                    fieldValues.put("svt_channel_id", resultSet.getInt(2));                    
-                    SvtBadChannel newObject = _objectFactory.createObject(SvtBadChannel.class, tableName, rowId, fieldValues, true);                    
-                    collection.add(newObject);
-                }
-            } catch (SQLException x) {
-                throw new RuntimeException("Database error", x);
-            } catch (ConditionsObjectException x) {
-                throw new RuntimeException("Error converting to SvtBadChannel object.", x);
-            }
-        }
-               
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<SvtBadChannelCollection> getType() {
-        return SvtBadChannelCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtCalibration.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibration.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibration.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,13 +1,17 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class represents a noise and pedestal measurement for an SVT channel.
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class SvtCalibration extends AbstractConditionsObject {
-                
+
+    public static class SvtCalibrationCollection extends ConditionsObjectCollection<SvtCalibration> {
+    }
+    
     /**
      * Get the channel ID.
      * @return The channel ID.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtCalibrationCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibrationCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibrationCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,19 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * This class is a collection of {@link SvtCalibration} objects associated to their 
- * SVT channel IDs from the database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtCalibrationCollection extends ConditionsObjectCollection<SvtCalibration> {
-
-    /**
-     * Class constructor.
-     */
-    SvtCalibrationCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtCalibrationConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibrationConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtCalibrationConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,95 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link SvtCalibrationCollection} from the conditions
- * database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtCalibrationConverter extends DatabaseConditionsConverter<SvtCalibrationCollection> {
-
-    /**
-     * Class constructor.
-     */
-    public SvtCalibrationConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-
-    /**
-     * Get the SVT channel constants for this run by named set.     
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     * @return The channel constants data.
-     */
-    public SvtCalibrationCollection getData(ConditionsManager manager, String name) {
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-
-        // Get the table name, field name, and field value defining the
-        // applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-
-        // Objects for building the return value.
-        SvtCalibrationCollection collection = 
-                new SvtCalibrationCollection(this.getTableMetaData(tableName), collectionId, true); 
-
-        // Get a connection from the manager.
-        ConnectionManager connectionManager = getConnectionManager();
-
-        // Construct the query to find matching calibration records.
-        String query = "SELECT id, svt_channel_id, noise, pedestal FROM " + tableName 
-                + " WHERE collection_id = " + collectionId 
-                + " ORDER BY svt_channel_id ASC";
-
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over the calibration records.
-            while (resultSet.next()) {
-                
-                int rowId = resultSet.getInt(1);
-                
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("svt_channel_id", resultSet.getInt(2));
-                fieldValues.put("noise", resultSet.getDouble(3));
-                fieldValues.put("pedestal", resultSet.getDouble(4));
-                SvtCalibration newObject = _objectFactory.createObject(
-                        SvtCalibration.class, tableName, rowId, fieldValues, true);
-                
-                collection.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to SvtCalibration object");
-        }
-        
-        // Return the collection of channel constants to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * 
-     * @return The type handled by this converter, which is
-     *         <code>ConditionsRecordCollection</code>.
-     */
-    public Class<SvtCalibrationCollection> getType() {
-        return SvtCalibrationCollection.class;
-    }
-}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtChannel.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannel.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannel.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,14 @@
 package org.hps.conditions.svt;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
+import org.lcsim.hps.util.Pair;
 
 /**
  * This class represents SVT channel setup information, including hybrid, FPGA, and channel numbers.
@@ -8,6 +16,55 @@
  */
 public class SvtChannel extends AbstractConditionsObject {
     
+    public static class SvtChannelCollection extends ConditionsObjectCollection<SvtChannel> {
+        
+        Map<Integer, SvtChannel> channelMap = new HashMap<Integer, SvtChannel>();
+            
+        public void add(SvtChannel channel) {
+            // Add to map.
+            if (channelMap.containsKey(channel.getChannelId())) {
+                throw new IllegalArgumentException("Channel ID already exists: " + channel.getChannelId());
+            }
+            channelMap.put(channel.getChannelId(), channel);
+            
+            // Add to collection.
+            super.add(channel);
+        }
+        
+        public SvtChannel findChannel(int channelId) {
+            return channelMap.get(channelId);
+        }
+        
+        /**
+         * Find channels that match a DAQ pair (FPGA, hybrid).
+         * @param pair The DAQ pair.
+         * @return The channels matching the DAQ pair or null if not found.
+         */
+        public Collection<SvtChannel> find(Pair<Integer,Integer> pair) {
+            List<SvtChannel> channels = new ArrayList<SvtChannel>(); 
+            int fpga = pair.getFirstElement();
+            int hybrid = pair.getSecondElement();
+            for (SvtChannel channel : this.getObjects()) {
+                if (channel.getFpga() == fpga && channel.getHybrid() == hybrid) {
+                    channels.add(channel);
+                }
+            }
+            return channels;
+        }
+                   
+        /**
+         * Convert this object to a human readable string.
+         * @return This object converted to a string.
+         */
+        public String toString() {        
+            StringBuffer buff = new StringBuffer();
+            for (SvtChannel channel : this.getObjects()) {
+                buff.append(channel.toString() + '\n');
+            }
+            return buff.toString();
+        }
+    }
+    
     /**
      * Get the channel ID.
      * @return The channel ID.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtChannelCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannelCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannelCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,69 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.lcsim.hps.util.Pair;
-
-/**
- * This class represents a map between SVT channels and their IDs from the channels table
- * in the conditions database.  It can be used to lookup information stored in the {@link SvtConditions} object.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtChannelCollection extends ConditionsObjectCollection<SvtChannel> {
-    
-    Map<Integer, SvtChannel> channelMap = new HashMap<Integer, SvtChannel>();
-    
-    SvtChannelCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-    public void add(SvtChannel channel) {
-        // Add to map.
-        if (channelMap.containsKey(channel.getChannelId())) {
-            throw new IllegalArgumentException("Channel ID already exists: " + channel.getChannelId());
-        }
-        channelMap.put(channel.getChannelId(), channel);
-        
-        // Add to collection.
-        super.add(channel);
-    }
-    
-    public SvtChannel findChannel(int channelId) {
-        return channelMap.get(channelId);
-    }
-    
-    /**
-     * Find channels that match a DAQ pair (FPGA, hybrid).
-     * @param pair The DAQ pair.
-     * @return The channels matching the DAQ pair or null if not found.
-     */
-    public Collection<SvtChannel> find(Pair<Integer,Integer> pair) {
-        List<SvtChannel> channels = new ArrayList<SvtChannel>(); 
-        int fpga = pair.getFirstElement();
-        int hybrid = pair.getSecondElement();
-        for (SvtChannel channel : this.getObjects()) {
-            if (channel.getFpga() == fpga && channel.getHybrid() == hybrid) {
-                channels.add(channel);
-            }
-        }
-        return channels;
-    }
-               
-    /**
-     * Convert this object to a human readable string.
-     * @return This object converted to a string.
-     */
-    public String toString() {        
-        StringBuffer buff = new StringBuffer();
-        for (SvtChannel channel : this.getObjects()) {
-            buff.append(channel.toString() + '\n');
-        }
-        return buff.toString();
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtChannelConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannelConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtChannelConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,78 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class converts a table of SVT channel setup data into an {@link SvtChannelCollection}.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-// TODO: This needs to support different collectionIDs.
-public class SvtChannelConverter extends DatabaseConditionsConverter<SvtChannelCollection> {
-
-    public SvtChannelConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create the channel map from the conditions database.
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     */
-    public SvtChannelCollection getData(ConditionsManager manager, String name) {
-
-        // Get the connection manager.
-        ConnectionManager connectionManager = getConnectionManager();
-        
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);        
-        int collectionId = record.getCollectionId();
-        
-        // Objects for building the return value.
-        SvtChannelCollection channels = new SvtChannelCollection(
-                _objectFactory.getTableRegistry().getTableMetaData(name),
-                collectionId,
-                true);
-        
-        // Construct the query to get the channel data.
-        String query = "SELECT id, channel_id, fpga, hybrid, channel FROM " + name
-                + " WHERE collection_id = " + collectionId;
-
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over records.
-            while (resultSet.next()) {
-                int rowId = resultSet.getInt(1);
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("channel_id", resultSet.getInt(2));
-                fieldValues.put("fpga", resultSet.getInt(3));
-                fieldValues.put("hybrid", resultSet.getInt(4));
-                fieldValues.put("channel", resultSet.getInt(5));                
-                SvtChannel newObject = _objectFactory.createObject(SvtChannel.class, name, rowId, fieldValues, true);                
-                channels.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to SvtChannel object.", x);
-        }
-        return channels;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<SvtChannelCollection> getType() {
-        return SvtChannelCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtConditions.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditions.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditions.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -3,6 +3,10 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
+import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
+import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+
 /**
  * This class contains all SVT conditions data by readout channel.
  * {@link SvtChannel} objects from the {@linkSvtChannelMap} should be 

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtConditionsConverter.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -8,20 +8,22 @@
 import static org.hps.conditions.ConditionsTableConstants.SVT_PULSE_PARAMETERS;
 import static org.hps.conditions.ConditionsTableConstants.SVT_TIME_SHIFTS;
 
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.DatabaseConditionsConverter;
+import org.hps.conditions.svt.SvtBadChannel.SvtBadChannelCollection;
+import org.hps.conditions.svt.SvtCalibration.SvtCalibrationCollection;
+import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
+import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
+import org.hps.conditions.svt.SvtGain.SvtGainCollection;
+import org.hps.conditions.svt.SvtPulseParameters.SvtPulseParametersCollection;
+import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+import org.lcsim.conditions.ConditionsConverter;
 import org.lcsim.conditions.ConditionsManager;
 
 /**
  * This class creates an {@link SvtConditions} object from the database,
  * based on the current run number known by the conditions manager.
  */
-public class SvtConditionsConverter extends DatabaseConditionsConverter<SvtConditions> {
-      
-    public SvtConditionsConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
+public class SvtConditionsConverter implements ConditionsConverter<SvtConditions> {
+          
     /**
      * Create and return the SVT conditions object.  
      * @param manager The current conditions manager.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtConditionsLoader.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -3,6 +3,9 @@
 import java.util.Collection;
 import java.util.List;
 
+import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
+import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
+import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
 import org.lcsim.detector.tracker.silicon.HpsSiSensor;
 import org.lcsim.geometry.Detector;
 import org.lcsim.hps.util.Pair;

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtConverterRegistry.java added at 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConverterRegistry.java	                        (rev 0)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConverterRegistry.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,61 @@
+package org.hps.conditions.svt;
+
+import org.hps.conditions.ConditionsObjectConverter;
+import org.hps.conditions.svt.SvtBadChannel.SvtBadChannelCollection;
+import org.hps.conditions.svt.SvtCalibration.SvtCalibrationCollection;
+import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
+import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
+import org.hps.conditions.svt.SvtGain.SvtGainCollection;
+import org.hps.conditions.svt.SvtPulseParameters.SvtPulseParametersCollection;
+import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+
+
+@SuppressWarnings({"unchecked", "rawtypes"})
+public class SvtConverterRegistry {
+           
+    public static class SvtBadChannelConverter extends ConditionsObjectConverter<SvtBadChannelCollection> {         
+        public Class getType() {
+            return SvtBadChannelCollection.class;
+        }                
+
+        public boolean allowMultipleCollections() {
+            return true;
+        }        
+    }
+    
+    public static class SvtCalibrationConverter extends ConditionsObjectConverter<SvtCalibrationCollection> {
+        public Class getType() {
+            return SvtCalibrationCollection.class;
+        }
+    }
+        
+    public static class SvtChannelConverter extends ConditionsObjectConverter<SvtChannelCollection> {
+        public Class getType() {
+            return SvtChannelCollection.class;
+        }
+    }
+    
+    public static class SvtDaqMappingConverter extends ConditionsObjectConverter<SvtDaqMappingCollection> {
+        public Class getType() {
+            return SvtDaqMappingCollection.class;
+        }
+    }
+    
+    public static class SvtGainConverter extends ConditionsObjectConverter<SvtGainCollection> {
+        public Class getType() {
+            return SvtGainCollection.class;
+        }
+    }
+    
+    public static class SvtPulseParametersConverter extends ConditionsObjectConverter<SvtPulseParametersCollection> {
+        public Class getType() {
+            return SvtPulseParametersCollection.class;
+        }
+    }
+    
+    public static class SvtTimeShiftConverter extends ConditionsObjectConverter<SvtTimeShiftCollection> {
+        public Class getType() {
+            return SvtTimeShiftCollection.class;
+        }
+    }
+}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtDaqMapping.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,10 +1,64 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
+import org.lcsim.hps.util.Pair;
 
-
 public class SvtDaqMapping extends AbstractConditionsObject {
 
+    public static class SvtDaqMappingCollection extends ConditionsObjectCollection<SvtDaqMapping> {
+        
+        /**
+         * Flag values for top or bottom half.
+         */
+        public static final int TOP_HALF = 0;
+        public static final int BOTTOM_HALF = 1;
+              
+        /**
+         * Get a DAQ pair (FPGA, hybrid) by top/bottom number and layer number.
+         * @param half Value indicating top or bottom half of detector.
+         * @param layerNumber The layer number.
+         * @return The DAQ pair for the half and layer number or null if does not exist.
+         */
+        Pair<Integer,Integer> get(int half, int layerNumber) {
+            for (SvtDaqMapping object : this.getObjects()) {
+                if (object.getHalf() == half && object.getLayerNumber() == layerNumber) {
+                    return new Pair<Integer, Integer>(object.getFpgaNumber(), object.getHybridNumber());
+                }
+            }
+            return null;
+        }
+        
+        /**
+         * Convert this object to a string.
+         * @return This object converted to a string.
+         */
+        public String toString() {
+            StringBuffer buff = new StringBuffer();
+            buff.append("half");
+            buff.append(" ");
+            buff.append("layer");
+            buff.append(" ");
+            buff.append("fpga");
+            buff.append(" ");
+            buff.append("hybrid");
+            buff.append('\n');
+            buff.append("----------------------");
+            buff.append('\n');
+            for (SvtDaqMapping object : getObjects()) {
+                buff.append(object.getHalf());
+                buff.append("    ");
+                buff.append(String.format("%-2d", object.getLayerNumber()));
+                buff.append("    ");                
+                buff.append(object.getFpgaNumber());
+                buff.append("    ");
+                buff.append(object.getHybridNumber());
+                buff.append('\n');
+            }        
+            return buff.toString();
+        }    
+    }
+    
     public int getHalf() {
         return getFieldValue("half");
     }

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtDaqMappingCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMappingCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMappingCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,67 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.lcsim.hps.util.Pair;
-
-/**
- * This class establishes the mapping between layer numbers and DAQ pair (FPGA, hybrid)
- * for the top and bottom halves of the detector. 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtDaqMappingCollection extends ConditionsObjectCollection<SvtDaqMapping> {
-    
-    /**
-     * Flag values for top or bottom half.
-     */
-    public static final int TOP_HALF = 0;
-    public static final int BOTTOM_HALF = 1;
-    
-    public SvtDaqMappingCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-      
-    /**
-     * Get a DAQ pair (FPGA, hybrid) by top/bottom number and layer number.
-     * @param half Value indicating top or bottom half of detector.
-     * @param layerNumber The layer number.
-     * @return The DAQ pair for the half and layer number or null if does not exist.
-     */
-    Pair<Integer,Integer> get(int half, int layerNumber) {
-        for (SvtDaqMapping object : this.getObjects()) {
-            if (object.getHalf() == half && object.getLayerNumber() == layerNumber) {
-                return new Pair<Integer, Integer>(object.getFpgaNumber(), object.getHybridNumber());
-            }
-        }
-        return null;
-    }
-    
-    /**
-     * Convert this object to a string.
-     * @return This object converted to a string.
-     */
-    public String toString() {
-        StringBuffer buff = new StringBuffer();
-        buff.append("half");
-        buff.append(" ");
-        buff.append("layer");
-        buff.append(" ");
-        buff.append("fpga");
-        buff.append(" ");
-        buff.append("hybrid");
-        buff.append('\n');
-        buff.append("----------------------");
-        buff.append('\n');
-        for (SvtDaqMapping object : getObjects()) {
-            buff.append(object.getHalf());
-            buff.append("    ");
-            buff.append(String.format("%-2d", object.getLayerNumber()));
-            buff.append("    ");                
-            buff.append(object.getFpgaNumber());
-            buff.append("    ");
-            buff.append(object.getHybridNumber());
-            buff.append('\n');
-        }        
-        return buff.toString();
-    }    
-}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtDaqMappingConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMappingConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMappingConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,89 +0,0 @@
-package org.hps.conditions.svt;
-
-import static org.hps.conditions.ConditionsTableConstants.SVT_DAQ_MAP;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link SvtDaqMappingCollection} from the conditions database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtDaqMappingConverter extends DatabaseConditionsConverter<SvtDaqMappingCollection> {
-
-    public SvtDaqMappingConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Create an {@link SvtDaqMappingCollection} object from the database.
-     */
-    public SvtDaqMappingCollection getData(ConditionsManager manager, String name) {
-        
-        // Use default key name if not set.
-        if (name == null) {
-            name = SVT_DAQ_MAP;
-        }
-                
-        // Get the ConditionsRecord with the meta-data, which will use the current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-               
-        // Get the table name, field name, and field value defining the applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-        
-        // The object to be returned to caller.
-        SvtDaqMappingCollection collection = 
-               new SvtDaqMappingCollection(this.getTableMetaData(tableName), collectionId, true); 
-                        
-        // Get the connection manager.
-        ConnectionManager connectionManager = ConnectionManager.getConnectionManager();
-                                                                                            
-        // Construct the query to find matching calibration records using the ID field.
-        String query = "SELECT id, half, layer, hybrid, fpga FROM " + tableName 
-                + " WHERE collection_id = " + collectionId
-                + " ORDER BY half ASC, layer ASC";
-                   
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-               
-        try {
-            // Loop over the database records.
-            while(resultSet.next()) {                          
-                int rowId = resultSet.getInt(1);                
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("half", resultSet.getInt(2));
-                fieldValues.put("layer", resultSet.getInt(3));
-                fieldValues.put("hybrid", resultSet.getInt(4));
-                fieldValues.put("fpga", resultSet.getInt(5));
-                SvtDaqMapping newObject = _objectFactory.createObject(
-                        SvtDaqMapping.class, tableName, rowId, fieldValues, true);
-                collection.add(newObject);
-            }            
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error creating object of " + getType().getSimpleName() + " type.", x);
-        }
-        
-        // Return DAQ map to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<SvtDaqMappingCollection> getType() {
-        return SvtDaqMappingCollection.class;
-    }
-
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtGain.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGain.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGain.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,7 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class represents gain measurements for a single SVT channel.
@@ -8,6 +9,9 @@
  */
 public class SvtGain extends AbstractConditionsObject {
     
+    public static class SvtGainCollection extends ConditionsObjectCollection<SvtGain> {
+    }
+    
     /**
      * Get the channel ID.
      * @return The channel ID.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtGainCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGainCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGainCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,17 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-
-/**
- * This class represents a list of {@link SvtGain} objects associated 
- * with their SVT channel IDs from the database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtGainCollection extends ConditionsObjectCollection<SvtGain> {
-        
-    public SvtGainCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtGainConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGainConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtGainConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,98 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link SvtGainCollection} from the conditions database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtGainConverter extends DatabaseConditionsConverter<SvtGainCollection> {
-    
-    /**
-     * Class constructor.
-     */
-    public SvtGainConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-
-    /**
-     * Get the SVT channel constants for this run by named set.
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     * @return The channel constants data.
-     */
-    public SvtGainCollection getData(ConditionsManager manager, String name) {
-        
-        // Get the ConditionsRecord with the meta-data, which will use the current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-               
-        // Get the table name, field name, and field value defining the applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-                
-        // Objects for building the return value.
-        ConditionsTableMetaData tableMetaData = _objectFactory.getTableRegistry().getTableMetaData(tableName);
-        SvtGainCollection collection = 
-                new SvtGainCollection(tableMetaData, collectionId, true); 
-        
-        // Get the connection manager.
-        ConnectionManager connectionManager = ConnectionManager.getConnectionManager();
-                                                                                            
-        // Construct the query to find matching calibration records using the ID field.
-        String query = "SELECT id, svt_channel_id, gain, offset FROM " + tableName 
-                + " WHERE collection_id = " + collectionId
-                + " ORDER BY svt_channel_id ASC";
-            
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-               
-        try {
-            // Loop over the gain records.            
-            while(resultSet.next()) {         
-                
-                // Get the object parameters from the ResultSet.
-                int rowId = resultSet.getInt(1);
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("svt_channel_id", resultSet.getInt(2));
-                fieldValues.put("gain", resultSet.getDouble(3));
-                fieldValues.put("offset", resultSet.getDouble(4));
-                
-                // Create the object using the factory.
-                SvtGain newObject = _objectFactory.createObject(
-                        SvtGain.class,
-                        tableName,
-                        rowId,
-                        fieldValues,
-                        true);
-                
-                // Add the object to the collection. 
-                collection.add(newObject);
-            }            
-        } catch (SQLException x1) {
-            throw new RuntimeException("Database error.", x1);
-        } catch (ConditionsObjectException x2) {
-            throw new RuntimeException("Error converting to SvtGain object.", x2);
-        }
-        
-        // Return collection of gain objects to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.     
-     * @return The type handled by this converter.
-     */
-    public Class<SvtGainCollection> getType() {
-        return SvtGainCollection.class;
-    }        
-}
\ No newline at end of file

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtPulseParameters.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParameters.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParameters.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,7 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
 
 /**
  * This class represents the pulse parameters for an SVT channel.
@@ -8,6 +9,9 @@
  */
 public class SvtPulseParameters extends AbstractConditionsObject {
     
+    public static class SvtPulseParametersCollection extends ConditionsObjectCollection<SvtPulseParameters> {    
+    }
+    
     /**
      * Get the SVT channel ID.
      * @return The SVT channel ID.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtPulseParametersCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParametersCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParametersCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,16 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-
-/**
- * A collection of {@link SvtPulseParameters} objects stored by SVT channel ID.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtPulseParametersCollection extends ConditionsObjectCollection<SvtPulseParameters> {
-	
-    public SvtPulseParametersCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtPulseParametersConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParametersConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParametersConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,102 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link SvtPulseParametersCollection} object from the
- * conditions database.
- * 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtPulseParametersConverter extends DatabaseConditionsConverter<SvtPulseParametersCollection> {
-    
-    public SvtPulseParametersConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Get the pulse parameters by channel for this run by named conditions set.
-     * 
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     * @return The channel constants data.
-     */
-    public SvtPulseParametersCollection getData(ConditionsManager manager, String name) {
-
-        // Get the ConditionsRecord with the meta-data, which will use the
-        // current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-
-        // Get the table name, field name, and field value defining the
-        // applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-
-        // Object for building the return value.
-        ConditionsTableMetaData tableMetaData = _objectFactory
-                .getTableRegistry().getTableMetaData(tableName);
-        SvtPulseParametersCollection collection = new SvtPulseParametersCollection(
-                tableMetaData, collectionId, true);
-
-        // Connection objects.
-        ConnectionManager connectionManager = getConnectionManager();
-
-        // Construct the query to find matching calibration records.
-        String query = "SELECT id, svt_channel_id, amplitude, t0, tp, chisq FROM " + tableName
-                + " WHERE collection_id = " + collectionId
-                + " ORDER BY id ASC";
-
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-
-        try {
-            // Loop over the calibration records.
-            while (resultSet.next()) {
-
-                // Get row ID from the database.
-                int rowId = resultSet.getInt(1);
-
-                // Set the field values for the new object.
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("svt_channel_id", resultSet.getInt(2));
-                fieldValues.put("amplitude", resultSet.getDouble(3));
-                fieldValues.put("t0", resultSet.getDouble(4));
-                fieldValues.put("tp", resultSet.getDouble(5));
-                fieldValues.put("chisq", resultSet.getDouble(6));
-
-                // Create the object using the factory.
-                SvtPulseParameters newObject;
-                newObject = _objectFactory.createObject(SvtPulseParameters.class,
-                        tableName, rowId, fieldValues, true);
-
-                // Add the object to the collection.
-                collection.add(newObject);
-            }
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error converting to SvtPulseParameters object.", x);
-        }
-
-        // Return the collection of channel constants to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.
-     * @return The type handled by this converter.
-     */
-    public Class<SvtPulseParametersCollection> getType() {
-        return SvtPulseParametersCollection.class;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtTimeShift.java 322 -> 323
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShift.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShift.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,6 +1,8 @@
 package org.hps.conditions.svt;
 
 import org.hps.conditions.AbstractConditionsObject;
+import org.hps.conditions.ConditionsObjectCollection;
+import org.lcsim.hps.util.Pair;
 
 /**
  * This class is a data holder for associating a time shift with a specific sensor
@@ -9,6 +11,21 @@
  */
 public class SvtTimeShift extends AbstractConditionsObject {
     
+    public static class SvtTimeShiftCollection extends ConditionsObjectCollection<SvtTimeShift> {
+        
+        SvtTimeShiftCollection find(Pair<Integer,Integer> pair) {
+            SvtTimeShiftCollection timeShifts = new SvtTimeShiftCollection();
+            int fpga = pair.getFirstElement();
+            int hybrid = pair.getSecondElement();
+            for (SvtTimeShift timeShift : getObjects()) {
+                if (timeShift.getFpga() == fpga && timeShift.getHybrid() == hybrid) {
+                    timeShifts.add(timeShift);
+                }
+            }
+            return timeShifts;
+        }
+    }
+    
     /**
      * Get the FPGA number.
      * @return The FPGA number.

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtTimeShiftCollection.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShiftCollection.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShiftCollection.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,28 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsTableMetaData;
-import org.lcsim.hps.util.Pair;
-
-/**
- * A simple collection of {@link SvtTimeShift} objects.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtTimeShiftCollection extends ConditionsObjectCollection<SvtTimeShift> {
-
-    SvtTimeShiftCollection(ConditionsTableMetaData tableMetaData, int collectionId, boolean isReadOnly) {
-        super(tableMetaData, collectionId, isReadOnly);
-    }
-    
-    SvtTimeShiftCollection find(Pair<Integer,Integer> pair) {
-        SvtTimeShiftCollection timeShifts = new SvtTimeShiftCollection(this.getTableMetaData(), -1, false);
-        int fpga = pair.getFirstElement();
-        int hybrid = pair.getSecondElement();
-        for (SvtTimeShift timeShift : getObjects()) {
-            if (timeShift.getFpga() == fpga && timeShift.getHybrid() == hybrid) {
-                timeShifts.add(timeShift);
-            }
-        }
-        return timeShifts;
-    }
-}

java/trunk/conditions/src/main/java/org/hps/conditions/svt
SvtTimeShiftConverter.java removed after 322
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShiftConverter.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShiftConverter.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,85 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.conditions.ConditionsObjectFactory;
-import org.hps.conditions.ConditionsRecord;
-import org.hps.conditions.ConnectionManager;
-import org.hps.conditions.DatabaseConditionsConverter;
-import org.lcsim.conditions.ConditionsManager;
-
-/**
- * This class creates a {@link SvtGainCollection} from the conditions database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtTimeShiftConverter extends DatabaseConditionsConverter<SvtTimeShiftCollection> {
-    
-    public SvtTimeShiftConverter(ConditionsObjectFactory objectFactory) {
-        super(objectFactory);
-    }
-    
-    /**
-     * Get the SVT channel constants for this run by named set.
-     * @param manager The current conditions manager.
-     * @param name The name of the conditions set.
-     * @return The channel constants data.
-     */
-    public SvtTimeShiftCollection getData(ConditionsManager manager, String name) {
-        
-        // Get the ConditionsRecord with the meta-data, which will use the current run number from the manager.
-        ConditionsRecord record = ConditionsRecord.find(manager, name).get(0);
-               
-        // Get the table name, field name, and field value defining the applicable conditions.
-        String tableName = record.getTableName();
-        int collectionId = record.getCollectionId();
-                
-        // Collection that will be returned. 
-        SvtTimeShiftCollection collection = 
-                new SvtTimeShiftCollection(_objectFactory.getTableRegistry().getTableMetaData(tableName), 
-                        collectionId, true);
-        
-        // Get the connection manager.
-        ConnectionManager connectionManager = ConnectionManager.getConnectionManager();
-                                                                                            
-        // Construct the query to find matching records.
-        String query = "SELECT id, fpga, hybrid, time_shift FROM " + tableName 
-                + " WHERE collection_id = " + collectionId;
-            
-        // Execute the query and get the results.
-        ResultSet resultSet = connectionManager.query(query);
-               
-        try {
-            // Loop over the records.            
-            while(resultSet.next()) {                                 
-                int rowId = resultSet.getInt(1);
-
-                FieldValueMap fieldValues = new FieldValueMap();
-                fieldValues.put("fpga", resultSet.getInt(2));
-                fieldValues.put("hybrid", resultSet.getInt(3));
-                fieldValues.put("time_shift", resultSet.getDouble(4));
-                
-                SvtTimeShift newObject = _objectFactory.createObject(SvtTimeShift.class, tableName, rowId, fieldValues, true);
-                
-                collection.add(newObject);
-            }            
-        } catch (SQLException x) {
-            throw new RuntimeException("Database error.", x);
-        } catch (ConditionsObjectException x) {
-            throw new RuntimeException("Error creating SvtTimeShift object.", x);
-        }
-        
-        // Return collection to caller.
-        return collection;
-    }
-
-    /**
-     * Get the type handled by this converter.     
-     * @return The type handled by this converte.
-     */
-    public Class<SvtTimeShiftCollection> getType() {
-        return SvtTimeShiftCollection.class;
-    }        
-}
\ No newline at end of file

java/trunk/conditions/src/main/resources/org/hps/conditions/config
conditions_database_testrun_2013.xml added at 323
--- java/trunk/conditions/src/main/resources/org/hps/conditions/config/conditions_database_testrun_2013.xml	                        (rev 0)
+++ java/trunk/conditions/src/main/resources/org/hps/conditions/config/conditions_database_testrun_2013.xml	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,187 @@
+<conditions_database name="hps_conditions_test" version="1.0" author="jeremym">
+
+    <connection>
+        <user>rd_hps_cond_ro</user>
+        <password>2jumpinphotons.</password>
+        <database>rd_hps_cond</database>
+        <hostname>mysql-node03.slac.stanford.edu</hostname>
+        <port>3306</port>
+        <read_only>true</read_only>
+        <conditions_table>conditions_dev</conditions_table>
+    </connection>
+
+    <converters>
+
+        <!-- ConditionsRecord converter -->
+        <converter class="org.hps.conditions.ConditionsRecordConverter"/>
+
+        <!-- SVT converters -->
+        <converter class="org.hps.conditions.svt.SvtConditionsConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtBadChannelConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtCalibrationConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtChannelConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtDaqMappingConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtGainConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtPulseParametersConverter"/>
+        <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtTimeShiftConverter"/>
+
+        <!-- ECal converters -->
+        <converter class="org.hps.conditions.ecal.EcalConditionsConverter"/>
+        <converter class="org.hps.conditions.ecal.EcalConverterRegistry$EcalBadChannelConverter"/>
+        <converter class="org.hps.conditions.ecal.EcalConverterRegistry$EcalChannelConverter"/>
+        <converter class="org.hps.conditions.ecal.EcalConverterRegistry$EcalGainConverter"/>
+        <converter class="org.hps.conditions.ecal.EcalConverterRegistry$EcalCalibrationConverter"/>
+
+        <!-- Beam conditions converter -->
+        <converter class="org.hps.conditions.beam.BeamConverterRegistry$BeamCurrentConverter"/>
+    </converters>
+
+    <tables>
+    
+        <table name="svt_channels">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtChannel"/>
+                <collection class="org.hps.conditions.svt.SvtChannel$SvtChannelCollection"/>
+            </classes>
+            <fields>
+                <field name="channel_id" />
+                <field name="fpga" />
+                <field name="hybrid" />
+                <field name="channel" />
+            </fields>
+        </table>
+    
+        <table name="svt_gains">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtGain"/> 
+                <collection class="org.hps.conditions.svt.SvtGain$SvtGainCollection"/>
+            </classes>            
+            <fields>
+                <field name="svt_channel_id" />
+                <field name="gain" />
+                <field name="offset" />
+            </fields>
+        </table>
+        
+        <table name="svt_pulse_parameters">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtPulseParameters"/>
+                <collection class="org.hps.conditions.svt.SvtPulseParameters$SvtPulseParametersCollection"/>
+            </classes>
+            <fields>
+                <field name="svt_channel_id" />
+                <field name="amplitude" />
+                <field name="t0" />
+                <field name="tp" />
+                <field name="chisq" />
+            </fields>        
+        </table>
+        
+        <table name="svt_calibrations">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtCalibration"/>
+                <collection class="org.hps.conditions.svt.SvtCalibration$SvtCalibrationCollection"/>
+            </classes>
+            <fields>
+                <field name="svt_channel_id" />
+                <field name="noise" />
+                <field name="pedestal" />
+            </fields>        
+        </table>
+        
+        <table name="svt_time_shifts">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtTimeShift"/>
+                <collection class="org.hps.conditions.svt.SvtTimeShift$SvtTimeShiftCollection"/>
+            </classes>
+            <fields>
+                <field name="fpga" />
+                <field name="hybrid" />
+                <field name="time_shift" />
+            </fields>        
+        </table>
+        
+        <table name="svt_bad_channels">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtBadChannel"/>
+                <collection class="org.hps.conditions.svt.SvtBadChannel$SvtBadChannelCollection"/>
+            </classes>
+            <fields>
+                <field name="svt_channel_id" />
+            </fields>        
+        </table>
+        
+        <table name="svt_daq_map">
+            <classes>
+                <object class="org.hps.conditions.svt.SvtDaqMapping"/>
+                <collection class="org.hps.conditions.svt.SvtDaqMapping$SvtDaqMappingCollection"/>
+            </classes>
+            <fields>
+                <field name="half" />
+                <field name="layer" />
+                <field name="fpga" />
+                <field name="hybrid" />
+            </fields>  
+        </table>
+        
+        <table name="ecal_bad_channels">
+            <classes>
+                <object class="org.hps.conditions.ecal.EcalBadChannel"/>
+                <collection class="org.hps.conditions.ecal.EcalBadChannel$EcalBadChannelCollection"/>
+            </classes>
+            <fields>
+                <field name="ecal_channel_id" />
+            </fields>  
+        </table>
+        
+        <table name="ecal_gains">
+            <classes>
+                <object class="org.hps.conditions.ecal.EcalGain"/>
+                <collection class="org.hps.conditions.ecal.EcalGain$EcalGainCollection"/>
+            </classes>
+            <fields>
+                <field name="ecal_channel_id" />
+                <field name="gain" />
+            </fields>  
+        </table>
+        
+        <table name="ecal_calibrations">
+            <classes>
+                <object class="org.hps.conditions.ecal.EcalCalibration"/>
+                <collection class="org.hps.conditions.ecal.EcalCalibration$EcalCalibrationCollection"/>
+            </classes>
+            <fields>
+                <field name="ecal_channel_id" />
+                <field name="noise" />
+                <field name="pedestal" />
+            </fields>  
+        </table>
+        
+        <table name="ecal_channels">
+            <classes>
+                <object class="org.hps.conditions.ecal.EcalChannel"/>
+                <collection class="org.hps.conditions.ecal.EcalChannel$EcalChannelCollection"/>
+            </classes>
+            <fields>
+                <field name="channel_id" />                
+                <field name="crate" />
+                <field name="slot" />
+                <field name="channel" />
+                <field name="x" />
+                <field name="y" />
+            </fields>  
+        </table>
+        
+        <table name="beam_current">
+            <classes>
+                <object class="org.hps.conditions.beam.BeamCurrent"/>
+                <collection class="org.hps.conditions.beam.BeamCurrent$BeamCurrentCollection"/>
+            </classes>
+            <fields>
+                <field name="beam_current" />
+            </fields>
+        </table>
+                       
+    </tables>
+
+</conditions_database>

java/trunk/conditions/src/main/scripts
mysql_backup.sh added at 323
--- java/trunk/conditions/src/main/scripts/mysql_backup.sh	                        (rev 0)
+++ java/trunk/conditions/src/main/scripts/mysql_backup.sh	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,16 @@
+#!/bin/sh
+mysqldump -p2jumpinphotons. -h mysql-node03.slac.stanford.edu -P 3306 -u rd_hps_cond_ro rd_hps_cond \
+beam_current \
+conditions_dev \
+ecal_bad_channels \
+ecal_calibrations \
+ecal_channels \
+ecal_gains \
+svt_bad_channels \
+svt_bad_channels_scratch \
+svt_calibrations \
+svt_channels \
+svt_daq_map \
+svt_gains \
+svt_pulse_parameters \
+svt_time_shifts > conditions_database_testrun_2012_full.sql

java/trunk/conditions/src/main/scripts
mysql_backup_tables_only.sh added at 323
--- java/trunk/conditions/src/main/scripts/mysql_backup_tables_only.sh	                        (rev 0)
+++ java/trunk/conditions/src/main/scripts/mysql_backup_tables_only.sh	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,16 @@
+#!/bin/sh
+mysqldump -d -p2jumpinphotons. -h mysql-node03.slac.stanford.edu -P 3306 -u rd_hps_cond_ro rd_hps_cond \
+beam_current \
+conditions_dev \
+ecal_bad_channels \
+ecal_calibrations \
+ecal_channels \
+ecal_gains \
+svt_bad_channels \
+svt_bad_channels_scratch \
+svt_calibrations \
+svt_channels \
+svt_daq_map \
+svt_gains \
+svt_pulse_parameters \
+svt_time_shifts > conditions_database_tables_only.sql 

java/trunk/conditions/src/main/scripts
mysql_console.sh added at 323
--- java/trunk/conditions/src/main/scripts/mysql_console.sh	                        (rev 0)
+++ java/trunk/conditions/src/main/scripts/mysql_console.sh	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,2 @@
+#!/bin/sh
+mysql -D rd_hps_cond -h mysql-node03.slac.stanford.edu -P 3306 -u rd_hps_cond_ro -p2jumpinphotons.

java/trunk/conditions/src/main/sql
conditions_database_tables_only.sql added at 323
--- java/trunk/conditions/src/main/sql/conditions_database_tables_only.sql	                        (rev 0)
+++ java/trunk/conditions/src/main/sql/conditions_database_tables_only.sql	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,270 @@
+-- MySQL dump 10.13  Distrib 5.1.73, for redhat-linux-gnu (x86_64)
+--
+-- Host: mysql-node03.slac.stanford.edu    Database: rd_hps_cond
+-- ------------------------------------------------------
+-- Server version	5.5.23-log
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `beam_current`
+--
+
+DROP TABLE IF EXISTS `beam_current`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `beam_current` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `beam_current` double DEFAULT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `conditions_dev`
+--
+
+DROP TABLE IF EXISTS `conditions_dev`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `conditions_dev` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `run_start` int(11) NOT NULL,
+  `run_end` int(11) NOT NULL,
+  `updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `created` datetime NOT NULL,
+  `valid_from` datetime DEFAULT NULL,
+  `valid_to` datetime DEFAULT NULL,
+  `created_by` varchar(255) DEFAULT NULL,
+  `notes` blob,
+  `name` varchar(40) NOT NULL,
+  `format_version` varchar(16) DEFAULT NULL,
+  `table_name` varchar(50) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=33 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `ecal_bad_channels`
+--
+
+DROP TABLE IF EXISTS `ecal_bad_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_bad_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `ecal_channel_id` int(11) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=45 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `ecal_calibrations`
+--
+
+DROP TABLE IF EXISTS `ecal_calibrations`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_calibrations` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `ecal_channel_id` int(11) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  `pedestal` double NOT NULL,
+  `noise` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=556 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `ecal_channels`
+--
+
+DROP TABLE IF EXISTS `ecal_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `channel_id` int(11) NOT NULL,
+  `x` smallint(6) NOT NULL,
+  `y` smallint(6) NOT NULL,
+  `crate` smallint(6) NOT NULL,
+  `slot` smallint(6) NOT NULL,
+  `channel` smallint(6) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `ecal_gains`
+--
+
+DROP TABLE IF EXISTS `ecal_gains`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_gains` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `ecal_channel_id` int(11) NOT NULL,
+  `gain` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_bad_channels`
+--
+
+DROP TABLE IF EXISTS `svt_bad_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_bad_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=3117 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_bad_channels_scratch`
+--
+
+DROP TABLE IF EXISTS `svt_bad_channels_scratch`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_bad_channels_scratch` (
+  `channel` int(10) unsigned DEFAULT NULL,
+  `fpga` int(10) unsigned DEFAULT NULL,
+  `hybrid` int(10) unsigned DEFAULT NULL
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_calibrations`
+--
+
+DROP TABLE IF EXISTS `svt_calibrations`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_calibrations` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `noise` double NOT NULL,
+  `pedestal` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_channels`
+--
+
+DROP TABLE IF EXISTS `svt_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `channel_id` int(11) NOT NULL,
+  `fpga` int(11) NOT NULL,
+  `hybrid` int(11) NOT NULL,
+  `channel` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_daq_map`
+--
+
+DROP TABLE IF EXISTS `svt_daq_map`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_daq_map` (
+  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `half` tinyint(3) unsigned NOT NULL,
+  `layer` tinyint(3) unsigned NOT NULL,
+  `hybrid` tinyint(3) unsigned NOT NULL,
+  `fpga` tinyint(3) unsigned NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_gains`
+--
+
+DROP TABLE IF EXISTS `svt_gains`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_gains` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `gain` double NOT NULL,
+  `offset` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=13087 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_pulse_parameters`
+--
+
+DROP TABLE IF EXISTS `svt_pulse_parameters`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_pulse_parameters` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `amplitude` double NOT NULL,
+  `t0` double NOT NULL,
+  `tp` double NOT NULL,
+  `chisq` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=13113 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Table structure for table `svt_time_shifts`
+--
+
+DROP TABLE IF EXISTS `svt_time_shifts`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_time_shifts` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `fpga` tinyint(3) unsigned NOT NULL,
+  `hybrid` tinyint(3) unsigned NOT NULL,
+  `time_shift` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2014-03-19 16:58:56

java/trunk/conditions/src/main/sql
conditions_database_testrun_2012_full.sql added at 323
--- java/trunk/conditions/src/main/sql/conditions_database_testrun_2012_full.sql	                        (rev 0)
+++ java/trunk/conditions/src/main/sql/conditions_database_testrun_2012_full.sql	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,409 @@
+-- MySQL dump 10.13  Distrib 5.1.73, for redhat-linux-gnu (x86_64)
+--
+-- Host: mysql-node03.slac.stanford.edu    Database: rd_hps_cond
+-- ------------------------------------------------------
+-- Server version	5.5.23-log
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `beam_current`
+--
+
+DROP TABLE IF EXISTS `beam_current`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `beam_current` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `beam_current` double DEFAULT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `beam_current`
+--
+
+LOCK TABLES `beam_current` WRITE;
+/*!40000 ALTER TABLE `beam_current` DISABLE KEYS */;
+INSERT INTO `beam_current` VALUES (1,1,54879.7343788147),(2,2,26928.0426635742),(3,3,204325.132622242),(4,4,148839.141475141),(5,5,92523.9428218845),(6,6,91761.4541434497),(7,7,209883.979889035),(8,8,110298.553449392),(9,9,8556.8459701538);
+/*!40000 ALTER TABLE `beam_current` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `conditions_dev`
+--
+
+DROP TABLE IF EXISTS `conditions_dev`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `conditions_dev` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `run_start` int(11) NOT NULL,
+  `run_end` int(11) NOT NULL,
+  `updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `created` datetime NOT NULL,
+  `valid_from` datetime DEFAULT NULL,
+  `valid_to` datetime DEFAULT NULL,
+  `created_by` varchar(255) DEFAULT NULL,
+  `notes` blob,
+  `name` varchar(40) NOT NULL,
+  `format_version` varchar(16) DEFAULT NULL,
+  `table_name` varchar(50) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=33 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `conditions_dev`
+--
+
+LOCK TABLES `conditions_dev` WRITE;
+/*!40000 ALTER TABLE `conditions_dev` DISABLE KEYS */;
+INSERT INTO `conditions_dev` VALUES (1,777,1365,'2013-09-30 23:05:51','2013-09-20 13:19:55',NULL,NULL,'jeremym',NULL,'svt_calibrations',NULL,'svt_calibrations',1),(2,777,1365,'2013-09-30 23:05:11','2013-09-24 16:48:39',NULL,NULL,'jeremym',NULL,'ecal_gains',NULL,'ecal_gains',1),(3,777,1365,'2013-09-30 23:05:11','2013-09-30 16:02:49',NULL,NULL,'jeremym',NULL,'ecal_bad_channels',NULL,'ecal_bad_channels',1),(4,777,1365,'2013-10-02 19:39:25','2013-09-30 17:25:21',NULL,NULL,'jeremym',NULL,'ecal_calibrations',NULL,'ecal_calibrations',1),(5,777,1365,'2013-10-02 19:39:25','2013-10-02 12:39:11',NULL,NULL,'jeremym',NULL,'svt_bad_channels',NULL,'svt_bad_channels',1),(6,777,1365,'2013-10-03 21:59:50','2013-10-03 14:59:33',NULL,NULL,'jeremym',NULL,'svt_pulse_parameters',NULL,'svt_pulse_parameters',1),(7,777,1365,'2013-10-10 23:02:42','2013-10-03 16:51:19',NULL,NULL,'jeremym',NULL,'svt_gains',NULL,'svt_gains',1),(8,777,1365,'2013-10-10 23:02:42','2013-10-10 16:02:27',NULL,NULL,'jeremym',N!
 ULL,'svt_daq[...]
+/*!40000 ALTER TABLE `conditions_dev` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `ecal_bad_channels`
+--
+
+DROP TABLE IF EXISTS `ecal_bad_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_bad_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `ecal_channel_id` int(11) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=45 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `ecal_bad_channels`
+--
+
+LOCK TABLES `ecal_bad_channels` WRITE;
+/*!40000 ALTER TABLE `ecal_bad_channels` DISABLE KEYS */;
+INSERT INTO `ecal_bad_channels` VALUES (1,20,1),(2,234,1),(3,235,1),(4,236,1),(5,238,1),(6,240,1),(7,256,1),(8,264,1),(9,266,1),(10,268,1),(11,270,1),(12,272,1),(13,274,1),(14,276,1),(15,278,1),(16,280,1),(17,282,1),(18,284,1),(19,286,1),(20,290,1),(21,292,1),(22,293,1),(23,300,1),(24,306,1),(25,308,1),(26,310,1),(27,346,1),(28,348,1),(29,370,1),(30,418,1),(31,420,1),(32,422,1),(33,424,1),(34,425,1),(35,426,1),(36,428,1),(37,430,1),(38,431,1),(39,432,1),(40,434,1),(41,436,1),(42,438,1),(43,440,1),(44,442,1);
+/*!40000 ALTER TABLE `ecal_bad_channels` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `ecal_calibrations`
+--
+
+DROP TABLE IF EXISTS `ecal_calibrations`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_calibrations` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `ecal_channel_id` int(11) NOT NULL,
+  `collection_id` int(11) NOT NULL,
+  `pedestal` double NOT NULL,
+  `noise` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=556 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `ecal_calibrations`
+--
+
+LOCK TABLES `ecal_calibrations` WRITE;
+/*!40000 ALTER TABLE `ecal_calibrations` DISABLE KEYS */;
+INSERT INTO `ecal_calibrations` VALUES (1,1,1,84.753881,5.458664),(2,3,1,84.855231,5.079355),(3,5,1,116.119123,5.02221),(4,7,1,94.705737,5.134416),(5,9,1,104.188526,5.368471),(6,11,1,121.932283,5.208102),(7,13,1,110.893363,5.202804),(8,15,1,122.005624,4.972769),(9,17,1,139.244094,5.071151),(10,19,1,110.365354,5.474363),(11,21,1,154.716085,4.728363),(12,23,1,109.64342,4.53666),(13,25,1,78.065467,4.574049),(14,27,1,143.976715,4.54093),(15,29,1,90.033971,4.610056),(16,31,1,148.486614,5.02935),(17,33,1,123.077728,4.688464),(18,35,1,113.468279,4.547754),(19,37,1,110.754331,5.533226),(20,39,1,110.415636,6.380486),(21,41,1,109.32036,4.726009),(22,43,1,89.884139,4.777839),(23,45,1,147.128796,4.669933),(24,47,1,100.65928,4.735584),(25,49,1,127.842857,5.609905),(26,51,1,101.690664,4.818428),(27,53,1,106.753656,4.694521),(28,55,1,93.061305,4.732574),(29,57,1,111.990889,4.726564),(30,59,1,103.664679,4.932656),(31,61,1,103.072891,4.731993),(32,63,1,123.154218,4.385393),(33,65,1,130.8633!
 3,4.529927),[...]
+/*!40000 ALTER TABLE `ecal_calibrations` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `ecal_channels`
+--
+
+DROP TABLE IF EXISTS `ecal_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `channel_id` int(11) NOT NULL,
+  `x` smallint(6) NOT NULL,
+  `y` smallint(6) NOT NULL,
+  `crate` smallint(6) NOT NULL,
+  `slot` smallint(6) NOT NULL,
+  `channel` smallint(6) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `ecal_channels`
+--
+
+LOCK TABLES `ecal_channels` WRITE;
+/*!40000 ALTER TABLE `ecal_channels` DISABLE KEYS */;
+INSERT INTO `ecal_channels` VALUES (1,1,1,-23,1,1,10,0),(2,1,2,-23,-1,2,10,0),(3,1,3,-23,2,1,10,1),(4,1,4,-23,-2,2,10,1),(5,1,5,-23,3,1,10,2),(6,1,6,-23,-3,2,10,2),(7,1,7,-23,4,1,10,3),(8,1,8,-23,-4,2,10,3),(9,1,9,-23,5,1,10,4),(10,1,10,-23,-5,2,10,4),(11,1,11,-22,1,1,10,5),(12,1,12,-22,-1,2,10,5),(13,1,13,-22,2,1,10,6),(14,1,14,-22,-2,2,10,6),(15,1,15,-22,3,1,10,7),(16,1,16,-22,-3,2,10,7),(17,1,17,-22,4,1,10,8),(18,1,18,-22,-4,2,10,8),(19,1,19,-22,5,1,10,9),(20,1,20,-22,-5,2,10,9),(21,1,21,-21,1,1,10,10),(22,1,22,-21,-1,2,10,10),(23,1,23,-21,2,1,10,11),(24,1,24,-21,-2,2,10,11),(25,1,25,-21,3,1,10,12),(26,1,26,-21,-3,2,10,12),(27,1,27,-21,4,1,10,13),(28,1,28,-21,-4,2,10,13),(29,1,29,-21,5,1,10,14),(30,1,30,-21,-5,2,10,14),(31,1,31,-20,1,1,10,15),(32,1,32,-20,-1,2,10,15),(33,1,33,-20,2,1,13,0),(34,1,34,-20,-2,2,13,0),(35,1,35,-20,3,1,13,1),(36,1,36,-20,-3,2,13,1),(37,1,37,-20,4,1,13,2),(38,1,38,-20,-4,2,13,2),(39,1,39,-20,5,1,13,3),(40,1,40,-20,-5,2,13,3),(41,1,41,-19,1,1,13!
 ,4),(42,1,42[...]
+/*!40000 ALTER TABLE `ecal_channels` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `ecal_gains`
+--
+
+DROP TABLE IF EXISTS `ecal_gains`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `ecal_gains` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `ecal_channel_id` int(11) NOT NULL,
+  `gain` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `ecal_gains`
+--
+
+LOCK TABLES `ecal_gains` WRITE;
+/*!40000 ALTER TABLE `ecal_gains` DISABLE KEYS */;
+INSERT INTO `ecal_gains` VALUES (1,1,1,0.133127),(2,1,3,0.11578),(3,1,5,0.134208),(4,1,7,0.155521),(5,1,9,0.13377),(6,1,11,0.157064),(7,1,13,0.130944),(8,1,15,0.140505),(9,1,17,0.166728),(10,1,19,0.13377),(11,1,21,0.179666),(12,1,23,0.196879),(13,1,25,0.212828),(14,1,27,0.212381),(15,1,29,0.212381),(16,1,31,0.204436),(17,1,33,0.21458),(18,1,35,0.208637),(19,1,37,0.212381),(20,1,39,0.212381),(21,1,41,0.207319),(22,1,43,0.19391),(23,1,45,0.199045),(24,1,47,0.214641),(25,1,49,0.214641),(26,1,51,0.204768),(27,1,53,0.219602),(28,1,55,0.213419),(29,1,57,0.214641),(30,1,59,0.214641),(31,1,61,0.238475),(32,1,63,0.251577),(33,1,65,0.26581),(34,1,67,0.26104),(35,1,69,0.26104),(36,1,71,0.236384),(37,1,73,0.269346),(38,1,75,0.230097),(39,1,77,0.26104),(40,1,79,0.26104),(41,1,81,0.188842),(42,1,83,0.220022),(43,1,85,0.24098),(44,1,87,0.222194),(45,1,89,0.222194),(46,1,91,0.213146),(47,1,93,0.21083),(48,1,95,0.213309),(49,1,97,0.222194),(50,1,99,0.222194),(51,1,101,0.228191),(52,1,103,0.!
 190975),(53,[...]
+/*!40000 ALTER TABLE `ecal_gains` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_bad_channels`
+--
+
+DROP TABLE IF EXISTS `svt_bad_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_bad_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=3117 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_bad_channels`
+--
+
+LOCK TABLES `svt_bad_channels` WRITE;
+/*!40000 ALTER TABLE `svt_bad_channels` DISABLE KEYS */;
+INSERT INTO `svt_bad_channels` VALUES (1,1,33),(2,1,129),(3,1,161),(4,1,257),(5,1,289),(6,1,385),(7,1,417),(8,1,545),(9,1,639),(10,1,673),(11,1,769),(12,1,801),(13,1,897),(14,1,929),(15,1,986),(16,1,1025),(17,1,1057),(18,1,1153),(19,1,1185),(20,1,1279),(21,1,1281),(22,1,1282),(23,1,1313),(24,1,1409),(25,1,1415),(26,1,1537),(27,1,1793),(28,1,1825),(29,1,1919),(30,1,1920),(31,1,1921),(32,1,2081),(33,1,2323),(34,1,2338),(35,1,2352),(36,1,2353),(37,1,2382),(38,1,2383),(39,1,2384),(40,1,2385),(41,1,2559),(42,1,2560),(43,1,2642),(44,1,3809),(45,1,5587),(46,1,5931),(47,1,7028),(48,1,7998),(49,1,7999),(50,1,8575),(51,2,2383),(52,2,3585),(53,2,3586),(54,2,3587),(55,2,3588),(56,2,3589),(57,2,3590),(58,2,3591),(59,2,3592),(60,2,3593),(61,2,3594),(62,2,3595),(63,2,3596),(64,2,3597),(65,2,3598),(66,2,3599),(67,2,3600),(68,2,3601),(69,2,3602),(70,2,3603),(71,2,3604),(72,2,3605),(73,2,3606),(74,2,3607),(75,2,3608),(76,2,3609),(77,2,3610),(78,2,3611),(79,2,3612),(80,2,3613),(81,2,3614),(82!
 ,2,3615),(83[...]
+/*!40000 ALTER TABLE `svt_bad_channels` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_bad_channels_scratch`
+--
+
+DROP TABLE IF EXISTS `svt_bad_channels_scratch`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_bad_channels_scratch` (
+  `channel` int(10) unsigned DEFAULT NULL,
+  `fpga` int(10) unsigned DEFAULT NULL,
+  `hybrid` int(10) unsigned DEFAULT NULL
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_bad_channels_scratch`
+--
+
+LOCK TABLES `svt_bad_channels_scratch` WRITE;
+/*!40000 ALTER TABLE `svt_bad_channels_scratch` DISABLE KEYS */;
+/*!40000 ALTER TABLE `svt_bad_channels_scratch` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_calibrations`
+--
+
+DROP TABLE IF EXISTS `svt_calibrations`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_calibrations` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `noise` double NOT NULL,
+  `pedestal` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_calibrations`
+--
+
+LOCK TABLES `svt_calibrations` WRITE;
+/*!40000 ALTER TABLE `svt_calibrations` DISABLE KEYS */;
+INSERT INTO `svt_calibrations` VALUES (1,1,1,144.439,4128.2),(2,1,2,73.8021,4221.44),(3,1,3,68.9208,4202.87),(4,1,4,67.5162,4212.69),(5,1,5,67.0612,4226.35),(6,1,6,66.8148,4277.98),(7,1,7,67.0852,4190.05),(8,1,8,66.6319,4249.81),(9,1,9,66.5855,4187.58),(10,1,10,66.5094,4280.37),(11,1,11,66.8626,4253.08),(12,1,12,67.2756,4201.57),(13,1,13,66.6692,4126.93),(14,1,14,66.3733,4240.99),(15,1,15,67.1244,4234.24),(16,1,16,67.267,4119.42),(17,1,17,67.417,4184.48),(18,1,18,66.8996,4141.2),(19,1,19,67.0237,4197.29),(20,1,20,67.2383,4252.4),(21,1,21,65.4174,4366.29),(22,1,22,65.4644,4314.95),(23,1,23,65.3006,4136.94),(24,1,24,66.7159,4287.62),(25,1,25,66.0618,4280.76),(26,1,26,65.9341,4197.06),(27,1,27,66.236,4256.05),(28,1,28,65.792,4295.79),(29,1,29,65.2681,4184.34),(30,1,30,65.2526,4164.29),(31,1,31,66.5423,4191.31),(32,1,32,65.142,4357.32),(33,1,33,110.69,4267.37),(34,1,34,70.185,4181.88),(35,1,35,65.4172,4263.31),(36,1,36,65.6101,4290.78),(37,1,37,64.9324,4295.53),(38,1,38,64.6467!
 ,4234.7),(39[...]
+/*!40000 ALTER TABLE `svt_calibrations` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_channels`
+--
+
+DROP TABLE IF EXISTS `svt_channels`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_channels` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `channel_id` int(11) NOT NULL,
+  `fpga` int(11) NOT NULL,
+  `hybrid` int(11) NOT NULL,
+  `channel` int(11) NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_channels`
+--
+
+LOCK TABLES `svt_channels` WRITE;
+/*!40000 ALTER TABLE `svt_channels` DISABLE KEYS */;
+INSERT INTO `svt_channels` VALUES (1,1,1,0,0,0),(2,1,2,0,0,1),(3,1,3,0,0,2),(4,1,4,0,0,3),(5,1,5,0,0,4),(6,1,6,0,0,5),(7,1,7,0,0,6),(8,1,8,0,0,7),(9,1,9,0,0,8),(10,1,10,0,0,9),(11,1,11,0,0,10),(12,1,12,0,0,11),(13,1,13,0,0,12),(14,1,14,0,0,13),(15,1,15,0,0,14),(16,1,16,0,0,15),(17,1,17,0,0,16),(18,1,18,0,0,17),(19,1,19,0,0,18),(20,1,20,0,0,19),(21,1,21,0,0,20),(22,1,22,0,0,21),(23,1,23,0,0,22),(24,1,24,0,0,23),(25,1,25,0,0,24),(26,1,26,0,0,25),(27,1,27,0,0,26),(28,1,28,0,0,27),(29,1,29,0,0,28),(30,1,30,0,0,29),(31,1,31,0,0,30),(32,1,32,0,0,31),(33,1,33,0,0,32),(34,1,34,0,0,33),(35,1,35,0,0,34),(36,1,36,0,0,35),(37,1,37,0,0,36),(38,1,38,0,0,37),(39,1,39,0,0,38),(40,1,40,0,0,39),(41,1,41,0,0,40),(42,1,42,0,0,41),(43,1,43,0,0,42),(44,1,44,0,0,43),(45,1,45,0,0,44),(46,1,46,0,0,45),(47,1,47,0,0,46),(48,1,48,0,0,47),(49,1,49,0,0,48),(50,1,50,0,0,49),(51,1,51,0,0,50),(52,1,52,0,0,51),(53,1,53,0,0,52),(54,1,54,0,0,53),(55,1,55,0,0,54),(56,1,56,0,0,55),(57,1,57,0,0,56),(58,1,58,0,0,!
 57),(59,1,59[...]
+/*!40000 ALTER TABLE `svt_channels` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_daq_map`
+--
+
+DROP TABLE IF EXISTS `svt_daq_map`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_daq_map` (
+  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `half` tinyint(3) unsigned NOT NULL,
+  `layer` tinyint(3) unsigned NOT NULL,
+  `hybrid` tinyint(3) unsigned NOT NULL,
+  `fpga` tinyint(3) unsigned NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_daq_map`
+--
+
+LOCK TABLES `svt_daq_map` WRITE;
+/*!40000 ALTER TABLE `svt_daq_map` DISABLE KEYS */;
+INSERT INTO `svt_daq_map` VALUES (1,1,1,1,1,4),(2,1,1,2,0,4),(3,1,1,3,0,3),(4,1,1,4,2,4),(5,1,1,5,2,3),(6,1,1,6,1,3),(7,1,1,7,1,6),(8,1,1,8,0,6),(9,1,1,9,2,6),(10,1,1,10,2,5),(11,1,0,1,1,0),(12,1,0,2,0,0),(13,1,0,3,0,1),(14,1,0,4,2,0),(15,1,0,5,2,1),(16,1,0,6,1,1),(17,1,0,7,1,2),(18,1,0,8,0,2),(19,1,0,9,2,2),(20,1,0,10,0,5);
+/*!40000 ALTER TABLE `svt_daq_map` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_gains`
+--
+
+DROP TABLE IF EXISTS `svt_gains`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_gains` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `gain` double NOT NULL,
+  `offset` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=13087 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_gains`
+--
+
+LOCK TABLES `svt_gains` WRITE;
+/*!40000 ALTER TABLE `svt_gains` DISABLE KEYS */;
+INSERT INTO `svt_gains` VALUES (1,1,1,0.09041,168.54959),(2,1,2,0.09132,129.06331),(3,1,3,0.09269,121.73973),(4,1,4,0.09228,127.13628),(5,1,5,0.09474,116.85173),(6,1,6,0.0927,120.09408),(7,1,7,0.09368,125.54723),(8,1,8,0.09324,132.5109),(9,1,9,0.08913,118.85577),(10,1,10,0.08708,123.87396),(11,1,11,0.0887,115.30546),(12,1,12,0.08889,115.51142),(13,1,13,0.09074,107.8372),(14,1,14,0.08878,114.82484),(15,1,15,0.08972,121.13764),(16,1,16,0.08986,122.56405),(17,1,17,0.08839,120.55956),(18,1,18,0.08729,114.11262),(19,1,19,0.08823,114.78429),(20,1,20,0.08827,109.46955),(21,1,21,0.09049,103.61712),(22,1,22,0.0886,114.31911),(23,1,23,0.08998,105.11322),(24,1,24,0.08981,123.84481),(25,1,25,0.08868,115.05402),(26,1,26,0.08712,109.20123),(27,1,27,0.08794,106.37902),(28,1,28,0.08801,109.43693),(29,1,29,0.08965,105.21076),(30,1,30,0.08795,107.71108),(31,1,31,0.08927,109.57722),(32,1,32,0.08945,112.41569),(33,1,33,0.08953,189.18797),(34,1,34,0.08911,104.52473),(35,1,35,0.09018,106.40301),!
 (36,1,36,0.0[...]
+/*!40000 ALTER TABLE `svt_gains` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_pulse_parameters`
+--
+
+DROP TABLE IF EXISTS `svt_pulse_parameters`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_pulse_parameters` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `svt_channel_id` int(11) NOT NULL,
+  `amplitude` double NOT NULL,
+  `t0` double NOT NULL,
+  `tp` double NOT NULL,
+  `chisq` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=13113 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_pulse_parameters`
+--
+
+LOCK TABLES `svt_pulse_parameters` WRITE;
+/*!40000 ALTER TABLE `svt_pulse_parameters` DISABLE KEYS */;
+INSERT INTO `svt_pulse_parameters` VALUES (1,1,1,2566.76,36.2141,49.5314,45.3601),(2,1,2,2557.65,34.8792,51.4634,1628.44),(3,1,3,2580.55,34.8269,52.3254,2590.75),(4,1,4,2570.08,34.9907,52.7496,2574.52),(5,1,5,2634.25,34.5281,52.3736,3388.62),(6,1,6,2579.41,34.612,52.3403,2630.34),(7,1,7,2607.43,34.6304,52.4653,3507.53),(8,1,8,2587.5,35.5808,53.9765,4339.65),(9,1,9,2470.49,35.1321,52.6679,1584.38),(10,1,10,2431.07,34.681,52.9538,3366.13),(11,1,11,2463.05,34.8131,52.8844,3758.31),(12,1,12,2464.5,34.9735,53.026,3184.54),(13,1,13,2515.97,34.6483,52.5323,3943.82),(14,1,14,2465.73,34.6363,52.6423,2967.8),(15,1,15,2499.96,34.6024,52.5392,3803.47),(16,1,16,2478.32,35.5557,54.264,4804.85),(17,1,17,2455.4,35.2826,52.6523,1719.91),(18,1,18,2424.47,34.8248,52.8653,3729.28),(19,1,19,2447.52,34.8615,53.0984,3929.35),(20,1,20,2447.24,35.057,53.049,3495.45),(21,1,21,2502.89,34.7726,52.4264,3836.1),(22,1,22,2458.68,34.7871,52.4676,2901.43),(23,1,23,2486.05,34.7067,52.6529,3915.48),(24,1,24,!
 2483.28,35.7[...]
+/*!40000 ALTER TABLE `svt_pulse_parameters` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `svt_time_shifts`
+--
+
+DROP TABLE IF EXISTS `svt_time_shifts`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `svt_time_shifts` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `collection_id` int(11) NOT NULL,
+  `fpga` tinyint(3) unsigned NOT NULL,
+  `hybrid` tinyint(3) unsigned NOT NULL,
+  `time_shift` double NOT NULL,
+  PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `svt_time_shifts`
+--
+
+LOCK TABLES `svt_time_shifts` WRITE;
+/*!40000 ALTER TABLE `svt_time_shifts` DISABLE KEYS */;
+INSERT INTO `svt_time_shifts` VALUES (1,1,0,1,1.5),(2,1,4,1,1),(3,1,0,0,1.6),(4,1,4,0,1.3),(5,1,1,0,-2.8),(6,1,3,0,-3),(7,1,0,2,2),(8,1,4,2,3.5),(9,1,1,2,-1.9),(10,1,3,2,0),(11,1,1,1,-2.2),(12,1,3,1,-0.5),(13,1,2,1,1.8),(14,1,6,1,-0.8),(15,1,2,0,1.2),(16,1,6,0,-1.1),(17,1,2,2,2.5),(18,1,6,2,3.5),(19,1,5,0,0),(20,1,5,2,-2.7);
+/*!40000 ALTER TABLE `svt_time_shifts` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2014-03-19 16:58:10
[Note: Some over-long lines of diff output only partialy shown]

java/trunk/conditions/src/main/sql
conditions_db_tables_only.sql removed after 322
--- java/trunk/conditions/src/main/sql/conditions_db_tables_only.sql	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/sql/conditions_db_tables_only.sql	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,267 +0,0 @@
--- MySQL dump 10.13  Distrib 5.1.73, for redhat-linux-gnu (x86_64)
---
--- Host: mysql-node03.slac.stanford.edu    Database: rd_hps_cond
--- ------------------------------------------------------
--- Server version	5.5.23-log
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Table structure for table `beam_current`
---
-
-DROP TABLE IF EXISTS `beam_current`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `beam_current` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) DEFAULT NULL,
-  `beam_current` double DEFAULT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `conditions_dev`
---
-
-DROP TABLE IF EXISTS `conditions_dev`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `conditions_dev` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `run_start` int(11) NOT NULL,
-  `run_end` int(11) NOT NULL,
-  `updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
-  `created` datetime NOT NULL,
-  `valid_from` datetime DEFAULT NULL,
-  `valid_to` datetime DEFAULT NULL,
-  `created_by` varchar(255) DEFAULT NULL,
-  `notes` blob,
-  `name` varchar(40) NOT NULL,
-  `format_version` varchar(16) DEFAULT NULL,
-  `table_name` varchar(50) NOT NULL,
-  `field_name` varchar(50) NOT NULL,
-  `field_value` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=31 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ecal_bad_channels`
---
-
-DROP TABLE IF EXISTS `ecal_bad_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_bad_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `ecal_channel_id` int(11) NOT NULL,
-  `set_id` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=45 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ecal_calibrations`
---
-
-DROP TABLE IF EXISTS `ecal_calibrations`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_calibrations` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `ecal_channel_id` int(11) NOT NULL,
-  `set_id` int(11) DEFAULT NULL,
-  `pedestal` double NOT NULL,
-  `noise` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=556 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ecal_channels`
---
-
-DROP TABLE IF EXISTS `ecal_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `x` smallint(6) NOT NULL,
-  `y` smallint(6) NOT NULL,
-  `crate` smallint(6) NOT NULL,
-  `slot` smallint(6) NOT NULL,
-  `channel` smallint(6) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `ecal_gains`
---
-
-DROP TABLE IF EXISTS `ecal_gains`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_gains` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `ecal_channel_id` int(11) NOT NULL,
-  `gain` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_bad_channels`
---
-
-DROP TABLE IF EXISTS `svt_bad_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_bad_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=3117 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_bad_channels_scratch`
---
-
-DROP TABLE IF EXISTS `svt_bad_channels_scratch`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_bad_channels_scratch` (
-  `channel` int(10) unsigned DEFAULT NULL,
-  `fpga` int(10) unsigned DEFAULT NULL,
-  `hybrid` int(10) unsigned DEFAULT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_calibrations`
---
-
-DROP TABLE IF EXISTS `svt_calibrations`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_calibrations` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `noise` double NOT NULL,
-  `pedestal` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_channels`
---
-
-DROP TABLE IF EXISTS `svt_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `fpga` int(11) NOT NULL,
-  `hybrid` int(11) NOT NULL,
-  `channel` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_daq_map`
---
-
-DROP TABLE IF EXISTS `svt_daq_map`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_daq_map` (
-  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
-  `set_id` int(10) unsigned NOT NULL,
-  `half` tinyint(3) unsigned NOT NULL,
-  `layer` tinyint(3) unsigned NOT NULL,
-  `hybrid` tinyint(3) unsigned NOT NULL,
-  `fpga` tinyint(3) unsigned NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_gains`
---
-
-DROP TABLE IF EXISTS `svt_gains`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_gains` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `gain` double NOT NULL,
-  `offset` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=13087 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_pulse_parameters`
---
-
-DROP TABLE IF EXISTS `svt_pulse_parameters`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_pulse_parameters` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `amplitude` double NOT NULL,
-  `t0` double NOT NULL,
-  `tp` double NOT NULL,
-  `chisq` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=13113 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Table structure for table `svt_time_shifts`
---
-
-DROP TABLE IF EXISTS `svt_time_shifts`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_time_shifts` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(10) unsigned NOT NULL,
-  `fpga` tinyint(3) unsigned NOT NULL,
-  `hybrid` tinyint(3) unsigned NOT NULL,
-  `time_shift` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2014-03-13 15:02:29

java/trunk/conditions/src/main/sql
conditions_db_testrun_full.sql removed after 322
--- java/trunk/conditions/src/main/sql/conditions_db_testrun_full.sql	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/main/sql/conditions_db_testrun_full.sql	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,406 +0,0 @@
--- MySQL dump 10.13  Distrib 5.1.73, for redhat-linux-gnu (x86_64)
---
--- Host: mysql-node03.slac.stanford.edu    Database: rd_hps_cond
--- ------------------------------------------------------
--- Server version	5.5.23-log
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Table structure for table `beam_current`
---
-
-DROP TABLE IF EXISTS `beam_current`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `beam_current` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) DEFAULT NULL,
-  `beam_current` double DEFAULT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `beam_current`
---
-
-LOCK TABLES `beam_current` WRITE;
-/*!40000 ALTER TABLE `beam_current` DISABLE KEYS */;
-INSERT INTO `beam_current` VALUES (1,1,54879.7343788147),(2,2,26928.0426635742),(3,3,204325.132622242),(4,4,148839.141475141),(5,5,92523.9428218845),(6,6,91761.4541434497),(7,7,209883.979889035),(8,8,110298.553449392),(9,9,8556.8459701538);
-/*!40000 ALTER TABLE `beam_current` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `conditions_dev`
---
-
-DROP TABLE IF EXISTS `conditions_dev`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `conditions_dev` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `run_start` int(11) NOT NULL,
-  `run_end` int(11) NOT NULL,
-  `updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
-  `created` datetime NOT NULL,
-  `valid_from` datetime DEFAULT NULL,
-  `valid_to` datetime DEFAULT NULL,
-  `created_by` varchar(255) DEFAULT NULL,
-  `notes` blob,
-  `name` varchar(40) NOT NULL,
-  `format_version` varchar(16) DEFAULT NULL,
-  `table_name` varchar(50) NOT NULL,
-  `field_name` varchar(50) NOT NULL,
-  `field_value` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=31 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `conditions_dev`
---
-
-LOCK TABLES `conditions_dev` WRITE;
-/*!40000 ALTER TABLE `conditions_dev` DISABLE KEYS */;
-INSERT INTO `conditions_dev` VALUES (1,777,1365,'2013-09-30 23:05:51','2013-09-20 13:19:55',NULL,NULL,'jeremym',NULL,'svt_calibrations',NULL,'svt_calibrations','set_id',1),(2,777,1365,'2013-09-30 23:05:11','2013-09-24 16:48:39',NULL,NULL,'jeremym',NULL,'ecal_gains',NULL,'ecal_gains','set_id',1),(3,777,1365,'2013-09-30 23:05:11','2013-09-30 16:02:49',NULL,NULL,'jeremym',NULL,'ecal_bad_channels',NULL,'ecal_bad_channels','set_id',1),(4,777,1365,'2013-10-02 19:39:25','2013-09-30 17:25:21',NULL,NULL,'jeremym',NULL,'ecal_calibrations',NULL,'ecal_calibrations','set_id',1),(5,777,1365,'2013-10-02 19:39:25','2013-10-02 12:39:11',NULL,NULL,'jeremym',NULL,'svt_bad_channels',NULL,'svt_bad_channels','set_id',1),(6,777,1365,'2013-10-03 21:59:50','2013-10-03 14:59:33',NULL,NULL,'jeremym',NULL,'svt_pulse_parameters',NULL,'svt_pulse_parameters','set_id',1),(7,777,1365,'2013-10-10 23:02:42','2013-10-03 16:51:19',NULL,NULL,'jeremym',NULL,'svt_gains',NULL,'svt_gains','set_id',1),(8,777,1365,'2!
 013-10-10 23[...]
-/*!40000 ALTER TABLE `conditions_dev` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ecal_bad_channels`
---
-
-DROP TABLE IF EXISTS `ecal_bad_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_bad_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `ecal_channel_id` int(11) NOT NULL,
-  `set_id` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=45 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ecal_bad_channels`
---
-
-LOCK TABLES `ecal_bad_channels` WRITE;
-/*!40000 ALTER TABLE `ecal_bad_channels` DISABLE KEYS */;
-INSERT INTO `ecal_bad_channels` VALUES (1,20,1),(2,234,1),(3,235,1),(4,236,1),(5,238,1),(6,240,1),(7,256,1),(8,264,1),(9,266,1),(10,268,1),(11,270,1),(12,272,1),(13,274,1),(14,276,1),(15,278,1),(16,280,1),(17,282,1),(18,284,1),(19,286,1),(20,290,1),(21,292,1),(22,293,1),(23,300,1),(24,306,1),(25,308,1),(26,310,1),(27,346,1),(28,348,1),(29,370,1),(30,418,1),(31,420,1),(32,422,1),(33,424,1),(34,425,1),(35,426,1),(36,428,1),(37,430,1),(38,431,1),(39,432,1),(40,434,1),(41,436,1),(42,438,1),(43,440,1),(44,442,1);
-/*!40000 ALTER TABLE `ecal_bad_channels` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ecal_calibrations`
---
-
-DROP TABLE IF EXISTS `ecal_calibrations`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_calibrations` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `ecal_channel_id` int(11) NOT NULL,
-  `set_id` int(11) DEFAULT NULL,
-  `pedestal` double NOT NULL,
-  `noise` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=556 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ecal_calibrations`
---
-
-LOCK TABLES `ecal_calibrations` WRITE;
-/*!40000 ALTER TABLE `ecal_calibrations` DISABLE KEYS */;
-INSERT INTO `ecal_calibrations` VALUES (1,1,1,84.753881,5.458664),(2,3,1,84.855231,5.079355),(3,5,1,116.119123,5.02221),(4,7,1,94.705737,5.134416),(5,9,1,104.188526,5.368471),(6,11,1,121.932283,5.208102),(7,13,1,110.893363,5.202804),(8,15,1,122.005624,4.972769),(9,17,1,139.244094,5.071151),(10,19,1,110.365354,5.474363),(11,21,1,154.716085,4.728363),(12,23,1,109.64342,4.53666),(13,25,1,78.065467,4.574049),(14,27,1,143.976715,4.54093),(15,29,1,90.033971,4.610056),(16,31,1,148.486614,5.02935),(17,33,1,123.077728,4.688464),(18,35,1,113.468279,4.547754),(19,37,1,110.754331,5.533226),(20,39,1,110.415636,6.380486),(21,41,1,109.32036,4.726009),(22,43,1,89.884139,4.777839),(23,45,1,147.128796,4.669933),(24,47,1,100.65928,4.735584),(25,49,1,127.842857,5.609905),(26,51,1,101.690664,4.818428),(27,53,1,106.753656,4.694521),(28,55,1,93.061305,4.732574),(29,57,1,111.990889,4.726564),(30,59,1,103.664679,4.932656),(31,61,1,103.072891,4.731993),(32,63,1,123.154218,4.385393),(33,65,1,130.8633!
 3,4.529927),[...]
-/*!40000 ALTER TABLE `ecal_calibrations` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ecal_channels`
---
-
-DROP TABLE IF EXISTS `ecal_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `x` smallint(6) NOT NULL,
-  `y` smallint(6) NOT NULL,
-  `crate` smallint(6) NOT NULL,
-  `slot` smallint(6) NOT NULL,
-  `channel` smallint(6) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ecal_channels`
---
-
-LOCK TABLES `ecal_channels` WRITE;
-/*!40000 ALTER TABLE `ecal_channels` DISABLE KEYS */;
-INSERT INTO `ecal_channels` VALUES (1,-23,1,1,10,0),(2,-23,-1,2,10,0),(3,-23,2,1,10,1),(4,-23,-2,2,10,1),(5,-23,3,1,10,2),(6,-23,-3,2,10,2),(7,-23,4,1,10,3),(8,-23,-4,2,10,3),(9,-23,5,1,10,4),(10,-23,-5,2,10,4),(11,-22,1,1,10,5),(12,-22,-1,2,10,5),(13,-22,2,1,10,6),(14,-22,-2,2,10,6),(15,-22,3,1,10,7),(16,-22,-3,2,10,7),(17,-22,4,1,10,8),(18,-22,-4,2,10,8),(19,-22,5,1,10,9),(20,-22,-5,2,10,9),(21,-21,1,1,10,10),(22,-21,-1,2,10,10),(23,-21,2,1,10,11),(24,-21,-2,2,10,11),(25,-21,3,1,10,12),(26,-21,-3,2,10,12),(27,-21,4,1,10,13),(28,-21,-4,2,10,13),(29,-21,5,1,10,14),(30,-21,-5,2,10,14),(31,-20,1,1,10,15),(32,-20,-1,2,10,15),(33,-20,2,1,13,0),(34,-20,-2,2,13,0),(35,-20,3,1,13,1),(36,-20,-3,2,13,1),(37,-20,4,1,13,2),(38,-20,-4,2,13,2),(39,-20,5,1,13,3),(40,-20,-5,2,13,3),(41,-19,1,1,13,4),(42,-19,-1,2,13,4),(43,-19,2,1,13,5),(44,-19,-2,2,13,5),(45,-19,3,1,13,6),(46,-19,-3,2,13,6),(47,-19,4,1,13,7),(48,-19,-4,2,13,7),(49,-19,5,1,13,8),(50,-19,-5,2,13,8),(51,-18,1,1,13,9),(52,-18!
 ,-1,2,13,9),[...]
-/*!40000 ALTER TABLE `ecal_channels` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ecal_gains`
---
-
-DROP TABLE IF EXISTS `ecal_gains`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ecal_gains` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `ecal_channel_id` int(11) NOT NULL,
-  `gain` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=443 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ecal_gains`
---
-
-LOCK TABLES `ecal_gains` WRITE;
-/*!40000 ALTER TABLE `ecal_gains` DISABLE KEYS */;
-INSERT INTO `ecal_gains` VALUES (1,1,1,0.133127),(2,1,3,0.11578),(3,1,5,0.134208),(4,1,7,0.155521),(5,1,9,0.13377),(6,1,11,0.157064),(7,1,13,0.130944),(8,1,15,0.140505),(9,1,17,0.166728),(10,1,19,0.13377),(11,1,21,0.179666),(12,1,23,0.196879),(13,1,25,0.212828),(14,1,27,0.212381),(15,1,29,0.212381),(16,1,31,0.204436),(17,1,33,0.21458),(18,1,35,0.208637),(19,1,37,0.212381),(20,1,39,0.212381),(21,1,41,0.207319),(22,1,43,0.19391),(23,1,45,0.199045),(24,1,47,0.214641),(25,1,49,0.214641),(26,1,51,0.204768),(27,1,53,0.219602),(28,1,55,0.213419),(29,1,57,0.214641),(30,1,59,0.214641),(31,1,61,0.238475),(32,1,63,0.251577),(33,1,65,0.26581),(34,1,67,0.26104),(35,1,69,0.26104),(36,1,71,0.236384),(37,1,73,0.269346),(38,1,75,0.230097),(39,1,77,0.26104),(40,1,79,0.26104),(41,1,81,0.188842),(42,1,83,0.220022),(43,1,85,0.24098),(44,1,87,0.222194),(45,1,89,0.222194),(46,1,91,0.213146),(47,1,93,0.21083),(48,1,95,0.213309),(49,1,97,0.222194),(50,1,99,0.222194),(51,1,101,0.228191),(52,1,103,0.!
 190975),(53,[...]
-/*!40000 ALTER TABLE `ecal_gains` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_bad_channels`
---
-
-DROP TABLE IF EXISTS `svt_bad_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_bad_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=3117 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_bad_channels`
---
-
-LOCK TABLES `svt_bad_channels` WRITE;
-/*!40000 ALTER TABLE `svt_bad_channels` DISABLE KEYS */;
-INSERT INTO `svt_bad_channels` VALUES (1,1,33),(2,1,129),(3,1,161),(4,1,257),(5,1,289),(6,1,385),(7,1,417),(8,1,545),(9,1,639),(10,1,673),(11,1,769),(12,1,801),(13,1,897),(14,1,929),(15,1,986),(16,1,1025),(17,1,1057),(18,1,1153),(19,1,1185),(20,1,1279),(21,1,1281),(22,1,1282),(23,1,1313),(24,1,1409),(25,1,1415),(26,1,1537),(27,1,1793),(28,1,1825),(29,1,1919),(30,1,1920),(31,1,1921),(32,1,2081),(33,1,2323),(34,1,2338),(35,1,2352),(36,1,2353),(37,1,2382),(38,1,2383),(39,1,2384),(40,1,2385),(41,1,2559),(42,1,2560),(43,1,2642),(44,1,3809),(45,1,5587),(46,1,5931),(47,1,7028),(48,1,7998),(49,1,7999),(50,1,8575),(51,2,2383),(52,2,3585),(53,2,3586),(54,2,3587),(55,2,3588),(56,2,3589),(57,2,3590),(58,2,3591),(59,2,3592),(60,2,3593),(61,2,3594),(62,2,3595),(63,2,3596),(64,2,3597),(65,2,3598),(66,2,3599),(67,2,3600),(68,2,3601),(69,2,3602),(70,2,3603),(71,2,3604),(72,2,3605),(73,2,3606),(74,2,3607),(75,2,3608),(76,2,3609),(77,2,3610),(78,2,3611),(79,2,3612),(80,2,3613),(81,2,3614),(82!
 ,2,3615),(83[...]
-/*!40000 ALTER TABLE `svt_bad_channels` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_bad_channels_scratch`
---
-
-DROP TABLE IF EXISTS `svt_bad_channels_scratch`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_bad_channels_scratch` (
-  `channel` int(10) unsigned DEFAULT NULL,
-  `fpga` int(10) unsigned DEFAULT NULL,
-  `hybrid` int(10) unsigned DEFAULT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_bad_channels_scratch`
---
-
-LOCK TABLES `svt_bad_channels_scratch` WRITE;
-/*!40000 ALTER TABLE `svt_bad_channels_scratch` DISABLE KEYS */;
-/*!40000 ALTER TABLE `svt_bad_channels_scratch` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_calibrations`
---
-
-DROP TABLE IF EXISTS `svt_calibrations`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_calibrations` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `noise` double NOT NULL,
-  `pedestal` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_calibrations`
---
-
-LOCK TABLES `svt_calibrations` WRITE;
-/*!40000 ALTER TABLE `svt_calibrations` DISABLE KEYS */;
-INSERT INTO `svt_calibrations` VALUES (1,1,1,144.439,4128.2),(2,1,2,73.8021,4221.44),(3,1,3,68.9208,4202.87),(4,1,4,67.5162,4212.69),(5,1,5,67.0612,4226.35),(6,1,6,66.8148,4277.98),(7,1,7,67.0852,4190.05),(8,1,8,66.6319,4249.81),(9,1,9,66.5855,4187.58),(10,1,10,66.5094,4280.37),(11,1,11,66.8626,4253.08),(12,1,12,67.2756,4201.57),(13,1,13,66.6692,4126.93),(14,1,14,66.3733,4240.99),(15,1,15,67.1244,4234.24),(16,1,16,67.267,4119.42),(17,1,17,67.417,4184.48),(18,1,18,66.8996,4141.2),(19,1,19,67.0237,4197.29),(20,1,20,67.2383,4252.4),(21,1,21,65.4174,4366.29),(22,1,22,65.4644,4314.95),(23,1,23,65.3006,4136.94),(24,1,24,66.7159,4287.62),(25,1,25,66.0618,4280.76),(26,1,26,65.9341,4197.06),(27,1,27,66.236,4256.05),(28,1,28,65.792,4295.79),(29,1,29,65.2681,4184.34),(30,1,30,65.2526,4164.29),(31,1,31,66.5423,4191.31),(32,1,32,65.142,4357.32),(33,1,33,110.69,4267.37),(34,1,34,70.185,4181.88),(35,1,35,65.4172,4263.31),(36,1,36,65.6101,4290.78),(37,1,37,64.9324,4295.53),(38,1,38,64.6467!
 ,4234.7),(39[...]
-/*!40000 ALTER TABLE `svt_calibrations` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_channels`
---
-
-DROP TABLE IF EXISTS `svt_channels`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_channels` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `fpga` int(11) NOT NULL,
-  `hybrid` int(11) NOT NULL,
-  `channel` int(11) NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=12801 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_channels`
---
-
-LOCK TABLES `svt_channels` WRITE;
-/*!40000 ALTER TABLE `svt_channels` DISABLE KEYS */;
-INSERT INTO `svt_channels` VALUES (1,0,0,0),(2,0,0,1),(3,0,0,2),(4,0,0,3),(5,0,0,4),(6,0,0,5),(7,0,0,6),(8,0,0,7),(9,0,0,8),(10,0,0,9),(11,0,0,10),(12,0,0,11),(13,0,0,12),(14,0,0,13),(15,0,0,14),(16,0,0,15),(17,0,0,16),(18,0,0,17),(19,0,0,18),(20,0,0,19),(21,0,0,20),(22,0,0,21),(23,0,0,22),(24,0,0,23),(25,0,0,24),(26,0,0,25),(27,0,0,26),(28,0,0,27),(29,0,0,28),(30,0,0,29),(31,0,0,30),(32,0,0,31),(33,0,0,32),(34,0,0,33),(35,0,0,34),(36,0,0,35),(37,0,0,36),(38,0,0,37),(39,0,0,38),(40,0,0,39),(41,0,0,40),(42,0,0,41),(43,0,0,42),(44,0,0,43),(45,0,0,44),(46,0,0,45),(47,0,0,46),(48,0,0,47),(49,0,0,48),(50,0,0,49),(51,0,0,50),(52,0,0,51),(53,0,0,52),(54,0,0,53),(55,0,0,54),(56,0,0,55),(57,0,0,56),(58,0,0,57),(59,0,0,58),(60,0,0,59),(61,0,0,60),(62,0,0,61),(63,0,0,62),(64,0,0,63),(65,0,0,64),(66,0,0,65),(67,0,0,66),(68,0,0,67),(69,0,0,68),(70,0,0,69),(71,0,0,70),(72,0,0,71),(73,0,0,72),(74,0,0,73),(75,0,0,74),(76,0,0,75),(77,0,0,76),(78,0,0,77),(79,0,0,78),(80,0,0,79),(81,0,0,80),(!
 82,0,0,81),([...]
-/*!40000 ALTER TABLE `svt_channels` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_daq_map`
---
-
-DROP TABLE IF EXISTS `svt_daq_map`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_daq_map` (
-  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
-  `set_id` int(10) unsigned NOT NULL,
-  `half` tinyint(3) unsigned NOT NULL,
-  `layer` tinyint(3) unsigned NOT NULL,
-  `hybrid` tinyint(3) unsigned NOT NULL,
-  `fpga` tinyint(3) unsigned NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_daq_map`
---
-
-LOCK TABLES `svt_daq_map` WRITE;
-/*!40000 ALTER TABLE `svt_daq_map` DISABLE KEYS */;
-INSERT INTO `svt_daq_map` VALUES (1,1,1,1,1,4),(2,1,1,2,0,4),(3,1,1,3,0,3),(4,1,1,4,2,4),(5,1,1,5,2,3),(6,1,1,6,1,3),(7,1,1,7,1,6),(8,1,1,8,0,6),(9,1,1,9,2,6),(10,1,1,10,2,5),(11,1,0,1,1,0),(12,1,0,2,0,0),(13,1,0,3,0,1),(14,1,0,4,2,0),(15,1,0,5,2,1),(16,1,0,6,1,1),(17,1,0,7,1,2),(18,1,0,8,0,2),(19,1,0,9,2,2),(20,1,0,10,0,5);
-/*!40000 ALTER TABLE `svt_daq_map` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_gains`
---
-
-DROP TABLE IF EXISTS `svt_gains`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_gains` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `gain` double NOT NULL,
-  `offset` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=13087 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_gains`
---
-
-LOCK TABLES `svt_gains` WRITE;
-/*!40000 ALTER TABLE `svt_gains` DISABLE KEYS */;
-INSERT INTO `svt_gains` VALUES (1,1,1,0.09041,168.54959),(2,1,2,0.09132,129.06331),(3,1,3,0.09269,121.73973),(4,1,4,0.09228,127.13628),(5,1,5,0.09474,116.85173),(6,1,6,0.0927,120.09408),(7,1,7,0.09368,125.54723),(8,1,8,0.09324,132.5109),(9,1,9,0.08913,118.85577),(10,1,10,0.08708,123.87396),(11,1,11,0.0887,115.30546),(12,1,12,0.08889,115.51142),(13,1,13,0.09074,107.8372),(14,1,14,0.08878,114.82484),(15,1,15,0.08972,121.13764),(16,1,16,0.08986,122.56405),(17,1,17,0.08839,120.55956),(18,1,18,0.08729,114.11262),(19,1,19,0.08823,114.78429),(20,1,20,0.08827,109.46955),(21,1,21,0.09049,103.61712),(22,1,22,0.0886,114.31911),(23,1,23,0.08998,105.11322),(24,1,24,0.08981,123.84481),(25,1,25,0.08868,115.05402),(26,1,26,0.08712,109.20123),(27,1,27,0.08794,106.37902),(28,1,28,0.08801,109.43693),(29,1,29,0.08965,105.21076),(30,1,30,0.08795,107.71108),(31,1,31,0.08927,109.57722),(32,1,32,0.08945,112.41569),(33,1,33,0.08953,189.18797),(34,1,34,0.08911,104.52473),(35,1,35,0.09018,106.40301),!
 (36,1,36,0.0[...]
-/*!40000 ALTER TABLE `svt_gains` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_pulse_parameters`
---
-
-DROP TABLE IF EXISTS `svt_pulse_parameters`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_pulse_parameters` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(11) NOT NULL,
-  `svt_channel_id` int(11) NOT NULL,
-  `amplitude` double NOT NULL,
-  `t0` double NOT NULL,
-  `tp` double NOT NULL,
-  `chisq` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=13113 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_pulse_parameters`
---
-
-LOCK TABLES `svt_pulse_parameters` WRITE;
-/*!40000 ALTER TABLE `svt_pulse_parameters` DISABLE KEYS */;
-INSERT INTO `svt_pulse_parameters` VALUES (1,1,1,2566.76,36.2141,49.5314,45.3601),(2,1,2,2557.65,34.8792,51.4634,1628.44),(3,1,3,2580.55,34.8269,52.3254,2590.75),(4,1,4,2570.08,34.9907,52.7496,2574.52),(5,1,5,2634.25,34.5281,52.3736,3388.62),(6,1,6,2579.41,34.612,52.3403,2630.34),(7,1,7,2607.43,34.6304,52.4653,3507.53),(8,1,8,2587.5,35.5808,53.9765,4339.65),(9,1,9,2470.49,35.1321,52.6679,1584.38),(10,1,10,2431.07,34.681,52.9538,3366.13),(11,1,11,2463.05,34.8131,52.8844,3758.31),(12,1,12,2464.5,34.9735,53.026,3184.54),(13,1,13,2515.97,34.6483,52.5323,3943.82),(14,1,14,2465.73,34.6363,52.6423,2967.8),(15,1,15,2499.96,34.6024,52.5392,3803.47),(16,1,16,2478.32,35.5557,54.264,4804.85),(17,1,17,2455.4,35.2826,52.6523,1719.91),(18,1,18,2424.47,34.8248,52.8653,3729.28),(19,1,19,2447.52,34.8615,53.0984,3929.35),(20,1,20,2447.24,35.057,53.049,3495.45),(21,1,21,2502.89,34.7726,52.4264,3836.1),(22,1,22,2458.68,34.7871,52.4676,2901.43),(23,1,23,2486.05,34.7067,52.6529,3915.48),(24,1,24,!
 2483.28,35.7[...]
-/*!40000 ALTER TABLE `svt_pulse_parameters` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `svt_time_shifts`
---
-
-DROP TABLE IF EXISTS `svt_time_shifts`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `svt_time_shifts` (
-  `id` int(11) NOT NULL AUTO_INCREMENT,
-  `set_id` int(10) unsigned NOT NULL,
-  `fpga` tinyint(3) unsigned NOT NULL,
-  `hybrid` tinyint(3) unsigned NOT NULL,
-  `time_shift` double NOT NULL,
-  PRIMARY KEY (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `svt_time_shifts`
---
-
-LOCK TABLES `svt_time_shifts` WRITE;
-/*!40000 ALTER TABLE `svt_time_shifts` DISABLE KEYS */;
-INSERT INTO `svt_time_shifts` VALUES (1,1,0,1,1.5),(2,1,4,1,1),(3,1,0,0,1.6),(4,1,4,0,1.3),(5,1,1,0,-2.8),(6,1,3,0,-3),(7,1,0,2,2),(8,1,4,2,3.5),(9,1,1,2,-1.9),(10,1,3,2,0),(11,1,1,1,-2.2),(12,1,3,1,-0.5),(13,1,2,1,1.8),(14,1,6,1,-0.8),(15,1,2,0,1.2),(16,1,6,0,-1.1),(17,1,2,2,2.5),(18,1,6,2,3.5),(19,1,5,0,0),(20,1,5,2,-2.7);
-/*!40000 ALTER TABLE `svt_time_shifts` ENABLE KEYS */;
-UNLOCK TABLES;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2014-03-11 17:04:14
[Note: Some over-long lines of diff output only partialy shown]

java/trunk/conditions/src/test/java/org/hps/conditions
ConditionsDatabaseObjectTest.java removed after 322
--- java/trunk/conditions/src/test/java/org/hps/conditions/ConditionsDatabaseObjectTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/ConditionsDatabaseObjectTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,110 +0,0 @@
-package org.hps.conditions;
-
-import java.io.File;
-import java.util.HashSet;
-import java.util.Set;
-
-import junit.framework.TestCase;
-
-import org.hps.conditions.AbstractConditionsObject.FieldValueMap;
-
-/**
- * Test the basic functionality of a {@link ConditionsObject} on a dummy database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class ConditionsDatabaseObjectTest extends TestCase {
-    
-    String dummyTableName = "dummy_table";
-    String dummyFieldName = "dummy_field1";
-    float firstValue = 1.234f;
-    float secondValue = 5.678f;
-    
-    public void testDummy() {
-    
-        // Connect to local test database.
-        ConnectionManager connectionManager = new ConnectionManager();
-        //ConnectionManager connectionManager = new DummyConnectionManager();
-        connectionManager.setupFromProperties(new File("./src/main/config/dummy_db.properties"));
-        
-        // Setup table meta data information.
-        Set<String> dummyFieldNames = new HashSet<String>();
-        dummyFieldNames.add(dummyFieldName);               
-        ConditionsTableMetaData tableMetaData = new ConditionsTableMetaData(dummyTableName, dummyFieldNames);
-        
-        // Create a dummy data object with a single field value.
-        FieldValueMap fieldValues = new FieldValueMap();
-        fieldValues.put(dummyFieldName, firstValue);
-        ConditionsObject dummyObject = new DummyConditionsObject(connectionManager, tableMetaData, 1, fieldValues);        
-         
-        try {
-            // Insert the object into the database.
-            dummyObject.insert();
-            int key = dummyObject.getRowId();
-
-            // Set a new field value and push update to the database.
-            dummyObject.setFieldValue(dummyFieldName, secondValue);
-            dummyObject.update();
-            
-            // Load an object in read only mode.
-            DummyConditionsObject readOnlyObject = new DummyConditionsObject(connectionManager, tableMetaData, key);            
-            readOnlyObject.select();
-            try {
-                readOnlyObject.delete();
-                throw new RuntimeException("Should not get here.");
-            } catch (ConditionsObjectException x) {
-                System.out.println("Caught error: " + x.getMessage());
-            }
-
-            // Delete the object from the database.
-            dummyObject.delete();
-            
-            // Try to select a non-existant object to see that exception is thrown.
-            try {
-                dummyObject.select();
-                throw new RuntimeException("Should not get here.");
-            } catch (ConditionsObjectException x) {
-                System.out.println("Caught error: " + x.getMessage());
-            }
-                                    
-        } catch (Exception x) {
-            throw new RuntimeException(x);
-        }        
-    }
-    
-    public static class DummyConditionsObject extends AbstractConditionsObject {
-        
-        // Create a new object.
-        DummyConditionsObject(ConnectionManager connectionManager,
-                ConditionsTableMetaData tableMetaData,
-                int setId,
-                FieldValueMap fieldValues) {       
-            super(connectionManager, tableMetaData, setId, fieldValues);
-        }
-        
-        // Load an existing object in read only mode.
-        DummyConditionsObject(
-                ConnectionManager connectionManager,
-                ConditionsTableMetaData tableMetaData,
-                int rowId) {
-            super(connectionManager, tableMetaData, rowId, true);
-        }
-        
-    }
-    
-    /*
-    public class DummyConnectionManager extends ConnectionManager {
-        
-        public ResultSet query(String query) {
-            System.out.println("Dummy query method ...");
-            System.out.println(query);            
-            return null;
-        }
-        
-        public int update(String query) {
-            System.out.println("Dummy update method ...");
-            System.out.println(query);            
-            return 1;
-        }
-    }
-    */
-}

java/trunk/conditions/src/test/java/org/hps/conditions
ConditionsDriverTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -21,7 +21,7 @@
  */
 public class ConditionsDriverTest extends TestCase {
     
-    /** This test file has a few events from the "good runs" of the Test Run. */
+    /** This test file has a few events from each of the "good runs" of the 2012 Test Run. */
     private static final String TEST_FILE_URL = "http://www.lcsim.org/test/hps/conditions_test.slcio";
     
     /** Answer key for number of bad channels by run. */
@@ -46,15 +46,23 @@
      */
     public void test() throws Exception {
 
-        // Cache file locally from URL.
+        // Cache a data file from the www.
         FileCache cache = new FileCache();
         File testFile = cache.getCachedFile(new URL(TEST_FILE_URL));
         
-        // Run the ConditionsDriver over test data containing multiple runs from the Test Run.
+        // Create the record loop.        
         LCSimLoop loop = new LCSimLoop();
+        
+        // Reconfigure the conditions system to override the manager created by LCSimLoop.
+        DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        
+        // Configure the loop.
         loop.setLCIORecordSource(testFile);
         loop.add(new ConditionsDriver());  
         loop.add(new SvtBadChannelChecker());
+        
+        // Run over all events.
         loop.loop(-1, null);
     }
     
@@ -64,7 +72,7 @@
      */
     class SvtBadChannelChecker extends Driver {
         
-        int currentRun = Integer.MIN_VALUE;
+        int currentRun = -1;
         
         /**
          * This method will check the number of bad channels against the answer key

java/trunk/conditions/src/test/java/org/hps/conditions
DatabaseConditionsManagerTest.java added at 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/DatabaseConditionsManagerTest.java	                        (rev 0)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/DatabaseConditionsManagerTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -0,0 +1,30 @@
+package org.hps.conditions;
+
+import junit.framework.TestCase;
+
+public class DatabaseConditionsManagerTest extends TestCase {
+    
+    String detectorName = "HPS-conditions-test";
+    int runNumber = 1351;
+    DatabaseConditionsManager conditionsManager = new DatabaseConditionsManager();
+    
+    public void setUp() {
+        // Create and configure the conditions manager.
+        conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        conditionsManager.setDetectorName(detectorName);
+        conditionsManager.setRunNumber(runNumber);
+        conditionsManager.setup();
+    }
+    
+    @SuppressWarnings("rawtypes")
+    public void testLoad() {                       
+        // Load data from every table registered with the manager.
+        for (ConditionsTableMetaData metaData : conditionsManager.getTableMetaDataList()) {
+            System.out.println(">>>> loading conditions from table: " + metaData.getTableName());
+            ConditionsObjectCollection conditionsObjects = 
+                    conditionsManager.getConditionsData(metaData.getCollectionClass(), metaData.getTableName());
+            System.out.println("  " + conditionsObjects.getObjects().size() + " " + conditionsObjects.get(0).getClass().getSimpleName() + " objects were created.");
+        }
+    }        
+}

java/trunk/conditions/src/test/java/org/hps/conditions
DatabaseConditionsReaderTest.java removed after 322
--- java/trunk/conditions/src/test/java/org/hps/conditions/DatabaseConditionsReaderTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/DatabaseConditionsReaderTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,46 +0,0 @@
-package org.hps.conditions;
-
-import java.io.PrintStream;
-
-import junit.framework.TestCase;
-
-import org.lcsim.conditions.CachedConditions;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-
-/**
- * This class tests the DatabaseConditionsReader on dummy data.
- * 
- * @author jeremym
- */
-public class DatabaseConditionsReaderTest extends TestCase {
-    
-    /** Example detector from hps-detectors. */
-    private final String detectorName = "HPS-conditions-test";
-    
-    /** Run number of conditions set. */
-    private final int runNumber = 777;
-        
-    /** Print output. */
-    private final PrintStream ps = System.out;
-    
-    /**
-     * Create the manager, load the detector, and then get the conditions meta-data
-     * for the selected conditions set.  Finally, use the sample converter to create 
-     * an SvtCalibrationConstants object from the database rows.
-     */
-    public void test() {
-	ConditionsManager manager = ConditionsManager.defaultInstance();
-	try {
-	    manager.setDetector(detectorName, runNumber);
-	} catch (ConditionsNotFoundException e) {
-	    throw new RuntimeException(e);
-	}
-			
-	CachedConditions<ConditionsRecordCollection> c2 = manager.getCachedConditions(ConditionsRecordCollection.class, "conditions_records");
-	ConditionsRecordCollection rc = c2.getCachedData();
-	for (ConditionsRecord r : rc) {
-	    ps.println(r.toString());
-	}	      
-    }    
-}

java/trunk/conditions/src/test/java/org/hps/conditions/beam
BeamCurrentTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -1,5 +1,7 @@
 package org.hps.conditions.beam;
 
+import static org.hps.conditions.ConditionsTableConstants.BEAM_CURRENT;
+
 import java.io.File;
 import java.net.URL;
 import java.util.HashMap;
@@ -7,14 +9,14 @@
 
 import junit.framework.TestCase;
 
+import org.hps.conditions.DatabaseConditionsManager;
+import org.hps.conditions.beam.BeamCurrent.BeamCurrentCollection;
 import org.lcsim.conditions.ConditionsManager;
 import org.lcsim.event.EventHeader;
 import org.lcsim.util.Driver;
 import org.lcsim.util.cache.FileCache;
 import org.lcsim.util.loop.LCSimLoop;
 
-import static org.hps.conditions.ConditionsTableConstants.BEAM_CURRENT;
-
 /**
  * This test checks the beam current values by run.
  * @author Jeremy McCormick <[log in to unmask]>
@@ -50,8 +52,14 @@
         FileCache cache = new FileCache();
         File testFile = cache.getCachedFile(new URL(TEST_FILE_URL));
         
-        // Run the ConditionsDriver over test data containing multiple runs from the Test Run.
+        // Create the LCSimLoop.
         LCSimLoop loop = new LCSimLoop();
+        
+        // Reconfigure the conditions system to override the manager created by LCSimLoop.
+        DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        
+        // Configure and run the loop.
         loop.setLCIORecordSource(testFile);
         loop.add(new BeamCurrentChecker());
         loop.loop(-1, null);

java/trunk/conditions/src/test/java/org/hps/conditions/ecal
EcalConditionsConverterTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsConverterTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsConverterTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -2,10 +2,8 @@
 
 import junit.framework.TestCase;
 
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
 import org.hps.conditions.ConnectionManager;
-import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
+import org.hps.conditions.DatabaseConditionsManager;
 
 /**
  * Tests that a {@link EcalConditions} objects loads without errors.
@@ -13,25 +11,24 @@
  */
 public class EcalConditionsConverterTest extends TestCase {
     
-    /** An example detector from hps-detectors. */
-    private static final String detectorName = "HPS-conditions-test";
+    final String detectorName = "HPS-conditions-test";
+    final int runNumber = 777;
     
-    /** The run number of the conditions set in the database. */
-    private static final int runNumber = 777;
-        
+    DatabaseConditionsManager conditionsManager = new DatabaseConditionsManager();
+    
+    public void setUp() {
+        // Create and configure the conditions manager.
+        conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        conditionsManager.setDetectorName(detectorName);
+        conditionsManager.setRunNumber(runNumber);
+        conditionsManager.setup();
+    }
+            
     public void test() {
-        
-        // Setup the conditions manager.        
-        ConditionsManager.setDefaultConditionsManager(new LCSimConditionsManagerImplementation());
-        ConditionsManager manager = ConditionsManager.defaultInstance();
-        try {
-            manager.setDetector(detectorName, runNumber);
-        } catch (ConditionsNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-                                        
+                                                
         // Test that the manager gets ECAL conditions.
-        EcalConditions conditions = manager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();        
+        EcalConditions conditions = conditionsManager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();        
         assertNotNull(conditions);
         System.out.println(conditions);
         

java/trunk/conditions/src/test/java/org/hps/conditions/ecal
EcalConditionsLoaderTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsLoaderTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsLoaderTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -4,14 +4,11 @@
 
 import junit.framework.TestCase;
 
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
+import org.hps.conditions.ConnectionManager;
+import org.hps.conditions.DatabaseConditionsManager;
 import org.lcsim.detector.converter.compact.EcalCrystal;
 import org.lcsim.geometry.Detector;
-import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
 
-import org.hps.conditions.ConnectionManager;
-
 /**
  * This test loads ECal conditions data onto the detector 
  * and checks some of the results for basic validity.  
@@ -41,26 +38,28 @@
     
     // The total number of crystals that should be processed.
     private static final int CRYSTAL_COUNT = 442;
+    
+    DatabaseConditionsManager conditionsManager;
+    
+    public void setUp() {
+        // Create and configure the conditions manager.
+        conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        conditionsManager.setDetectorName(detectorName);
+        conditionsManager.setRunNumber(runNumber);
+        conditionsManager.setup();
+    }
                                            
     /**
      * Load SVT conditions data onto the detector and perform basic checks afterwards.
      */
     public void test() {
-        
-        // Setup the conditions manager.        
-        ConditionsManager.setDefaultConditionsManager(new LCSimConditionsManagerImplementation());
-        ConditionsManager manager = ConditionsManager.defaultInstance();
-        try {
-            manager.setDetector(detectorName, runNumber);
-        } catch (ConditionsNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-        
+                
         // Get the detector.
-        Detector detector = manager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
+        Detector detector = conditionsManager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
         
         // Get conditions.
-        EcalConditions conditions = manager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
+        EcalConditions conditions = conditionsManager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
 
         // Load conditions onto detector.
         EcalConditionsLoader loader = new EcalConditionsLoader();

java/trunk/conditions/src/test/java/org/hps/conditions/svt
SvtConditionsConverterTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsConverterTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsConverterTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -2,11 +2,8 @@
 
 import junit.framework.TestCase;
 
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
-
 import org.hps.conditions.ConnectionManager;
+import org.hps.conditions.DatabaseConditionsManager;
 
 /**
  * This test loads and prints {@link SvtConditions}, which internally uses the  
@@ -21,23 +18,25 @@
     
     /** The run number of the conditions set in the database. */
     private static final int runNumber = 777;
-            
+    
+    DatabaseConditionsManager conditionsManager;
+    
+    public void setUp() {
+        // Create and configure the conditions manager.
+        conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        conditionsManager.setDetectorName(detectorName);
+        conditionsManager.setRunNumber(runNumber);
+        conditionsManager.setup();
+    }
+    
     /**
      * Load and print all SVT conditions for a certain run number.
      */
     public void test() {
-        
-        // Setup the conditions manager.        
-        ConditionsManager.setDefaultConditionsManager(new LCSimConditionsManagerImplementation());
-        ConditionsManager manager = ConditionsManager.defaultInstance();
-        try {
-            manager.setDetector(detectorName, runNumber);
-        } catch (ConditionsNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-        
+                
         // Get conditions and print them out.
-        SvtConditions svt = manager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
+        SvtConditions svt = conditionsManager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
         assertNotNull(svt);
         System.out.println(svt);
         System.out.println("Successfully loaded SVT conditions!");

java/trunk/conditions/src/test/java/org/hps/conditions/svt
SvtConditionsLoaderTest.java 322 -> 323
--- java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsLoaderTest.java	2014-03-19 08:01:41 UTC (rev 322)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsLoaderTest.java	2014-03-20 17:38:04 UTC (rev 323)
@@ -4,14 +4,11 @@
 
 import junit.framework.TestCase;
 
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
+import org.hps.conditions.ConnectionManager;
+import org.hps.conditions.DatabaseConditionsManager;
 import org.lcsim.detector.tracker.silicon.HpsSiSensor;
 import org.lcsim.geometry.Detector;
-import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
 
-import org.hps.conditions.ConnectionManager;
-
 /**
  * This test loads {@link SvtConditions} data onto the detector and then checks that 
  * all channels of each sensor have non-zero data values for applicable parameters. 
@@ -37,26 +34,28 @@
     
     // Total number of sensors that should be processed.
     private static final int SENSOR_COUNT = 12800;
-            
+    
+    DatabaseConditionsManager conditionsManager;
+    
+    public void setUp() {
+        // Create and configure the conditions manager.
+        conditionsManager = DatabaseConditionsManager.createInstance();
+        conditionsManager.configure("/org/hps/conditions/config/conditions_database_testrun_2013.xml");
+        conditionsManager.setDetectorName(detectorName);
+        conditionsManager.setRunNumber(runNumber);
+        conditionsManager.setup();
+    }
+    
     /**
      * Load SVT conditions data onto the detector and perform basic checks afterwards.
      */
     public void test() {
-        
-        // Setup the conditions manager.        
-        ConditionsManager.setDefaultConditionsManager(new LCSimConditionsManagerImplementation());
-        ConditionsManager manager = ConditionsManager.defaultInstance();
-        try {
-            manager.setDetector(detectorName, runNumber);
-        } catch (ConditionsNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-        
+               
         // Get the detector.
-        Detector detector = manager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
+        Detector detector = conditionsManager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
         
         // Get conditions.
-        SvtConditions conditions = manager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
+        SvtConditions conditions = conditionsManager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
 
         // Load conditions onto detector.
         SvtConditionsLoader loader = new SvtConditionsLoader();
SVNspam 0.1