Author: [log in to unmask]
Date: Tue Oct 6 18:49:26 2015
New Revision: 3780
Log:
[HPSJAVA-616] Cleanup logging API usage.
Added:
java/trunk/logging/
java/trunk/logging/pom.xml
java/trunk/logging/src/
java/trunk/logging/src/main/
java/trunk/logging/src/main/java/
java/trunk/logging/src/main/java/org/
java/trunk/logging/src/main/java/org/hps/
java/trunk/logging/src/main/java/org/hps/logging/
java/trunk/logging/src/main/java/org/hps/logging/config/
java/trunk/logging/src/main/java/org/hps/logging/config/DefaultLoggingConfig.java
java/trunk/logging/src/main/resources/
java/trunk/logging/src/main/resources/org/
java/trunk/logging/src/main/resources/org/hps/
java/trunk/logging/src/main/resources/org/hps/logging/
java/trunk/logging/src/main/resources/org/hps/logging/config/
java/trunk/logging/src/main/resources/org/hps/logging/config/logging.properties
Modified:
java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java
java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java
java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnSSPDriver.java
java/trunk/conditions/pom.xml
java/trunk/conditions/src/main/java/org/hps/conditions/api/AbstractConditionsObjectConverter.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java
java/trunk/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java
java/trunk/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
java/trunk/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtConditionsConverter.java
java/trunk/conditions/src/main/java/org/hps/conditions/svt/CalibrationHandler.java
java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java
java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasMyaDumpReader.java
java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java
java/trunk/conditions/src/test/java/org/hps/conditions/RunNumberTest.java
java/trunk/conditions/src/test/java/org/hps/conditions/api/ConditionsTagTest.java
java/trunk/conditions/src/test/java/org/hps/conditions/database/DatabaseConditionsManagerTest.java
java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java
java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtTimingConstantsTest.java
java/trunk/conditions/src/test/java/org/hps/conditions/svt/TestRunSvtBadChannelsTest.java
java/trunk/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
java/trunk/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
java/trunk/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
java/trunk/crawler/src/main/java/org/hps/crawler/RunSummaryMap.java
java/trunk/datacat-client/src/main/java/org/hps/datacat/client/DatacatClientImpl.java
java/trunk/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java
java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java
java/trunk/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java
java/trunk/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
java/trunk/distribution/pom.xml
java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java
java/trunk/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
java/trunk/evio/src/main/java/org/hps/evio/EcalEvioReader.java
java/trunk/evio/src/main/java/org/hps/evio/EvioToLcio.java
java/trunk/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
java/trunk/evio/src/main/java/org/hps/evio/LCSimTestRunEventBuilder.java
java/trunk/evio/src/main/java/org/hps/evio/SvtEventHeaderChecker.java
java/trunk/evio/src/main/java/org/hps/evio/SvtEvioReader.java
java/trunk/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java
java/trunk/integration-tests/src/test/java/org/hps/APrimeReconTest.java
java/trunk/integration-tests/src/test/java/org/hps/MCFilteredReconTest.java
java/trunk/integration-tests/src/test/java/org/hps/MockDataReconTest.java
java/trunk/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java
java/trunk/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java
java/trunk/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java
java/trunk/integration-tests/src/test/java/org/hps/SimpleMCReconTest.java
java/trunk/integration-tests/src/test/java/org/hps/SimpleSvtReadoutTest.java
java/trunk/integration-tests/src/test/java/org/hps/SteeringFilesTest.java
java/trunk/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java
java/trunk/monitoring-app/src/main/java/org/hps/monitoring/application/model/AbstractModel.java
java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/GblTrackingReconstructionPlots.java
java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java
java/trunk/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
java/trunk/parent/pom.xml
java/trunk/pom.xml
java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtAlignmentFilter.java
java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java
java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataAdapter.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataProcessor.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java
java/trunk/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java
java/trunk/record-util/src/main/java/org/hps/record/scalers/ScalersEvioProcessor.java
java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPCluster.java
java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPTrigger.java
java/trunk/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
java/trunk/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
java/trunk/run-database/src/main/java/org/hps/run/database/RunManager.java
java/trunk/run-database/src/main/java/org/hps/run/database/RunProcessor.java
java/trunk/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
java/trunk/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java
java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
java/trunk/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java
java/trunk/users/src/main/java/org/hps/users/phansson/ReadSurveyRotations.java
java/trunk/users/src/main/java/org/hps/users/phansson/SvtHeaderAnalysisDriver.java
java/trunk/users/src/main/java/org/hps/users/phansson/SvtOldHeaderAnalysisDriver.java
java/trunk/users/src/main/java/org/hps/users/phansson/TrackExtrapolationTestDriver.java
java/trunk/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/DQMDatabaseManager.java Tue Oct 6 18:49:26 2015
@@ -17,117 +17,87 @@
import org.hps.conditions.database.ConnectionParameters;
/**
- * Manages the DQM database connection and access
- * re-uses ConnectionParameters and TableMetaData classes from conditionsDB
- * as they do exactly what we want here.
+ * Manages the DQM database connection and access re-uses ConnectionParameters and TableMetaData classes from
+ * conditionsDB as they do exactly what we want here.
+ *
* @author Matt Graham <[log in to unmask]>
- */
+ */
@SuppressWarnings("rawtypes")
-public class DQMDatabaseManager{
+public class DQMDatabaseManager {
int runNumber = -1;
String detectorName;
- List<TableMetaData> tableMetaData;
+ List<TableMetaData> tableMetaData;
File connectionPropertiesFile;
-
- static Logger logger = null;
+
+ private static final Logger LOGGER = Logger.getLogger(DQMDatabaseManager.class.getPackage().getName());
ConnectionParameters connectionParameters;
Connection connection;
String dqmTableName;
boolean wasConfigured = false;
boolean isConnected = false;
-
+
// FIXME: Prefer using the ConditionsManager's instance if possible.
- static private DQMDatabaseManager instance;
+ static private DQMDatabaseManager instance;
/**
* Class constructor, which is only package accessible.
*/
DQMDatabaseManager() {
System.out.println("Making the instance of DQMDatabaseManager");
-
- }
-
- /**
- * Simple log formatter for this class.
- */
- private static final class LogFormatter extends Formatter {
-
- public String format(LogRecord record) {
- StringBuilder sb = new StringBuilder();
- sb.append(record.getLoggerName() + " [ " + record.getLevel() + " ] " + record.getMessage() + '\n');
- return sb.toString();
- }
- }
-
- /**
- * Setup the logger for this class, with initial level of ALL.
- */
- static {
- logger = Logger.getLogger(DQMDatabaseManager.class.getSimpleName());
- logger.setUseParentHandlers(false);
- logger.setLevel(Level.ALL);
- ConsoleHandler handler = new ConsoleHandler();
- handler.setLevel(Level.ALL);
- handler.setFormatter(new LogFormatter());
- logger.addHandler(handler);
- logger.config("logger initialized with level " + handler.getLevel());
- }
-
-
- /**
- * Register this conditions manager as the global default.
- */
- void register() {
+
+ }
+
+ /**
+ * Register this conditions manager as the global default.
+ */
+ void register() {
instance = this;
}
-
-
- /**
- * Get the static instance of this class, which must have been
- * registered first from a call to {@link #register()}.
+
+ /**
+ * Get the static instance of this class, which must have been registered first from a call to {@link #register()}.
+ *
* @return The static instance of the manager.
*/
public static DQMDatabaseManager getInstance() {
return instance;
}
-
-
-
+
public void setup() {
if (!isConnected())
openConnection();
else
- logger.log(Level.CONFIG, "using existing connection {0}", connectionParameters.getConnectionString());
- }
-
-
-
+ LOGGER.log(Level.CONFIG, "using existing connection {0}", connectionParameters.getConnectionString());
+ }
+
/**
* Set the path to a properties file containing connection settings.
+ *
* @param file The properties file
*/
public void setConnectionProperties(File file) {
- logger.config("setting connection prop file " + file.getPath());
+ LOGGER.config("setting connection prop file " + file.getPath());
if (!file.exists())
- throw new IllegalArgumentException("The connection properties file does not exist: " + connectionPropertiesFile.getPath());
- connectionParameters = ConnectionParameters.fromProperties(file);
- }
-
+ throw new IllegalArgumentException("The connection properties file does not exist: "
+ + connectionPropertiesFile.getPath());
+ connectionParameters = ConnectionParameters.fromProperties(file);
+ }
+
/**
* Set the connection parameters from an embedded resource.
+ *
* @param resource The classpath resource
*/
public void setConnectionResource(String resource) {
- logger.config("setting connection resource " + resource);
- connectionParameters = ConnectionParameters.fromResource(resource);
- }
-
-
+ LOGGER.config("setting connection resource " + resource);
+ connectionParameters = ConnectionParameters.fromResource(resource);
+ }
/**
* Get the next collection ID for a database conditions table.
+ *
* @param tableName The name of the table.
* @return The next collection ID.
*/
@@ -145,12 +115,13 @@
} catch (SQLException e) {
throw new RuntimeException(e);
}
- logger.fine("new collection ID " + collectionId + " created for table " + tableName);
+ LOGGER.fine("new collection ID " + collectionId + " created for table " + tableName);
return collectionId;
}
/**
* Get the list of table meta data.
+ *
* @return The list of table meta data.
*/
public List<TableMetaData> getTableMetaDataList() {
@@ -159,6 +130,7 @@
/**
* Find a table's meta data.
+ *
* @param name The name of the table.
* @return The table's meta data or null if does not exist.
*/
@@ -169,9 +141,10 @@
}
return null;
}
-
+
/**
* Find meta data by collection class type.
+ *
* @param type The collection class.
* @return The table meta data.
*/
@@ -183,14 +156,15 @@
}
return null;
}
-
- /**
- * This method can be used to perform a database SELECT query.
+
+ /**
+ * This method can be used to perform a database SELECT query.
+ *
* @param query The SQL query string.
* @return The ResultSet from the query or null.
*/
public ResultSet selectQuery(String query) {
- logger.fine(query);
+ LOGGER.fine(query);
ResultSet result = null;
Statement statement = null;
try {
@@ -198,72 +172,74 @@
result = statement.executeQuery(query);
} catch (SQLException x) {
throw new RuntimeException("Error in query: " + query, x);
- }
+ }
return result;
}
-
+
/**
* Perform a SQL query with an update command like INSERT, DELETE or UPDATE.
+ *
* @param query The SQL query string.
* @return The keys of the rows affected.
*/
- public List<Integer> updateQuery(String query) {
- logger.fine(query);
+ public List<Integer> updateQuery(String query) {
+ LOGGER.fine(query);
List<Integer> keys = new ArrayList<Integer>();
Statement statement = null;
try {
statement = connection.createStatement();
- statement.executeUpdate(query, Statement.RETURN_GENERATED_KEYS);
- ResultSet resultSet = statement.getGeneratedKeys();
+ statement.executeUpdate(query, Statement.RETURN_GENERATED_KEYS);
+ ResultSet resultSet = statement.getGeneratedKeys();
while (resultSet.next()) {
int key = resultSet.getInt(1);
keys.add(key);
}
} catch (SQLException x) {
- throw new RuntimeException("Error in SQL query: " + query, x);
+ throw new RuntimeException("Error in SQL query: " + query, x);
} finally {
close(statement);
}
return keys;
}
-
+
/**
* Set the log level.
+ *
* @param level The log level.
*/
public void setLogLevel(Level level) {
- logger.config("setting log level to " + level);
- logger.setLevel(level);
- logger.getHandlers()[0].setLevel(level);
- }
-
- /**
- * Get the name of the DQM table
+ LOGGER.config("setting log level to " + level);
+ LOGGER.setLevel(level);
+ }
+
+ /**
+ * Get the name of the DQM table
*/
public String getDQMTableName() {
return dqmTableName;
}
-
-
/**
* Return true if the connection parameters are valid, e.g. non-null.
+ *
* @return true if connection parameters are non-null
*/
public boolean hasConnectionParameters() {
return connectionParameters != null;
}
-
+
/**
* Return if the manager was configured e.g. from an XML configuration file.
+ *
* @return true if manager was configured
*/
public boolean wasConfigured() {
return wasConfigured;
}
-
+
/**
* Close a JDBC <code>Statement</code>.
+ *
* @param statement the Statement to close
*/
static void close(Statement statement) {
@@ -272,36 +248,36 @@
if (!statement.isClosed())
statement.close();
else
- logger.log(Level.WARNING, "Statement is already closed!");
+ LOGGER.log(Level.WARNING, "Statement is already closed!");
} catch (SQLException x) {
throw new RuntimeException("Failed to close statement.", x);
}
}
}
-
+
/**
* Close a JDBC <code>ResultSet</code>, or rather the Statement connected to it.
+ *
* @param resultSet the ResultSet to close
*/
- static void close(ResultSet resultSet) {
+ static void close(ResultSet resultSet) {
if (resultSet != null) {
try {
Statement statement = resultSet.getStatement();
if (!statement.isClosed())
statement.close();
else
- logger.log(Level.WARNING, "Statement is already closed!");
+ LOGGER.log(Level.WARNING, "Statement is already closed!");
} catch (SQLException x) {
throw new RuntimeException("Failed to close statement.", x);
}
}
}
-
+
private boolean isConnected() {
return isConnected;
}
-
/**
* Open the database connection.
*/
@@ -309,22 +285,22 @@
if (connectionParameters == null)
throw new RuntimeException("The connection parameters were not configured.");
connection = connectionParameters.createConnection();
- logger.log(Level.CONFIG, "created connection {0}", connectionParameters.getConnectionString());
+ LOGGER.log(Level.CONFIG, "created connection {0}", connectionParameters.getConnectionString());
isConnected = true;
}
-
+
/**
* Close the database connection.
*/
public void closeConnection() {
- logger.config("closing connection");
+ LOGGER.config("closing connection");
if (connection != null) {
try {
if (!connection.isClosed()) {
connection.close();
- logger.config("connection closed");
+ LOGGER.config("connection closed");
} else {
- logger.config("connection already closed");
+ LOGGER.config("connection already closed");
}
} catch (SQLException e) {
throw new RuntimeException(e);
@@ -333,14 +309,11 @@
connection = null;
connectionParameters = null;
}
-
+
@Override
public void finalize() {
if (isConnected())
closeConnection();
}
-
-
-
-
+
}
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnDriver.java Tue Oct 6 18:49:26 2015
@@ -10,7 +10,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.analysis.trigger.util.SinglesTrigger;
@@ -19,14 +18,12 @@
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.SSPCluster;
import org.hps.record.triggerbank.SSPData;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>, Matt Solt <[log in to unmask]>
@@ -34,7 +31,7 @@
*/
public class TriggerTurnOnDriver extends Driver {
- private static Logger logger = LogUtil.create(TriggerTurnOnDriver.class, new BasicLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(TriggerTurnOnDriver.class.getPackage().getName());
private final String ecalClusterCollectionName = "EcalClustersCorr";
IPlotter plotter;
IPlotter plotter2;
@@ -121,18 +118,18 @@
if(!triggerDecisions.passed(TriggerType.PULSER))
return;
- logger.fine("pulser trigger fired");
+ LOGGER.fine("pulser trigger fired");
if(triggerDecisions.passed(TriggerType.SINGLES1))
- logger.fine("Singles1 trigger fired");
+ LOGGER.fine("Singles1 trigger fired");
if(triggerDecisions.passed(TriggerType.SINGLES1_SIM)) {
- logger.fine("Sim Singles1 trigger fired");
+ LOGGER.fine("Sim Singles1 trigger fired");
nSimSingles1++;
}
if(triggerDecisions.passed(TriggerType.SINGLES1_RESULTS)) {
- logger.fine("Results Singles1 trigger fired");
+ LOGGER.fine("Results Singles1 trigger fired");
nResultSingles1++;
}
@@ -172,7 +169,7 @@
// fill numerator
if (triggerDecisions.passed(TriggerType.SINGLES1_SIM)) {
- logger.fine("Eureka. They both fired.");
+ LOGGER.fine("Eureka. They both fired.");
if(clusterEMax != null) {
clusterE_RandomSingles1.fill(clusterEMax.getEnergy());
if(clusters.size() == 1) {
@@ -192,8 +189,8 @@
@Override
protected void endOfData() {
- logger.info("Processed " + nEventsProcessed);
- logger.info("nResSingles1 " + nResultSingles1 + " nSimSingles1 " + nSimSingles1);
+ LOGGER.info("Processed " + nEventsProcessed);
+ LOGGER.info("nResSingles1 " + nResultSingles1 + " nSimSingles1 " + nSimSingles1);
clusterE_RandomSingles1_trigEff = aida.histogramFactory().divide("trigEff", clusterE_RandomSingles1, clusterE_Random);
clusterEOne_RandomSingles1_trigEff = aida.histogramFactory().divide("trigEffEone", clusterEOne_RandomSingles1, clusterEOne_Random);
int r = 0;
@@ -204,7 +201,7 @@
r++;
}
}
- logger.info("entries in clusterE_RandomSingles1_trigEff: " + Integer.toString(clusterE_RandomSingles1_trigEff.allEntries()));
+ LOGGER.info("entries in clusterE_RandomSingles1_trigEff: " + Integer.toString(clusterE_RandomSingles1_trigEff.allEntries()));
plotter.region(2).plot(clusterE_RandomSingles1_trigEff);
plotter2.region(2).plot(clusterEOne_RandomSingles1_trigEff);
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnSSPDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnSSPDriver.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/trigger/TriggerTurnOnSSPDriver.java Tue Oct 6 18:49:26 2015
@@ -10,7 +10,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.analysis.trigger.util.SinglesTrigger;
@@ -19,14 +18,11 @@
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.SSPCluster;
import org.hps.record.triggerbank.SSPData;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>, Matt Solt <[log in to unmask]>
@@ -34,7 +30,7 @@
*/
public class TriggerTurnOnSSPDriver extends Driver {
- private static Logger logger = LogUtil.create(TriggerTurnOnSSPDriver.class, new BasicLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(TriggerTurnOnSSPDriver.class.getPackage().getName());
private final String ecalClusterCollectionName = "EcalClustersCorr";
IPlotter plotter;
IPlotter plotter2;
@@ -121,18 +117,18 @@
if(!triggerDecisions.passed(TriggerType.PULSER))
return;
- logger.fine("pulser trigger fired");
+ LOGGER.fine("pulser trigger fired");
if(triggerDecisions.passed(TriggerType.SINGLES1))
- logger.fine("Singles1 trigger fired");
+ LOGGER.fine("Singles1 trigger fired");
if(triggerDecisions.passed(TriggerType.SINGLES1_SIM)) {
- logger.fine("Sim Singles1 trigger fired");
+ LOGGER.fine("Sim Singles1 trigger fired");
nSimSingles1++;
}
if(triggerDecisions.passed(TriggerType.SINGLES1_RESULTS)) {
- logger.fine("Results Singles1 trigger fired");
+ LOGGER.fine("Results Singles1 trigger fired");
nResultSingles1++;
}
@@ -171,7 +167,7 @@
// fill numerator
if (triggerDecisions.passed(TriggerType.SINGLES1_SIM)) {
- logger.fine("Eureka. They both fired.");
+ LOGGER.fine("Eureka. They both fired.");
if(clusterEMax != null) {
clusterE_RandomSingles1.fill(clusterEMax.getEnergy());
if(clusters.size() == 1) {
@@ -190,8 +186,8 @@
@Override
protected void endOfData() {
- logger.info("Processed " + nEventsProcessed);
- logger.info("nResSingles1 " + nResultSingles1 + " nSimSingles1 " + nSimSingles1);
+ LOGGER.info("Processed " + nEventsProcessed);
+ LOGGER.info("nResSingles1 " + nResultSingles1 + " nSimSingles1 " + nSimSingles1);
clusterE_RandomSingles1_trigEff = aida.histogramFactory().divide("SSP_trigEff", clusterE_RandomSingles1, clusterE_Random);
clusterEOne_RandomSingles1_trigEff = aida.histogramFactory().divide("SSP_trigEffEone", clusterEOne_RandomSingles1, clusterEOne_Random);
int r = 0;
@@ -202,7 +198,7 @@
r++;
}
}
- logger.info("entries in clusterE_RandomSingles1_trigEff: " + Integer.toString(clusterE_RandomSingles1_trigEff.allEntries()));
+ LOGGER.info("entries in clusterE_RandomSingles1_trigEff: " + Integer.toString(clusterE_RandomSingles1_trigEff.allEntries()));
plotter.region(2).plot(clusterE_RandomSingles1_trigEff);
plotter2.region(2).plot(clusterEOne_RandomSingles1_trigEff);
Modified: java/trunk/conditions/pom.xml
=============================================================================
--- java/trunk/conditions/pom.xml (original)
+++ java/trunk/conditions/pom.xml Tue Oct 6 18:49:26 2015
@@ -52,6 +52,11 @@
<artifactId>hps-detector-data</artifactId>
</dependency>
<dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-logging</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.26</version>
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/api/AbstractConditionsObjectConverter.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/api/AbstractConditionsObjectConverter.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/api/AbstractConditionsObjectConverter.java Tue Oct 6 18:49:26 2015
@@ -1,6 +1,7 @@
package org.hps.conditions.api;
import java.sql.SQLException;
+import java.util.logging.Logger;
import org.hps.conditions.api.ConditionsRecord.ConditionsRecordCollection;
import org.hps.conditions.database.DatabaseConditionsManager;
@@ -16,8 +17,14 @@
* @author Jeremy McCormick, SLAC
* @param <T> The type of the returned data which should be a class extending {@link BaseConditionsObjectCollection}.
*/
+// TODO: Move to conditions.database package (not an API class).
public abstract class AbstractConditionsObjectConverter<T> implements ConditionsConverter<T> {
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(AbstractConditionsObjectConverter.class.getPackage().getName());
+
/**
* Create a conditions object collection.
*
@@ -119,7 +126,7 @@
throw new RuntimeException(e);
}
- DatabaseConditionsManager.getLogger().info("loading conditions set..." + '\n' + conditionsRecord);
+ LOGGER.info("loading conditions set..." + '\n' + conditionsRecord);
// Select the objects into the collection by the collection ID.
try {
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/AddCommand.java Tue Oct 6 18:49:26 2015
@@ -13,7 +13,6 @@
import org.hps.conditions.api.FieldValuesMap;
import org.hps.conditions.api.TableRegistry;
import org.hps.conditions.database.DatabaseConditionsManager;
-import org.lcsim.util.log.LogUtil;
/**
* This is a command for the conditions CLI that will add a conditions record, making a conditions set with a particular
@@ -24,9 +23,9 @@
final class AddCommand extends AbstractCommand {
/**
- * Setup logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(AddCommand.class);
+ private static final Logger LOGGER = Logger.getLogger(AddCommand.class.getPackage().getName());
/**
* Define command line options.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/CommandLineTool.java Tue Oct 6 18:49:26 2015
@@ -14,7 +14,6 @@
import org.apache.commons.cli.PosixParser;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-import org.lcsim.util.log.LogUtil;
/**
* This class is a command-line tool for performing commands on the conditions database using sub-commands for
@@ -28,9 +27,9 @@
public final class CommandLineTool {
/**
- * Setup logging.
- */
- private static final Logger LOGGER = LogUtil.create(CommandLineTool.class);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(CommandLineTool.class.getPackage().getName());
private static Options OPTIONS = new Options();
@@ -180,9 +179,9 @@
// Set the conditions manager log level (does not affect logger of this class or sub-commands).
if (commandLine.hasOption("l")) {
- final Level level = Level.parse(commandLine.getOptionValue("l"));
- this.conditionsManager.setLogLevel(level);
- LOGGER.config("conditions manager log level will be set to " + level.toString());
+ final Level newLevel = Level.parse(commandLine.getOptionValue("l"));
+ Logger.getLogger(DatabaseConditionsManager.class.getPackage().getName()).setLevel(newLevel);
+ LOGGER.config("conditions manager log level will be set to " + newLevel.toString());
}
// Connection properties.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/LoadCommand.java Tue Oct 6 18:49:26 2015
@@ -2,7 +2,6 @@
import java.io.File;
import java.sql.SQLException;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
@@ -13,8 +12,6 @@
import org.hps.conditions.api.DatabaseObjectException;
import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.database.DatabaseConditionsManager;
-import org.lcsim.util.log.LogUtil;
-import org.lcsim.util.log.MessageOnlyLogFormatter;
/**
* This is a sub-command to add conditions data using an input text file. The file should be ASCII text that is
@@ -27,9 +24,9 @@
final class LoadCommand extends AbstractCommand {
/**
- * Setup the logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(LoadCommand.class, new MessageOnlyLogFormatter(), Level.ALL);
+ private static final Logger LOGGER = Logger.getLogger(CommandLineTool.class.getPackage().getName());
/**
* Define command options.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/PrintCommand.java Tue Oct 6 18:49:26 2015
@@ -18,7 +18,6 @@
import org.hps.conditions.api.ConditionsRecord.ConditionsRecordCollection;
import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.database.DatabaseConditionsManager;
-import org.lcsim.util.log.LogUtil;
/**
* This sub-command of the conditions CLI prints conditions conditions table data by run number to the console or
@@ -29,9 +28,9 @@
final class PrintCommand extends AbstractCommand {
/**
- * Setup logger.
- */
- private static final Logger LOGGER = LogUtil.create(PrintCommand.class);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(PrintCommand.class.getPackage().getName());
/**
* Defines command options.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/RunSummaryCommand.java Tue Oct 6 18:49:26 2015
@@ -3,7 +3,6 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
@@ -14,8 +13,6 @@
import org.hps.conditions.api.ConditionsRecord.ConditionsRecordCollection;
import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.database.DatabaseConditionsManager;
-import org.lcsim.util.log.LogUtil;
-import org.lcsim.util.log.MessageOnlyLogFormatter;
/**
* This is a sub-command to print out collection meta data for the current conditions configuration of tag, detector
@@ -29,10 +26,9 @@
final class RunSummaryCommand extends AbstractCommand {
/**
- * Setup logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(RunSummaryCommand.class, new MessageOnlyLogFormatter(),
- Level.INFO);
+ private static final Logger LOGGER = Logger.getLogger(RunSummaryCommand.class.getPackage().getName());
/**
* Define command options.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/cli/TagCommand.java Tue Oct 6 18:49:26 2015
@@ -19,8 +19,6 @@
import org.hps.conditions.api.TableMetaData;
import org.hps.conditions.api.TableRegistry;
import org.hps.conditions.database.MultipleCollectionsAction;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Create a conditions system tag.
@@ -35,9 +33,9 @@
final class TagCommand extends AbstractCommand {
/**
- * Setup the logger.
- */
- private static final Logger LOGGER = LogUtil.create(TagCommand.class, new DefaultLogFormatter(), Level.ALL);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(TagCommand.class.getPackage().getName());
/**
* Defines command options.
@@ -144,7 +142,6 @@
// Conditions system configuration.
this.getManager().setXmlConfig("/org/hps/conditions/config/conditions_database_no_svt.xml");
- this.getManager().setLogLevel(Level.ALL);
// Find all the applicable conditions records by their run number ranges.
ConditionsRecordCollection tagConditionsRecordCollection = this.findConditionsRecords(runStart, runEnd);
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java Tue Oct 6 18:49:26 2015
@@ -132,15 +132,6 @@
}
/**
- * Get the Logger for this class, which can be used by related sub-classes if they do not have their own logger.
- *
- * @return the Logger for this class
- */
- public static Logger getLogger() {
- return LOGGER;
- }
-
- /**
* Utility method to determine if a run number is from the 2012 Test Run.
*
* @param runNumber the run number
@@ -834,11 +825,6 @@
LOGGER.config("isTestRun = " + this.isTestRun);
}
- element = node.getChild("logLevel");
- if (element != null) {
- this.setLogLevel(Level.parse(element.getText()));
- }
-
element = node.getChild("closeConnectionAfterInitialize");
if (element != null) {
this.closeConnectionAfterInitialize = Boolean.parseBoolean(element.getText());
@@ -1055,16 +1041,6 @@
}
/**
- * Set the log level.
- *
- * @param level the new log level
- */
- public void setLogLevel(final Level level) {
- LOGGER.config("setting log level to " + level);
- LOGGER.setLevel(level);
- }
-
- /**
* Set the name of the SVT subdetector.
*
* @param svtName the name of the SVT subdetector
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java Tue Oct 6 18:49:26 2015
@@ -1,6 +1,5 @@
package org.hps.conditions.ecal;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.conditions.api.ConditionsObjectCollection;
@@ -33,13 +32,10 @@
public class EcalConditionsConverter implements ConditionsConverter<EcalConditions> {
/**
- * Setup logger.
- */
- private static Logger LOGGER = Logger.getLogger(EcalConditionsConverter.class.getName());
- static {
- LOGGER.setLevel(Level.ALL);
- }
-
+ * Initialize the logger.
+ */
+ private static Logger LOGGER = Logger.getLogger(EcalConditionsConverter.class.getPackage().getName());
+
/**
* Create combined ECAL conditions object containing all data for the current run.
*
@@ -110,7 +106,7 @@
}
} else {
// If time shifts do not exist it is not a fatal error.
- DatabaseConditionsManager.getLogger().warning("no conditions found for EcalTimeShiftCollection");
+ LOGGER.warning("no conditions found for EcalTimeShiftCollection");
}
// Set the channel pulse width if it exists in the database.
@@ -123,7 +119,7 @@
}
} else {
// If pulse widths do not exist it is not a fatal error.
- DatabaseConditionsManager.getLogger().warning("no conditions found for EcalPulseWidthCollection");
+ LOGGER.warning("no conditions found for EcalPulseWidthCollection");
}
// Return the conditions object to caller.
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtConditionsConverter.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtConditionsConverter.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtConditionsConverter.java Tue Oct 6 18:49:26 2015
@@ -11,7 +11,6 @@
import org.hps.conditions.svt.SvtShapeFitParameters.SvtShapeFitParametersCollection;
import org.lcsim.conditions.ConditionsConverter;
import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.util.log.LogUtil;
/**
* Abstract class providing some of the common methods used in creating SVT conditions objects from the database.
@@ -22,14 +21,13 @@
public abstract class AbstractSvtConditionsConverter<T extends AbstractSvtConditions> implements ConditionsConverter<T> {
/**
- * Initialize logging.
+ * Initialize the logger.
*/
- static Logger logger = LogUtil.create(AbstractSvtConditionsConverter.class);
+ private static Logger LOGGER = Logger.getLogger(AbstractSvtConditionsConverter.class.getPackage().getName());
/**
* The combined detector conditions object.
*/
- // FIXME: Should be private with accessor methods.
protected T conditions;
/**
@@ -73,7 +71,7 @@
}
}
} catch (final RuntimeException e) {
- logger.warning("A set of SVT bad channels was not found.");
+ LOGGER.warning("A set of SVT bad channels was not found.");
}
// Get the gains and offsets from the conditions database
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/svt/CalibrationHandler.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/CalibrationHandler.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/CalibrationHandler.java Tue Oct 6 18:49:26 2015
@@ -1,14 +1,11 @@
package org.hps.conditions.svt;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.conditions.api.ConditionsObjectException;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.svt.SvtCalibration.SvtCalibrationCollection;
import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
@@ -23,8 +20,7 @@
/**
* Initialize the logger.
*/
- private static Logger logger = LogUtil.create(SvtConditionsLoader.class.getSimpleName(), new DefaultLogFormatter(),
- Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(CalibrationHandler.class.getPackage().getName());
/**
* Baseline sample ID (0-5).
@@ -161,7 +157,7 @@
break;
case "Hybrid":
this.hybridID = Integer.parseInt(attributes.getValue("id"));
- logger.info("Processing calibrations for FEB " + this.febID + " Hybrid " + this.hybridID);
+ LOGGER.info("Processing calibrations for FEB " + this.febID + " Hybrid " + this.hybridID);
break;
case "channel":
this.channel = Integer.parseInt(attributes.getValue("id"));
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasConditionsLoader.java Tue Oct 6 18:49:26 2015
@@ -1,6 +1,11 @@
package org.hps.conditions.svt;
-import hep.aida.*;
+import hep.aida.IDataPoint;
+import hep.aida.IDataPointSet;
+import hep.aida.IDataPointSetFactory;
+import hep.aida.IPlotter;
+import hep.aida.IPlotterStyle;
+
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@@ -17,8 +22,8 @@
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
-import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
@@ -38,9 +43,7 @@
import org.hps.conditions.svt.SvtMotorMyaDataReader.SvtPositionMyaRange;
import org.hps.conditions.svt.SvtMotorMyaDataReader.SvtPositionRunRange;
import org.hps.conditions.svt.SvtMotorPosition.SvtMotorPositionCollection;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -49,7 +52,11 @@
public class SvtBiasConditionsLoader {
private static final Set<String> FIELDS = new HashSet<String>();
- private static Logger logger = LogUtil.create(SvtBiasConditionsLoader.class, new BasicLogFormatter(), Level.INFO);
+
+ /**
+ * Initialize the logger.
+ */
+ private static Logger LOGGER = Logger.getLogger(SvtBiasConditionsLoader.class.getPackage().getName());
/**
* Setup conditions.
@@ -113,7 +120,7 @@
*/
private static boolean isValid(RunData data) {
if (data.getStartDate() == null || data.getEndDate() == null || data.getStartDate().before((new GregorianCalendar(1999, 1, 1)).getTime())) {
- logger.fine("This run data is not valid: " + data.toString());
+ LOGGER.fine("This run data is not valid: " + data.toString());
return false;
}
if (data.getStartDate().after(data.getEndDate())) {
@@ -125,18 +132,18 @@
//private static Options options = null;
public static RunMap getRunMapFromSpreadSheet(String path) {
// Load in CSV records from the exported run spreadsheet.
- logger.info(path);
+ LOGGER.info(path);
final RunSpreadsheet runSheet = new RunSpreadsheet(new File(path));
// Find the run ranges that have the same fields values.
final List<RunRange> ranges = RunRange.findRunRanges(runSheet, FIELDS);
- logger.info("Found " + ranges.size() + " ranges.");
+ LOGGER.info("Found " + ranges.size() + " ranges.");
for (RunRange range : ranges) {
- logger.fine(range.toString());
+ LOGGER.fine(range.toString());
}
// find the run records (has converted dates and stuff) for these ranges
RunMap runmap = runSheet.getRunMap(ranges);
- logger.info("Found " + runmap.size() + " runs in the run map.");
+ LOGGER.info("Found " + runmap.size() + " runs in the run map.");
return runmap;
}
@@ -205,13 +212,13 @@
// Load MYA dump
if (cl.hasOption("m")) {
List<SvtBiasMyaRange> biasRanges = SvtBiasMyaDataReader.readMyaData(new File(cl.getOptionValue("m")), 178.0, 2000, cl.hasOption("d"));
- logger.info("Got " + biasRanges.size() + " bias ranges");
+ LOGGER.info("Got " + biasRanges.size() + " bias ranges");
biasRunRanges = SvtBiasMyaDataReader.findOverlappingRanges(runList, biasRanges);
}
if (cl.hasOption("p")) {
List<SvtPositionMyaRange> positionRanges = SvtMotorMyaDataReader.readMyaData(new File(cl.getOptionValue("p")), 200, 10000);
- logger.info("Got " + positionRanges.size() + " position ranges");
+ LOGGER.info("Got " + positionRanges.size() + " position ranges");
positionRunRanges = SvtMotorMyaDataReader.findOverlappingRanges(runList, positionRanges);
}
@@ -221,7 +228,7 @@
if (cl.hasOption("s")) {
if (cl.hasOption("m")) {
for (SvtBiasRunRange r : biasRunRanges) {
- logger.info(r.toString());
+ LOGGER.info(r.toString());
if (r.getRun().getRun() > 5600) {//9999999999999.0) {
//if(dpsRuns.size()/4.0<500) {//9999999999999.0) {
addPoint(dpsRuns, r.getRun().getStartDate().getTime(), 0.0);
@@ -240,7 +247,7 @@
}
if (cl.hasOption("p")) {
for (SvtPositionRunRange r : positionRunRanges) {
- logger.info(r.toString());
+ LOGGER.info(r.toString());
if (r.getRun().getRun() > 5600) {//9999999999999.0) {
//if(dpsRuns.size()/4.0<500) {//9999999999999.0) {
for (SvtPositionMyaRange br : r.getRanges()) {
@@ -276,22 +283,22 @@
}
private static void loadBiasesToConditionsDB(List<SvtBiasRunRange> ranges) {
- logger.info("Load to DB...");
+ LOGGER.info("Load to DB...");
// Create a new collection for each run
List<Integer> runsadded = new ArrayList<Integer>();
for (SvtBiasRunRange range : ranges) {
- logger.info("Loading " + range.toString());
+ LOGGER.info("Loading " + range.toString());
RunData rundata = range.getRun();
if (runsadded.contains(rundata.getRun())) {
- logger.warning("Run " + Integer.toString(rundata.getRun()) + " was already added?");
+ LOGGER.warning("Run " + Integer.toString(rundata.getRun()) + " was already added?");
throw new RuntimeException("Run " + Integer.toString(rundata.getRun()) + " was already added?");
}
runsadded.add(rundata.getRun());
if (range.getRanges().isEmpty()) {
- logger.info("No bias range for run " + range.getRun().getRun());
+ LOGGER.info("No bias range for run " + range.getRun().getRun());
continue;
}
@@ -322,7 +329,7 @@
condition.setFieldValue("collection_id", collectionId);
condition.setTableMetaData(MANAGER.findTableMetaData("conditions"));
condition.setConnection(MANAGER.getConnection());
- logger.info(condition.toString());
+ LOGGER.info(condition.toString());
try {
@@ -348,22 +355,22 @@
}
private static void loadPositionsToConditionsDB(List<SvtPositionRunRange> ranges) {
- logger.info("Load to DB...");
+ LOGGER.info("Load to DB...");
// Create a new collection for each run
List<Integer> runsadded = new ArrayList<Integer>();
for (SvtPositionRunRange range : ranges) {
- logger.info("Loading " + range.toString());
+ LOGGER.info("Loading " + range.toString());
RunData rundata = range.getRun();
if (runsadded.contains(rundata.getRun())) {
- logger.warning("Run " + Integer.toString(rundata.getRun()) + " was already added?");
+ LOGGER.warning("Run " + Integer.toString(rundata.getRun()) + " was already added?");
throw new RuntimeException("Run " + Integer.toString(rundata.getRun()) + " was already added?");
}
runsadded.add(rundata.getRun());
if (range.getRanges().isEmpty()) {
- logger.info("No position range for run " + range.getRun().getRun());
+ LOGGER.info("No position range for run " + range.getRun().getRun());
continue;
}
@@ -394,7 +401,7 @@
condition.setFieldValue("collection_id", collectionId);
condition.setTableMetaData(MANAGER.findTableMetaData("conditions"));
condition.setConnection(MANAGER.getConnection());
- logger.info(condition.toString());
+ LOGGER.info(condition.toString());
try {
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasMyaDumpReader.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasMyaDumpReader.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtBiasMyaDumpReader.java Tue Oct 6 18:49:26 2015
@@ -10,20 +10,18 @@
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.conditions.run.RunSpreadsheet;
import org.hps.conditions.run.RunSpreadsheet.RunData;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.util.log.LogUtil;
public class SvtBiasMyaDumpReader {
- private static Logger logger = LogUtil.create(SvtBiasMyaDumpReader.class, new BasicLogFormatter(), Level.INFO);
-
+ /**
+ * Initialize the logger.
+ */
+ private static Logger LOGGER = Logger.getLogger(SvtBiasMyaDumpReader.class.getPackage().getName());
public static void main(String[] args) {
@@ -51,7 +49,7 @@
private void readFromFile(File file) {
addEntries(readMyaDump(file));
- logger.info("Got " + getEntries().size() + " entries from " + file.getName());
+ LOGGER.info("Got " + getEntries().size() + " entries from " + file.getName());
}
public void buildFromFiles(String[] args) {
@@ -90,7 +88,7 @@
private void printRanges() {
for( SvtBiasMyaRange r : biasRanges) {
- logger.info(r.toString());
+ LOGGER.info(r.toString());
}
}
@@ -143,14 +141,14 @@
if( e.getValue() > BIASVALUEON) {
if (range==null) {
- logger.fine("BIAS ON: " + e.toString());
+ LOGGER.fine("BIAS ON: " + e.toString());
range = new SvtBiasMyaRange();
range.setStart(e);
}
} else {
//close it
if (range!=null) {
- logger.fine("BIAS TURNED OFF: " + e.toString());
+ LOGGER.fine("BIAS TURNED OFF: " + e.toString());
range.setEnd(e);
this.biasRanges.add(range);
range = null;
@@ -158,7 +156,7 @@
}
eprev = e;
}
- logger.info("Built " + this.biasRanges.size() + " ranges");
+ LOGGER.info("Built " + this.biasRanges.size() + " ranges");
}
@@ -188,13 +186,13 @@
public static final class SvtBiasMyaRanges extends ArrayList<SvtBiasMyaRange> {
public SvtBiasMyaRanges() {}
public SvtBiasMyaRanges findOverlappingRanges(Date date_start, Date date_end) {
- logger.fine("look for overlaps from " + date_start.toString() + " to " + date_end.toString());
+ LOGGER.fine("look for overlaps from " + date_start.toString() + " to " + date_end.toString());
SvtBiasMyaRanges overlaps = new SvtBiasMyaRanges();
for(SvtBiasMyaRange range : this) {
- logger.fine("loop bias range " + range.toString());
+ LOGGER.fine("loop bias range " + range.toString());
if( range.overlap(date_start,date_end) ) {
overlaps.add(range);
- logger.fine("overlap found!! ");
+ LOGGER.fine("overlap found!! ");
}
}
return overlaps;
Modified: java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java
=============================================================================
--- java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java (original)
+++ java/trunk/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java Tue Oct 6 18:49:26 2015
@@ -1,7 +1,6 @@
package org.hps.conditions.svt;
import java.io.File;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
@@ -17,8 +16,6 @@
import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Command line tool used to load SVT conditions into the conditions database.
@@ -45,8 +42,7 @@
/**
* Initialize the logger.
*/
- private static Logger logger = LogUtil.create(SvtConditionsLoader.class.getName(), new DefaultLogFormatter(),
- Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(SvtConditionsLoader.class.getPackage().getName());
/**
* SVT channels table name.
@@ -78,7 +74,7 @@
return;
}
final int runNumber = Integer.valueOf(commandLine.getOptionValue("r"));
- logger.info("Run number set to " + runNumber);
+ LOGGER.info("Run number set to " + runNumber);
// Initialize the conditions system and load the conditions onto the
// detector object
@@ -106,7 +102,7 @@
// to the conditions database.
if (commandLine.hasOption("c")) {
final File calibrationFile = new File(commandLine.getOptionValue("c"));
- logger.info("Loading calibrations from file " + calibrationFile.getAbsolutePath());
+ LOGGER.info("Loading calibrations from file " + calibrationFile.getAbsolutePath());
try {
// Parse the calibration file and retrieve the calibrations collection.
@@ -121,11 +117,11 @@
// Set the collection ID.
final int collectionID = DatabaseConditionsManager.getInstance().getCollectionId(calibrations, null);
calibrations.setCollectionId(collectionID);
- logger.info("Using collection ID " + collectionID);
+ LOGGER.info("Using collection ID " + collectionID);
// Load the calibrations
calibrations.insert();
- logger.info("A total of " + calibrations.size()
+ LOGGER.info("A total of " + calibrations.size()
+ " SvtCalibrations were loaded successfully into the database.");
// Create a conditions record associated with the set of conditions that were just loaded.
@@ -144,7 +140,7 @@
// conditions database.
if (commandLine.hasOption("d")) {
final File daqMapFile = new File(commandLine.getOptionValue("d"));
- logger.info("Loading DAQ map from file " + daqMapFile.getAbsolutePath());
+ LOGGER.info("Loading DAQ map from file " + daqMapFile.getAbsolutePath());
try {
// Parse the DAQ map file
@@ -159,12 +155,12 @@
// Set the collection ID
int collectionID = DatabaseConditionsManager.getInstance().getCollectionId(daqMapping, null);
daqMapping.setCollectionId(collectionID);
- logger.info("Using collection ID " + collectionID);
+ LOGGER.info("Using collection ID " + collectionID);
// Load the DAQ map
daqMapping.insert();
- logger.info("DAQ map has been loaded successfully");
- logger.fine(daqMapping.toString());
+ LOGGER.info("DAQ map has been loaded successfully");
+ LOGGER.fine(daqMapping.toString());
// Create a conditions record associated with the set of
// conditions that were just loaded.
@@ -173,7 +169,7 @@
"Engineering run DAQ map. Loaded using SvtConditionsLoader.", "eng_run");
conditionsRecord.insert();
- logger.info("Loading the collection of SvtChannel's");
+ LOGGER.info("Loading the collection of SvtChannel's");
final SvtChannelCollection svtChannels = reader.getSvtChannelCollection();
// Set the table meta data
@@ -184,10 +180,10 @@
// Set the collection ID
collectionID = DatabaseConditionsManager.getInstance().getCollectionId(svtChannels, null);
svtChannels.setCollectionId(collectionID);
- logger.info("Using collection ID " + collectionID);
+ LOGGER.info("Using collection ID " + collectionID);
svtChannels.insert();
- logger.info("A total of " + svtChannels.size()
+ LOGGER.info("A total of " + svtChannels.size()
+ " SvtChannels were successfully loaded into the database.");
// Create a conditions record associated with the set of
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/RunNumberTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/RunNumberTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/RunNumberTest.java Tue Oct 6 18:49:26 2015
@@ -95,8 +95,6 @@
final File testFile = cache.getCachedFile(new URL(URL));
// Create the record loop.
- final DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
- manager.setLogLevel(Level.WARNING);
final LCSimLoop loop = new LCSimLoop();
// Configure the loop.
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/api/ConditionsTagTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/api/ConditionsTagTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/api/ConditionsTagTest.java Tue Oct 6 18:49:26 2015
@@ -104,7 +104,6 @@
*/
public void testPass1Tag() throws Exception {
- MANAGER.setLogLevel(Level.WARNING);
MANAGER.addTag("pass1");
Map<Integer, int[]> collectionMap = buildCollectionMap();
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/database/DatabaseConditionsManagerTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/database/DatabaseConditionsManagerTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/database/DatabaseConditionsManagerTest.java Tue Oct 6 18:49:26 2015
@@ -1,6 +1,4 @@
package org.hps.conditions.database;
-
-import java.util.logging.Level;
import junit.framework.TestCase;
@@ -27,7 +25,6 @@
public void testDatabaseConditionsManager() throws Exception {
DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
- manager.setLogLevel(Level.ALL);
// Check initial state.
TestCase.assertTrue("The conditions manager instance is null.", manager != null);
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java Tue Oct 6 18:49:26 2015
@@ -5,7 +5,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
import junit.framework.TestCase;
@@ -112,8 +111,6 @@
loop.setLCIORecordSource(testFile);
loop.add(new SvtBadChannelChecker());
- DatabaseConditionsManager.getInstance().setLogLevel(Level.OFF);
-
// Run over all events.
loop.loop(-1, null);
}
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtTimingConstantsTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtTimingConstantsTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/svt/SvtTimingConstantsTest.java Tue Oct 6 18:49:26 2015
@@ -34,7 +34,6 @@
*/
public void testSvtTimingConstants() throws Exception {
final DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
- manager.setLogLevel(Level.SEVERE);
// manager.setConnectionResource("/org/hps/conditions/config/jeremym_dev_connection.prop");
for (final int run : RUNS) {
manager.setDetector(DETECTOR, run);
Modified: java/trunk/conditions/src/test/java/org/hps/conditions/svt/TestRunSvtBadChannelsTest.java
=============================================================================
--- java/trunk/conditions/src/test/java/org/hps/conditions/svt/TestRunSvtBadChannelsTest.java (original)
+++ java/trunk/conditions/src/test/java/org/hps/conditions/svt/TestRunSvtBadChannelsTest.java Tue Oct 6 18:49:26 2015
@@ -50,7 +50,6 @@
final DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
conditionsManager.setXmlConfig("/org/hps/conditions/config/conditions_database_testrun_2012.xml");
- conditionsManager.setLogLevel(Level.WARNING);
for (int i = 0; i < RUN_NUMBERS.length; i++) {
Modified: java/trunk/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
=============================================================================
--- java/trunk/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java (original)
+++ java/trunk/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java Tue Oct 6 18:49:26 2015
@@ -28,8 +28,6 @@
import org.hps.datacat.client.DatacatClient;
import org.hps.datacat.client.DatacatClientFactory;
import org.hps.datacat.client.DatasetFileFormat;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Command line file crawler for populating the data catalog.
@@ -118,7 +116,7 @@
/**
* Setup the logger.
*/
- private static final Logger LOGGER = LogUtil.create(DatacatCrawler.class, new DefaultLogFormatter(), Level.CONFIG);
+ private static final Logger LOGGER = Logger.getLogger(DatacatCrawler.class.getPackage().getName());
/**
* Command line options for the crawler.
Modified: java/trunk/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
=============================================================================
--- java/trunk/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java (original)
+++ java/trunk/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java Tue Oct 6 18:49:26 2015
@@ -5,7 +5,6 @@
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.evio.EventTagConstant;
@@ -14,8 +13,6 @@
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Reads metadata from EVIO files.
@@ -25,9 +22,9 @@
public class EvioMetadataReader implements FileMetadataReader {
/**
- * Class logger.
+ * Initialize the logger.
*/
- private static Logger LOGGER = LogUtil.create(EvioMetadataReader.class, new DefaultLogFormatter(), Level.ALL);
+ private static Logger LOGGER = Logger.getLogger(EvioMetadataReader.class.getPackage().getName());
/**
* Get the EVIO file metadata.
Modified: java/trunk/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
=============================================================================
--- java/trunk/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java (original)
+++ java/trunk/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java Tue Oct 6 18:49:26 2015
@@ -3,12 +3,9 @@
import java.io.File;
import java.io.FileFilter;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.datacat.client.DatasetFileFormat;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Filter files on their format.
@@ -20,9 +17,9 @@
public class FileFormatFilter implements FileFilter {
/**
- * Setup logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(FileFormatFilter.class, new DefaultLogFormatter(), Level.ALL);
+ private static final Logger LOGGER = Logger.getLogger(FileFormatFilter.class.getPackage().getName());
/**
* The file format.
Modified: java/trunk/crawler/src/main/java/org/hps/crawler/RunSummaryMap.java
=============================================================================
--- java/trunk/crawler/src/main/java/org/hps/crawler/RunSummaryMap.java (original)
+++ java/trunk/crawler/src/main/java/org/hps/crawler/RunSummaryMap.java Tue Oct 6 18:49:26 2015
@@ -6,7 +6,6 @@
import org.hps.run.database.RunSummary;
import org.hps.run.database.RunSummaryImpl;
-import org.lcsim.util.log.LogUtil;
/**
* This class maps run numbers to {@link RunSummary} objects.
@@ -17,9 +16,9 @@
final class RunSummaryMap extends HashMap<Integer, RunSummaryImpl> {
/**
- * Setup logging.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(RunSummaryMap.class);
+ private static Logger LOGGER = Logger.getLogger(RunSummaryMap.class.getPackage().getName());
/**
* Get the collection of {@link RunSummary} objects.
Modified: java/trunk/datacat-client/src/main/java/org/hps/datacat/client/DatacatClientImpl.java
=============================================================================
--- java/trunk/datacat-client/src/main/java/org/hps/datacat/client/DatacatClientImpl.java (original)
+++ java/trunk/datacat-client/src/main/java/org/hps/datacat/client/DatacatClientImpl.java Tue Oct 6 18:49:26 2015
@@ -9,12 +9,9 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.json.JSONObject;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Implementation of {@link DatacatClient} interface for working with SRS datacat REST API.
@@ -26,7 +23,7 @@
/**
* Setup class logging.
*/
- private static Logger LOGGER = LogUtil.create(DatacatClientImpl.class, new DefaultLogFormatter(), Level.ALL);
+ private static Logger LOGGER = Logger.getLogger(DatacatClientImpl.class.getPackage().getName());
/**
* The root directory (e.g. should be 'HPS').
Modified: java/trunk/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java
=============================================================================
--- java/trunk/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java (original)
+++ java/trunk/detector-model/src/main/java/org/hps/detector/svt/SvtDetectorSetup.java Tue Oct 6 18:49:26 2015
@@ -26,7 +26,6 @@
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.HpsTestRunSiSensor;
import org.lcsim.geometry.compact.Subdetector;
-import org.lcsim.util.log.LogUtil;
/**
* This class puts {@link SvtConditions} data onto <code>HpsSiSensor</code> objects.
@@ -39,10 +38,7 @@
/**
* Initialize logger.
*/
- private static Logger logger = LogUtil.create(SvtDetectorSetup.class);
- static {
- logger.setLevel(Level.ALL);
- }
+ private static Logger LOGGER = Logger.getLogger(SvtDetectorSetup.class.getPackage().getName());
/**
* The number of noise samples.
@@ -68,7 +64,7 @@
* Constructor that uses the default detector name.
*/
public SvtDetectorSetup() {
- logger.info("hi");
+ LOGGER.info("hi");
}
/**
@@ -77,7 +73,7 @@
* @param svtName the name of the SVT subdetector
*/
public SvtDetectorSetup(final String svtName) {
- logger.info("hi");
+ LOGGER.info("hi");
this.svtName = svtName;
}
@@ -88,29 +84,29 @@
*/
@Override
public void conditionsChanged(final ConditionsEvent event) {
- logger.info("conditions changed hook activated");
+ LOGGER.info("conditions changed hook activated");
if (this.enabled) {
- logger.info("I am enabled");
+ LOGGER.info("I am enabled");
final DatabaseConditionsManager manager = (DatabaseConditionsManager) event.getConditionsManager();
final Subdetector subdetector = manager.getDetectorObject().getSubdetector(this.svtName);
if (subdetector != null) {
- logger.info("found the SVT");
+ LOGGER.info("found the SVT");
if (manager.isTestRun()) {
final TestRunSvtConditions svtConditions = manager.getCachedConditions(TestRunSvtConditions.class,
"test_run_svt_conditions").getCachedData();
this.loadTestRun(subdetector, svtConditions);
} else {
- logger.info("activating default setup (not test run)");
+ LOGGER.info("activating default setup (not test run)");
final SvtConditions svtConditions = manager.getCachedConditions(SvtConditions.class,
"svt_conditions").getCachedData();
this.loadDefault(subdetector, svtConditions);
}
} else {
- logger.warning("no SVT detector was found so SvtDetectorSetup was NOT activated");
+ LOGGER.warning("no SVT detector was found so SvtDetectorSetup was NOT activated");
this.enabled = false;
}
} else {
- logger.config("disabled");
+ LOGGER.config("disabled");
}
}
@@ -122,13 +118,13 @@
*/
void loadDefault(final Subdetector subdetector, final SvtConditions conditions) {
- logger.info("loading default SVT conditions onto subdetector " + subdetector.getName());
+ LOGGER.info("loading default SVT conditions onto subdetector " + subdetector.getName());
// Find sensor objects.
final List<HpsSiSensor> sensors = subdetector.getDetectorElement().findDescendants(HpsSiSensor.class);
- logger.info("setting up " + sensors.size() + " SVT sensors");
+ LOGGER.info("setting up " + sensors.size() + " SVT sensors");
final SvtChannelCollection channelMap = conditions.getChannelMap();
- logger.info("channel map has " + conditions.getChannelMap().size() + " entries");
+ LOGGER.info("channel map has " + conditions.getChannelMap().size() + " entries");
final SvtDaqMappingCollection daqMap = conditions.getDaqMap();
final SvtT0ShiftCollection t0Shifts = conditions.getT0Shifts();
@@ -213,13 +209,13 @@
*/
void loadTestRun(final Subdetector subdetector, final TestRunSvtConditions conditions) {
- logger.info("loading Test Run SVT conditions onto subdetector " + subdetector.getName());
+ LOGGER.info("loading Test Run SVT conditions onto subdetector " + subdetector.getName());
// Find sensor objects.
final List<HpsSiSensor> sensors = subdetector.getDetectorElement().findDescendants(HpsSiSensor.class);
- logger.info("setting up " + sensors.size() + " SVT sensors");
+ LOGGER.info("setting up " + sensors.size() + " SVT sensors");
final TestRunSvtChannelCollection channelMap = conditions.getChannelMap();
- logger.info("channel map has " + channelMap.size() + " entries");
+ LOGGER.info("channel map has " + channelMap.size() + " entries");
final TestRunSvtDaqMappingCollection daqMap = conditions.getDaqMap();
final TestRunSvtT0ShiftCollection t0Shifts = conditions.getT0Shifts();
@@ -307,8 +303,8 @@
* @param level the log level
*/
public void setLogLevel(final Level level) {
- logger.setLevel(level);
- logger.getHandlers()[0].setLevel(level);
+ LOGGER.setLevel(level);
+ LOGGER.getHandlers()[0].setLevel(level);
}
/**
Modified: java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
=============================================================================
--- java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java (original)
+++ java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java Tue Oct 6 18:49:26 2015
@@ -16,11 +16,10 @@
import org.lcsim.conditions.ConditionsManager;
import org.lcsim.detector.Transform3D;
import org.lcsim.geometry.compact.converter.HPSTestRunTracker2014GeometryDefinition.BaseModule;
-import org.lcsim.util.log.LogUtil;
public abstract class HPSTrackerBuilder {
- private static final Logger LOGGER = LogUtil.create(HPSTrackerBuilder.class);
+ private static final Logger LOGGER = Logger.getLogger(HPSTrackerBuilder.class.getPackage().getName());
private boolean debug = true;
public List<BaseModuleBundle> modules = new ArrayList<BaseModuleBundle>();
Modified: java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java
=============================================================================
--- java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java (original)
+++ java/trunk/detector-model/src/main/java/org/lcsim/geometry/compact/converter/ReadSurveyOutput.java Tue Oct 6 18:49:26 2015
@@ -13,7 +13,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -21,9 +20,6 @@
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.geometry.compact.converter.HPSTracker2014GeometryDefinition.SvtBox;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -31,7 +27,7 @@
*/
public class ReadSurveyOutput {
- private static Logger logger = LogUtil.create(ReadSurveyOutput.class, new BasicLogFormatter(),Level.INFO);
+ private static Logger logger = Logger.getLogger(ReadSurveyOutput.class.getPackage().getName());
List<CSVRecord> records;
Modified: java/trunk/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java
=============================================================================
--- java/trunk/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java (original)
+++ java/trunk/detector-model/src/test/java/org/hps/detector/SvtAlignmentTest.java Tue Oct 6 18:49:26 2015
@@ -1,6 +1,4 @@
package org.hps.detector;
-
-import java.util.logging.Level;
import junit.framework.TestCase;
@@ -45,9 +43,8 @@
public void testSvtAlignment() throws Exception {
final DatabaseConditionsManager manager = DatabaseConditionsManager.getInstance();
- manager.setLogLevel(Level.WARNING);
int runIndex = 0;
- for (int run : RUNS) {
+ for (int run : RUNS) {
System.out.println();
System.out.println("loading run " + run + " SVT alignments for detector " + DETECTOR_NAME + " ...");
Modified: java/trunk/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java
=============================================================================
--- java/trunk/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java (original)
+++ java/trunk/detector-model/src/test/java/org/lcsim/detector/converter/compact/HPSTracker2ConverterTest.java Tue Oct 6 18:49:26 2015
@@ -2,7 +2,6 @@
import java.io.InputStream;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import junit.framework.Test;
@@ -15,20 +14,17 @@
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.geometry.Detector;
import org.lcsim.geometry.GeometryReader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Unit test for the HPSTracker2Coverter.
*
- * @author Jeremy McCormick <[log in to unmask]>
- * @author Omar Moreno <[log in to unmask]>
+ * @author Jeremy McCormick, SLAC
+ * @author Omar Moreno, UCSC
*/
public class HPSTracker2ConverterTest extends TestCase {
// Initialize the logger
- private static Logger logger = LogUtil.create(HPSTracker2Converter.class.getName(),
- new DefaultLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(HPSTracker2ConverterTest.class.getPackage().getName());
Detector detector = null;
@@ -67,14 +63,14 @@
public void testHPSTracker2Converter() {
// Test if the correct number of sensors was created.
- logger.info("Checking if the correct number of sensors were created.");
+ LOGGER.info("Checking if the correct number of sensors were created.");
List<HpsSiSensor> sensors = detector.getSubdetector(SUBDETECTOR_NAME).getDetectorElement().findDescendants(HpsSiSensor.class);
assertTrue("[ " + this.getClass().getSimpleName() + " ]: The wrong number of sensors were created.",
sensors.size() == TOTAL_NUMBER_OF_SENSORS);
- logger.info("Total number of sensors that were created: " + sensors.size());
+ LOGGER.info("Total number of sensors that were created: " + sensors.size());
// Test if the sensors that were created are instances of HpsSiSensor
- logger.info("Checking if sensors were initialized correctly");
+ LOGGER.info("Checking if sensors were initialized correctly");
for(HpsSiSensor sensor : sensors) {
assertTrue("[ " + this.getClass().getSimpleName() + " ]: Sensor is of wrong type: " + sensor.getClass().getSimpleName(),
sensor instanceof HpsSiSensor);
@@ -83,20 +79,20 @@
assertTrue("[ " + this.getClass().getSimpleName() + " ]: Wrong number of sense electrodes found.",
sensor.getSenseElectrodes(ChargeCarrier.HOLE).getNCells() == NUMBER_OF_SENSE_STRIPS);
- logger.info(sensor.toString());
+ LOGGER.info(sensor.toString());
}
- logger.info("Sensors were all initialized correctly.");
+ LOGGER.info("Sensors were all initialized correctly.");
// Check that the correct number of stereo layers were created
- logger.info("Checking if the correct number of stereo layers were created.");
+ LOGGER.info("Checking if the correct number of stereo layers were created.");
List<SvtStereoLayer> stereoLayers = ((HpsTracker2) detector.getSubdetector(SUBDETECTOR_NAME).getDetectorElement()).getStereoPairs();
// Check that the number of stereo layers created is as expected
assertTrue("[ " + this.getClass().getSimpleName() + " ]: The wrong number of stereo layers were created.",
stereoLayers.size() == TOTAL_NUMBER_OF_STEREO_LAYERS);
- logger.info("Total number of stereo layers created: " + stereoLayers.size());
+ LOGGER.info("Total number of stereo layers created: " + stereoLayers.size());
for(SvtStereoLayer stereoLayer : stereoLayers){
- logger.fine(stereoLayer.toString());
+ LOGGER.fine(stereoLayer.toString());
// The sensors comprising the stereo layer should belong to the same detector volume
assertTrue("[ " + this.getClass().getSimpleName() + " ]: Sensors belong to different detector volumes.",
@@ -104,7 +100,7 @@
// If the stereo layer is part of the top detector volume, the axial layers have an odd layer number.
// If the stereo layer is part of the bottom detector volume, the axial layers have an even layer number.
- logger.info("Checking if the layers are oriented correctly.");
+ LOGGER.info("Checking if the layers are oriented correctly.");
if(stereoLayer.getAxialSensor().isTopLayer()){
assertTrue("[ " + this.getClass().getSimpleName() + " ]: Sensors composing the stereo layer are flipped",
stereoLayer.getAxialSensor().getLayerNumber()%2 == 1);
Modified: java/trunk/distribution/pom.xml
=============================================================================
--- java/trunk/distribution/pom.xml (original)
+++ java/trunk/distribution/pom.xml Tue Oct 6 18:49:26 2015
@@ -74,6 +74,7 @@
<binFileExtensions>
<unix>.sh</unix>
</binFileExtensions>
+ <extraJvmArguments>-Djava.util.logging.config.class=org.hps.logging.config.DefaultLoggingConfig</extraJvmArguments>
<programs>
<program>
<mainClass>org.hps.evio.EvioToLcio</mainClass>
@@ -141,5 +142,9 @@
<groupId>org.hps</groupId>
<artifactId>hps-job</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-logging</artifactId>
+ </dependency>
</dependencies>
</project>
Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
=============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java (original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java Tue Oct 6 18:49:26 2015
@@ -1,7 +1,6 @@
package org.hps.recon.ecal.cluster;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.conditions.database.DatabaseConditionsManager;
@@ -27,10 +26,7 @@
*/
public abstract class AbstractClusterer implements Clusterer {
- private static Logger logger = Logger.getLogger(AbstractClusterer.class.getName());
- static {
- logger.setLevel(Level.ALL);
- }
+ private static Logger LOGGER = Logger.getLogger(AbstractClusterer.class.getPackage().getName());
protected HPSEcal3 ecal;
protected NeighborMap neighborMap;
@@ -97,7 +93,7 @@
*/
@Override
public void conditionsChanged(ConditionsEvent event) {
- logger.info("conditions change hook");
+ LOGGER.info("conditions change hook");
// Default setup of ECAL subdetector.
this.ecal = (HPSEcal3) DatabaseConditionsManager.getInstance().getDetectorObject().getSubdetector("Ecal");
Modified: java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java
=============================================================================
--- java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java (original)
+++ java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java Tue Oct 6 18:49:26 2015
@@ -140,7 +140,6 @@
// Initialize the conditions system.
DatabaseConditionsManager.resetInstance();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
}
/**
Modified: java/trunk/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java Tue Oct 6 18:49:26 2015
@@ -4,13 +4,11 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderException;
import org.hps.evio.SvtEvioExceptions.SvtEvioReaderException;
import org.hps.record.svt.SvtHeaderDataInfo;
-import org.hps.util.BasicLogFormatter;
import org.hps.util.Pair;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.DataType;
@@ -25,8 +23,6 @@
import org.lcsim.event.base.BaseRawTrackerHit;
import org.lcsim.geometry.Subdetector;
import org.lcsim.lcio.LCIOUtil;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Abstract SVT EVIO reader used to convert SVT bank sample blocks to
@@ -40,11 +36,9 @@
public abstract class AbstractSvtEvioReader extends EvioReader {
public static final String SVT_HEADER_COLLECTION_NAME = "SvtHeaders";
-
// Initialize the logger
- protected static Level logLevel = Level.INFO;
- public static Logger logger = LogUtil.create(AbstractSvtEvioReader.class.getName(), new BasicLogFormatter(), Level.INFO);
+ public static Logger LOGGER = Logger.getLogger(AbstractSvtEvioReader.class.getPackage().getName());
// A Map from DAQ pair (FPGA/Hybrid or FEB ID/FEB Hybrid ID) to the
// corresponding sensor
@@ -161,7 +155,7 @@
*/
public boolean makeHits(EvioEvent event, EventHeader lcsimEvent) throws SvtEvioReaderException {
- logger.fine("Physics Event: " + event.toString());
+ LOGGER.fine("Physics Event: " + event.toString());
// Retrieve the ROC banks encapsulated by the physics bank. The ROC
// bank range is set in the subclass.
@@ -169,15 +163,15 @@
for (int rocBankTag = this.getMinRocBankTag();
rocBankTag <= this.getMaxRocBankTag(); rocBankTag++) {
- logger.fine("Retrieving ROC bank: " + rocBankTag);
+ LOGGER.fine("Retrieving ROC bank: " + rocBankTag);
List<BaseStructure> matchingRocBanks = this.getMatchingBanks(event, rocBankTag);
if (matchingRocBanks == null) {
- logger.fine("ROC bank " + rocBankTag + " was not found!");
+ LOGGER.fine("ROC bank " + rocBankTag + " was not found!");
continue;
}
rocBanks.addAll(matchingRocBanks);
}
- logger.fine("Total ROC banks found: " + rocBanks.size());
+ LOGGER.fine("Total ROC banks found: " + rocBanks.size());
// Return false if ROC banks weren't found
if (rocBanks.isEmpty()) return false;
@@ -196,9 +190,9 @@
// Loop over the SVT ROC banks and process all samples
for (BaseStructure rocBank : rocBanks) {
- logger.fine("ROC bank: " + rocBank.toString());
-
- logger.fine("Processing ROC bank " + rocBank.getHeader().getTag());
+ LOGGER.fine("ROC bank: " + rocBank.toString());
+
+ LOGGER.fine("Processing ROC bank " + rocBank.getHeader().getTag());
// If the ROC bank doesn't contain any data, raise an exception
if (rocBank.getChildCount() == 0) {
@@ -208,25 +202,25 @@
// Get the data banks containing the SVT samples.
List<BaseStructure> dataBanks = rocBank.getChildren();
- logger.fine("Total data banks found: " + dataBanks.size());
+ LOGGER.fine("Total data banks found: " + dataBanks.size());
// Loop over all of the data banks contained by the ROC banks and
// processed them
for (BaseStructure dataBank : dataBanks) {
- logger.fine("Processing data bank: " + dataBank.toString());
+ LOGGER.fine("Processing data bank: " + dataBank.toString());
// Check that the bank is valid
if (!this.isValidDataBank(dataBank)) continue;
// Get the int data encapsulated by the data bank
int[] data = dataBank.getIntData();
- logger.fine("Total number of integers contained by the data bank: " + data.length);
+ LOGGER.fine("Total number of integers contained by the data bank: " + data.length);
// Check that a complete set of samples exist
int sampleCount = data.length - this.getDataHeaderLength()
- this.getDataTailLength();
- logger.fine("Total number of samples: " + sampleCount);
+ LOGGER.fine("Total number of samples: " + sampleCount);
if (sampleCount % 4 != 0) {
throw new SvtEvioReaderException("[ "
+ this.getClass().getSimpleName()
@@ -249,7 +243,7 @@
int multisampleHeaderData[] = new int[sampleCount];
int multisampleHeaderIndex = 0;
- logger.fine("sampleCount " + sampleCount);
+ LOGGER.fine("sampleCount " + sampleCount);
// Loop through all of the samples and make hits
for (int samplesN = 0; samplesN < sampleCount; samplesN += 4) {
@@ -257,11 +251,11 @@
int[] samples = new int[4];
System.arraycopy(data, this.getDataHeaderLength() + samplesN, samples, 0, samples.length);
- logger.fine("samplesN " + samplesN + " multisampleHeaderCount " + multisampleHeaderIndex);
+ LOGGER.fine("samplesN " + samplesN + " multisampleHeaderCount " + multisampleHeaderIndex);
if(SvtEvioUtils.isMultisampleHeader(samples))
- logger.fine("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
+ LOGGER.fine("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
else
- logger.fine("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
+ LOGGER.fine("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
// Extract data words from multisample header
@@ -272,7 +266,7 @@
rawHits.add(this.makeHit(samples));
}
- logger.fine("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
+ LOGGER.fine("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
// add multisample header tails to header data object
this.setMultiSampleHeaders(headerData, multisampleHeaderIndex, multisampleHeaderData);
@@ -280,7 +274,7 @@
}
}
- logger.fine("Total number of RawTrackerHits created: " + rawHits.size());
+ LOGGER.fine("Total number of RawTrackerHits created: " + rawHits.size());
// Turn on 64-bit cell ID.
int flag = LCIOUtil.bitSet(0, 31, true);
@@ -317,13 +311,13 @@
}
protected int extractMultisampleHeaderData(int[] samples, int index, int[] multisampleHeaderData) {
- logger.finest("extractMultisampleHeaderData: index " + index);
+ LOGGER.finest("extractMultisampleHeaderData: index " + index);
if( SvtEvioUtils.isMultisampleHeader(samples) && !SvtEvioUtils.isMultisampleTail(samples) ) {
- logger.finest("extractMultisampleHeaderData: this is a multisample header so add the words to index " + index);
+ LOGGER.finest("extractMultisampleHeaderData: this is a multisample header so add the words to index " + index);
System.arraycopy(samples, 0, multisampleHeaderData, index, samples.length);
return samples.length;
} else {
- logger.finest("extractMultisampleHeaderData: this is a NOT multisample header ");
+ LOGGER.finest("extractMultisampleHeaderData: this is a NOT multisample header ");
return 0;
}
}
Modified: java/trunk/evio/src/main/java/org/hps/evio/EcalEvioReader.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/EcalEvioReader.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/EcalEvioReader.java Tue Oct 6 18:49:26 2015
@@ -5,7 +5,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.conditions.database.DatabaseConditionsManager;
@@ -33,7 +32,6 @@
import org.lcsim.event.base.BaseRawTrackerHit;
import org.lcsim.geometry.Subdetector;
import org.lcsim.lcio.LCIOConstants;
-import org.lcsim.util.log.LogUtil;
/**
*
@@ -70,10 +68,7 @@
private final Map<List<Integer>, Integer> genericHitCount = new HashMap<List<Integer>, Integer>();
- private static final Logger logger = LogUtil.create(EcalEvioReader.class);
- static {
- logger.setLevel(Level.INFO);
- }
+ private static final Logger LOGGER = Logger.getLogger(EcalEvioReader.class.getPackage().getName());
public EcalEvioReader(int topBankTag, int botBankTag) {
this.topBankTag = topBankTag;
@@ -132,7 +127,7 @@
// CompositeData cdata = slotBank.getCompositeData();
if (slotBank.getHeader().getTag() != bankTag) {
bankTag = slotBank.getHeader().getTag();
- logger.info(String.format("ECal format tag: 0x%x\n", bankTag));
+ LOGGER.info(String.format("ECal format tag: 0x%x\n", bankTag));
}
switch (slotBank.getHeader().getTag()) {
case EventConstants.ECAL_WINDOW_BANK_TAG:
@@ -440,9 +435,9 @@
// Lowered the log level on these. Otherwise they print too much. --JM
if (count < 10) {
- logger.finer(String.format("Crate %d, slot %d, channel %d not found in map", hit.getCrate(), hit.getSlot(), hit.getChannel()));
+ LOGGER.finer(String.format("Crate %d, slot %d, channel %d not found in map", hit.getCrate(), hit.getSlot(), hit.getChannel()));
} else if (count == 10) {
- logger.fine(String.format("Crate %d, slot %d, channel %d not found in map: silencing further warnings for this channel", hit.getCrate(), hit.getSlot(), hit.getChannel()));
+ LOGGER.fine(String.format("Crate %d, slot %d, channel %d not found in map: silencing further warnings for this channel", hit.getCrate(), hit.getSlot(), hit.getChannel()));
}
}
Modified: java/trunk/evio/src/main/java/org/hps/evio/EvioToLcio.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/EvioToLcio.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/EvioToLcio.java Tue Oct 6 18:49:26 2015
@@ -38,8 +38,6 @@
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
import org.lcsim.event.EventHeader;
import org.lcsim.lcio.LCIOWriter;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* <p>
@@ -83,7 +81,7 @@
/**
* Setup logging for this class.
*/
- private static Logger LOGGER = LogUtil.create(EvioToLcio.class, new DefaultLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(EvioToLcio.class.getPackage().getName());
/**
* Run the EVIO to LCIO converter from the command line.
@@ -270,7 +268,7 @@
LOGGER.setLevel(level);
// Set log level on conditions manager.
- DatabaseConditionsManager.getInstance().setLogLevel(level);
+ Logger.getLogger(DatabaseConditionsManager.class.getPackage().getName()).setLevel(level);
}
// Add all extra arguments to the EVIO file list.
@@ -470,7 +468,6 @@
throw new RuntimeException("EVIO file " + evioFile.getPath() + " does not exist.");
}
LOGGER.info("Opening EVIO file " + evioFileName + " ...");
- LOGGER.getHandlers()[0].flush();
// Open the EVIO reader.
try {
Modified: java/trunk/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java Tue Oct 6 18:49:26 2015
@@ -29,8 +29,6 @@
import org.jlab.coda.jevio.EvioEvent;
import org.lcsim.conditions.ConditionsEvent;
import org.lcsim.event.EventHeader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* This is the {@link org.hps.record.LCSimEventBuilder} implementation for the
@@ -51,8 +49,7 @@
/**
* Setup logger.
*/
- private static final Logger LOGGER = LogUtil.create(LCSimEngRunEventBuilder.class, new DefaultLogFormatter(),
- Level.INFO);
+ private static final Logger LOGGER = Logger.getLogger(LCSimEngRunEventBuilder.class.getPackage().getName());
/**
* EVIO processor for extracting EPICS data.
Modified: java/trunk/evio/src/main/java/org/hps/evio/LCSimTestRunEventBuilder.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/LCSimTestRunEventBuilder.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/LCSimTestRunEventBuilder.java Tue Oct 6 18:49:26 2015
@@ -5,6 +5,7 @@
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.hps.record.LCSimEventBuilder;
import org.hps.record.evio.EvioEventConstants;
import org.hps.record.evio.EvioEventUtilities;
@@ -18,8 +19,6 @@
import org.lcsim.conditions.ConditionsManager;
import org.lcsim.event.EventHeader;
import org.lcsim.event.base.BaseLCSimEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Build LCSim events from Test Run 2012 EVIO data.
@@ -34,7 +33,7 @@
protected long time = 0; //most recent event time (ns), taken from prestart and end events, and trigger banks (if any)
protected int sspCrateBankTag = 0x1; //bank ID of the crate containing the SSP
protected int sspBankTag = 0xe106; //SSP bank's tag
- protected static Logger LOGGER = LogUtil.create(LCSimTestRunEventBuilder.class, new DefaultLogFormatter(), Level.INFO);
+ protected static Logger LOGGER = Logger.getLogger(LCSimTestRunEventBuilder.class.getPackage().getName());
protected List<IntBankDefinition> intBanks = null;
public LCSimTestRunEventBuilder() {
@@ -42,7 +41,6 @@
svtReader = new TestRunSvtEvioReader();
intBanks = new ArrayList<IntBankDefinition>();
intBanks.add(new IntBankDefinition(TestRunTriggerData.class, new int[]{sspCrateBankTag, sspBankTag}));
- LOGGER.setLevel(Level.FINE);
}
public void setEcalHitCollectionName(String ecalHitCollectionName) {
Modified: java/trunk/evio/src/main/java/org/hps/evio/SvtEventHeaderChecker.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/SvtEventHeaderChecker.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/SvtEventHeaderChecker.java Tue Oct 6 18:49:26 2015
@@ -8,10 +8,14 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.evio.SvtEvioExceptions.*;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderApvBufferAddressException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderApvFrameCountException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderApvReadErrorException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderMultisampleErrorBitException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderOFErrorException;
+import org.hps.evio.SvtEvioExceptions.SvtEvioHeaderSkipCountException;
import org.hps.record.svt.SvtHeaderDataInfo;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
*
@@ -22,7 +26,7 @@
*/
public class SvtEventHeaderChecker {
- private static Logger logger = LogUtil.create(SvtEventHeaderChecker.class.getSimpleName(), new BasicLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(SvtEventHeaderChecker.class.getPackage().getName());
/**
* Check the integrity of the SVT header information.
@@ -30,7 +34,7 @@
* @throws SvtEvioHeaderException
*/
public static void checkSvtHeaders(List<SvtHeaderDataInfo> headers) throws SvtEvioHeaderException {
- logger.fine("check " + headers.size() + " headers ");
+ LOGGER.fine("check " + headers.size() + " headers ");
int[] bufferAddresses = new int[6];
int[] firstFrameCounts = new int[6];
boolean firstHeader = true;
@@ -41,13 +45,13 @@
int count;
int multisampleHeaderTailerrorBit;
for( SvtHeaderDataInfo headerDataInfo : headers ) {
- logger.fine("checking header: " + headerDataInfo.toString());
+ LOGGER.fine("checking header: " + headerDataInfo.toString());
// Check the multisample header information
int nMultisampleHeaders = headerDataInfo.getNumberOfMultisampleHeaders();
for(int iMultisampleHeader = 0; iMultisampleHeader < nMultisampleHeaders; iMultisampleHeader++) {
- logger.fine("iMultisampleHeader " + iMultisampleHeader);
+ LOGGER.fine("iMultisampleHeader " + iMultisampleHeader);
multisampleHeader = SvtHeaderDataInfo.getMultisampleHeader(iMultisampleHeader, headerDataInfo);
@@ -80,8 +84,8 @@
}
// print debug
- if(logger.getLevel().intValue() >= Level.FINE.intValue()) {
- logger.fine(getMultisampleDebugString(headerDataInfo, SvtEvioUtils.getMultisampleTailWord(multisampleHeader)) +
+ if(LOGGER.getLevel().intValue() >= Level.FINE.intValue()) {
+ LOGGER.fine(getMultisampleDebugString(headerDataInfo, SvtEvioUtils.getMultisampleTailWord(multisampleHeader)) +
getDebugString(bufAddresses, frameCounts, readError));
}
@@ -120,7 +124,7 @@
count = -1;
for (int iFrame=0; iFrame<frameCounts.length; ++iFrame) {
- logger.fine("frame count " + iFrame + " " + frameCounts[iFrame] + " ( " + Integer.toHexString( frameCounts[iFrame]) + " )");
+ LOGGER.fine("frame count " + iFrame + " " + frameCounts[iFrame] + " ( " + Integer.toHexString( frameCounts[iFrame]) + " )");
if( frameCounts[iFrame] > 15 || (count < 15 && frameCounts[iFrame] < count) || ( count == 15 && frameCounts[iFrame] != 0 ) ) {
throw new SvtEvioHeaderApvFrameCountException("The APV frame counts in this events are invalid " +
@@ -131,7 +135,7 @@
}
for (int iReadError=0; iReadError<readError.length; ++iReadError) {
- logger.fine("read error " + iReadError + " " + readError[iReadError] + " ( " + Integer.toHexString( readError[iReadError]) + " )");
+ LOGGER.fine("read error " + iReadError + " " + readError[iReadError] + " ( " + Integer.toHexString( readError[iReadError]) + " )");
if( readError[iReadError] != 1) {// active low
throw new SvtEvioHeaderApvReadErrorException("Read error occurred " +
getMultisampleDebugString(headerDataInfo, SvtEvioUtils.getMultisampleTailWord(multisampleHeader)) +
@@ -156,11 +160,11 @@
public static void checkSvtHeaderData(SvtHeaderDataInfo header) throws SvtEvioHeaderException {
int tail = header.getTail();
- if(logger.getLevel().intValue() >= Level.FINE.intValue()) {
- logger.fine("checkSvtHeaderData tail " + tail + "( " + Integer.toHexString(tail) + " )");
- logger.fine("checkSvtHeaderData errorbit " + Integer.toHexString(SvtEvioUtils.getSvtTailSyncErrorBit(tail)));
- logger.fine("checkSvtHeaderData OFerrorbit " + Integer.toHexString(SvtEvioUtils.getSvtTailOFErrorBit(tail)));
- logger.fine("checkSvtHeaderData skipcount " + Integer.toHexString(SvtEvioUtils.getSvtTailMultisampleSkipCount(tail)));
+ if(LOGGER.getLevel().intValue() >= Level.FINE.intValue()) {
+ LOGGER.fine("checkSvtHeaderData tail " + tail + "( " + Integer.toHexString(tail) + " )");
+ LOGGER.fine("checkSvtHeaderData errorbit " + Integer.toHexString(SvtEvioUtils.getSvtTailSyncErrorBit(tail)));
+ LOGGER.fine("checkSvtHeaderData OFerrorbit " + Integer.toHexString(SvtEvioUtils.getSvtTailOFErrorBit(tail)));
+ LOGGER.fine("checkSvtHeaderData skipcount " + Integer.toHexString(SvtEvioUtils.getSvtTailMultisampleSkipCount(tail)));
}
if( SvtEvioUtils.getSvtTailSyncErrorBit(tail) != 0) {
throw new SvtEvioExceptions.SvtEvioHeaderSyncErrorException("This SVT header had a SyncError " + header.toString());
@@ -171,7 +175,7 @@
else if( SvtEvioUtils.getSvtTailMultisampleSkipCount(tail) != 0) {
throw new SvtEvioHeaderSkipCountException("This header had a skipCount " + SvtEvioUtils.getSvtTailMultisampleSkipCount(tail) + " error " + header.toString());
}
- logger.fine("checkSvtHeaderData passed all I guess");
+ LOGGER.fine("checkSvtHeaderData passed all I guess");
}
Modified: java/trunk/evio/src/main/java/org/hps/evio/SvtEvioReader.java
=============================================================================
--- java/trunk/evio/src/main/java/org/hps/evio/SvtEvioReader.java (original)
+++ java/trunk/evio/src/main/java/org/hps/evio/SvtEvioReader.java Tue Oct 6 18:49:26 2015
@@ -98,7 +98,7 @@
for (HpsSiSensor sensor : sensors) {
Pair<Integer, Integer> daqPair
= new Pair<Integer, Integer>(sensor.getFebID(), sensor.getFebHybridID());
- logger.fine("FEB ID: " + sensor.getFebID()
+ LOGGER.fine("FEB ID: " + sensor.getFebID()
+ " Hybrid ID: " + sensor.getFebHybridID());
daqPairToSensor.put(daqPair, sensor);
}
Modified: java/trunk/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java
=============================================================================
--- java/trunk/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java (original)
+++ java/trunk/evio/src/test/java/org/hps/evio/SvtEvioReaderTest.java Tue Oct 6 18:49:26 2015
@@ -1,23 +1,18 @@
package org.hps.evio;
+
+import java.io.File;
+import java.net.URL;
+import java.util.logging.Logger;
import junit.framework.TestCase;
-import java.io.File;
-import java.net.URL;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.jlab.coda.jevio.EvioReader;
-import org.jlab.coda.jevio.EvioEvent;
-
-import org.lcsim.event.EventHeader;
+import org.hps.conditions.database.DatabaseConditionsManager;
+import org.hps.record.LCSimEventBuilder;
+import org.hps.record.evio.EvioEventUtilities;
+import org.jlab.coda.jevio.EvioEvent;
+import org.jlab.coda.jevio.EvioReader;
+import org.lcsim.event.EventHeader;
import org.lcsim.util.cache.FileCache;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
-
-import org.hps.record.evio.EvioEventUtilities;
-import org.hps.record.LCSimEventBuilder;
-import org.hps.conditions.database.DatabaseConditionsManager;
/**
* Test used to check the EVIO reader that will be used for the engineering
@@ -28,8 +23,7 @@
public class SvtEvioReaderTest extends TestCase {
// Initialize the logger
- protected static Logger logger = LogUtil.create(AbstractSvtEvioReader.class.getName(),
- new DefaultLogFormatter(), Level.INFO);
+ protected static Logger LOGGER = Logger.getLogger(SvtEvioReaderTest.class.getPackage().getName());
public void testSvtEvioReaderTest() throws Exception {
@@ -38,7 +32,7 @@
File evioFile = fileCache.getCachedFile(
new URL("http://www.lcsim.org/test/hps-java/svt_evio_reader_test.evio"));
- logger.info("Opening file " + evioFile);
+ LOGGER.info("Opening file " + evioFile);
// Instantiate the EVIO reader and open the file
EvioReader evioReader = new EvioReader(evioFile);
@@ -55,7 +49,7 @@
// Check that the file contains the expected number of events
int eventCount = evioReader.getEventCount();
- logger.info("File " + evioFile + " contains " + eventCount + " events.");
+ LOGGER.info("File " + evioFile + " contains " + eventCount + " events.");
// Loop through the EVIO events and process them.
@@ -65,10 +59,10 @@
// Only process physics events
if (!EvioEventUtilities.isPhysicsEvent(evioEvent)) continue;
- logger.info("Found physics event.");
+ LOGGER.info("Found physics event.");
EventHeader lcsimEvent = eventBuilder.makeLCSimEvent(evioEvent);
- logger.info("Created LCSim event # " + lcsimEvent.getEventNumber());
+ LOGGER.info("Created LCSim event # " + lcsimEvent.getEventNumber());
// Process the event using the SVT evio reader
svtReader.processEvent(evioEvent, lcsimEvent);
Modified: java/trunk/integration-tests/src/test/java/org/hps/APrimeReconTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/APrimeReconTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/APrimeReconTest.java Tue Oct 6 18:49:26 2015
@@ -39,7 +39,6 @@
File inputFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/APrimeReconTest/" + fileName));
// Run the reconstruction.
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_recon");
job.addVariableDefinition("outputFile", outputFile.getPath());
job.addInputFile(inputFile);
Modified: java/trunk/integration-tests/src/test/java/org/hps/MCFilteredReconTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/MCFilteredReconTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/MCFilteredReconTest.java Tue Oct 6 18:49:26 2015
@@ -4,11 +4,9 @@
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import junit.framework.TestCase;
-import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.job.JobManager;
import org.hps.readout.ecal.FADCEcalReadoutDriver;
import org.hps.users.meeg.FilterMCBunches;
@@ -66,7 +64,6 @@
// 2) Run readout simulation.
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
File readoutOutputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_readout");
job.addVariableDefinition("outputFile", readoutOutputFile.getPath());
job.addInputFile(filteredOutputFile);
Modified: java/trunk/integration-tests/src/test/java/org/hps/MockDataReconTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/MockDataReconTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/MockDataReconTest.java Tue Oct 6 18:49:26 2015
@@ -94,7 +94,6 @@
System.out.println("running recon using steering resource " + steeringResource);
JobManager jobManager = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
jobManager.addVariableDefinition("outputFile", outputFile.getPath());
jobManager.addInputFile(mockDataFile);
jobManager.setup(steeringResource);
Modified: java/trunk/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java Tue Oct 6 18:49:26 2015
@@ -37,7 +37,6 @@
File inputFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/ReadoutNoPileupTest.slcio"));
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
job.addInputFile(inputFile);
File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_readout");
job.addVariableDefinition("outputFile", outputFile.getPath());
Modified: java/trunk/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java Tue Oct 6 18:49:26 2015
@@ -34,7 +34,6 @@
File inputFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/ReadoutToEvioTest.slcio"));
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
job.addInputFile(inputFile);
File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_readout");
job.addVariableDefinition("outputFile", outputFile.getPath());
Modified: java/trunk/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java Tue Oct 6 18:49:26 2015
@@ -35,7 +35,6 @@
File inputFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/ReadoutToLcioTest.slcio"));
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
job.addInputFile(inputFile);
File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName());
job.addVariableDefinition("outputFile", outputFile.getPath());
Modified: java/trunk/integration-tests/src/test/java/org/hps/SimpleMCReconTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/SimpleMCReconTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/SimpleMCReconTest.java Tue Oct 6 18:49:26 2015
@@ -34,7 +34,6 @@
// Run the reconstruction.
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_recon");
job.addVariableDefinition("outputFile", outputFile.getPath());
job.addInputFile(inputFile);
Modified: java/trunk/integration-tests/src/test/java/org/hps/SimpleSvtReadoutTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/SimpleSvtReadoutTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/SimpleSvtReadoutTest.java Tue Oct 6 18:49:26 2015
@@ -47,7 +47,6 @@
FinalCheckDriver checker = new FinalCheckDriver();
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
job.addInputFile(inputFile);
job.addVariableDefinition("outputFile", outputFile.getPath());
job.setup("/org/hps/steering/readout/HPS2014TruthReadoutToLcio.lcsim");
Modified: java/trunk/integration-tests/src/test/java/org/hps/SteeringFilesTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/SteeringFilesTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/SteeringFilesTest.java Tue Oct 6 18:49:26 2015
@@ -111,7 +111,6 @@
* @throws Exception If there was an error initializing from one of the steering files.
*/
public void testSteeringFiles() throws Exception {
- DatabaseConditionsManager.getInstance().setLogLevel(Level.SEVERE);
final List<String> steeringResources = SteeringFileCatalog.find();
for (final String steeringResource : steeringResources) {
if (!SKIP_STEERING_FILES.contains(steeringResource)) {
Modified: java/trunk/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java
=============================================================================
--- java/trunk/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java (original)
+++ java/trunk/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java Tue Oct 6 18:49:26 2015
@@ -30,7 +30,6 @@
File inputFile = cache.getCachedFile(new URL("http://www.lcsim.org/test/hps-java/TestRunReadoutToEvioTest.slcio"));
JobManager job = new JobManager();
- DatabaseConditionsManager.getInstance().setLogLevel(Level.WARNING);
DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
DatabaseConditionsManager.getInstance().setDetector("HPS-TestRun-v5", 1351);
conditionsManager.freeze();
Added: java/trunk/logging/pom.xml
=============================================================================
--- java/trunk/logging/pom.xml (added)
+++ java/trunk/logging/pom.xml Tue Oct 6 18:49:26 2015
@@ -0,0 +1,17 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>hps-logging</artifactId>
+ <name>logging</name>
+ <description>global logging configuration</description>
+ <parent>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-parent</artifactId>
+ <relativePath>../parent/pom.xml</relativePath>
+ <version>3.4.1-SNAPSHOT</version>
+ </parent>
+ <scm>
+ <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/logging/</url>
+ <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/logging/</connection>
+ <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/logging/</developerConnection>
+ </scm>
+</project>
Added: java/trunk/logging/src/main/java/org/hps/logging/config/DefaultLoggingConfig.java
=============================================================================
--- java/trunk/logging/src/main/java/org/hps/logging/config/DefaultLoggingConfig.java (added)
+++ java/trunk/logging/src/main/java/org/hps/logging/config/DefaultLoggingConfig.java Tue Oct 6 18:49:26 2015
@@ -0,0 +1,33 @@
+package org.hps.logging.config;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.logging.LogManager;
+
+/**
+ * Read the default logging configuration and load it into the global log manager.
+ *
+ * @author Jeremy McCormick, SLAC
+ */
+public class DefaultLoggingConfig {
+
+ /**
+ * Class constructor which reads in a logging properties file from a classpath resource.
+ * <p>
+ * The default configuration will only be activated if there is no file or class specified
+ * from a system property.
+ */
+ public DefaultLoggingConfig() {
+ // Activate the default config if the logging system properties are not set.
+ if (System.getProperty("java.util.logging.config.class") == null
+ && System.getProperty("java.util.logging.config.file") == null) {
+ InputStream inputStream = DefaultLoggingConfig.class.getResourceAsStream("logging.properties");
+ try {
+ LogManager.getLogManager().readConfiguration(inputStream);
+ } catch (SecurityException | IOException e) {
+ throw new RuntimeException("Initialization of default logging configuration failed.", e);
+ }
+ }
+ }
+
+}
Added: java/trunk/logging/src/main/resources/org/hps/logging/config/logging.properties
=============================================================================
--- java/trunk/logging/src/main/resources/org/hps/logging/config/logging.properties (added)
+++ java/trunk/logging/src/main/resources/org/hps/logging/config/logging.properties Tue Oct 6 18:49:26 2015
@@ -0,0 +1,68 @@
+#
+# Global logging configuration for HPS Java packages.
+#
+# Jeremy McCormick, SLAC
+#
+
+# default global level
+.level = WARNING
+
+# default handler which prints to console
+handlers = java.util.logging.ConsoleHandler
+
+# prints level (4$), message (5$), timestamp (1$), source ($2) and (optionally) an exception (6$).
+java.util.logging.SimpleFormatter.format = %4$s: %5$s [%1$tc] %2$s%6$s%n
+
+# configure the console handler
+java.util.logging.ConsoleHandler.level = ALL
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+# conditions
+org.hps.conditions.api.level = WARNING
+org.hps.conditions.database.level = WARNING
+org.hps.conditions.cli.level = INFO
+org.hps.conditions.ecal.level = WARNING
+org.hps.conditions.svt.level = WARNING
+
+# monitoring-drivers
+org.hps.monitoring.drivers.svt = INFO
+org.hps.monitoring.plotting = INFO
+
+# evio
+org.hps.evio.level = INFO
+
+# analysis
+org.hps.analysis.trigger.level = INFO
+org.hps.analysis.dataquality.level = INFO
+
+# crawler
+org.hps.crawler.level = INFO
+
+# ecal-recon
+org.hps.recon.ecal = WARNING
+org.hps.recon.ecal.cluster = WARNING
+
+# recon
+org.hps.recon.filtering.level = WARNING
+
+# record-util
+org.hps.record.epics.level = WARNING
+org.hps.record.evio.level = WARNING
+org.hps.record.scalers.level = WARNING
+org.hps.record.triggerbank.level = WARNING
+
+# tracking
+org.hps.recon.tracking.level = WARNING
+org.hps.recon.tracking.gbl.level = WARNING
+
+# run-database
+org.hps.run.database.level = INFO
+
+# monitoring-application
+org.hps.monitoring.application.model.level = WARNING
+org.hps.monitoring.application.level = ALL
+
+# detector-model
+org.lcsim.detector.converter.compact.level = INFO
+org.lcsim.geometry.compact.converter.level = INFO
+org.hps.detector.svt.level = ALL
Modified: java/trunk/monitoring-app/src/main/java/org/hps/monitoring/application/model/AbstractModel.java
=============================================================================
--- java/trunk/monitoring-app/src/main/java/org/hps/monitoring/application/model/AbstractModel.java (original)
+++ java/trunk/monitoring-app/src/main/java/org/hps/monitoring/application/model/AbstractModel.java Tue Oct 6 18:49:26 2015
@@ -15,9 +15,6 @@
import javassist.Modifier;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
-
/**
* An abstract class which updates a set of listeners when there are property changes to a backing model.
*
@@ -28,8 +25,7 @@
/**
* Setup logging.
*/
- private static final Logger LOGGER = LogUtil.create(AbstractModel.class.getName(), new DefaultLogFormatter(),
- Level.INFO);
+ private static final Logger LOGGER = Logger.getLogger(AbstractModel.class.getPackage().getName());
/**
* This method will extract property names from a class, which in this package's conventions are statically
Modified: java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/GblTrackingReconstructionPlots.java
=============================================================================
--- java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/GblTrackingReconstructionPlots.java (original)
+++ java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/GblTrackingReconstructionPlots.java Tue Oct 6 18:49:26 2015
@@ -1,4 +1,11 @@
package org.hps.monitoring.drivers.svt;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IHistogram1D;
+import hep.aida.IPlotter;
+import hep.aida.IPlotterStyle;
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
import java.io.IOException;
import java.util.ArrayList;
@@ -8,33 +15,21 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import hep.aida.IAnalysisFactory;
-import hep.aida.IHistogram1D;
-import hep.aida.IPlotter;
-import hep.aida.IPlotterStyle;
-import hep.physics.vec.BasicHep3Vector;
-import hep.physics.vec.Hep3Vector;
-
import org.hps.analysis.ecal.HPSMCParticlePlotsDriver;
import org.hps.recon.tracking.TrackUtils;
-import org.hps.recon.tracking.gbl.HpsGblRefitter;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.constants.Constants;
import org.lcsim.event.EventHeader;
import org.lcsim.event.MCParticle;
import org.lcsim.event.Track;
-import org.lcsim.event.base.ParticleTypeClassifier;
import org.lcsim.fit.helicaltrack.HelicalTrackFit;
import org.lcsim.geometry.Detector;
import org.lcsim.recon.tracking.seedtracker.SeedCandidate;
import org.lcsim.recon.tracking.seedtracker.SeedTrack;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
public class GblTrackingReconstructionPlots extends Driver {
private double _bfield;
- private static Logger logger = LogUtil.create(GblTrackingReconstructionPlots.class, new BasicLogFormatter());
+ private static Logger LOGGER = Logger.getLogger(GblTrackingReconstructionPlots.class.getPackage().getName());
private AIDA aida = AIDA.defaultInstance();
private String outputPlots = null;
private final String trackCollectionName = "MatchedTracks";
@@ -69,7 +64,7 @@
public GblTrackingReconstructionPlots() {
// TODO Auto-generated constructor stub
- logger.setLevel(Level.INFO);
+ LOGGER.setLevel(Level.INFO);
}
public void setOutputPlots(String output) {
@@ -168,14 +163,14 @@
if(event.hasCollection(Track.class, trackCollectionName)) {
tracks = event.get(Track.class, trackCollectionName);
} else {
- logger.warning("no seed track collection");
+ LOGGER.warning("no seed track collection");
tracks = new ArrayList<Track>();
}
List<Track> gblTracks;
if(event.hasCollection(Track.class, gblTrackCollectionName)) {
gblTracks = event.get(Track.class, gblTrackCollectionName);
} else {
- logger.warning("no gbl track collection");
+ LOGGER.warning("no gbl track collection");
gblTracks = new ArrayList<Track>();
}
@@ -185,7 +180,7 @@
mcparticles = event.get(MCParticle.class).get(0);
fsParticles = HPSMCParticlePlotsDriver.makeGenFSParticleList(mcparticles);
} else {
- logger.warning("no gbl track collection");
+ LOGGER.warning("no gbl track collection");
mcparticles = new ArrayList<MCParticle>();
fsParticles = new ArrayList<MCParticle>();
}
@@ -193,10 +188,10 @@
- logger.info("Number of Tracks = " + tracks.size());
- logger.info("Number of GBL Tracks = " + gblTracks.size());
- logger.info("Number of MC particles = " + mcparticles.size());
- logger.info("Number of FS MC particles = " + fsParticles.size());
+ LOGGER.info("Number of Tracks = " + tracks.size());
+ LOGGER.info("Number of GBL Tracks = " + gblTracks.size());
+ LOGGER.info("Number of MC particles = " + mcparticles.size());
+ LOGGER.info("Number of FS MC particles = " + fsParticles.size());
@@ -206,33 +201,33 @@
MCParticle part = TrackUtils.getMatchedTruthParticle(track);
trackTruthMatch.put(track, part);
if(part!=null) {
- logger.info("Match track with q " + track.getCharge() + " p " + track.getMomentum()[0] + "," + track.getMomentum()[1] + "," + track.getMomentum()[2]);
+ LOGGER.info("Match track with q " + track.getCharge() + " p " + track.getMomentum()[0] + "," + track.getMomentum()[1] + "," + track.getMomentum()[2]);
} else {
- logger.info("no match for track with q " + track.getCharge() + " p " + track.getMomentum()[0] + "," + track.getMomentum()[1] + "," + track.getMomentum()[2]);
+ LOGGER.info("no match for track with q " + track.getCharge() + " p " + track.getMomentum()[0] + "," + track.getMomentum()[1] + "," + track.getMomentum()[2]);
}
}
for(Track track : gblTracks) {
- logger.info("Track:");
+ LOGGER.info("Track:");
SeedTrack st = (SeedTrack)track;
SeedCandidate seed = st.getSeedCandidate();
HelicalTrackFit htf = seed.getHelix();
- logger.info(htf.toString());
+ LOGGER.info(htf.toString());
HelicalTrackFit pHTF = null;
double pTruth = -1.;
double pTrackTruth = -1.;
if(trackTruthMatch.get(track)==null) {
- logger.info("no truth mc particle for this track");
+ LOGGER.info("no truth mc particle for this track");
} else {
MCParticle part = trackTruthMatch.get(track);
pTruth = part.getMomentum().magnitude();
pHTF = TrackUtils.getHTF(part,Math.abs(_bfield));
pTrackTruth = pHTF.p(Math.abs(_bfield));
- logger.info("part: " + trackTruthMatch.get(track).getPDGID());
- logger.info("pHTF:");
- logger.info(pHTF.toString());
- logger.info("pTruth="+pTruth+" pTrackTruth="+pTrackTruth);
+ LOGGER.info("part: " + trackTruthMatch.get(track).getPDGID());
+ LOGGER.info("pHTF:");
+ LOGGER.info(pHTF.toString());
+ LOGGER.info("pTruth="+pTruth+" pTrackTruth="+pTrackTruth);
}
@@ -250,7 +245,7 @@
double phiGbl = track.getTrackStates().get(0).getPhi();
double slopeGbl = track.getTrackStates().get(0).getTanLambda();
double pGbl = getMag(track.getTrackStates().get(0).getMomentum());
- logger.info("pGbl="+pGbl);
+ LOGGER.info("pGbl="+pGbl);
if(pHTF!=null) {
double d0Truth = pHTF.dca();
@@ -258,7 +253,7 @@
double CTruth = pHTF.curvature();
double phiTruth = pHTF.phi0();
double slopeTruth = pHTF.slope();
- logger.info("d0 " + d0 + " d0 trugh " + d0Truth);
+ LOGGER.info("d0 " + d0 + " d0 trugh " + d0Truth);
d0Diff.fill(d0-d0Truth);
z0Diff.fill(z0-z0Truth);
phiDiff.fill(phi-phiTruth);
@@ -308,7 +303,7 @@
try {
aida.saveAs(outputPlots);
} catch (IOException ex) {
- logger.log(Level.SEVERE,"aid problem saving file",ex);
+ LOGGER.log(Level.SEVERE,"aid problem saving file",ex);
}
}
//plotterFrame.dispose();
Modified: java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java
=============================================================================
--- java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java (original)
+++ java/trunk/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/svt/SampleZeroHVBiasChecker.java Tue Oct 6 18:49:26 2015
@@ -20,7 +20,6 @@
import java.util.logging.Logger;
import org.hps.analysis.trigger.util.TriggerDataUtils;
-import org.hps.conditions.api.ConditionsRecord.ConditionsRecordCollection;
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.run.RunSpreadsheet;
import org.hps.conditions.svt.SvtBiasConditionsLoader;
@@ -32,7 +31,6 @@
import org.hps.record.epics.EpicsData;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.HeadBankData;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
@@ -40,7 +38,6 @@
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -49,7 +46,7 @@
public class SampleZeroHVBiasChecker extends Driver {
// Logger
- Logger logger = LogUtil.create(getName(), new BasicLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(SampleZeroHVBiasChecker.class.getPackage().getName());
static {
hep.aida.jfree.AnalysisFactory.register();
@@ -226,10 +223,10 @@
//SvtBiasConditionsLoader.setTimeOffset(Calendar.)
runRanges = SvtBiasMyaDataReader.findOverlappingRanges(runmap, ranges);
- logger.info("Print all " + runRanges.size() + " bias run ranges:");
+ LOGGER.info("Print all " + runRanges.size() + " bias run ranges:");
for (SvtBiasRunRange r : runRanges) {
if (debug) {
- logger.info(r.toString());
+ LOGGER.info(r.toString());
}
pWriter.println(r.toString());
}
@@ -276,11 +273,11 @@
epicsData = EpicsData.read(event);
if (epicsData != null) {
- logger.info(epicsData.toString());
+ LOGGER.info(epicsData.toString());
if (epicsData.getKeys().contains("SVT:bias:top:0:v_sens")) {
epicsBiasValue = epicsData.getValue("SVT:bias:top:0:v_sens");
- logger.info("epicsBiasValue = " + Double.toString(epicsBiasValue));
+ LOGGER.info("epicsBiasValue = " + Double.toString(epicsBiasValue));
if (epicsBiasValue > 178.0) {
hvOnEpics = true;
@@ -289,7 +286,7 @@
}
}
} else {
- logger.fine("no epics information in this event");
+ LOGGER.fine("no epics information in this event");
}
// Read the timestamp for the event
@@ -305,14 +302,14 @@
System.out.println("hvOnMya is " + (hvOnMya ? "ON" : "OFF") + " hvOnEpics " + (hvOnEpics ? "ON" : "OFF") + " hvOnConditions " + (hvOnConditions ? "ON" : "OFF") + " hvOnEventFlag " + (hvOnEventFlag ? "ON" : "OFF") + " for Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + newEventDate.toString() + " epoch " + newEventDate.getTime());
// check what the DB has
if (svtBiasConstants != null) {
- logger.info("there are " + svtBiasConstants.size() + " constants to search");
+ LOGGER.info("there are " + svtBiasConstants.size() + " constants to search");
for (SvtBiasConstant constant : svtBiasConstants) {
- logger.info("start " + constant.getStart() + " end " + constant.getEnd() + " value " + constant.getValue());
+ LOGGER.info("start " + constant.getStart() + " end " + constant.getEnd() + " value " + constant.getValue());
}
SvtBiasConstant constant = svtBiasConstants.find(newEventDate);
- logger.info(constant == null ? "No constant found!" : ("Found constant " + "start " + constant.getStart() + " end " + constant.getEnd() + " value " + constant.getValue()));
+ LOGGER.info(constant == null ? "No constant found!" : ("Found constant " + "start " + constant.getStart() + " end " + constant.getEnd() + " value " + constant.getValue()));
}
}
@@ -326,7 +323,7 @@
// only do this analysis where there is a date availabe.
if (eventDate != null) {
if (debug) {
- logger.info("eventDate " + eventDate.toString());
+ LOGGER.info("eventDate " + eventDate.toString());
}
eventCount++;
@@ -344,7 +341,7 @@
// print the cases where epics and run range do not agree
if (hvOnMya != hvOnEpics && epicsBiasValue > 0.) {
if (debug) {
- logger.warning("hvOnMya is " + (hvOnMya ? "ON" : "OFF") + " hvOnEpics " + (hvOnEpics ? "ON" : "OFF") + " for Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOn " + (hvOnMya ? "YES" : "NO") + " hvOnEpics " + (hvOnEpics ? "YES" : "NO"));
+ LOGGER.warning("hvOnMya is " + (hvOnMya ? "ON" : "OFF") + " hvOnEpics " + (hvOnEpics ? "ON" : "OFF") + " for Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOn " + (hvOnMya ? "YES" : "NO") + " hvOnEpics " + (hvOnEpics ? "YES" : "NO"));
}
pWriter.println("Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOn " + (hvOnMya ? "YES" : "NO"));
eventCountEpicsDisagree++;
@@ -353,7 +350,7 @@
// print the cases where the HV is OFF
if (!hvOnMya) {
if (debug) {
- logger.info("Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOnMya " + (hvOnMya ? "YES" : "NO") + " hvOnEpics " + (hvOnEpics ? "YES" : "NO"));
+ LOGGER.info("Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOnMya " + (hvOnMya ? "YES" : "NO") + " hvOnEpics " + (hvOnEpics ? "YES" : "NO"));
}
pWriter.println("Run " + event.getRunNumber() + " Event " + event.getEventNumber() + " date " + eventDate.toString() + " epoch " + eventDate.getTime() + " hvOnMya " + (hvOnMya ? "YES" : "NO") + " hvOnEpics " + (hvOnEpics ? "YES" : "NO"));
eventCountHvOff++;
@@ -427,14 +424,14 @@
@Override
public void endOfData() {
- logger.info("eventCount " + Integer.toString(eventCount) + " eventCountHvOff " + Integer.toString(eventCountHvOff) + " eventCountEpicsDisagree " + Integer.toString(eventCountEpicsDisagree));
+ LOGGER.info("eventCount " + Integer.toString(eventCount) + " eventCountHvOff " + Integer.toString(eventCountHvOff) + " eventCountEpicsDisagree " + Integer.toString(eventCountEpicsDisagree));
pWriter.println("eventCount " + Integer.toString(eventCount) + " eventCountHvOff " + Integer.toString(eventCountHvOff) + " eventCountEpicsDisagree " + Integer.toString(eventCountEpicsDisagree));
try {
pWriter.close();
fWriter.close();
} catch (IOException ex) {
- logger.log(Level.SEVERE, null, ex);
+ LOGGER.log(Level.SEVERE, null, ex);
}
}
Modified: java/trunk/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
=============================================================================
--- java/trunk/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java (original)
+++ java/trunk/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java Tue Oct 6 18:49:26 2015
@@ -9,11 +9,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
-
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
import com.itextpdf.text.BadElementException;
import com.itextpdf.text.Document;
@@ -35,7 +31,7 @@
/**
* Setup logging.
*/
- private static Logger LOGGER = LogUtil.create(ExportPdf.class, new DefaultLogFormatter(), Level.ALL);
+ private static Logger LOGGER = Logger.getLogger(ExportPdf.class.getPackage().getName());
/**
* Do not allow class instantiation.
Modified: java/trunk/parent/pom.xml
=============================================================================
--- java/trunk/parent/pom.xml (original)
+++ java/trunk/parent/pom.xml Tue Oct 6 18:49:26 2015
@@ -244,6 +244,11 @@
<artifactId>hps-job</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-logging</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<!-- Next are external dependencies used in multiple modules. -->
<dependency>
<groupId>org.jlab.coda</groupId>
@@ -444,6 +449,10 @@
<name>org.lcsim.cacheDir</name>
<value>${org.lcsim.cacheDir}</value>
</property>
+ <property>
+ <name>java.util.logging.config.class</name>
+ <value>org.hps.logging.config.DefaultLoggingConfig</value>
+ </property>
</systemProperties>
</configuration>
</plugin>
Modified: java/trunk/pom.xml
=============================================================================
--- java/trunk/pom.xml (original)
+++ java/trunk/pom.xml Tue Oct 6 18:49:26 2015
@@ -139,6 +139,7 @@
<module>evio</module>
<module>integration-tests</module>
<module>job</module>
+ <module>logging</module>
<module>monitoring-util</module>
<module>monitoring-drivers</module>
<module>monitoring-app</module>
Modified: java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtAlignmentFilter.java
=============================================================================
--- java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtAlignmentFilter.java (original)
+++ java/trunk/recon/src/main/java/org/hps/recon/filtering/SvtAlignmentFilter.java Tue Oct 6 18:49:26 2015
@@ -7,16 +7,12 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.hps.recon.tracking.TrackUtils;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
import org.lcsim.lcio.LCIOConstants;
-import org.lcsim.util.log.LogUtil;
/**
*
Modified: java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsEvioProcessor.java Tue Oct 6 18:49:26 2015
@@ -8,8 +8,6 @@
import org.hps.record.evio.EvioEventProcessor;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* This is an EVIO event processor that will read EPICS events (event tag 31) and turn them into {@link EpicsData}
@@ -22,8 +20,7 @@
/**
* Setup class logger.
*/
- private static final Logger LOGGER = LogUtil
- .create(EpicsEvioProcessor.class, new DefaultLogFormatter(), Level.INFO);
+ private static final Logger LOGGER = Logger.getLogger(EpicsEvioProcessor.class.getPackage().getName());
/**
* The current EPICS data object.
Modified: java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/epics/EpicsRunProcessor.java Tue Oct 6 18:49:26 2015
@@ -4,13 +4,10 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.evio.EvioEventProcessor;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Creates a list of EPICS data found in EVIO events across an entire job.
@@ -20,9 +17,9 @@
public final class EpicsRunProcessor extends EvioEventProcessor {
/**
- * Setup class logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(EpicsRunProcessor.class, new DefaultLogFormatter(), Level.INFO);
+ private static final Logger LOGGER = Logger.getLogger(EpicsRunProcessor.class.getPackage().getName());
/**
* The current EPICS data block from the EVIO events (last one that was found).
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioEventSkimmer.java Tue Oct 6 18:49:26 2015
@@ -18,8 +18,6 @@
import org.jlab.coda.jevio.EventWriter;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Skim EVIO events into a new file based on a list of event numbers to include.
@@ -30,9 +28,9 @@
public class EvioEventSkimmer {
/**
- * Setup the logger.
- */
- private static Logger LOGGER = LogUtil.create(EvioEventSkimmer.class, new DefaultLogFormatter(), Level.CONFIG);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(EvioEventSkimmer.class.getPackage().getName());
/**
* Define command line options.
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataAdapter.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataAdapter.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataAdapter.java Tue Oct 6 18:49:26 2015
@@ -3,7 +3,6 @@
import java.io.File;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.freehep.record.loop.AbstractLoopListener;
@@ -12,20 +11,19 @@
import org.freehep.record.loop.RecordEvent;
import org.freehep.record.loop.RecordListener;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Loop adapter for creating a list of metadata from processed EVIO files.
*
* @author Jeremy McCormick, SLAC
*/
+// TODO: delete me
public class EvioFileMetadataAdapter extends AbstractLoopListener implements RecordListener, LoopListener {
/**
- * Setup logging.
+ * Initialize the logger.
*/
- private static Logger LOGGER = LogUtil.create(EvioFileMetadataProcessor.class, new DefaultLogFormatter(), Level.ALL);
+ private static final Logger LOGGER = Logger.getLogger(EvioFileMetadataAdapter.class.getPackage().getName());
/**
* The EVIO file currently being processed.
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataProcessor.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataProcessor.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileMetadataProcessor.java Tue Oct 6 18:49:26 2015
@@ -2,17 +2,17 @@
import java.io.File;
import java.util.Date;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
-// TODO: add doc
+// TODO: delete me
public class EvioFileMetadataProcessor extends EvioEventProcessor {
- private static Logger LOGGER = LogUtil.create(EvioFileMetadataProcessor.class, new DefaultLogFormatter(), Level.ALL);
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(EvioFileMetadataProcessor.class.getPackage().getName());
private File evioFile = null;
private Date startDate = null;
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java Tue Oct 6 18:49:26 2015
@@ -27,8 +27,6 @@
import org.jlab.coda.jevio.EventWriter;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* A command line utility for streaming EVIO files to an ET server.
@@ -78,12 +76,11 @@
* Minimum port number of ET server (lower port numbers not allowed).
*/
private static final int ET_PORT_MIN = 1024;
-
- /**
- * Setup the logger.
- */
- private static final Logger LOGGER = LogUtil
- .create(EvioFileProducer.class, new DefaultLogFormatter(), Level.CONFIG);
+
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(EvioFileProducer.class.getPackage().getName());
/**
* The command line options.
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioFileUtilities.java Tue Oct 6 18:49:26 2015
@@ -8,7 +8,6 @@
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.util.log.LogUtil;
/**
* A miscellaneous collection of EVIO file utility methods.
@@ -18,9 +17,9 @@
public final class EvioFileUtilities {
/**
- * Setup class logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(EvioFileUtilities.class);
+ private static final Logger LOGGER = Logger.getLogger(EvioFileUtilities.class.getPackage().getName());
/**
* Milliseconds constant for conversion to/from second.
Modified: java/trunk/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/evio/EvioLoopAdapter.java Tue Oct 6 18:49:26 2015
@@ -2,7 +2,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.freehep.record.loop.AbstractLoopListener;
@@ -11,8 +10,6 @@
import org.freehep.record.loop.RecordEvent;
import org.freehep.record.loop.RecordListener;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* A loop adapter for the {@link EvioLoop} which manages and activates a list of {@link EvioEventProcessor} objects.
@@ -22,9 +19,9 @@
public final class EvioLoopAdapter extends AbstractLoopListener implements RecordListener, LoopListener {
/**
- * Setup class logger.
+ * Initialize the logger.
*/
- private final Logger LOGGER = LogUtil.create(EvioLoopAdapter.class, new DefaultLogFormatter(), Level.ALL);
+ private static final Logger LOGGER = Logger.getLogger(EvioLoopAdapter.class.getPackage().getName());
/**
* List of event processors to activate.
Modified: java/trunk/record-util/src/main/java/org/hps/record/scalers/ScalersEvioProcessor.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/scalers/ScalersEvioProcessor.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/scalers/ScalersEvioProcessor.java Tue Oct 6 18:49:26 2015
@@ -4,14 +4,10 @@
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.evio.EvioEventProcessor;
-import org.hps.record.evio.EvioEventUtilities;
import org.jlab.coda.jevio.EvioEvent;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* This is an EVIO event processor for creating a {@link ScalerData} object from scaler bank data.
@@ -20,8 +16,10 @@
*/
public class ScalersEvioProcessor extends EvioEventProcessor {
- private static final Logger LOGGER = LogUtil.create(ScalersEvioProcessor.class, new DefaultLogFormatter(),
- Level.ALL);
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(ScalersEvioProcessor.class.getPackage().getName());
/**
* Currently cached ScalerData object which was created by the process method.
Modified: java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPCluster.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPCluster.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPCluster.java Tue Oct 6 18:49:26 2015
@@ -1,9 +1,8 @@
package org.hps.record.triggerbank;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import org.lcsim.util.log.LogUtil;
+import org.hps.record.scalers.ScalersEvioProcessor;
/**
* Class <code>SSPCluster</code> stores all of the information on
@@ -26,11 +25,10 @@
private final int t;
private final double e;
- // Output potential errors or messages.
- private static Logger logger = LogUtil.create(SSPCluster.class);
- static {
- logger.setLevel(Level.WARNING);
- }
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(ScalersEvioProcessor.class.getPackage().getName());
/**
* Creates a new <code>SSPCluster</code> object.
@@ -43,15 +41,15 @@
public SSPCluster(int ix, int iy, int energy, int hits, int time) {
// Make sure that the input values are valid.
if(ix == 0 || ix < -23 || ix > 23) {
- logger.warning(String.format("Received out-of-bounds ix value of %d.", ix));
+ LOGGER.warning(String.format("Received out-of-bounds ix value of %d.", ix));
} if(iy == 0 || iy < -5 || iy > 5) {
- logger.warning(String.format("Received out-of-bounds iy value of %d.", iy));
+ LOGGER.warning(String.format("Received out-of-bounds iy value of %d.", iy));
} if(energy < 0) {
- logger.warning("Received negative energy for cluster.");
+ LOGGER.warning("Received negative energy for cluster.");
} if(hits <= 0) {
- logger.warning("Received cluster with zero or fewer hits.");
+ LOGGER.warning("Received cluster with zero or fewer hits.");
} if(time < 0) {
- logger.warning("Received cluster with negative time.");
+ LOGGER.warning("Received cluster with negative time.");
}
// Define the cluster parameters.
@@ -62,7 +60,7 @@
this.n = hits;
// Indicate that the cluster was made.
- logger.fine(String.format("Constructed cluster at (%3d, %3d) at time %3d ns with energy %4d MeV and %d hits.",
+ LOGGER.fine(String.format("Constructed cluster at (%3d, %3d) at time %3d ns with energy %4d MeV and %d hits.",
ix, iy, time, energy, hits));
}
Modified: java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPTrigger.java
=============================================================================
--- java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPTrigger.java (original)
+++ java/trunk/record-util/src/main/java/org/hps/record/triggerbank/SSPTrigger.java Tue Oct 6 18:49:26 2015
@@ -1,9 +1,8 @@
package org.hps.record.triggerbank;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import org.lcsim.util.log.LogUtil;
+import org.hps.record.scalers.ScalersEvioProcessor;
/**
* Class <code>SSPTrigger</code> represents the data output by the SSP
@@ -18,11 +17,10 @@
protected final int time;
protected final int data;
- // Logger to output status messages.
- protected static Logger logger = LogUtil.create(SSPTrigger.class);
- static {
- logger.setLevel(Level.WARNING);
- }
+ /**
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(ScalersEvioProcessor.class.getPackage().getName());
/**
* Instantiates a new <code>SSPTrigger</code> with the indicated
@@ -34,7 +32,7 @@
public SSPTrigger(int type, int time, int data) {
// Log any issues with processing the trigger.
if(!SSPTriggerFactory.isKnownTriggerType(type)) {
- logger.warning(String.format("Trigger type %d is not recognized.", type));
+ LOGGER.warning(String.format("Trigger type %d is not recognized.", type));
}
// Store the trigger data.
@@ -43,7 +41,7 @@
this.data = data;
// Note that a trigger was made.
- logger.fine(String.format("Constructed trigger of type %d occurred at time %3d with data %d.",
+ LOGGER.fine(String.format("Constructed trigger of type %d occurred at time %3d with data %d.",
type, time, data));
}
Modified: java/trunk/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
=============================================================================
--- java/trunk/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java (original)
+++ java/trunk/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java Tue Oct 6 18:49:26 2015
@@ -9,13 +9,10 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.epics.EpicsData;
import org.hps.record.epics.EpicsHeader;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Implementation of database operations for EPICS data.
@@ -25,10 +22,9 @@
final class EpicsDataDaoImpl implements EpicsDataDao {
/**
- * Setup class logger.
- */
- private static final Logger LOGGER = LogUtil
- .create(EpicsDataDaoImpl.class, new DefaultLogFormatter(), Level.INFO);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(EpicsDataDaoImpl.class.getPackage().getName());
/**
* The database connection.
Modified: java/trunk/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
=============================================================================
--- java/trunk/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java (original)
+++ java/trunk/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java Tue Oct 6 18:49:26 2015
@@ -9,7 +9,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
@@ -23,8 +22,6 @@
import org.hps.datacat.client.Dataset;
import org.hps.datacat.client.DatasetMetadata;
import org.hps.record.evio.EvioFileUtilities;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Command line tool for updating the run database from EVIO files registered in the data catalog.
@@ -56,10 +53,9 @@
}
/**
- * Setup the logger.
- */
- private static final Logger LOGGER = LogUtil.create(RunDatabaseCommandLine.class, new DefaultLogFormatter(),
- Level.ALL);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(RunDatabaseCommandLine.class.getPackage().getName());
/**
* Command line options for the crawler.
Modified: java/trunk/run-database/src/main/java/org/hps/run/database/RunManager.java
=============================================================================
--- java/trunk/run-database/src/main/java/org/hps/run/database/RunManager.java (original)
+++ java/trunk/run-database/src/main/java/org/hps/run/database/RunManager.java Tue Oct 6 18:49:26 2015
@@ -3,7 +3,6 @@
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.conditions.database.ConnectionParameters;
@@ -12,8 +11,6 @@
import org.hps.record.triggerbank.TriggerConfig;
import org.lcsim.conditions.ConditionsEvent;
import org.lcsim.conditions.ConditionsListener;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Manages read-only access to the run database and creates a {@link RunSummary} for a specific run.
@@ -47,9 +44,9 @@
private static RunManager INSTANCE;
/**
- * The class's logger.
- */
- private static Logger LOGGER = LogUtil.create(RunManager.class, new DefaultLogFormatter(), Level.ALL);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(RunManager.class.getPackage().getName());
/**
* Get the global instance of the {@link RunManager}.
Modified: java/trunk/run-database/src/main/java/org/hps/run/database/RunProcessor.java
=============================================================================
--- java/trunk/run-database/src/main/java/org/hps/run/database/RunProcessor.java (original)
+++ java/trunk/run-database/src/main/java/org/hps/run/database/RunProcessor.java Tue Oct 6 18:49:26 2015
@@ -1,24 +1,16 @@
package org.hps.run.database;
import java.io.File;
-import java.util.Collections;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.datacat.client.DatasetFileFormat;
import org.hps.record.epics.EpicsRunProcessor;
-import org.hps.record.evio.EvioFileMetadata;
-import org.hps.record.evio.EvioFileMetadataAdapter;
-import org.hps.record.evio.EvioFileSequenceComparator;
import org.hps.record.evio.EvioFileSource;
import org.hps.record.evio.EvioLoop;
import org.hps.record.scalers.ScalersEvioProcessor;
import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
import org.hps.record.triggerbank.TriggerConfig;
import org.hps.record.triggerbank.TriggerConfigVariable;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Processes EVIO files from a run and extracts meta data for updating the run database.
@@ -28,9 +20,9 @@
public final class RunProcessor {
/**
- * Setup logger.
+ * Initialize the logger.
*/
- private static final Logger LOGGER = LogUtil.create(RunProcessor.class, new DefaultLogFormatter(), Level.FINE);
+ private static final Logger LOGGER = Logger.getLogger(RunProcessor.class.getPackage().getName());
/**
* Processor for extracting EPICS information.
Modified: java/trunk/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
=============================================================================
--- java/trunk/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java (original)
+++ java/trunk/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java Tue Oct 6 18:49:26 2015
@@ -9,12 +9,9 @@
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
-import java.util.logging.Level;
import java.util.logging.Logger;
import org.hps.record.epics.EpicsData;
-import org.lcsim.util.log.DefaultLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* Implementation of database operations for {@link RunSummary} objects in the run database.
@@ -56,9 +53,9 @@
private static Calendar CALENDAR = new GregorianCalendar(TimeZone.getTimeZone("America/New_York"));
/**
- * Setup class logging.
- */
- private static final Logger LOGGER = LogUtil.create(RunSummaryDaoImpl.class, new DefaultLogFormatter(), Level.ALL);
+ * Initialize the logger.
+ */
+ private static final Logger LOGGER = Logger.getLogger(RunSummaryDaoImpl.class.getPackage().getName());
/**
* The database connection.
Modified: java/trunk/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
=============================================================================
--- java/trunk/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java (original)
+++ java/trunk/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java Tue Oct 6 18:49:26 2015
@@ -4,40 +4,37 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
+import org.lcsim.event.RelationalTable;
import org.lcsim.event.Track;
import org.lcsim.event.TrackState;
import org.lcsim.event.TrackerHit;
import org.lcsim.event.base.BaseLCRelation;
-import org.lcsim.event.RelationalTable;
import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.fit.helicaltrack.HelicalTrackCross;
import org.lcsim.fit.helicaltrack.HelicalTrackHit;
import org.lcsim.fit.helicaltrack.HelicalTrackStrip;
import org.lcsim.geometry.Detector;
import org.lcsim.geometry.FieldMap;
-import org.lcsim.recon.tracking.seedtracker.SeedTrack;
import org.lcsim.util.Driver;
-import org.lcsim.util.log.LogUtil;
/**
* Driver used to persist additional {@link Track} information via a
* {@link GenericObject}.
*
- * @author <a href="mailto:[log in to unmask]">Omar Moreno</a>
- * @author <a href="[log in to unmask]">Sho Uemura</a>
+ * @author Omar Moreno, UCSC
+ * @author Sho Uemura, SLAC
*/
public final class TrackDataDriver extends Driver {
/** logger **/
- private static Logger logger = LogUtil.create(TrackDataDriver.class.getSimpleName(), new BasicLogFormatter(), Level.WARNING);
+ private static Logger LOGGER = Logger.getLogger(TrackDataDriver.class.getPackage().getName());
/** The B field map */
@@ -260,7 +257,7 @@
// Add a track state that contains the extrapolated track position and
// parameters at the face of the Ecal.
//
- logger.info("Extrapolating track with type " + Integer.toString(track.getType()) );
+ LOGGER.info("Extrapolating track with type " + Integer.toString(track.getType()) );
// Extrapolate the track to the face of the Ecal and get the TrackState
if( TrackType.isGBL(track.getType())) {
@@ -282,16 +279,16 @@
//track.getTrackStates().add(stateEcalIP);
} else {
- logger.info("Extrapolate seed track to ECal from vertex");
+ LOGGER.info("Extrapolate seed track to ECal from vertex");
TrackState state = TrackUtils.extrapolateTrackUsingFieldMap(track, extStartPos, ecalPosition, stepSize, bFieldMap);
track.getTrackStates().add(state);
}
- logger.info(Integer.toString(track.getTrackStates().size()) + " track states for this track at this point:");
+ LOGGER.info(Integer.toString(track.getTrackStates().size()) + " track states for this track at this point:");
for(TrackState state : track.getTrackStates()) {
String s = "type " + Integer.toString(track.getType()) + " location " + Integer.toString(state.getLocation()) + " refPoint (" + state.getReferencePoint()[0] + " " + state.getReferencePoint()[1] + " " + state.getReferencePoint()[2] + ") " + " params: ";
for(int i=0;i<5;++i) s += String.format(" %f", state.getParameter(i));
- logger.info(s);
+ LOGGER.info(s);
}
Modified: java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java
=============================================================================
--- java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java (original)
+++ java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java Tue Oct 6 18:49:26 2015
@@ -1,11 +1,12 @@
package org.hps.recon.tracking.gbl;
+import static java.lang.Math.abs;
+import static java.lang.Math.sin;
+import static java.lang.Math.sqrt;
import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
import hep.physics.vec.VecOp;
-import static java.lang.Math.abs;
-import static java.lang.Math.sin;
-import static java.lang.Math.sqrt;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -13,6 +14,7 @@
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.Logger;
+
import org.hps.recon.tracking.TrackUtils;
import org.hps.recon.tracking.gbl.matrix.Matrix;
import org.hps.recon.tracking.gbl.matrix.SymMatrix;
@@ -26,24 +28,19 @@
import org.lcsim.geometry.Detector;
import org.lcsim.geometry.compact.converter.MilleParameter;
import org.lcsim.util.Driver;
-import org.lcsim.util.log.LogUtil;
/**
* A Driver which refits tracks using GBL. Modeled on the hps-dst code written
* by Per Hansson and Omar Moreno. Requires the GBL Collections and Relations to
* be present in the event.
*
- * @author Norman A Graf
- * @author Per Hansson Adrian <[log in to unmask]>
- *
- * @version $Id: HpsGblRefitter.java 3460 2015-08-29 01:45:39Z
- * [log in to unmask] $
+ * @author Norman A Graf, SLAC
+ * @author Per Hansson Adrian, SLAC
*/
public class HpsGblRefitter extends Driver {
static Formatter f = new BasicLogFormatter();
- private static Logger logger = LogUtil.create(HpsGblRefitter.class.getSimpleName(), f, Level.WARNING);
- //private static final Logger logger = Logger.getLogger(HpsGblRefitter.class.getName());
+ private static Logger LOGGER = Logger.getLogger(HpsGblRefitter.class.getPackage().getName());
private boolean _debug = false;
private final String trackCollectionName = "MatchedTracks";
private final String track2GblTrackRelationName = "TrackToGBLTrack";
@@ -71,8 +68,8 @@
public HpsGblRefitter() {
_makeTracks = new MakeGblTracks();
_makeTracks.setDebug(_debug);
- logger.setLevel(Level.WARNING);
- System.out.println("level " + logger.getLevel().toString());
+ LOGGER.setLevel(Level.WARNING);
+ System.out.println("level " + LOGGER.getLevel().toString());
}
//@Override
@@ -154,11 +151,11 @@
// loop over the tracks and do the GBL fit
List<FittedGblTrajectory> trackFits = new ArrayList<FittedGblTrajectory>();
- logger.info("Trying to fit " + stripsGblMap.size() + " tracks");
+ LOGGER.info("Trying to fit " + stripsGblMap.size() + " tracks");
for (GBLTrackData t : stripsGblMap.keySet()) {
FittedGblTrajectory traj = fit(stripsGblMap.get(t), bfac, _debug);
if (traj != null) {
- logger.info("GBL fit successful");
+ LOGGER.info("GBL fit successful");
if (_debug) {
System.out.printf("%s: GBL fit successful.\n", getClass().getSimpleName());
}
@@ -169,18 +166,18 @@
traj.set_seed(gblToSeedMap.get(t));
trackFits.add(traj);
} else {
- logger.info("GBL fit failed");
+ LOGGER.info("GBL fit failed");
if (_debug) {
System.out.printf("%s: GBL fit failed.\n", getClass().getSimpleName());
}
}
}
- logger.info(event.get(Track.class, trackCollectionName).size() + " tracks in collection \"" + trackCollectionName + "\"");
- logger.info(gblObjMap.size() + " tracks in gblObjMap");
- logger.info(gblToSeedMap.size() + " tracks in gblToSeedMap");
- logger.info(stripsGblMap.size() + " tracks in stripsGblMap");
- logger.info(trackFits.size() + " fitted GBL tracks before adding to event");
+ LOGGER.info(event.get(Track.class, trackCollectionName).size() + " tracks in collection \"" + trackCollectionName + "\"");
+ LOGGER.info(gblObjMap.size() + " tracks in gblObjMap");
+ LOGGER.info(gblToSeedMap.size() + " tracks in gblToSeedMap");
+ LOGGER.info(stripsGblMap.size() + " tracks in stripsGblMap");
+ LOGGER.info(trackFits.size() + " fitted GBL tracks before adding to event");
_makeTracks.Process(event, trackFits, bfield);
@@ -423,9 +420,9 @@
for (int i = 0; i < milleParameters.size(); ++i) {
logders += labGlobal.get(i) + "\t" + addDer.get(0, i) + "\n";
}
- logger.info("\n" + logders);
-
- logger.info("uRes " + strip.getId() + " uRes " + uRes + " pred (" + strip.getTrackPos().x() + "," + strip.getTrackPos().y() + "," + strip.getTrackPos().z() + ") s(3D) " + strip.getPath3D());
+ LOGGER.info("\n" + logders);
+
+ LOGGER.info("uRes " + strip.getId() + " uRes " + uRes + " pred (" + strip.getTrackPos().x() + "," + strip.getTrackPos().y() + "," + strip.getTrackPos().z() + ") s(3D) " + strip.getPath3D());
//go to next point
s += step;
@@ -451,7 +448,7 @@
double[] dVals = new double[2];
int[] iVals = new int[1];
traj.fit(dVals, iVals, "");
- logger.info("fit result: Chi2=" + dVals[0] + " Ndf=" + iVals[0] + " Lost=" + dVals[1]);
+ LOGGER.info("fit result: Chi2=" + dVals[0] + " Ndf=" + iVals[0] + " Lost=" + dVals[1]);
Vector aCorrection = new Vector(5);
SymMatrix aCovariance = new SymMatrix(5);
traj.getResults(1, aCorrection, aCovariance);
@@ -462,7 +459,7 @@
aCovariance.print(6, 4);
}
- logger.fine("locPar " + aCorrection.toString());
+ LOGGER.fine("locPar " + aCorrection.toString());
//
return new FittedGblTrajectory(traj, dVals[0], iVals[0], dVals[1]);
Modified: java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
=============================================================================
--- java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java (original)
+++ java/trunk/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java Tue Oct 6 18:49:26 2015
@@ -1,5 +1,6 @@
package org.hps.recon.tracking.gbl;
+import static org.hps.recon.tracking.gbl.GBLOutput.getPerToClPrj;
import hep.physics.matrix.SymmetricMatrix;
import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Matrix;
@@ -13,11 +14,9 @@
import org.apache.commons.math3.util.Pair;
import org.hps.recon.tracking.TrackType;
-import static org.hps.recon.tracking.gbl.GBLOutput.getPerToClPrj;
import org.hps.recon.tracking.gbl.matrix.Matrix;
import org.hps.recon.tracking.gbl.matrix.SymMatrix;
import org.hps.recon.tracking.gbl.matrix.Vector;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.constants.Constants;
import org.lcsim.event.EventHeader;
import org.lcsim.event.Track;
@@ -29,7 +28,6 @@
import org.lcsim.lcio.LCIOConstants;
import org.lcsim.recon.tracking.seedtracker.SeedCandidate;
import org.lcsim.recon.tracking.seedtracker.SeedTrack;
-import org.lcsim.util.log.LogUtil;
/**
* A class that creates track objects from fitted GBL trajectories and adds them
@@ -41,7 +39,7 @@
public class MakeGblTracks {
private String _TrkCollectionName = "GBLTracks";
- private static Logger logger = LogUtil.create(MakeGblTracks.class, new BasicLogFormatter(), Level.OFF);
+ private static Logger LOGGER = Logger.getLogger(MakeGblTracks.class.getPackage().getName());
/**
* Creates a new instance of MakeTracks.
@@ -51,9 +49,9 @@
public void setDebug(boolean debug) {
if (debug) {
- logger.setLevel(Level.INFO);
+ LOGGER.setLevel(Level.INFO);
} else {
- logger.setLevel(Level.OFF);
+ LOGGER.setLevel(Level.OFF);
}
}
@@ -69,7 +67,7 @@
List<Track> tracks = new ArrayList<Track>();
- logger.info("adding " + gblTrajectories.size() + " of fitted GBL tracks to the event");
+ LOGGER.info("adding " + gblTrajectories.size() + " of fitted GBL tracks to the event");
for (FittedGblTrajectory fittedTraj : gblTrajectories) {
@@ -83,7 +81,7 @@
tracks.add(trk);
}
- logger.info("adding " + Integer.toString(tracks.size()) + " Gbl tracks to event with " + event.get(Track.class, "MatchedTracks").size() + " matched tracks");
+ LOGGER.info("adding " + Integer.toString(tracks.size()) + " Gbl tracks to event with " + event.get(Track.class, "MatchedTracks").size() + " matched tracks");
// Put the tracks back into the event and exit
int flag = 1 << LCIOConstants.TRBIT_HITS;
@@ -124,10 +122,10 @@
// Add the track to the list of tracks
// tracks.add(trk);
- logger.info(String.format("helix chi2 %f ndf %d gbl chi2 %f ndf %d\n", helix.chisqtot(), helix.ndf()[0] + helix.ndf()[1], trk.getChi2(), trk.getNDF()));
- if (logger.getLevel().intValue() <= Level.INFO.intValue()) {
+ LOGGER.info(String.format("helix chi2 %f ndf %d gbl chi2 %f ndf %d\n", helix.chisqtot(), helix.ndf()[0] + helix.ndf()[1], trk.getChi2(), trk.getNDF()));
+ if (LOGGER.getLevel().intValue() <= Level.INFO.intValue()) {
for (int i = 0; i < 5; ++i) {
- logger.info(String.format("param %d: %.10f -> %.10f helix-gbl= %f", i, helix.parameters()[i], trk.getTrackParameter(i), helix.parameters()[i] - trk.getTrackParameter(i)));
+ LOGGER.info(String.format("param %d: %.10f -> %.10f helix-gbl= %f", i, helix.parameters()[i], trk.getTrackParameter(i), helix.parameters()[i] - trk.getTrackParameter(i)));
}
}
return trk;
@@ -151,9 +149,9 @@
double phi0 = helix.phi0();
double lambda = Math.atan(helix.slope());
- logger.info("GblPoint: " + point.toString() + "( " + point.name() + ")");
- logger.info(String.format("original helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", helix.dca(), helix.z0(), helix.curvature(), helix.slope(), helix.phi0(), helix.p(Math.abs(bfield))));
- logger.info("original helix covariance:\n" + helix.covariance());
+ LOGGER.info("GblPoint: " + point.toString() + "( " + point.name() + ")");
+ LOGGER.info(String.format("original helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", helix.dca(), helix.z0(), helix.curvature(), helix.slope(), helix.phi0(), helix.p(Math.abs(bfield))));
+ LOGGER.info("original helix covariance:\n" + helix.covariance());
// get corrections from GBL fit
Vector locPar = new Vector(5);
@@ -175,7 +173,7 @@
double xTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.XT.getValue());
double yTCorr = locPar.get(FittedGblTrajectory.GBLPARIDX.YT.getValue());
- logger.info((helix.slope() > 0 ? "top: " : "bot ") + "qOverPCorr " + qOverPCorr + " xtPrimeCorr " + xTPrimeCorr + " yTPrimeCorr " + yTPrimeCorr + " xTCorr " + xTCorr + " yTCorr " + yTCorr);
+ LOGGER.info((helix.slope() > 0 ? "top: " : "bot ") + "qOverPCorr " + qOverPCorr + " xtPrimeCorr " + xTPrimeCorr + " yTPrimeCorr " + yTPrimeCorr + " xTCorr " + xTCorr + " yTCorr " + yTCorr);
// calculate new d0 and z0
// Hep3Matrix perToClPrj = traj.get_track_data().getPrjPerToCl();
@@ -205,9 +203,9 @@
//calculate new phi0
double phi0_gbl = phi0 + xTPrimeCorr - corrPer.x() * C_gbl;
- logger.info("qOverP=" + qOverP + " qOverPCorr=" + qOverPCorr + " qOverP_gbl=" + qOverP_gbl + " ==> pGbl=" + 1.0 / qOverP_gbl + " C_gbl=" + C_gbl);
-
- logger.info(String.format("corrected helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", dca_gbl, z0_gbl, C_gbl, slope_gbl, phi0_gbl, Math.abs(1 / qOverP_gbl)));
+ LOGGER.info("qOverP=" + qOverP + " qOverPCorr=" + qOverPCorr + " qOverP_gbl=" + qOverP_gbl + " ==> pGbl=" + 1.0 / qOverP_gbl + " C_gbl=" + C_gbl);
+
+ LOGGER.info(String.format("corrected helix: d0=%f, z0=%f, omega=%f, tanlambda=%f, phi0=%f, p=%f", dca_gbl, z0_gbl, C_gbl, slope_gbl, phi0_gbl, Math.abs(1 / qOverP_gbl)));
/*
// Strandlie, Wittek, NIMA 566, 2006
@@ -275,7 +273,7 @@
}
}
}
- logger.info("corrected helix covariance:\n" + cov);
+ LOGGER.info("corrected helix covariance:\n" + cov);
double parameters_gbl[] = new double[5];
parameters_gbl[HelicalTrackFit.dcaIndex] = dca_gbl;
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EvioFileScanner.java Tue Oct 6 18:49:26 2015
@@ -36,7 +36,6 @@
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
-import org.lcsim.util.log.LogUtil;
/**
* A utility for scanning EVIO files by run.
@@ -333,7 +332,7 @@
}
}
- private static final Logger LOGGER = LogUtil.create(EvioFileVisitor.class);
+ private static final Logger LOGGER = Logger.getLogger(EvioFileScanner.class.getName());
private static final long MILLISECONDS = 1000L;
private static final Options OPTIONS = new Options();
Modified: java/trunk/users/src/main/java/org/hps/users/phansson/ReadSurveyRotations.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/phansson/ReadSurveyRotations.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/phansson/ReadSurveyRotations.java Tue Oct 6 18:49:26 2015
@@ -7,11 +7,9 @@
import hep.physics.vec.Hep3Vector;
import java.io.BufferedReader;
-import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
-import java.io.FileWriter;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;
@@ -24,8 +22,6 @@
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
import org.apache.commons.math3.geometry.euclidean.threed.RotationOrder;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
-import org.hps.util.BasicLogFormatter;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -33,7 +29,10 @@
*/
public class ReadSurveyRotations {
- final static Logger logger = LogUtil.create(ReadSurveyRotations.class.getSimpleName(), new BasicLogFormatter(), Level.INFO);
+ final static Logger LOGGER = Logger.getLogger(ReadSurveyRotations.class.getSimpleName());
+ static {
+ LOGGER.setLevel(Level.INFO);
+ }
String name;
String parent;
@@ -48,10 +47,10 @@
List<ReadSurveyRotations> rotSurvey = getRotations(args[0]);
- logger.info("Found " + rotSurvey.size() + " survey rotations");
+ LOGGER.info("Found " + rotSurvey.size() + " survey rotations");
List<ReadSurveyRotations> rotIdeal = getRotations(args[1]);
- logger.info("Found " + rotIdeal.size() + " ideal rotations");
+ LOGGER.info("Found " + rotIdeal.size() + " ideal rotations");
@@ -149,8 +148,8 @@
Hep3Vector v = getVector(matcher.group(4));
Hep3Vector w = getVector(matcher.group(5));
if(Math.abs(u.magnitude()-1.0)>0.0001 || Math.abs(v.magnitude()-1.0)>0.0001 ||Math.abs(w.magnitude()-1.0)>0.0001 ) {
- logger.warning(line);
- logger.warning("name: " + name + " unit vectors: " + u.toString() + " " + v.toString() + " " + w.toString());
+ LOGGER.warning(line);
+ LOGGER.warning("name: " + name + " unit vectors: " + u.toString() + " " + v.toString() + " " + w.toString());
throw new RuntimeException("error reading vectors");
}
ReadSurveyRotations rot = new ReadSurveyRotations(name, parent, u, v, w);
Modified: java/trunk/users/src/main/java/org/hps/users/phansson/SvtHeaderAnalysisDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/phansson/SvtHeaderAnalysisDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/phansson/SvtHeaderAnalysisDriver.java Tue Oct 6 18:49:26 2015
@@ -19,15 +19,12 @@
import org.hps.evio.SvtEvioReader;
import org.hps.evio.SvtEvioUtils;
import org.hps.record.svt.SvtHeaderDataInfo;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.HeadBankData;
import org.hps.util.BasicLogFormatter;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -38,7 +35,7 @@
private final AIDA aida = AIDA.defaultInstance();
private final String HeaderCollectionName = "SvtHeaders";
- private final Logger logger = LogUtil.create(SvtHeaderAnalysisDriver.class.getSimpleName(), new BasicLogFormatter(), Level.INFO);
+ private final Logger logger = Logger.getLogger(SvtHeaderAnalysisDriver.class.getSimpleName());
private int nEventsProcessed = 0;
private Date eventDate = new Date(0);
private IHistogram2D rceSyncErrorCount;
@@ -64,7 +61,7 @@
*
*/
public SvtHeaderAnalysisDriver() {
-
+ logger.setLevel(Level.INFO);
}
public void setLogFileName(String name) {
Modified: java/trunk/users/src/main/java/org/hps/users/phansson/SvtOldHeaderAnalysisDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/phansson/SvtOldHeaderAnalysisDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/phansson/SvtOldHeaderAnalysisDriver.java Tue Oct 6 18:49:26 2015
@@ -18,15 +18,12 @@
import org.hps.analysis.trigger.util.TriggerDataUtils;
import org.hps.evio.SvtEvioReader;
import org.hps.evio.SvtEvioUtils;
-import org.hps.record.triggerbank.AbstractIntData;
-import org.hps.record.triggerbank.HeadBankData;
import org.hps.util.BasicLogFormatter;
import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
@@ -37,7 +34,7 @@
private final AIDA aida = AIDA.defaultInstance();
private final String HeaderCollectionName = "SvtHeaders";
- private final Logger logger = LogUtil.create(SvtOldHeaderAnalysisDriver.class.getSimpleName(), new BasicLogFormatter(), Level.INFO);
+ private final Logger LOGGER = Logger.getLogger(SvtOldHeaderAnalysisDriver.class.getSimpleName());
private int nEventsProcessed = 0;
private Date eventDate = new Date(0);
private IHistogram2D rceSyncErrorCount;
@@ -80,7 +77,7 @@
FileHandler fh;
try {
fh = new FileHandler(logFileName);
- logger.addHandler(fh);
+ LOGGER.addHandler(fh);
fh.setFormatter(new BasicLogFormatter());
} catch (SecurityException | IOException e1) {
e1.printStackTrace();
@@ -110,7 +107,7 @@
if(event.hasCollection(GenericObject.class, triggerBankCollectionName)) {
Date currentEventDate = TriggerDataUtils.getEventTimeStamp(event, triggerBankCollectionName);
if( currentEventDate == null) {
- logger.info("Couldn't get event date from trigger bank for processed " + nEventsProcessed);
+ LOGGER.info("Couldn't get event date from trigger bank for processed " + nEventsProcessed);
// throw new RuntimeException("Couldn't get event date from trigger bank!");
} else {
eventDate = currentEventDate;
@@ -118,17 +115,17 @@
}
// log start of run
if( nEventsProcessed == 0 )
- logger.info("startOfRun: run " + event.getRunNumber() + " event " + event.getEventNumber() + " processed " + nEventsProcessed + " date " + eventDate.toString());
+ LOGGER.info("startOfRun: run " + event.getRunNumber() + " event " + event.getEventNumber() + " processed " + nEventsProcessed + " date " + eventDate.toString());
if( !event.hasCollection(GenericObject.class, HeaderCollectionName) )
return;
List<GenericObject> headers = event.get(GenericObject.class, HeaderCollectionName);
- logger.fine("Found " + headers.size() + " SvtHeaders in event " + event.getEventNumber() + " run " + event.getRunNumber());
+ LOGGER.fine("Found " + headers.size() + " SvtHeaders in event " + event.getEventNumber() + " run " + event.getRunNumber());
for(GenericObject header : headers ) {
- logger.fine("nint " + header.getNInt());
+ LOGGER.fine("nint " + header.getNInt());
int roc = SvtOldHeaderDataInfo.getNum(header);
// find the errors in the header
@@ -144,18 +141,18 @@
// print header errors to log
if( syncError != 0) {
- logger.info("syncError: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ LOGGER.info("syncError: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ " roc " + roc);
printEverything = true;
}
if( oFError != 0) {
- logger.info("oFError: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ LOGGER.info("oFError: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ " roc " + roc);
printEverything = true;
}
for(int i=0; i < skipCount; ++i) {
if( oFError != 0) {
- logger.info("skipCount: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ LOGGER.info("skipCount: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + " processed " + nEventsProcessed + " date " + eventDate.toString()
+ " roc " + roc);
}
printEverything = true;
@@ -165,17 +162,17 @@
// Check for multisample tail error bit
int nMultisamples = SvtEvioUtils.getSvtTailMultisampleCount(SvtOldHeaderDataInfo.getTail(header));
- logger.info(nMultisamples + " multisamples");
+ LOGGER.info(nMultisamples + " multisamples");
int multisampleErrorBits = 0;
for(int iMultisample = 0; iMultisample != nMultisamples; ++iMultisample) {
int multisampleHeader = SvtOldHeaderDataInfo.getMultisample(iMultisample, header);
- logger.log(printEverything ? Level.INFO : Level.FINE, "found multisample tail: " + Integer.toHexString(multisampleHeader));
+ LOGGER.log(printEverything ? Level.INFO : Level.FINE, "found multisample tail: " + Integer.toHexString(multisampleHeader));
int multisampleErrorBit = SvtEvioUtils.getErrorBitFromMultisampleHeader(multisampleHeader);
checkBitValueRange(multisampleErrorBit);
- logger.log(printEverything ? Level.INFO : Level.FINE, "found multisample tail error bit: " + multisampleErrorBit);
+ LOGGER.log(printEverything ? Level.INFO : Level.FINE, "found multisample tail error bit: " + multisampleErrorBit);
if( multisampleErrorBit != 0) {
multisampleErrorBits++;
- logger.info("multisample tail error: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + eventDate.toString()
+ LOGGER.info("multisample tail error: run " + event.getRunNumber() + " event " + event.getEventNumber() + " date " + eventDate.toString()
+ " roc " + roc + " feb " + SvtEvioUtils.getFebIDFromMultisampleTail(multisampleHeader)
+ " hybrid " + SvtEvioUtils.getFebHybridIDFromMultisampleTail(multisampleHeader)
+ " apv " + SvtEvioUtils.getApvFromMultisampleTail(multisampleHeader));
@@ -208,12 +205,12 @@
@Override
protected void endOfData() {
- logger.info("endOfData: processed " + nEventsProcessed + " date " + eventDate.toString());
- logger.info("nRceSvtHeaders " + nRceSvtHeaders);
- logger.info("nRceSyncErrorCountN " + nRceSyncErrorCountN);
- logger.info("nRceOFErrorCount " + nRceOFErrorCount);
- logger.info("nRceSkipCount " + nRceSkipCount);
- logger.info("nRceMultisampleErrorCount " + nRceMultisampleErrorCount);
+ LOGGER.info("endOfData: processed " + nEventsProcessed + " date " + eventDate.toString());
+ LOGGER.info("nRceSvtHeaders " + nRceSvtHeaders);
+ LOGGER.info("nRceSyncErrorCountN " + nRceSyncErrorCountN);
+ LOGGER.info("nRceOFErrorCount " + nRceOFErrorCount);
+ LOGGER.info("nRceSkipCount " + nRceSkipCount);
+ LOGGER.info("nRceMultisampleErrorCount " + nRceMultisampleErrorCount);
}
Modified: java/trunk/users/src/main/java/org/hps/users/phansson/TrackExtrapolationTestDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/phansson/TrackExtrapolationTestDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/phansson/TrackExtrapolationTestDriver.java Tue Oct 6 18:49:26 2015
@@ -20,7 +20,6 @@
import java.util.logging.Logger;
import org.hps.recon.tracking.TrackUtils;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
@@ -33,14 +32,16 @@
import org.lcsim.geometry.compact.converter.HPSTrackerBuilder;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
* @author Per Hansson Adrian <[log in to unmask]>
*
*/
public class TrackExtrapolationTestDriver extends Driver {
- private static Logger logger = LogUtil.create(TrackExtrapolationTestDriver.class.getName(), new BasicLogFormatter(), Level.INFO);
+ private static Logger LOGGER = Logger.getLogger(TrackExtrapolationTestDriver.class.getName());
+ static {
+ LOGGER.setLevel(Level.INFO);
+ }
private AIDA aida = AIDA.defaultInstance();
IHistogram1D res_trackPos_Y;
IHistogram1D res_trackPos_X;
@@ -127,14 +128,14 @@
layerTrackPos.put(layer, trackPosition);
layerTrackPos3DField.put(layer, trackPosition3DField);
- logger.fine("layer " + layer + " stereohitposition " + stereoHitPosition.toString());
+ LOGGER.fine("layer " + layer + " stereohitposition " + stereoHitPosition.toString());
if( prevStereoHitPosition != null)
- logger.fine("prevStereoHitPosition " + prevStereoHitPosition.toString());
- logger.fine("trackPos " + layerTrackPos.get(layer).toString());
+ LOGGER.fine("prevStereoHitPosition " + prevStereoHitPosition.toString());
+ LOGGER.fine("trackPos " + layerTrackPos.get(layer).toString());
if( trackPosition3DField != null) {
- logger.fine("trackPosition3DField " + trackPosition3DField.toString());
+ LOGGER.fine("trackPosition3DField " + trackPosition3DField.toString());
} else {
- logger.fine("trackPosition3DField no prev layer ");
+ LOGGER.fine("trackPosition3DField no prev layer ");
}
if(layer == 6 ) {
Modified: java/trunk/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/phansson/TrackingReconstructionPlots.java Tue Oct 6 18:49:26 2015
@@ -25,7 +25,6 @@
import org.hps.recon.tracking.StraightLineTrack;
import org.hps.recon.tracking.TrackUtils;
import org.hps.recon.tracking.gbl.HelicalTrackStripGbl;
-import org.hps.util.BasicLogFormatter;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.SiSensor;
import org.lcsim.event.Cluster;
@@ -49,7 +48,6 @@
import org.lcsim.recon.tracking.seedtracker.SeedTrack;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.log.LogUtil;
/**
*
@@ -115,7 +113,7 @@
HelixConverter converter = new HelixConverter(0);
private boolean showPlots = true;
private double _bfield;
- private static Logger logger = LogUtil.create(TrackingReconstructionPlots.class, new BasicLogFormatter());
+ private static Logger LOGGER = Logger.getLogger(TrackingReconstructionPlots.class.getName());
@Override
protected void detectorChanged(Detector detector) {
@@ -126,7 +124,7 @@
sensors.add(s);
}
}
- logger.info("Found " + sensors.size() + " SiSensors.");
+ LOGGER.info("Found " + sensors.size() + " SiSensors.");
Hep3Vector bfieldvec = detector.getFieldMap().getField(new BasicHep3Vector(0., 0., 1.));
_bfield = bfieldvec.y();
@@ -959,7 +957,7 @@
}
public TrackingReconstructionPlots() {
- logger.setLevel(Level.WARNING);
+ LOGGER.setLevel(Level.WARNING);
}
public void setOutputPlots(String output) {
@@ -1007,7 +1005,7 @@
double stripIsoMin = 9999.9;
for (SiTrackerHitStrip1D stripHitOther : stripClusters) {
- logger.fine(stripHit.getPositionAsVector().toString() + " c.f. " + stripHitOther.getPositionAsVector().toString());
+ LOGGER.fine(stripHit.getPositionAsVector().toString() + " c.f. " + stripHitOther.getPositionAsVector().toString());
if(stripHitOther.equals(stripHit)) {
continue;
@@ -1225,7 +1223,7 @@
else isTopLayer=false;
HelicalTrackStripGbl stripGbl = new HelicalTrackStripGbl(strip, true);
Map<String, Double> stripResiduals = TrackUtils.calculateLocalTrackHitResiduals(helicalTrackFit, stripGbl, 0.,0.,_bfield);
- logger.fine("Sensor " + sensor.getName() + " ures = " + stripResiduals.get("ures"));
+ LOGGER.fine("Sensor " + sensor.getName() + " ures = " + stripResiduals.get("ures"));
aida.histogram1D(sensor.getName() + " strip residual (mm)").fill(stripResiduals.get("ures"));
|