Author: [log in to unmask]
Date: Wed Dec 16 15:25:21 2015
New Revision: 4065
Log:
[HPSJAVA-636] Use standard SRS datacat Java client plus other minor changes and reorg.
Added:
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DataType.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java
- copied, changed from r4058, java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatUtilities.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormat.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileUtilities.java
- copied, changed from r4058, java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/Site.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DaoProvider.java
- copied, changed from r4058, java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java
Removed:
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatUtilities.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileSet.java
java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java
Modified:
java/branches/jeremy-dev/crawler/pom.xml
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
java/branches/jeremy-dev/run-database/pom.xml
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
Modified: java/branches/jeremy-dev/crawler/pom.xml
=============================================================================
--- java/branches/jeremy-dev/crawler/pom.xml (original)
+++ java/branches/jeremy-dev/crawler/pom.xml Wed Dec 16 15:25:21 2015
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.5-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/crawler/</url>
@@ -17,7 +17,11 @@
<dependencies>
<dependency>
<groupId>org.hps</groupId>
- <artifactId>hps-run-database</artifactId>
+ <artifactId>hps-record-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>srs</groupId>
+ <artifactId>org-srs-datacat-client</artifactId>
</dependency>
</dependencies>
</project>
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/AidaMetadataReader.java Wed Dec 16 15:25:21 2015
@@ -4,8 +4,6 @@
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-
-import org.hps.record.util.FileUtilities;
/**
* This is a metadata reader for ROOT DQM files.
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerConfig.java Wed Dec 16 15:25:21 2015
@@ -11,14 +11,9 @@
import java.util.Set;
import org.hps.conditions.database.ConnectionParameters;
-import org.hps.datacat.client.DatacatConstants;
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.datacat.client.DatasetSite;
/**
* Full configuration information for the {@link Crawler} class.
- * <p>
- * Method chaining of setters is supported.
*
* @author Jeremy McCormick, SLAC
*/
@@ -46,14 +41,9 @@
private String datacatFolder = null;
/**
- * Set whether extraction of metadata is enabled.
- */
- private boolean enableMetadata;
-
- /**
* Set of accepted file formats.
*/
- private Set<DatasetFileFormat> formats = new HashSet<DatasetFileFormat>();
+ private Set<FileFormat> formats = new HashSet<FileFormat>();
/**
* The maximum depth to crawl.
@@ -68,7 +58,7 @@
/**
* The dataset site for the datacat.
*/
- private DatasetSite site = DatasetSite.JLAB;
+ private Site site = Site.JLAB;
/**
* A timestamp to use for filtering input files on their creation date.
@@ -88,13 +78,8 @@
/**
* Base URL of datacat client.
*/
- private String baseUrl = DatacatConstants.BASE_URL;
-
- /**
- * Root URL of datacat client (e.g. 'HPS').
- */
- private String rootFolder = DatacatConstants.ROOT_FOLDER;
-
+ private String baseUrl = DatacatHelper.DATACAT_URL;
+
/**
* Set of paths used for filtering files (file's path must match one of these).
*/
@@ -113,7 +98,7 @@
* Add the default file formats.
*/
CrawlerConfig addDefaultFileFormats() {
- final List<DatasetFileFormat> defaultFormats = Arrays.asList(DatasetFileFormat.values());
+ final List<FileFormat> defaultFormats = Arrays.asList(FileFormat.values());
this.formats.addAll(defaultFormats);
return this;
}
@@ -123,9 +108,8 @@
*
* @param format the file format
*/
- CrawlerConfig addFileFormat(final DatasetFileFormat format) {
+ void addFileFormat(final FileFormat format) {
this.formats.add(format);
- return this;
}
/**
@@ -142,7 +126,7 @@
*
* @return the data catalog folder
*/
- String datacatFolder() {
+ String folder() {
return this.datacatFolder;
}
@@ -151,25 +135,16 @@
*
* @return the dataset site
*/
- DatasetSite datasetSite() {
+ Site site() {
return this.site;
}
/**
- * Return <code>true</code> if metadata extraction from files is enabled.
- *
- * @return <code>true</code> if metadata extraction is enabled
- */
- boolean enableMetaData() {
- return this.enableMetadata;
- }
-
- /**
* Get the file formats for filtering.
*
* @return the file formats for filtering
*/
- Set<DatasetFileFormat> getFileFormats() {
+ Set<FileFormat> getFileFormats() {
return this.formats;
}
@@ -183,7 +158,7 @@
}
/**
- * Get the root directory for the file search.
+ * Get the root directory in the file catalog.
*
* @return the root directory for the file search
*/
@@ -197,9 +172,8 @@
* @param acceptRuns the list of acceptable run numbers
* @return this object
*/
- CrawlerConfig setAcceptRuns(final Set<Integer> acceptRuns) {
+ void setAcceptRuns(final Set<Integer> acceptRuns) {
this.acceptRuns = acceptRuns;
- return this;
}
/**
@@ -208,9 +182,8 @@
* @param connectionParameters the database connection parameters
* @return this object
*/
- CrawlerConfig setConnection(final ConnectionParameters connectionParameters) {
+ void setConnection(final ConnectionParameters connectionParameters) {
this.connectionParameters = connectionParameters;
- return this;
}
/**
@@ -218,9 +191,8 @@
*
* @param datacatFolder the data catalog folder
*/
- CrawlerConfig setDatacatFolder(final String datacatFolder) {
+ void setDatacatFolder(final String datacatFolder) {
this.datacatFolder = datacatFolder;
- return this;
}
/**
@@ -228,9 +200,8 @@
*
* @return this object
*/
- CrawlerConfig setDatasetSite(final DatasetSite site) {
+ void setSite(final Site site) {
this.site = site;
- return this;
}
/**
@@ -239,31 +210,18 @@
* @param dryRun set to <code>true</code> to enable dry run
* @return this object
*/
- CrawlerConfig setDryRun(boolean dryRun) {
+ void setDryRun(boolean dryRun) {
this.dryRun = dryRun;
- return this;
- }
-
-
- /**
- * Set whether metadata extraction is enabled.
- *
- * @param enableMetadata <code>true</code> to enable metadata
- * @return this object
- */
- CrawlerConfig setEnableMetadata(final boolean enableMetadata) {
- this.enableMetadata = enableMetadata;
- return this;
- }
+ }
+
/**
* Set the max depth.
*
* @param maxDepth the max depth
*/
- CrawlerConfig setMaxDepth(final Integer maxDepth) {
+ void setMaxDepth(final Integer maxDepth) {
this.maxDepth = maxDepth;
- return this;
}
/**
@@ -272,9 +230,8 @@
* @param rootDir the root directory for the file search
* @return this object
*/
- CrawlerConfig setRootDir(final File rootDir) {
+ void setRootDir(final File rootDir) {
this.rootDir = rootDir;
- return this;
}
/**
@@ -285,9 +242,8 @@
* @param timestamp the date for filtering files
* @return this object
*/
- CrawlerConfig setTimestamp(final Date timestamp) {
+ void setTimestamp(final Date timestamp) {
this.timestamp = timestamp;
- return this;
}
/**
@@ -299,9 +255,8 @@
* @param timestamp the date string for filtering files
* @return this object
*/
- CrawlerConfig setTimestamp(final String timestampString) throws ParseException {
+ void setTimestamp(final String timestampString) throws ParseException {
TIMESTAMP_FORMAT.parse(timestampString);
- return this;
}
/**
@@ -310,9 +265,8 @@
* @param timestampFile the timestamp file for date filtering
* @return this object
*/
- CrawlerConfig setTimestampFile(final File timestampFile) {
+ void setTimestampFile(final File timestampFile) {
this.timestampFile = timestampFile;
- return this;
}
/**
@@ -344,26 +298,38 @@
return this.dryRun;
}
- void setBaseUrl(String baseUrl) {
+ /**
+ * Set the data catalog URL.
+ *
+ * @param baseUrl the data catalog URL
+ */
+ void setDatacatUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
- String baseUrl() {
+ /**
+ * Get the data catalog URL.
+ *
+ * @return the data catalog URL
+ */
+ String datacatUrl() {
return this.baseUrl;
}
-
- void setRootFolder(String rootFolder) {
- this.rootFolder = rootFolder;
- }
-
- String rootFolder() {
- return this.rootFolder;
- }
-
+
+ /**
+ * Add a path for filtering files.
+ *
+ * @param path the path for filtering
+ */
void addPath(String path) {
this.paths.add(path);
}
+ /**
+ * Get the list of paths for filtering.
+ *
+ * @return the list of paths for filtering
+ */
Set<String> paths() {
return this.paths;
}
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/CrawlerFileVisitor.java Wed Dec 16 15:25:21 2015
@@ -8,8 +8,6 @@
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
-
-import org.hps.datacat.client.DatasetFileFormat;
/**
* Visitor which creates a {@link FileSet} from walking a directory tree.
@@ -24,7 +22,7 @@
/**
* The run log containing information about files from each run.
*/
- private final FileSet fileSet = new FileSet();
+ private final List<File> files = new ArrayList<File>();
/**
* A list of file filters to apply.
@@ -62,8 +60,8 @@
*
* @return the file set from visiting the directory tree
*/
- FileSet getFileSet() {
- return this.fileSet;
+ List<File> getFiles() {
+ return this.files;
}
/**
@@ -76,8 +74,7 @@
public FileVisitResult visitFile(final Path path, final BasicFileAttributes attrs) {
final File file = path.toFile();
if (this.accept(file)) {
- final DatasetFileFormat format = DatacatUtilities.getFileFormat(file);
- fileSet.addFile(format, file);
+ files.add(file);
}
return FileVisitResult.CONTINUE;
}
Added: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DataType.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DataType.java (added)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DataType.java Wed Dec 16 15:25:21 2015
@@ -0,0 +1,29 @@
+package org.hps.crawler;
+
+/**
+ * Dataset types for HPS.
+ *
+ * @author Jeremy McCormick, SLAC
+ */
+public enum DataType {
+ /**
+ * Data quality management plots.
+ */
+ DQM,
+ /**
+ * Raw data (EVIO).
+ */
+ RAW,
+ /**
+ * Reconstructed data (usually LCIO).
+ */
+ RECON,
+ /**
+ * Digital Summary Tape files (ROOT).
+ */
+ DST,
+ /**
+ * Test type (don't use in production).
+ */
+ TEST;
+}
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatCrawler.java Wed Dec 16 15:25:21 2015
@@ -2,15 +2,14 @@
import java.io.File;
import java.io.IOException;
+import java.net.URISyntaxException;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Date;
import java.util.EnumSet;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -20,18 +19,15 @@
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatacatClientFactory;
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.datacat.client.DatasetSite;
-import org.hps.record.util.FileUtilities;
+import org.srs.datacat.client.Client;
+import org.srs.datacat.client.ClientBuilder;
+import org.srs.datacat.model.DatasetModel;
/**
* Command line file crawler for populating the data catalog.
*
* @author Jeremy McCormick, SLAC
*/
-// TODO: add support for patching metadata if resource exists
public final class DatacatCrawler {
/**
@@ -50,7 +46,7 @@
private static final Options OPTIONS = new Options();
static {
final StringBuffer buffer = new StringBuffer();
- for (final DatasetFileFormat format : DatasetFileFormat.values()) {
+ for (final FileFormat format : FileFormat.values()) {
buffer.append(format.name() + " ");
}
buffer.setLength(buffer.length() - 1);
@@ -67,7 +63,6 @@
OPTIONS.addOption("f", "folder", true, "datacat folder");
OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
OPTIONS.addOption("o", "format", true, "add a file format for filtering: " + AVAILABLE_FORMATS);
- OPTIONS.addOption("m", "metadata", false, "create metadata for datasets");
OPTIONS.addOption("r", "run", true, "add a run number to accept");
OPTIONS.addOption("s", "site", true, "datacat site");
OPTIONS.addOption("t", "timestamp-file", true, "existing or new timestamp file name");
@@ -96,34 +91,13 @@
private final DefaultParser parser = new DefaultParser();
/**
- * The data catalog client interface.
- */
- private DatacatClient datacatClient;
-
- /**
- * Throw an exception if the path doesn't exist in the data catalog or it is not a folder.
- *
- * @param folder the folder in the datacat
- * @throws RuntimeException if the given path does not exist or it is not a folder
- */
- private void checkFolder(final String folder) {
- if (!datacatClient.exists(folder)) {
- throw new RuntimeException("The folder " + folder + " does not exist in the data catalog.");
- }
- if (!datacatClient.isFolder(folder)) {
- throw new RuntimeException("The path " + folder + " is not a folder.");
- }
- }
-
- /**
* Parse command line options.
*
* @param args the command line arguments
* @return this object (for method chaining)
*/
private DatacatCrawler parse(final String[] args) {
- config = new CrawlerConfig();
-
+
LOGGER.config("parsing command line options");
this.config = new CrawlerConfig();
@@ -210,29 +184,27 @@
// Configure enabled file formats.
if (cl.hasOption("o")) {
for (final String arg : cl.getOptionValues("o")) {
- DatasetFileFormat format = null;
+ FileFormat format = null;
try {
- format = DatasetFileFormat.valueOf(arg);
+ format = FileFormat.valueOf(arg);
} catch (IllegalArgumentException | NullPointerException e) {
throw new IllegalArgumentException("The format " + arg + " is not valid.", e);
}
LOGGER.config("adding format " + format.name());
this.config.addFileFormat(format);
}
- } else {
- throw new RuntimeException("The -o argument with data format must be supplied at least once.");
- }
-
- // Enable metadata extraction from files.
- if (cl.hasOption("m")) {
- config.setEnableMetadata(true);
- LOGGER.config("metadata extraction enabled");
+ }
+
+ // Enable the default set of file formats.
+ if (this.config.getFileFormats().isEmpty()) {
+ LOGGER.config("enabling default file formats");
+ this.config.addDefaultFileFormats();
}
// Datacat folder.
if (cl.hasOption("f")) {
config.setDatacatFolder(cl.getOptionValue("f"));
- LOGGER.config("set datacat folder to " + config.datacatFolder());
+ LOGGER.config("set datacat folder to " + config.folder());
} else {
throw new RuntimeException("The -f argument with the datacat folder is required.");
}
@@ -247,22 +219,25 @@
}
// Dataset site (defaults to JLAB).
- DatasetSite site = DatasetSite.JLAB;
+ Site site = Site.JLAB;
if (cl.hasOption("s")) {
- site = DatasetSite.valueOf(cl.getOptionValue("s"));
+ site = Site.valueOf(cl.getOptionValue("s"));
}
LOGGER.config("dataset site " + site);
- config.setDatasetSite(site);
+ config.setSite(site);
// Dry run.
if (cl.hasOption("D")) {
config.setDryRun(true);
}
+ // Data catalog URL.
if (cl.hasOption("u")) {
- config.setBaseUrl(cl.getOptionValue("u"));
- }
-
+ config.setDatacatUrl(cl.getOptionValue("u"));
+ LOGGER.config("datacat URL " + config.datacatUrl());
+ }
+
+ // List of paths.
if (!cl.getArgList().isEmpty()) {
for (String arg : cl.getArgList()) {
config.addPath(arg);
@@ -278,7 +253,7 @@
throw new IllegalStateException("At least one file format must be provided with the -f switch.");
}
- LOGGER.info("done parsing command line options");
+ LOGGER.info("Done parsing command line options.");
return this;
}
@@ -296,13 +271,7 @@
* Run the crawler job.
*/
private void run() {
-
- LOGGER.config("creating datacat client with url = " + config.baseUrl() + "; site = " + config.datasetSite() + "; rootFolder = " + config.rootFolder());
- datacatClient = new DatacatClientFactory().createClient(config.baseUrl(), config.datasetSite(), config.rootFolder());
-
- // Check the datacat folder which must already exist.
- this.checkFolder(config.datacatFolder());
-
+
// Create the file visitor for crawling the root directory with the given date filter.
final CrawlerFileVisitor visitor = new CrawlerFileVisitor();
@@ -312,6 +281,7 @@
LOGGER.config("added timestamp filter " + config.timestamp());
}
+ // Add path filter.
if (!config.paths().isEmpty()) {
visitor.addFilter(new PathFilter(config.paths()));
StringBuffer sb = new StringBuffer();
@@ -325,83 +295,27 @@
// Add file format filter.
visitor.addFilter(new FileFormatFilter(config.getFileFormats()));
- // Run number filter.
+ // Add run number filter.
if (!config.acceptRuns().isEmpty()) {
visitor.addFilter(new RunFilter(config.acceptRuns()));
}
- // Walk the file tree using the visitor with the enabled filters.
+ // Walk the file tree and get list of files.
this.walk(visitor);
+
+ // Insert datasets if files were found.
+ if (!visitor.getFiles().isEmpty()) {
+ List<DatasetModel> datasets = DatacatHelper.createDatasets(visitor.getFiles(), config.folder(), config.site().toString());
+ LOGGER.info("built " + datasets.size() + " datasets");
+ DatacatHelper.addDatasets(datasets, config.folder(), config.datacatUrl());
+ LOGGER.info("added datasets to datacat");
+ } else {
+ LOGGER.warning("No files were found by the crawler.");
+ }
- LOGGER.info(visitor.getFileSet().toString());
-
- // Update the data catalog.
- if (!visitor.getFileSet().isEmpty()) {
- this.updateDatacat(visitor.getFileSet());
- } else {
- LOGGER.warning("no files found");
- }
- }
-
- /**
- * Update the data catalog.
- *
- * @param runMap the map of run information including the EVIO file list
- */
- private void updateDatacat(final FileSet fileSet) {
- for (final DatasetFileFormat fileFormat : config.getFileFormats()) {
- List<File> formatFiles = fileSet.get(fileFormat);
- LOGGER.info("adding " + formatFiles.size() + " files with format " + fileFormat.name());
- for (final File file : formatFiles) {
-
- LOGGER.info("adding file " + file.getAbsolutePath());
-
- Map<String, Object> metadata = new HashMap<String, Object>();
-
- // Use file on JLAB cache disk if necessary.
- File actualFile = file;
- if (FileUtilities.isMssFile(file)) {
- actualFile = FileUtilities.getCachedFile(file);
- LOGGER.info("using cached file " + actualFile.getPath());
- }
-
- if (config.enableMetaData()) {
- // Create metadata map for file.
- LOGGER.info("creating metadata for " + actualFile.getPath());
- metadata = DatacatUtilities.createMetadata(actualFile);
- metadata.put("scanStatus", "OK");
- } else {
- // Assign run number even if metadata is not enabled.
- metadata = new HashMap<String, Object>();
- int run = FileUtilities.getRunFromFileName(file);
- metadata.put("runMin", run);
- metadata.put("runMax", run);
- metadata.put("scanStatus", "UNSCANNED");
- }
-
- // Register file in the catalog.
- if (!config.dryRun()) {
- int response = DatacatUtilities.addFile(
- datacatClient,
- config.datacatFolder(),
- file,
- actualFile.length(),
- config.datasetSite(),
- metadata);
- LOGGER.info("HTTP response " + response);
- if (response >= 400) {
- // Throw exception if response from server indicates an error occurred.
- throw new RuntimeException("HTTP error code " + response + " was received from server.");
- }
- } else {
- LOGGER.info("Skipped update on " + file.getPath() + " because dry run is enabled.");
- }
- }
- LOGGER.info("Successfully added " + formatFiles.size() + " " + fileFormat + " files to data catalog.");
- }
- LOGGER.info("Done updating data catalog.");
- }
-
+ LOGGER.info("Done!");
+ }
+
/**
* Walk the directory tree to find files for the runs that are being processed in the job.
*
Copied: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java (from r4058, java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatUtilities.java)
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatUtilities.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/DatacatHelper.java Wed Dec 16 15:25:21 2015
@@ -2,70 +2,77 @@
import java.io.File;
import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
import java.util.Map;
-
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatasetDataType;
-import org.hps.datacat.client.DatasetFileFormat;
-import org.hps.datacat.client.DatasetSite;
-import org.hps.record.evio.EvioFileUtilities;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.srs.datacat.client.Client;
+import org.srs.datacat.client.ClientBuilder;
+import org.srs.datacat.model.DatasetModel;
+import org.srs.datacat.shared.Dataset;
+import org.srs.datacat.shared.Provider;
/**
- * Datacat utilities for the crawler.
+ * Datacat helper functions for the crawler.
*
* @author Jeremy McCormick, SLAC
*/
-class DatacatUtilities {
-
- /**
- * Static map of strings to dataset file formats.
- */
- private static Map<String, DatasetFileFormat> formatMap = new HashMap<String, DatasetFileFormat>();
+class DatacatHelper {
+
+ /*
+ * Default base URL for datacat.
+ */
+ static final String DATACAT_URL = "http://hpsweb.jlab.org/datacat/r";
+
+ /*
+ * Static map of strings to file formats.
+ */
+ private static final Map<String, FileFormat> formatMap = new HashMap<String, FileFormat>();
static {
- for (final DatasetFileFormat format : DatasetFileFormat.values()) {
+ for (final FileFormat format : FileFormat.values()) {
formatMap.put(format.extension(), format);
}
}
-
- /**
- * Add a file to the data catalog.
- *
- * @param client the data catalog client
- * @param folder the folder name e.g. "data/raw"
- * @param fileMetadata the file's meta data including the path
- * @param fileFormat the file's format (EVIO, LCIO etc.)
- * @param dataType the file's data type (RAW, RECON, etc.)
- * @return the HTTP response code
- */
- static int addFile(final DatacatClient client, final String folder, final File file, long fileLength,
- final DatasetSite site, final Map<String, Object> metadata) {
-
- // Get the dataset format and type.
- final DatasetFileFormat fileFormat = DatacatUtilities.getFileFormat(file);
- final DatasetDataType dataType = DatacatUtilities.getDataType(file);
-
- // Add the dataset to the data catalog using the REST API.
- return client.addDataset(folder, dataType, file.getAbsolutePath(), fileLength, site, fileFormat, file.getName(), metadata);
- }
-
- /**
- * Create metadata for a file.
+
+ /*
+ * System metadata fields.
+ */
+ private static final Set<String> SYSTEM_METADATA = new HashSet<String>();
+ static {
+ SYSTEM_METADATA.add("eventCount");
+ SYSTEM_METADATA.add("size");
+ SYSTEM_METADATA.add("runMin");
+ SYSTEM_METADATA.add("runMax");
+ SYSTEM_METADATA.add("checksum");
+ SYSTEM_METADATA.add("scanStatus");
+ }
+
+ /**
+ * Create metadata for a file using its specific reader.
*
* @param file the file
* @return the metadata for the file
*/
static Map<String, Object> createMetadata(final File file) {
- final DatasetFileFormat fileFormat = DatacatUtilities.getFileFormat(file);
- final DatasetDataType dataType = DatacatUtilities.getDataType(file);
- final FileMetadataReader reader = DatacatUtilities.getFileMetaDataReader(fileFormat, dataType);
+ File actualFile = file;
+ if (FileUtilities.isMssFile(file)) {
+ actualFile = FileUtilities.getCachedFile(file);
+ }
+ final FileFormat fileFormat = DatacatHelper.getFileFormat(file);
+ final DataType dataType = DatacatHelper.getDataType(file);
+ final FileMetadataReader reader = DatacatHelper.getFileMetaDataReader(fileFormat, dataType);
if (reader == null) {
throw new RuntimeException("No metadata reader found for format " + fileFormat.name() + " and type "
+ dataType.name() + ".");
}
Map<String, Object> metadata;
try {
- metadata = reader.getMetadata(file);
+ metadata = reader.getMetadata(actualFile);
} catch (final IOException e) {
throw new RuntimeException(e);
}
@@ -78,24 +85,24 @@
* @param file the file
* @return the file's data type
*/
- static DatasetDataType getDataType(final File file) {
- final DatasetFileFormat fileFormat = getFileFormat(file);
- DatasetDataType dataType = null;
+ static DataType getDataType(final File file) {
+ final FileFormat fileFormat = getFileFormat(file);
+ DataType dataType = null;
if (fileFormat == null) {
throw new IllegalArgumentException("File has unknown format: " + file.getAbsolutePath());
}
- if (fileFormat.equals(DatasetFileFormat.EVIO)) {
- dataType = DatasetDataType.RAW;
- } else if (fileFormat.equals(DatasetFileFormat.LCIO)) {
- dataType = DatasetDataType.RECON;
- } else if (fileFormat.equals(DatasetFileFormat.ROOT)) {
+ if (fileFormat.equals(FileFormat.EVIO)) {
+ dataType = DataType.RAW;
+ } else if (fileFormat.equals(FileFormat.LCIO)) {
+ dataType = DataType.RECON;
+ } else if (fileFormat.equals(FileFormat.ROOT)) {
if (file.getName().contains("_dqm")) {
- dataType = DatasetDataType.DQM;
+ dataType = DataType.DQM;
} else if (file.getName().contains("_dst")) {
- dataType = DatasetDataType.DST;
+ dataType = DataType.DST;
}
- } else if (fileFormat.equals(DatasetFileFormat.AIDA)) {
- dataType = DatasetDataType.DQM;
+ } else if (fileFormat.equals(FileFormat.AIDA)) {
+ dataType = DataType.DQM;
}
if (dataType == null) {
throw new IllegalArgumentException("Could not determine data type for format: " + fileFormat.name());
@@ -109,9 +116,9 @@
* @param pathname the file
* @return the file format of the file
*/
- static DatasetFileFormat getFileFormat(final File pathname) {
+ static FileFormat getFileFormat(final File pathname) {
String name = pathname.getName();
- if (name.contains(DatasetFileFormat.EVIO.extension()) && !name.endsWith(DatasetFileFormat.EVIO.extension())) {
+ if (name.contains(FileFormat.EVIO.extension()) && !name.endsWith(FileFormat.EVIO.extension())) {
name = stripEvioFileNumber(name);
}
final String extension = name.substring(name.lastIndexOf(".") + 1);
@@ -125,17 +132,17 @@
* @param dataType the data type
* @return the file metadata reader
*/
- static FileMetadataReader getFileMetaDataReader(final DatasetFileFormat fileFormat, final DatasetDataType dataType) {
+ static FileMetadataReader getFileMetaDataReader(final FileFormat fileFormat, final DataType dataType) {
FileMetadataReader reader = null;
- if (fileFormat.equals(DatasetFileFormat.LCIO)) {
+ if (fileFormat.equals(FileFormat.LCIO)) {
reader = new LcioReconMetadataReader();
- } else if (fileFormat.equals(DatasetFileFormat.EVIO)) {
+ } else if (fileFormat.equals(FileFormat.EVIO)) {
reader = new EvioMetadataReader();
- } else if (fileFormat.equals(DatasetFileFormat.ROOT) && dataType.equals(DatasetDataType.DST)) {
+ } else if (fileFormat.equals(FileFormat.ROOT) && dataType.equals(DataType.DST)) {
reader = new RootDstMetadataReader();
- } else if (fileFormat.equals(DatasetFileFormat.ROOT) && dataType.equals(DatasetDataType.DQM)) {
+ } else if (fileFormat.equals(FileFormat.ROOT) && dataType.equals(DataType.DQM)) {
reader = new RootDqmMetadataReader();
- } else if (fileFormat.equals(DatasetFileFormat.AIDA)) {
+ } else if (fileFormat.equals(FileFormat.AIDA)) {
reader = new AidaMetadataReader();
}
return reader;
@@ -149,9 +156,109 @@
*/
static String stripEvioFileNumber(final String name) {
String strippedName = name;
- if (!name.endsWith(DatasetFileFormat.EVIO.extension())) {
+ if (!name.endsWith(FileFormat.EVIO.extension())) {
strippedName = name.substring(0, name.lastIndexOf("."));
}
return strippedName;
}
+
+
+
+ /**
+ * Create a dataset for insertion into the data catalog.
+ *
+ * @param file the file on disk
+ * @param metadata the metadata map
+ * @param folder the datacat folder
+ * @param site the datacat site
+ * @param dataType the data type
+ * @param fileFormat the file format
+ * @return the created dataset
+ */
+ static DatasetModel createDataset(
+ File file,
+ Map<String, Object> metadata,
+ String folder,
+ String site,
+ String dataType,
+ String fileFormat) {
+
+ Provider provider = new Provider();
+ List<DatasetModel> datasets = new ArrayList<DatasetModel>();
+
+ Dataset.Builder datasetBuilder = provider.getDatasetBuilder();
+ datasetBuilder.versionId(1);
+ datasetBuilder.master(true);
+ datasetBuilder.name(file.getName());
+ datasetBuilder.resource(file.getPath());
+ datasetBuilder.size((Long) metadata.get("size"));
+ datasetBuilder.scanStatus("OK");
+ datasetBuilder.dataType(dataType);
+ datasetBuilder.fileFormat(fileFormat);
+ datasetBuilder.site(site);
+ if (metadata.get("eventCount") != null) {
+ datasetBuilder.eventCount((Long) metadata.get("eventCount"));
+ }
+ datasetBuilder.runMin((Long) metadata.get("runMin"));
+ datasetBuilder.runMax((Long) metadata.get("runMax"));
+ datasetBuilder.checksum((String) metadata.get("checksum"));
+
+ // Create user metadata leaving out system metadata fields.
+ Map<String, Object> userMetadata = new HashMap<String, Object>();
+ for (Entry<String, Object> metadataEntry : metadata.entrySet()) {
+ if (!SYSTEM_METADATA.contains(metadataEntry.getKey())) {
+ userMetadata.put(metadataEntry.getKey(), metadataEntry.getValue());
+ }
+ }
+ datasetBuilder.versionMetadata(userMetadata);
+
+ // Build dataset and add to list.
+ DatasetModel dataset = datasetBuilder.build();
+ datasets.add(dataset);
+
+ return dataset;
+ }
+
+ /**
+ * Create datasets from a list of files.
+ *
+ * @param files the list of files
+ * @return the list of datasets
+ */
+ static List<DatasetModel> createDatasets(List<File> files, String folder, String site) {
+ List<DatasetModel> datasets = new ArrayList<DatasetModel>();
+ for (File file : files) {
+ Map<String, Object> metadata = createMetadata(file);
+ DataType dataType = DatacatHelper.getDataType(file);
+ FileFormat fileFormat = DatacatHelper.getFileFormat(file);
+ DatasetModel dataset = DatacatHelper.createDataset(
+ file,
+ metadata,
+ folder,
+ site,
+ dataType.toString(),
+ fileFormat.toString());
+ datasets.add(dataset);
+ }
+ return datasets;
+ }
+
+ /**
+ * Add datasets to the data catalog.
+ *
+ * @param datasets the list of datasets
+ * @param folder the target folder
+ * @param url the datacat URL
+ */
+ static void addDatasets(List<DatasetModel> datasets, String folder, String url) {
+ Client client = null;
+ try {
+ client = new ClientBuilder().setUrl(url).build();
+ } catch (URISyntaxException e) {
+ throw new RuntimeException("Bad datacat URL.", e);
+ }
+ for (DatasetModel dataset : datasets) {
+ client.createDataset(folder, dataset);
+ }
+ }
}
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java Wed Dec 16 15:25:21 2015
@@ -57,10 +57,12 @@
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
- int events = 0;
+ LOGGER.info("creating metadata for " + file.getPath());
+
+ long events = 0;
int badEvents = 0;
- boolean blinded = true;
- Integer run = null;
+ int blinded = 0;
+ Long run = null;
Integer firstHeadTimestamp = null;
Integer lastHeadTimestamp = null;
Integer lastPhysicsEvent = null;
@@ -82,10 +84,21 @@
// Get the file number from the name.
final int fileNumber = EvioFileUtilities.getSequenceFromName(file);
- // Files with sequence number divisible by 10 are unblinded (Eng Run 2015 scheme).
- if (fileNumber % 10 == 0) {
- blinded = false;
- }
+ // Files with a sequence number that is not divisible by 10 are blinded (Eng Run 2015 scheme).
+ if (!(fileNumber % 10 == 0)) {
+ blinded = 1;
+ }
+
+ // Get file size.
+ long size = 0;
+ File cacheFile = file;
+ if (FileUtilities.isMssFile(file)) {
+ cacheFile = FileUtilities.getCachedFile(file);
+ }
+ size = cacheFile.length();
+
+ // Compute MD5 checksum string.
+ String checksum = FileUtilities.createMD5Checksum(cacheFile);
EvioReader evioReader = null;
try {
@@ -105,6 +118,7 @@
break fileLoop;
}
+ // Increment event count (doesn't count events that can't be parsed).
++events;
// Debug print event number and tag.
@@ -137,7 +151,7 @@
// Run number.
if (run == null) {
if (headBankData[1] != 0) {
- run = headBankData[1];
+ run = (long) headBankData[1];
LOGGER.info("run " + run + " from event " + evioEvent.getEventNumber());
}
}
@@ -192,7 +206,7 @@
tiProcessor.process(evioEvent);
} catch (IOException | EvioException e) {
- // Trap event processing errors (not counted in event total).
+ // Trap event processing errors.
badEvents++;
LOGGER.warning("error processing EVIO event " + evioEvent.getEventNumber());
}
@@ -211,7 +225,7 @@
}
}
- LOGGER.info("done reading " + events + " events");
+ LOGGER.info("done reading " + events + " events from " + file.getPath());
// Rough trigger rate calculation.
try {
@@ -231,6 +245,8 @@
metadataMap.put("runMin", run);
metadataMap.put("runMax", run);
metadataMap.put("eventCount", events);
+ metadataMap.put("size", size);
+ metadataMap.put("checksum", checksum);
// File sequence number.
metadataMap.put("FILE", fileNumber);
@@ -252,13 +268,13 @@
metadataMap.put("TI_TIME_DELTA", maxTIDelta - minTIDelta);
// TI time offset (stored as string because of bug in MySQL datacat backend).
- metadataMap.put("TI_TIME_OFFSET", ((Long) tiProcessor.getTiTimeOffset()).toString());
+ metadataMap.put("TI_TIME_OFFSET", tiProcessor.getTiTimeOffset());
// Event counts.
metadataMap.put("BAD_EVENTS", badEvents);
// Trigger rate in KHz.
- DecimalFormat df = new DecimalFormat("#.####");
+ DecimalFormat df = new DecimalFormat("#.##");
df.setRoundingMode(RoundingMode.CEILING);
metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
@@ -280,14 +296,14 @@
}
/**
- * Calculate the trigger rate in KHz.
+ * Calculate the trigger rate in Hz.
*
* @param firstTimestamp the first physics timestamp
* @param lastTimestamp the last physics timestamp
* @param events the number of physics events
* @return the trigger rate calculation in KHz
*/
- private double calculateTriggerRate(Integer firstTimestamp, Integer lastTimestamp, int events) {
- return ((double) events / ((double) lastTimestamp - (double) firstTimestamp)) / 1000.;
+ private double calculateTriggerRate(Integer firstTimestamp, Integer lastTimestamp, long events) {
+ return ((double) events / ((double) lastTimestamp - (double) firstTimestamp));
}
}
Added: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormat.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormat.java (added)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormat.java Wed Dec 16 15:25:21 2015
@@ -0,0 +1,61 @@
+package org.hps.crawler;
+
+
+/**
+ * Dataset file formats for HPS.
+ *
+ * @author Jeremy McCormick, SLAC
+ */
+public enum FileFormat {
+
+ /**
+ * EVIO data format.
+ */
+ EVIO(),
+ /**
+ * LCIO data format (note custom file extension).
+ */
+ LCIO("slcio"),
+ /**
+ * ROOT files.
+ */
+ ROOT(),
+ /**
+ * AIDA files.
+ */
+ AIDA(),
+ /**
+ * Testing only (do not use in production).
+ */
+ TEST(null);
+
+ /**
+ * The file extension of the format.
+ */
+ private String extension;
+
+ /**
+ * Create a file format with an extension.
+ *
+ * @param extension the file's extension
+ */
+ private FileFormat(String extension) {
+ this.extension = extension;
+ }
+
+ /**
+ * Create a file format with default extension (lower case of enum name).
+ */
+ private FileFormat() {
+ this.extension = this.name().toLowerCase();
+ }
+
+ /**
+ * Get the format's file extension.
+ *
+ * @return the format file extension
+ */
+ public String extension() {
+ return extension;
+ }
+}
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileFormatFilter.java Wed Dec 16 15:25:21 2015
@@ -3,8 +3,6 @@
import java.io.File;
import java.io.FileFilter;
import java.util.Set;
-
-import org.hps.datacat.client.DatasetFileFormat;
/**
* Filter files on their format.
@@ -18,14 +16,14 @@
/**
* The file format.
*/
- private final Set<DatasetFileFormat> formats;
+ private final Set<FileFormat> formats;
/**
* Create a new filter with the given format.
*
* @param format the file format
*/
- FileFormatFilter(final Set<DatasetFileFormat> formats) {
+ FileFormatFilter(final Set<FileFormat> formats) {
if (formats == null) {
throw new IllegalArgumentException("The formats collection is null.");
}
@@ -42,7 +40,7 @@
*/
@Override
public boolean accept(final File pathname) {
- final DatasetFileFormat fileFormat = DatacatUtilities.getFileFormat(pathname);
+ final FileFormat fileFormat = DatacatHelper.getFileFormat(pathname);
if (fileFormat != null) {
return formats.contains(fileFormat);
} else {
Copied: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileUtilities.java (from r4058, java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java)
=============================================================================
--- java/branches/jeremy-dev/record-util/src/main/java/org/hps/record/util/FileUtilities.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/FileUtilities.java Wed Dec 16 15:25:21 2015
@@ -1,6 +1,11 @@
-package org.hps.record.util;
+package org.hps.crawler;
import java.io.File;
+import java.io.IOException;
+
+import com.google.common.hash.HashCode;
+import com.google.common.hash.Hashing;
+import com.google.common.io.Files;
/**
* File utilities for the datacat crawler.
@@ -15,7 +20,7 @@
* @param file the file
* @return the run number
*/
- public static int getRunFromFileName(final File file) {
+ static int getRunFromFileName(final File file) {
final String name = file.getName();
return Integer.parseInt(name.substring(4, 10));
}
@@ -30,7 +35,7 @@
* @return the cached file path (prepends "/cache" to the path)
* @throws IllegalArgumentException if the file is not on the MSS (e.g. path does not start with "/mss")
*/
- public static File getCachedFile(final File mssFile) {
+ static File getCachedFile(final File mssFile) {
if (!isMssFile(mssFile)) {
throw new IllegalArgumentException("File " + mssFile.getPath() + " is not on the JLab MSS.");
}
@@ -47,7 +52,7 @@
* @param file the file
* @return <code>true</code> if the file is a cached file
*/
- public static boolean isCachedFile(final File file) {
+ static boolean isCachedFile(final File file) {
return file.getPath().startsWith("/cache");
}
@@ -57,8 +62,18 @@
* @param file the file
* @return <code>true</code> if the file is on the MSS
*/
- public static boolean isMssFile(final File file) {
+ static boolean isMssFile(final File file) {
return file.getPath().startsWith("/mss");
+ }
+
+ /**
+ * Create an MD5 checksum for the file.
+ *
+ * @param file the file to hash
+ */
+ static String createMD5Checksum(File file) throws IOException {
+ HashCode md5 = Files.hash(file, Hashing.md5());
+ return md5.toString();
}
private FileUtilities() {
Modified: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java (original)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/RootDqmMetadataReader.java Wed Dec 16 15:25:21 2015
@@ -4,8 +4,6 @@
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-
-import org.hps.record.util.FileUtilities;
/**
* This is a metadata reader for ROOT DQM files.
Added: java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/Site.java
=============================================================================
--- java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/Site.java (added)
+++ java/branches/jeremy-dev/crawler/src/main/java/org/hps/crawler/Site.java Wed Dec 16 15:25:21 2015
@@ -0,0 +1,21 @@
+package org.hps.crawler;
+
+/**
+ * Site of a dataset (SLAC or JLAB).
+ *
+ * @author Jeremy McCormick, SLAC
+ */
+public enum Site {
+ /**
+ * All sites.
+ */
+ all,
+ /**
+ * SLAC site.
+ */
+ SLAC,
+ /**
+ * JLAB site.
+ */
+ JLAB;
+}
Modified: java/branches/jeremy-dev/run-database/pom.xml
=============================================================================
--- java/branches/jeremy-dev/run-database/pom.xml (original)
+++ java/branches/jeremy-dev/run-database/pom.xml Wed Dec 16 15:25:21 2015
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.4.2-SNAPSHOT</version>
+ <version>3.5-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/run-database/</url>
@@ -20,8 +20,8 @@
<artifactId>hps-record-util</artifactId>
</dependency>
<dependency>
- <groupId>org.hps</groupId>
- <artifactId>hps-datacat-client</artifactId>
+ <groupId>srs</groupId>
+ <artifactId>org-srs-datacat-client</artifactId>
</dependency>
</dependencies>
</project>
Copied: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DaoProvider.java (from r4058, java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java)
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseDaoFactory.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/DaoProvider.java Wed Dec 16 15:25:21 2015
@@ -4,29 +4,37 @@
import java.sql.SQLException;
/**
- * Factory for creating database API objects for interacting with the run database.
+ * Provider for creating database API objects for interacting with the run database.
*
* @author Jeremy McCormick, SLAC
*/
-final class RunDatabaseDaoFactory {
+final class DaoProvider {
/**
* The database connection.
*/
private final Connection connection;
+
+ /* Object DAO interfaces created on demand. */
+ private EpicsDataDao epicsDao;
+ private EpicsVariableDao epicsVariableDao;
+ private RunSummaryDao runSummaryDao;
+ private ScalerDataDao scalerDao;
+ private SvtConfigDao svtDao;
+ private TriggerConfigDao configDao;
/**
* Create a new factory.
*
* @param connection the database connection
*/
- RunDatabaseDaoFactory(final Connection connection) {
+ DaoProvider(final Connection connection) {
if (connection == null) {
throw new IllegalArgumentException("The connection is null.");
}
try {
if (connection.isClosed()) {
- throw new IllegalStateException("The connection is closed.");
+ throw new IllegalStateException("The connection has already been closed.");
}
} catch (final SQLException e) {
throw new IllegalStateException("Error when checking connection status.", e);
@@ -39,8 +47,11 @@
*
* @return the EPICS DAO
*/
- EpicsDataDao createEpicsDataDao() {
- return new EpicsDataDaoImpl(connection);
+ EpicsDataDao getEpicsDataDao() {
+ if (epicsDao == null) {
+ epicsDao = new EpicsDataDaoImpl(connection);
+ }
+ return epicsDao;
}
/**
@@ -48,8 +59,11 @@
*
* @return the EPICS variable DAO
*/
- EpicsVariableDao createEpicsVariableDao() {
- return new EpicsVariableDaoImpl(connection);
+ EpicsVariableDao getEpicsVariableDao() {
+ if (epicsVariableDao == null) {
+ epicsVariableDao = new EpicsVariableDaoImpl(connection);
+ }
+ return epicsVariableDao;
}
/**
@@ -57,8 +71,11 @@
*
* @return the run summary DAO
*/
- RunSummaryDao createRunSummaryDao() {
- return new RunSummaryDaoImpl(connection);
+ RunSummaryDao getRunSummaryDao() {
+ if (runSummaryDao == null) {
+ runSummaryDao = new RunSummaryDaoImpl(connection);
+ }
+ return runSummaryDao;
}
/**
@@ -66,8 +83,11 @@
*
* @return the scaler data DAO
*/
- ScalerDataDao createScalerDataDao() {
- return new ScalerDataDaoImpl(connection);
+ ScalerDataDao getScalerDataDao() {
+ if (scalerDao == null) {
+ scalerDao = new ScalerDataDaoImpl(connection);
+ }
+ return scalerDao;
}
/**
@@ -75,8 +95,11 @@
*
* @return the SVT config DAO
*/
- SvtConfigDao createSvtConfigDao() {
- return new SvtConfigDaoImpl(connection);
+ SvtConfigDao getSvtConfigDao() {
+ if (svtDao == null) {
+ svtDao = new SvtConfigDaoImpl(connection);
+ }
+ return svtDao;
}
/**
@@ -84,7 +107,10 @@
*
* @return the trigger config DAO
*/
- TriggerConfigDao createTriggerConfigDao() {
- return new TriggerConfigDaoImpl(connection);
+ TriggerConfigDao getTriggerConfigDao() {
+ if (configDao == null) {
+ configDao = new TriggerConfigDaoImpl(connection);
+ }
+ return configDao;
}
}
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/EpicsDataDaoImpl.java Wed Dec 16 15:25:21 2015
@@ -74,6 +74,9 @@
/**
* Delete all EPICS data for a run from the database.
+ * <p>
+ * Only the <code>epics_header</code> records are deleted and the child records
+ * are deleted automatically via a <code>CASCADE</code>.
*
* @param run the run number
*/
@@ -242,7 +245,7 @@
if (dataRowsCreated == 0) {
throw new SQLException("Creation of EPICS data failed; no rows affected.");
}
- LOGGER.fine("inserted EPICS data with run " + epicsHeader.getRun() + "; seq " + epicsHeader.getSequence() + "; timestamp "
+ LOGGER.finer("inserted EPICS data with run " + epicsHeader.getRun() + "; seq " + epicsHeader.getSequence() + "; timestamp "
+ epicsHeader.getTimestamp());
insertStatement.close();
}
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java Wed Dec 16 15:25:21 2015
@@ -5,12 +5,8 @@
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashSet;
import java.util.List;
-import java.util.Map;
import java.util.Map.Entry;
-import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -18,8 +14,6 @@
import org.hps.conditions.database.DatabaseConditionsManager;
import org.hps.conditions.run.RunSpreadsheet;
import org.hps.conditions.run.RunSpreadsheet.RunData;
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.Dataset;
import org.hps.record.AbstractRecordProcessor;
import org.hps.record.daqconfig.DAQConfig;
import org.hps.record.daqconfig.DAQConfigEvioProcessor;
@@ -39,12 +33,15 @@
import org.hps.record.triggerbank.AbstractIntData.IntBankDefinition;
import org.hps.record.triggerbank.HeadBankData;
import org.hps.record.triggerbank.TiTimeOffsetEvioProcessor;
-import org.hps.record.util.FileUtilities;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
+import org.srs.datacat.client.Client;
+import org.srs.datacat.model.DatasetModel;
+import org.srs.datacat.model.DatasetResultSetModel;
+import org.srs.datacat.model.dataset.DatasetWithViewModel;
/**
* Builds a complete {@link RunSummary} object from various data sources, including the data catalog and the run
@@ -70,40 +67,30 @@
private ConnectionParameters connectionParameters;
/**
- * Data catalog client API.
- */
- private DatacatClient datacatClient;
-
- /**
* Detector name for initializing conditions system.
*/
private String detectorName;
/**
- * Dry run to not perform database updates (off by default).
+ * Enable dry run to not perform database updates (off by default).
*/
private boolean dryRun = false;
/**
- * List of EPICS data from the run.
+ * List of EPICS data read from the EVIO files.
*/
private List<EpicsData> epicsData;
/**
- * Map of EVIO files to their dataset objects.
- */
- private Map<File, Dataset> evioDatasets;
-
- /**
- * List of EVIO files.
+ * List of EVIO datasets found in the datacat for the run.
+ */
+ private List<DatasetModel> evioDatasets;
+
+ /**
+ * List of EVIO files for processing.
*/
private List<File> evioFiles;
-
- /**
- * List of EVIO files with cache path/
- */
- private List<File> cacheFiles;
-
+
/**
* Allow replacement of information in the database (off by default).
*/
@@ -115,7 +102,7 @@
private RunSummaryImpl runSummary;
/**
- * List of scaler data from the run.
+ * List of scaler data read from the EVIO files.
*/
private List<ScalerData> scalerData;
@@ -125,7 +112,7 @@
private boolean skipEvioProcessing = false;
/**
- * Path to run spreadsheet CSV file (not used by default).
+ * Run spreadsheet CSV file with supplementary information (not used by default).
*/
private File spreadsheetFile;
@@ -145,7 +132,17 @@
private boolean reload;
/**
- * Reload state for the current run number (used for testing after a database insert).
+ * Data catalog client interface.
+ */
+ private Client datacatClient;
+
+ /**
+ * Datacat site to use.
+ */
+ private String site;
+
+ /**
+ * Reload state for the current run number for testing.
*/
static void reload(Connection connection, int run) {
@@ -182,52 +179,50 @@
runSummary = new RunSummaryImpl(run);
return this;
}
+
+ /**
+ * Create the EVIO file list from the data catalog datasets.
+ */
+ private void createEvioFileList() {
+ this.evioFiles = new ArrayList<File>();
+
+ for (DatasetModel dataset : this.evioDatasets) {
+ String resource =
+ ((DatasetWithViewModel) dataset).getViewInfo().getLocations().iterator().next().getResource();
+ File file = new File(resource);
+ if (file.getPath().startsWith("/mss")) {
+ file = new File("/cache" + resource);
+ }
+ this.evioFiles.add(file);
+ }
+ EvioFileUtilities.sortBySequence(this.evioFiles);
+ }
/**
* Find EVIO files in the data catalog.
*/
private void findEvioDatasets() {
+
LOGGER.info("finding EVIO datasets for run " + getRun());
-
- // Metadata to return from search.
- final Set<String> metadata = new LinkedHashSet<String>();
- metadata.add("runMin");
- metadata.add("eventCount");
-
- // Initialize map of files to datasets.
- evioDatasets = new HashMap<File, Dataset>();
-
- // Find datasets in the datacat using a search.
- final List<Dataset> datasets = datacatClient.findDatasets(
- "data/raw",
+
+ DatasetResultSetModel results = datacatClient.searchForDatasets(
+ "/HPS/data/raw",
+ "current",
+ this.site,
"fileFormat eq 'EVIO' AND dataType eq 'RAW' AND runMin eq " + getRun(),
- metadata);
- if (datasets.isEmpty()) {
- // No files for the run in datacat is a fatal error.
- throw new IllegalStateException("No EVIO datasets for run " + getRun() + " were found in the data catalog.");
- }
-
- // Map files to datasets.
- for (final Dataset dataset : datasets) {
- evioDatasets.put(new File(dataset.getLocations().get(0).getResource()), dataset);
- }
-
- // Create the list of sorted EVIO files.
- evioFiles = new ArrayList<File>();
- evioFiles.addAll(evioDatasets.keySet());
- EvioFileUtilities.sortBySequence(evioFiles);
-
- // Create a list of files with cache paths in case running at JLAB.
- cacheFiles = new ArrayList<File>();
- for (File file : evioFiles) {
- if (FileUtilities.isMssFile(file)) {
- cacheFiles.add(FileUtilities.getCachedFile(file));
- } else {
- cacheFiles.add(file);
- }
- }
-
- LOGGER.info("found " + evioFiles.size() + " EVIO file(s) for run " + runSummary.getRun());
+ null,
+ null,
+ null,
+ null
+ );
+
+ this.evioDatasets = results.getResults();
+
+ if (!this.evioDatasets.isEmpty()) {
+ throw new RuntimeException("No EVIO datasets found in data catalog.");
+ }
+
+ this.evioFiles = new ArrayList<File>();
}
/**
@@ -247,16 +242,16 @@
LOGGER.info("inserting run " + runSummary.getRun() + " into db");
// Create DAO factory.
- final RunDatabaseDaoFactory runFactory = new RunDatabaseDaoFactory(connection);
+ final DaoProvider runFactory = new DaoProvider(connection);
// Insert the run summary record.
LOGGER.info("inserting run summary");
- runFactory.createRunSummaryDao().insertRunSummary(runSummary);
+ runFactory.getRunSummaryDao().insertRunSummary(runSummary);
// Insert the EPICS data.
if (epicsData != null) {
LOGGER.info("inserting EPICS data");
- runFactory.createEpicsDataDao().insertEpicsData(epicsData);
+ runFactory.getEpicsDataDao().insertEpicsData(epicsData);
} else {
LOGGER.warning("no EPICS data to insert");
}
@@ -264,7 +259,7 @@
// Insert the scaler data.
if (scalerData != null) {
LOGGER.info("inserting scaler data");
- runFactory.createScalerDataDao().insertScalerData(scalerData, getRun());
+ runFactory.getScalerDataDao().insertScalerData(scalerData, getRun());
} else {
LOGGER.warning("no scaler data to insert");
}
@@ -272,7 +267,7 @@
// Insert SVT config data.
if (this.svtConfigs != null) {
LOGGER.info("inserting SVT config");
- runFactory.createSvtConfigDao().insertSvtConfigs(svtConfigs, getRun());
+ runFactory.getSvtConfigDao().insertSvtConfigs(svtConfigs, getRun());
} else {
LOGGER.warning("no SVT config to insert");
}
@@ -280,7 +275,7 @@
// Insert trigger config data.
if (this.config != null) {
LOGGER.info("inserting trigger config");
- runFactory.createTriggerConfigDao().insertTriggerConfig(config, getRun());
+ runFactory.getTriggerConfigDao().insertTriggerConfig(config, getRun());
} else {
LOGGER.warning("no trigger config to inesrt");
}
@@ -330,10 +325,6 @@
LOGGER.fine("processing EVIO files");
- if (evioFiles == null || evioFiles.isEmpty()) {
- throw new IllegalStateException("No EVIO files were found.");
- }
-
if (detectorName == null) {
throw new IllegalStateException("The detector name was not set.");
}
@@ -374,10 +365,13 @@
// Run the job using the EVIO loop.
EvioLoop loop = new EvioLoop();
loop.addProcessors(processors);
- EvioFileSource source = new EvioFileSource(cacheFiles);
+ EvioFileSource source = new EvioFileSource(this.evioFiles);
loop.setEvioFileSource(source);
loop.loop(-1);
-
+
+ // Update total events from loop state.
+ runSummary.setTotalEvents(loop.getTotalCountableConsumed());
+
// Set livetime field values.
updateLivetimes(scalersProcessor);
@@ -420,6 +414,9 @@
// Find EVIO datasets in the datacat.
findEvioDatasets();
+
+ // Create list of EVIO files from datasets.
+ createEvioFileList();
// Set total number of files.
updateTotalFiles();
@@ -429,34 +426,33 @@
// Set END timestamp.
updateEndTimestamp();
-
- // Set total number of events.
- updateTotalEvents();
// Calculate trigger rate.
updateTriggerRate();
- // Run EVIO job if enabled.
+ // Run the full EVIO processing job.
if (!this.skipEvioProcessing) {
processEvioFiles();
} else {
LOGGER.info("EVIO file processing is skipped.");
}
-
- // Get extra info from spreadsheet if enabled.
+
+ // Get extra info from the spreadsheet.
if (this.spreadsheetFile != null) {
updateFromSpreadsheet();
} else {
LOGGER.info("Run spreadsheet not used.");
}
- // Print out summary info to the log before updating database.
+ // Print out summary info before updating database.
printSummary();
-
+
if (!dryRun) {
- // Update the database.
+
+ // Perform the database update; this will throw a runtime exception if there is an error.
updateDatabase();
-
+
+ // Optionally load back run information.
if (reload) {
LOGGER.info("reloading data for run " + getRun() + " ...");
reload(connectionParameters.createConnection(), getRun());
@@ -466,6 +462,8 @@
// Dry run so database is not updated.
LOGGER.info("Dry run enabled so no updates were performed.");
}
+
+ LOGGER.info("Done!");
return this;
}
@@ -487,7 +485,7 @@
* @param datacatClient the datacat client
* @return this object
*/
- RunDatabaseBuilder setDatacatClient(DatacatClient datacatClient) {
+ RunDatabaseBuilder setDatacatClient(Client datacatClient) {
this.datacatClient = datacatClient;
return this;
}
@@ -536,6 +534,11 @@
RunDatabaseBuilder setReplace(boolean replace) {
this.replace = replace;
LOGGER.config("replace = " + this.replace);
+ return this;
+ }
+
+ RunDatabaseBuilder setSite(String site) {
+ this.site = site;
return this;
}
@@ -612,8 +615,9 @@
try {
LOGGER.log(Level.SEVERE, "Error occurred updating database; rolling back transaction...", e1);
connection.rollback();
+ throw new RuntimeException("Failed to insert run.");
} catch (SQLException e2) {
- throw new RuntimeException(e2);
+ throw new RuntimeException("Error performing rollback.", e2);
}
}
@@ -627,7 +631,7 @@
private void updateEndTimestamp() {
LOGGER.info("updating end timestamp");
IntBankDefinition headBankDefinition = new IntBankDefinition(HeadBankData.class, new int[] {0x2e, 0xe10f});
- File lastEvioFile = cacheFiles.get(cacheFiles.size() - 1);
+ File lastEvioFile = evioFiles.get(evioFiles.size() - 1);
LOGGER.info("setting end timestamp from file " + lastEvioFile.getPath());
EvioReader reader = null;
Integer endTimestamp = null;
@@ -726,7 +730,7 @@
*/
private void updateStartTimestamps() {
LOGGER.fine("updating start timestamps");
- File firstEvioFile = cacheFiles.get(0);
+ File firstEvioFile = evioFiles.get(0);
LOGGER.info("setting start timestamps from file " + firstEvioFile.getPath());
int sequence = EvioFileUtilities.getSequenceFromName(firstEvioFile);
if (sequence != 0) {
@@ -765,19 +769,6 @@
}
/**
- * Update the total number of events.
- */
- private void updateTotalEvents() {
- LOGGER.fine("updating total events");
- int totalEvents = 0;
- for (Entry<File, Dataset> entry : evioDatasets.entrySet()) {
- totalEvents += entry.getValue().getLocations().get(0).getEventCount();
- }
- runSummary.setTotalEvents(totalEvents);
- LOGGER.info("total events set to " + runSummary.getTotalEvents());
- }
-
- /**
* Update the total number of EVIO files in the run.
*/
private void updateTotalFiles() {
@@ -801,8 +792,8 @@
LOGGER.warning("Could not get starting timestamp for trigger rate calculation.");
}
if (runSummary.getEndTimestamp() != null && startTimestamp != null) {
- double triggerRate = ((double) runSummary.getTotalEvents() / ((double) runSummary.getEndTimestamp() - (double) runSummary
- .getGoTimestamp())) / 1000.;
+ double triggerRate = ((double) runSummary.getTotalEvents() /
+ ((double) runSummary.getEndTimestamp() - (double) runSummary.getGoTimestamp()));
runSummary.setTriggerRate(triggerRate);
LOGGER.info("trigger rate set to " + runSummary.getTriggerRate());
} else {
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java Wed Dec 16 15:25:21 2015
@@ -1,6 +1,7 @@
package org.hps.run.database;
import java.io.File;
+import java.net.URISyntaxException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
@@ -8,10 +9,8 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.hps.conditions.database.ConnectionParameters;
-import org.hps.datacat.client.DatacatClient;
-import org.hps.datacat.client.DatacatClientFactory;
-import org.hps.datacat.client.DatacatConstants;
-import org.hps.datacat.client.DatasetSite;
+import org.srs.datacat.client.Client;
+import org.srs.datacat.client.ClientBuilder;
/**
* Command line tool for inserting records into the run database.
@@ -38,9 +37,8 @@
OPTIONS.addOption("d", "detector", true, "conditions system detector name");
OPTIONS.addOption("N", "no-evio-processing", false, "skip processing of all EVIO files");
OPTIONS.addOption("L", "load", false, "load back run information after inserting (for debugging)");
- OPTIONS.addOption("u", "url", true, "datacat URL");
- OPTIONS.addOption("S", "site", true, "datacat site (e.g. SLAC or JLAB)");
- // TODO: add -D option for defining metadata values
+ OPTIONS.addOption("u", "url", true, "data catalog URL");
+ OPTIONS.addOption("S", "site", true, "data catalog site (e.g. SLAC or JLAB)");
}
/**
@@ -49,7 +47,6 @@
* @param args the command line arguments
*/
public static void main(final String args[]) {
- // Parse command line options and run the job.
new RunDatabaseCommandLine().parse(args).run();
}
@@ -94,9 +91,19 @@
private ConnectionParameters connectionParameters = null;
/**
- * Datacat client to use for connecting to data catalog.
- */
- private DatacatClient datacatClient = null;
+ * Data catalog client interface.
+ */
+ private Client datacatClient = null;
+
+ /**
+ * Data catalog site.
+ */
+ private String site = "JLAB";
+
+ /**
+ * Data catalog URL.
+ */
+ private String url = "http://hpsweb.jlab.org/datacat/r";
/**
* Parse command line options and return reference to <code>this</code> object.
@@ -169,17 +176,22 @@
this.reload = true;
}
- // Setup datacat client.
- DatasetSite site = DatasetSite.JLAB;
- String url = DatacatConstants.BASE_URL;
- String rootFolder = DatacatConstants.ROOT_FOLDER;
+ // Data catalog URL.
if (cl.hasOption("u")) {
url = cl.getOptionValue("u");
}
+
+ // Site in the data catalog.
if (cl.hasOption("S")) {
- site = DatasetSite.valueOf(cl.getOptionValue("S"));
- }
- datacatClient = new DatacatClientFactory().createClient(url, site, rootFolder);
+ site = cl.getOptionValue("S");
+ }
+
+ // Initialize the data catalog client.
+ try {
+ datacatClient = new ClientBuilder().setUrl(url).build();
+ } catch (URISyntaxException e) {
+ throw new RuntimeException("Bad datacat URL.", e);
+ }
} catch (final ParseException e) {
throw new RuntimeException(e);
@@ -197,6 +209,7 @@
.setDetectorName(detectorName)
.setConnectionParameters(connectionParameters)
.setDatacatClient(datacatClient)
+ .setSite(site)
.setDryRun(dryRun)
.setReplace(replace)
.skipEvioProcessing(skipEvioProcessing)
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunManager.java Wed Dec 16 15:25:21 2015
@@ -21,18 +21,6 @@
public final class RunManager implements ConditionsListener {
/**
- * Simple class for caching data.
- */
- private class DataCache {
- Boolean runExists = null;
- RunSummary runSummary = null;
- List<EpicsData> epicsData = null;
- List<ScalerData> scalerData = null;
- List<SvtConfigData> svtConfigData = null;
- DAQConfig daqConfig = null;
- }
-
- /**
* The default connection parameters for read-only access to the run database.
*/
private static ConnectionParameters DEFAULT_CONNECTION_PARAMETERS = new ConnectionParameters("hpsuser",
@@ -71,14 +59,9 @@
private final ConnectionParameters connectionParameters = DEFAULT_CONNECTION_PARAMETERS;
/**
- * The data cache of run information.
- */
- private DataCache cache;
-
- /**
* Factory for creating database API objects.
*/
- private final RunDatabaseDaoFactory factory;
+ private final DaoProvider factory;
/**
* The run number; the -1 value indicates that this has not been set externally yet.
@@ -91,7 +74,7 @@
public RunManager() {
this.connection = DEFAULT_CONNECTION_PARAMETERS.createConnection();
this.openConnection();
- factory = new RunDatabaseDaoFactory(this.connection);
+ factory = new DaoProvider(this.connection);
}
/**
@@ -102,7 +85,7 @@
public RunManager(final Connection connection) {
this.connection = connection;
this.openConnection();
- factory = new RunDatabaseDaoFactory(this.connection);
+ factory = new DaoProvider(this.connection);
}
/**
@@ -110,7 +93,7 @@
*/
private void checkRunNumber() {
if (this.run == null) {
- throw new IllegalStateException("The run number was not set.");
+ throw new IllegalStateException("The run number was never set.");
}
}
@@ -143,12 +126,12 @@
* @param run the run number
*/
void deleteRun() {
- factory.createEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2S, run);
- factory.createEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20S, run);
- factory.createScalerDataDao().deleteScalerData(run);
- factory.createSvtConfigDao().deleteSvtConfigs(run);
- factory.createTriggerConfigDao().deleteTriggerConfig(run);
- factory.createRunSummaryDao().deleteRunSummary(run);
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2S, run);
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20S, run);
+ factory.getScalerDataDao().deleteScalerData(run);
+ factory.getSvtConfigDao().deleteSvtConfigs(run);
+ factory.getTriggerConfigDao().deleteTriggerConfig(run);
+ factory.getRunSummaryDao().deleteRunSummary(run);
}
/**
@@ -168,11 +151,7 @@
*/
public List<EpicsData> getEpicsData(final EpicsType epicsType) {
this.checkRunNumber();
- if (this.cache.epicsData == null) {
- LOGGER.info("loading EPICS data for run " + this.run);
- this.cache.epicsData = factory.createEpicsDataDao().getEpicsData(epicsType, this.run);
- }
- return this.cache.epicsData;
+ return factory.getEpicsDataDao().getEpicsData(epicsType, this.run);
}
/**
@@ -182,7 +161,8 @@
* @return the EPICS data for the current run
*/
public List<EpicsVariable> getEpicsVariables(final EpicsType epicsType) {
- return factory.createEpicsVariableDao().getEpicsVariables(epicsType);
+ this.checkRunNumber();
+ return factory.getEpicsVariableDao().getEpicsVariables(epicsType);
}
/**
@@ -200,7 +180,7 @@
* @return the complete list of run numbers
*/
public List<Integer> getRuns() {
- return new RunSummaryDaoImpl(this.connection).getRuns();
+ return factory.getRunSummaryDao().getRuns();
}
/**
@@ -210,10 +190,7 @@
*/
public RunSummary getRunSummary() {
this.checkRunNumber();
- if (this.cache.runSummary == null) {
- this.cache.runSummary = factory.createRunSummaryDao().getRunSummary(this.run);
- }
- return this.cache.runSummary;
+ return factory.getRunSummaryDao().getRunSummary(this.run);
}
/**
@@ -223,11 +200,7 @@
*/
public List<ScalerData> getScalerData() {
this.checkRunNumber();
- if (this.cache.scalerData == null) {
- LOGGER.info("loading scaler data for run " + this.run);
- this.cache.scalerData = factory.createScalerDataDao().getScalerData(run);
- }
- return this.cache.scalerData;
+ return factory.getScalerDataDao().getScalerData(run);
}
/**
@@ -237,11 +210,7 @@
*/
public List<SvtConfigData> getSvtConfigData() {
this.checkRunNumber();
- if (this.cache.svtConfigData == null) {
- LOGGER.info("loading SVT configuration data for run " + this.run);
- this.cache.svtConfigData = factory.createSvtConfigDao().getSvtConfigs(run);
- }
- return this.cache.svtConfigData;
+ return factory.getSvtConfigDao().getSvtConfigs(run);
}
/**
@@ -251,11 +220,8 @@
*/
public DAQConfig getDAQConfig() {
this.checkRunNumber();
- if (this.cache.daqConfig == null) {
- TriggerConfig config = factory.createTriggerConfigDao().getTriggerConfig(run);
- cache.daqConfig = config.loadDAQConfig(run);
- }
- return this.cache.daqConfig;
+ TriggerConfig config = factory.getTriggerConfigDao().getTriggerConfig(run);
+ return config.loadDAQConfig(run);
}
/**
@@ -280,11 +246,7 @@
* @return <code>true</code> if the run exists in the database
*/
public boolean runExists() {
- this.checkRunNumber();
- if (this.cache.runExists == null) {
- this.cache.runExists = factory.createRunSummaryDao().runSummaryExists(this.run);
- }
- return this.cache.runExists;
+ return factory.getRunSummaryDao().runSummaryExists(this.run);
}
/**
@@ -294,7 +256,7 @@
* @return <code>true</code> if the run exists in the database
*/
boolean runExists(final int run) {
- return factory.createRunSummaryDao().runSummaryExists(run);
+ return factory.getRunSummaryDao().runSummaryExists(run);
}
/**
@@ -310,9 +272,6 @@
// Set the run number.
this.run = run;
-
- // Reset the data cache.
- this.cache = new DataCache();
}
}
}
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummary.java Wed Dec 16 15:25:21 2015
@@ -97,7 +97,7 @@
*
* @return the total number of events in the run
*/
- Integer getTotalEvents();
+ Long getTotalEvents();
/**
* Get the total number of EVIO files in this run.
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryDaoImpl.java Wed Dec 16 15:25:21 2015
@@ -135,7 +135,7 @@
throw new IllegalArgumentException("Run " + run + " does not exist in database.");
}
runSummary = new RunSummaryImpl(run);
- runSummary.setTotalEvents(resultSet.getInt("nevents"));
+ runSummary.setTotalEvents(resultSet.getLong("nevents"));
runSummary.setTotalFiles(resultSet.getInt("nfiles"));
runSummary.setPrestartTimestamp(resultSet.getInt("prestart_timestamp"));
runSummary.setGoTimestamp(resultSet.getInt("go_timestamp"));
@@ -175,7 +175,7 @@
try {
preparedStatement = connection.prepareStatement(INSERT);
preparedStatement.setInt(1, runSummary.getRun());
- preparedStatement.setInt(2, runSummary.getTotalEvents());
+ preparedStatement.setLong(2, runSummary.getTotalEvents());
preparedStatement.setInt(3, runSummary.getTotalFiles());
/* Use setObject on the rest as they may be null. */
preparedStatement.setObject(4, runSummary.getPrestartTimestamp());
Modified: java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java
=============================================================================
--- java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java (original)
+++ java/branches/jeremy-dev/run-database/src/main/java/org/hps/run/database/RunSummaryImpl.java Wed Dec 16 15:25:21 2015
@@ -67,7 +67,7 @@
/**
* The total events found in the run across all files.
*/
- private Integer totalEvents;
+ private Long totalEvents;
/**
* The total number of files in the run.
@@ -154,7 +154,7 @@
}
@Override
- public Integer getTotalEvents() {
+ public Long getTotalEvents() {
return this.totalEvents;
}
@@ -273,7 +273,7 @@
*
* @param totalEvents the total number of physics events in the run
*/
- void setTotalEvents(final Integer totalEvents) {
+ void setTotalEvents(final Long totalEvents) {
this.totalEvents = totalEvents;
}
|