12 added files
java/sandbox/ecal-readout-sim
--- java/sandbox/ecal-readout-sim/pom.xml (rev 0)
+++ java/sandbox/ecal-readout-sim/pom.xml 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>hps-ecal-readout-sim</artifactId>
+ <name>hps-ecal-readout-sim</name>
+ <description>HPS ECAL readout simulation</description>
+
+ <parent>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-parent</artifactId>
+ <relativePath>../parent/pom.xml</relativePath>
+ <version>3.0.2-SNAPSHOT</version>
+ </parent>
+
+ <scm>
+ <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-readout-sim/</url>
+ <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-readout-sim/</connection>
+ <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-readout-sim/</developerConnection>
+ </scm>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.lcsim</groupId>
+ <artifactId>lcsim-distribution</artifactId>
+ <version>${lcsimVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-ecal-recon</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-conditions</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-math3</artifactId>
+ <version>3.2</version>
+ </dependency>
+ </dependencies>
+
+</project>
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/DummyTriggerDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/DummyTriggerDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,24 @@
+package org.hps.readout.ecal;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.hps.util.ClockSingleton;
+
+/**
+ * Free-running trigger - triggers on every Nth event
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: DummyTriggerDriver.java,v 1.3 2013/04/02 01:11:11 meeg Exp $
+ */
+public class DummyTriggerDriver extends TriggerDriver {
+
+ int period = 100;
+
+ public void setPeriod(int period) {
+ this.period = period;
+ }
+
+ @Override
+ public boolean triggerDecision(EventHeader event) {
+ return (ClockSingleton.getClock() % period == 0);
+ }
+}
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/EcalReadoutDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/EcalReadoutDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,154 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.lcio.LCIOConstants;
+
+/**
+ * Performs readout of ECal hits.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: EcalReadoutDriver.java,v 1.4 2013/03/20 01:03:32 meeg Exp $
+ */
+public abstract class EcalReadoutDriver<T> extends TriggerableDriver {
+
+ String ecalCollectionName;
+ String ecalRawCollectionName = "EcalRawHits";
+ String ecalReadoutName = "EcalHits";
+ Class hitClass;
+ //hit type as in org.lcsim.recon.calorimetry.CalorimeterHitType
+ int hitType = 0;
+ //number of bunches in readout cycle
+ int readoutCycle = 1;
+ //minimum readout value to write a hit
+ double threshold = 0.0;
+ //LCIO flags
+ int flags = 0;
+ //readout period in ns
+ double readoutPeriod = 2.0;
+ //readout period time offset in ns
+ double readoutOffset = 0.0;
+ //readout period counter
+ int readoutCounter;
+ public static boolean readoutBit = false;
+ protected boolean debug = false;
+
+ public EcalReadoutDriver() {
+ flags += 1 << LCIOConstants.CHBIT_LONG; //store position
+ flags += 1 << LCIOConstants.RCHBIT_ID1; //store cell ID
+ triggerDelay = 100.0;
+ }
+
+ public void setDebug(boolean debug) {
+ this.debug = debug;
+ }
+
+ public void setEcalReadoutName(String ecalReadoutName) {
+ this.ecalReadoutName = ecalReadoutName;
+ }
+
+ public void setEcalRawCollectionName(String ecalRawCollectionName) {
+ this.ecalRawCollectionName = ecalRawCollectionName;
+ }
+
+ public void setEcalCollectionName(String ecalCollectionName) {
+ this.ecalCollectionName = ecalCollectionName;
+ }
+
+ public void setReadoutCycle(int readoutCycle) {
+ this.readoutCycle = readoutCycle;
+ if (readoutCycle > 0) {
+ this.readoutPeriod = readoutCycle * ClockSingleton.getDt();
+ }
+ }
+
+ public void setReadoutOffset(double readoutOffset) {
+ this.readoutOffset = readoutOffset;
+ }
+
+ public void setReadoutPeriod(double readoutPeriod) {
+ this.readoutPeriod = readoutPeriod;
+ this.readoutCycle = -1;
+ }
+
+ public void setThreshold(double threshold) {
+ this.threshold = threshold;
+ }
+
+ @Override
+ public void startOfData() {
+ super.startOfData();
+ if (ecalCollectionName == null) {
+ throw new RuntimeException("The parameter ecalCollectionName was not set!");
+ }
+
+ readoutCounter = 0;
+
+ initReadout();
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ //System.out.println(this.getClass().getCanonicalName() + " - process");
+ // Get the list of ECal hits.
+ if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
+ List<CalorimeterHit> hits = event.get(CalorimeterHit.class, ecalCollectionName);
+
+ //write hits into buffers
+ putHits(hits);
+ }
+
+ ArrayList<T> newHits = null;
+
+ //if at the end of a readout cycle, write buffers to hits
+ if (readoutCycle > 0) {
+ if ((ClockSingleton.getClock() + 1) % readoutCycle == 0) {
+ if (newHits == null) {
+ newHits = new ArrayList<T>();
+ }
+ readHits(newHits);
+ readoutCounter++;
+ }
+ } else {
+ while (ClockSingleton.getTime() - readoutTime() + ClockSingleton.getDt() >= readoutPeriod) {
+ if (newHits == null) {
+ newHits = new ArrayList<T>();
+ }
+ readHits(newHits);
+ readoutCounter++;
+ }
+ }
+
+ if (newHits != null) {
+ event.put(ecalRawCollectionName, newHits, hitClass, flags, ecalReadoutName);
+ }
+
+ checkTrigger(event);
+ }
+
+ protected double readoutTime() {
+ return readoutCounter * readoutPeriod + readoutOffset;
+ }
+
+ //read analog signal out of buffers and make hits; reset buffers
+ protected abstract void readHits(List<T> hits);
+
+ //add deposited energy to buffers
+ //must be run every event, even if the list is empty
+ protected abstract void putHits(List<CalorimeterHit> hits);
+
+ @Override
+ protected void processTrigger(EventHeader event) {
+ }
+
+ //initialize buffers
+ protected abstract void initReadout();
+
+ public int getTimestampType() {
+ return ReadoutTimestamp.SYSTEM_ECAL;
+ }
+}
\ No newline at end of file
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,571 @@
+package org.hps.readout.ecal;
+
+import static org.hps.recon.ecal.ECalUtils.ecalReadoutPeriod;
+import static org.hps.recon.ecal.ECalUtils.fallTime;
+import static org.hps.recon.ecal.ECalUtils.maxVolt;
+import static org.hps.recon.ecal.ECalUtils.nBit;
+import static org.hps.recon.ecal.ECalUtils.readoutGain;
+import static org.hps.recon.ecal.ECalUtils.riseTime;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+
+import org.hps.conditions.deprecated.EcalConditions;
+import org.hps.recon.ecal.ECalUtils;
+import org.hps.recon.ecal.HPSRawCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.RawCalorimeterHit;
+import org.lcsim.event.RawTrackerHit;
+import org.lcsim.event.base.BaseRawCalorimeterHit;
+import org.lcsim.event.base.BaseRawTrackerHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.Subdetector;
+import org.lcsim.geometry.subdetector.HPSEcal3;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.hps.util.RandomGaussian;
+import org.lcsim.hps.util.RingBuffer;
+import org.lcsim.lcio.LCIOConstants;
+
+/**
+ * Performs readout of ECal hits. Simulates time evolution of preamp output
+ * pulse.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: FADCEcalReadoutDriver.java,v 1.4 2013/10/31 00:11:02 meeg Exp $
+ */
+public class FADCEcalReadoutDriver extends EcalReadoutDriver<RawCalorimeterHit> {
+
+ // Repeated here from EventConstants in evio module to avoid depending on it.
+ private static final int ECAL_WINDOW_MODE = 1;
+ private static final int ECAL_PULSE_MODE = 2;
+ private static final int ECAL_PULSE_INTEGRAL_MODE = 3;
+
+ String ecalName = "Ecal";
+ Subdetector ecal;
+ //buffer for preamp signals (units of volts, no pedestal)
+ private Map<Long, RingBuffer> signalMap = null;
+ //ADC pipeline for readout (units of ADC counts)
+ private Map<Long, FADCPipeline> pipelineMap = null;
+ //buffer for window sums
+ private Map<Long, Double> sumMap = null;
+ //buffer for timestamps
+ private Map<Long, Integer> timeMap = null;
+ //queue for hits to be output to clusterer
+ private PriorityQueue<HPSRawCalorimeterHit> outputQueue = null;
+ //length of ring buffer (in readout cycles)
+ private int bufferLength = 100;
+ //length of readout pipeline (in readout cycles)
+ private int pipelineLength = 2000;
+ //switch between two pulse shape functions
+ private boolean useCRRCShape = true;
+ //shaper time constant in ns; negative values generate square pulses of the given width (for test run sim)
+ private double tp = 14.0;
+ //delay (number of readout periods) between start of summing window and output of hit to clusterer
+ private int delay0 = 32;
+ //start of readout window relative to trigger time (in readout cycles)
+ //in FADC documentation, "Programmable Latency" or PL
+ private int readoutLatency = 100;
+ //number of ADC samples to read out
+ //in FADC documentation, "Programmable Trigger Window" or PTW
+ private int readoutWindow = 100;
+ //number of ADC samples to read out before each rising threshold crossing
+ //in FADC documentation, "number of samples before" or NSB
+ private int numSamplesBefore = 5;
+ //number of ADC samples to read out after each rising threshold crossing
+ //in FADC documentation, "number of samples before" or NSA
+ private int numSamplesAfter = 30;
+// private HPSEcalConverter converter = null;
+ //output buffer for hits
+ private LinkedList<HPSRawCalorimeterHit> buffer = new LinkedList<HPSRawCalorimeterHit>();
+ //number of readout periods for which a given hit stays in the buffer
+ private int coincidenceWindow = 2;
+ //output collection name for hits read out from trigger
+ private String ecalReadoutCollectionName = "EcalReadoutHits";
+ private int mode = ECAL_PULSE_INTEGRAL_MODE;
+ private int readoutThreshold = 50;
+ private int triggerThreshold = 50;
+ //amplitude ADC counts/GeV
+// private double gain = 0.5*1000 * 80.0 / 60;
+ private double scaleFactor = 128;
+ private double fixedGain = -1;
+ private boolean constantTriggerWindow = false;
+ private boolean addNoise = false;
+ private double pePerMeV = 2.0; //photoelectrons per MeV, used to calculate noise
+
+ public FADCEcalReadoutDriver() {
+ flags = 0;
+ flags += 1 << LCIOConstants.RCHBIT_TIME; //store cell ID
+ hitClass = HPSRawCalorimeterHit.class;
+ setReadoutPeriod(ecalReadoutPeriod);
+// converter = new HPSEcalConverter(null);
+ }
+
+ public void setAddNoise(boolean addNoise) {
+ this.addNoise = addNoise;
+ }
+
+ public void setConstantTriggerWindow(boolean constantTriggerWindow) {
+ this.constantTriggerWindow = constantTriggerWindow;
+ }
+
+ public void setFixedGain(double fixedGain) {
+ this.fixedGain = fixedGain;
+ }
+
+ public void setEcalName(String ecalName) {
+ this.ecalName = ecalName;
+ }
+
+ public void setReadoutThreshold(int readoutThreshold) {
+ this.readoutThreshold = readoutThreshold;
+ }
+
+ public void setScaleFactor(double scaleFactor) {
+ this.scaleFactor = scaleFactor;
+ }
+
+ public void setTriggerThreshold(int triggerThreshold) {
+ this.triggerThreshold = triggerThreshold;
+ }
+
+ public void setEcalReadoutCollectionName(String ecalReadoutCollectionName) {
+ this.ecalReadoutCollectionName = ecalReadoutCollectionName;
+ }
+
+ public void setNumSamplesAfter(int numSamplesAfter) {
+ this.numSamplesAfter = numSamplesAfter;
+ }
+
+ public void setNumSamplesBefore(int numSamplesBefore) {
+ this.numSamplesBefore = numSamplesBefore;
+ }
+
+ public void setReadoutLatency(int readoutLatency) {
+ this.readoutLatency = readoutLatency;
+ }
+
+ public void setReadoutWindow(int readoutWindow) {
+ this.readoutWindow = readoutWindow;
+ }
+
+ public void setCoincidenceWindow(int coincidenceWindow) {
+ this.coincidenceWindow = coincidenceWindow;
+ }
+
+ public void setUseCRRCShape(boolean useCRRCShape) {
+ this.useCRRCShape = useCRRCShape;
+ }
+
+ public void setTp(double tp) {
+ this.tp = tp;
+ }
+
+// public void setFallTime(double fallTime) {
+// this.fallTime = fallTime;
+// }
+ public void setPePerMeV(double pePerMeV) {
+ this.pePerMeV = pePerMeV;
+ }
+
+// public void setRiseTime(double riseTime) {
+// this.riseTime = riseTime;
+// }
+ public void setDelay0(int delay0) {
+ this.delay0 = delay0;
+ }
+
+ public void setBufferLength(int bufferLength) {
+ this.bufferLength = bufferLength;
+ resetFADCBuffers();
+ }
+
+ public void setPipelineLength(int pipelineLength) {
+ this.pipelineLength = pipelineLength;
+ resetFADCBuffers();
+ }
+
+ public void setMode(int mode) {
+ this.mode = mode;
+ if (mode != ECAL_WINDOW_MODE && mode != ECAL_PULSE_MODE && mode != ECAL_PULSE_INTEGRAL_MODE) {
+ throw new IllegalArgumentException("invalid mode " + mode);
+ }
+ }
+
+ /**
+ * Return the map of preamp signal buffers. For debug only.
+ *
+ * @return
+ */
+ public Map<Long, RingBuffer> getSignalMap() {
+ return signalMap;
+ }
+
+ /**
+ * Return the map of FADC pipelines. For debug only.
+ *
+ * @return
+ */
+ public Map<Long, FADCPipeline> getPipelineMap() {
+ return pipelineMap;
+ }
+
+ @Override
+ protected void readHits(List<RawCalorimeterHit> hits) {
+
+ for (Long cellID : signalMap.keySet()) {
+ RingBuffer signalBuffer = signalMap.get(cellID);
+
+ FADCPipeline pipeline = pipelineMap.get(cellID);
+ pipeline.step();
+
+ double currentValue = signalBuffer.currentValue() * ((Math.pow(2, nBit) - 1) / maxVolt); //12-bit ADC with maxVolt V range
+ double pedestal = EcalConditions.physicalToPedestal(cellID);
+ pipeline.writeValue(Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit))); //ADC can't return a value larger than 4095; 4096 (overflow) is returned for any input >2V
+ //System.out.println(signalBuffer.currentValue() + " " + currentValue + " " + pipeline.currentValue());
+
+ Double sum = sumMap.get(cellID);
+ if (sum == null && currentValue > triggerThreshold) {
+ timeMap.put(cellID, readoutCounter);
+ if (constantTriggerWindow) {
+ double sumBefore = 0;
+ for (int i = 0; i < numSamplesBefore; i++) {
+ if (debug) {
+ System.out.format("trigger %d, %d: %d\n", cellID, i, pipeline.getValue(numSamplesBefore - i - 1));
+ }
+ sumBefore += pipeline.getValue(numSamplesBefore - i - 1);
+ }
+ sumMap.put(cellID, sumBefore);
+ } else {
+ sumMap.put(cellID, currentValue);
+ }
+ }
+ if (sum != null) {
+ if (constantTriggerWindow) {
+ if (timeMap.get(cellID) + numSamplesAfter >= readoutCounter) {
+ if (debug) {
+ System.out.format("trigger %d, %d: %d\n", cellID, readoutCounter - timeMap.get(cellID) + numSamplesBefore - 1, pipeline.getValue(0));
+ }
+ sumMap.put(cellID, sum + pipeline.getValue(0));
+ } else if (timeMap.get(cellID) + delay0 <= readoutCounter) {
+// System.out.printf("sum = %f\n", sum);
+ outputQueue.add(new HPSRawCalorimeterHit(cellID,
+ (int) Math.round(sum / scaleFactor),
+ 64 * timeMap.get(cellID),
+ readoutCounter - timeMap.get(cellID) + 1));
+ sumMap.remove(cellID);
+ }
+ } else {
+ if (currentValue < triggerThreshold || timeMap.get(cellID) + delay0 == readoutCounter) {
+// System.out.printf("sum = %f\n",sum);
+ outputQueue.add(new HPSRawCalorimeterHit(cellID,
+ (int) Math.round((sum + currentValue) / scaleFactor),
+ 64 * timeMap.get(cellID),
+ readoutCounter - timeMap.get(cellID) + 1));
+ sumMap.remove(cellID);
+ } else {
+ sumMap.put(cellID, sum + currentValue);
+ }
+ }
+ }
+ signalBuffer.step();
+ }
+ while (outputQueue.peek() != null && outputQueue.peek().getTimeStamp() / 64 <= readoutCounter - delay0) {
+ if (outputQueue.peek().getTimeStamp() / 64 < readoutCounter - delay0) {
+ System.out.println("Stale hit in output queue");
+ outputQueue.poll();
+ } else {
+ buffer.add(outputQueue.poll());
+ }
+ }
+ while (!buffer.isEmpty() && buffer.peek().getTimeStamp() / 64 <= readoutCounter - delay0 - coincidenceWindow) {
+ buffer.remove();
+ }
+ if (debug) {
+ for (RawCalorimeterHit hit : buffer) {
+ System.out.format("new hit: energy %d\n", hit.getAmplitude());
+ }
+ }
+
+ hits.addAll(buffer);
+ }
+
+ @Override
+ public void startOfData() {
+ super.startOfData();
+ if (ecalReadoutCollectionName == null) {
+ throw new RuntimeException("The parameter ecalReadoutCollectionName was not set!");
+ }
+ }
+
+ @Override
+ protected void processTrigger(EventHeader event) {
+ switch (mode) {
+ case ECAL_WINDOW_MODE:
+ if (debug) {
+ System.out.println("Reading out ECal in window mode");
+ }
+ event.put(ecalReadoutCollectionName, readWindow(), RawTrackerHit.class, 0, ecalReadoutName);
+ break;
+ case ECAL_PULSE_MODE:
+ if (debug) {
+ System.out.println("Reading out ECal in pulse mode");
+ }
+ event.put(ecalReadoutCollectionName, readPulses(), RawTrackerHit.class, 0, ecalReadoutName);
+ break;
+ case ECAL_PULSE_INTEGRAL_MODE:
+ if (debug) {
+ System.out.println("Reading out ECal in integral mode");
+ }
+ event.put(ecalReadoutCollectionName, readIntegrals(), RawCalorimeterHit.class, flags, ecalReadoutName);
+ break;
+ }
+ }
+
+ @Override
+ public double readoutDeltaT() {
+ double triggerTime = ClockSingleton.getTime() + triggerDelay;
+ int cycle = (int) Math.floor((triggerTime - readoutOffset + ClockSingleton.getDt()) / readoutPeriod);
+ double readoutTime = (cycle - readoutLatency) * readoutPeriod + readoutOffset - ClockSingleton.getDt();
+ return readoutTime;
+ }
+
+ protected short[] getWindow(long cellID) {
+ FADCPipeline pipeline = pipelineMap.get(cellID);
+ short[] adcValues = new short[readoutWindow];
+ for (int i = 0; i < readoutWindow; i++) {
+ adcValues[i] = (short) pipeline.getValue(readoutLatency - i - 1);
+// if (adcValues[i] != 0) {
+// System.out.println("getWindow: " + adcValues[i] + " at i = " + i);
+// }
+ }
+ return adcValues;
+ }
+
+ protected List<RawTrackerHit> readWindow() {
+// System.out.println("Reading FADC data");
+ List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
+ for (Long cellID : pipelineMap.keySet()) {
+ short[] adcValues = getWindow(cellID);
+ hits.add(new BaseRawTrackerHit(cellID, 0, adcValues));
+ }
+ return hits;
+ }
+
+ protected List<RawTrackerHit> readPulses() {
+// System.out.println("Reading FADC data");
+ List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
+ for (Long cellID : pipelineMap.keySet()) {
+ short[] window = getWindow(cellID);
+ short[] adcValues = null;
+ int pointerOffset = 0;
+ int numSamplesToRead = 0;
+ int thresholdCrossing = 0;
+ for (int i = 0; i < readoutWindow; i++) {
+ if (numSamplesToRead != 0) {
+ adcValues[adcValues.length - numSamplesToRead] = window[i - pointerOffset];
+ numSamplesToRead--;
+ if (numSamplesToRead == 0) {
+ hits.add(new BaseRawTrackerHit(cellID, thresholdCrossing, adcValues));
+ }
+ } else if ((i == 0 || window[i - 1] <= EcalConditions.physicalToPedestal(cellID) + readoutThreshold) && window[i] > EcalConditions.physicalToPedestal(cellID) + readoutThreshold) {
+ thresholdCrossing = i;
+ pointerOffset = Math.min(numSamplesBefore, i);
+ numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, readoutWindow - i - pointerOffset - 1);
+ adcValues = new short[numSamplesToRead];
+ }
+ }
+ }
+ return hits;
+ }
+
+ protected List<RawCalorimeterHit> readIntegrals() {
+// System.out.println("Reading FADC data");
+ List<RawCalorimeterHit> hits = new ArrayList<RawCalorimeterHit>();
+ for (Long cellID : pipelineMap.keySet()) {
+ short[] window = getWindow(cellID);
+ int adcSum = 0;
+ int pointerOffset = 0;
+ int numSamplesToRead = 0;
+ int thresholdCrossing = 0;
+ if (window != null) {
+ for (int i = 0; i < readoutWindow; i++) {
+ if (numSamplesToRead != 0) {
+ if (debug) {
+ System.out.format("readout %d, %d: %d\n", cellID, numSamplesBefore + numSamplesAfter - numSamplesToRead, window[i - pointerOffset]);
+ }
+ adcSum += window[i - pointerOffset];
+ numSamplesToRead--;
+ if (numSamplesToRead == 0) {
+ hits.add(new BaseRawCalorimeterHit(cellID, adcSum, 64 * thresholdCrossing));
+ }
+ } else if ((i == 0 || window[i - 1] <= EcalConditions.physicalToPedestal(cellID) + readoutThreshold) && window[i] > EcalConditions.physicalToPedestal(cellID) + readoutThreshold) {
+ thresholdCrossing = i;
+ pointerOffset = Math.min(numSamplesBefore, i);
+ numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, readoutWindow - i - pointerOffset - 1);
+ adcSum = 0;
+ }
+ }
+ }
+ }
+ return hits;
+ }
+
+ @Override
+ protected void putHits(List<CalorimeterHit> hits) {
+ //fill the readout buffers
+ for (CalorimeterHit hit : hits) {
+ RingBuffer eDepBuffer = signalMap.get(hit.getCellID());
+ double energyAmplitude = hit.getRawEnergy();
+ if (addNoise) {
+ //add preamp noise and photoelectron Poisson noise in quadrature
+ double noise;
+ if (!useCRRCShape) {
+ noise = Math.sqrt(Math.pow(EcalConditions.physicalToNoise(hit.getCellID()) * EcalConditions.physicalToGain(hit.getCellID()) * ECalUtils.gainFactor * ECalUtils.ecalReadoutPeriod, 2) + hit.getRawEnergy() * ECalUtils.MeV / pePerMeV);
+ } else {
+ noise = Math.sqrt(Math.pow(EcalConditions.physicalToNoise(hit.getCellID()) * EcalConditions.physicalToGain(hit.getCellID()) * ECalUtils.MeV, 2) + hit.getRawEnergy() * ECalUtils.MeV / pePerMeV);
+ }
+ energyAmplitude += RandomGaussian.getGaussian(0, noise);
+ }
+ for (int i = 0; i < bufferLength; i++) {
+ eDepBuffer.addToCell(i, energyAmplitude * pulseAmplitude((i + 1) * readoutPeriod + readoutTime() - (ClockSingleton.getTime() + hit.getTime()), hit.getCellID()));
+ }
+ }
+ }
+
+ @Override
+ protected void initReadout() {
+ //initialize buffers
+ sumMap = new HashMap<Long, Double>();
+ timeMap = new HashMap<Long, Integer>();
+ outputQueue = new PriorityQueue(20, new HPSRawCalorimeterHit.TimeComparator());
+ resetFADCBuffers();
+ }
+
+ @Override
+ public void detectorChanged(Detector detector) {
+ // Get the Subdetector.
+ ecal = detector.getSubdetector(ecalName);
+ resetFADCBuffers();
+ }
+
+ private boolean resetFADCBuffers() {
+ if (ecal == null) {
+ return false;
+ }
+ signalMap = new HashMap<Long, RingBuffer>();
+ pipelineMap = new HashMap<Long, FADCPipeline>();
+ Set<Long> cells = ((HPSEcal3) ecal).getNeighborMap().keySet();
+ for (Long cellID : cells) {
+ signalMap.put(cellID, new RingBuffer(bufferLength));
+ pipelineMap.put(cellID, new FADCPipeline(pipelineLength, (int) Math.round(EcalConditions.physicalToPedestal(cellID))));
+ }
+ return true;
+ }
+
+ private double pulseAmplitude(double time, long cellID) {
+ if (useCRRCShape) {
+ if (time <= 0.0) {
+ return 0.0;
+ }
+
+ //normalization constant from cal gain (MeV/integral bit) to amplitude gain (amplitude bit/GeV)
+ double gain;
+ if (fixedGain > 0) {
+ gain = readoutPeriod / (fixedGain * ECalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt));
+ } else {
+ gain = readoutPeriod / (EcalConditions.physicalToGain(cellID) * ECalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt));
+ }
+
+ if (tp > 0.0) {
+ return gain * ((time / tp) * Math.exp(1.0 - time / tp)) / (tp * Math.E);
+ } else {
+ if (time < -tp) {
+ return 1.0;
+ } else {
+ return 0.0;
+ }
+ }
+ } else { // According to measurements the output signal can be fitted by two gaussians, one for the rise of the signal, one for the fall
+ // Time corresponds to t-(t_eve+pulseDelay) such that the maximum of the amplitude is reached pulseDelay ns after the event t_eve
+ // Without the coefficient, the integral is equal to 1
+ if (time <= 0.0) {
+ return 0.0;
+ }
+
+ //if fixedGain is set, multiply the default gain by this factor
+ double corrGain = 1.0;
+ if (fixedGain > 0) {
+ corrGain = fixedGain;
+ } else {
+ corrGain = 1.0 / EcalConditions.physicalToGain(cellID);
+ }
+
+ double norm = ((riseTime + fallTime) / 2) * Math.sqrt(2 * Math.PI); //to ensure the total integral is equal to 1: = 33.8
+
+ if (time < 3 * riseTime) {
+ return corrGain * readoutGain * funcGaus(time - 3 * riseTime, riseTime) / norm;
+ } else {
+ return corrGain * readoutGain * funcGaus(time - 3 * riseTime, fallTime) / norm;
+ }
+ }
+ }
+
+ // Gaussian function needed for the calculation of the pulse shape amplitude
+ public static double funcGaus(double t, double sig) {
+ return Math.exp(-t * t / (2 * sig * sig));
+ }
+
+ public class FADCPipeline {
+
+ private int[] array;
+ private int size;
+ private int ptr;
+
+ public FADCPipeline(int size) {
+ this.size = size;
+ array = new int[size]; //initialized to 0
+ ptr = 0;
+ }
+
+ //construct pipeline with a nonzero initial value
+ public FADCPipeline(int size, int init) {
+ this.size = size;
+ array = new int[size];
+ for (int i = 0; i < size; i++) {
+ array[i] = init;
+ }
+ ptr = 0;
+ }
+
+ /**
+ * Write value to current cell
+ */
+ public void writeValue(int val) {
+ array[ptr] = val;
+ }
+
+ /**
+ * Write value to current cell
+ */
+ public void step() {
+ ptr++;
+ if (ptr == size) {
+ ptr = 0;
+ }
+ }
+
+ //return content of specified cell (pos=0 for current cell)
+ public int getValue(int pos) {
+ if (pos >= size || pos < 0) {
+ throw new ArrayIndexOutOfBoundsException();
+ }
+ return array[((ptr - pos) % size + size) % size];
+ }
+ }
+}
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,522 @@
+package org.hps.readout.ecal;
+
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+
+import org.hps.recon.ecal.ECalUtils;
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.geometry.Detector;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * Reads clusters and makes trigger decision using opposite quadrant criterion.
+ * Prints triggers to file if file path specified.
+ *
+ * @author Omar Moreno <[log in to unmask]>
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: FADCTriggerDriver.java,v 1.4 2013/09/02 21:56:56 phansson Exp $
+ */
+public class FADCTriggerDriver extends TriggerDriver {
+
+ int nTriggers;
+ int totalEvents;
+ protected double beamEnergy = 2.2 * ECalUtils.GeV;
+ private int minHitCount = 1;
+ private double clusterEnergyHigh = 1.85 / 2.2;
+ private double clusterEnergyLow = .1 / 2.2;
+ private double energySumThreshold = 1.0;
+ private double energyDifferenceThreshold = 1.5 / 2.2;
+ private double maxCoplanarityAngle = 35; // degrees
+// private double energyDistanceDistance = 250; // mm
+// private double energyDistanceThreshold = 0.8 / 2.2;
+ private double energyDistanceDistance = 200; // mm
+ private double energyDistanceThreshold = 0.5;
+ // maximum time difference between two clusters, in units of readout cycles (4 ns).
+ private int pairCoincidence = 2;
+ int allPairs;
+ int oppositeQuadrantCount;
+ int clusterEnergyCount;
+ int energySumCount;
+ int energyDifferenceCount;
+ int energyDistanceCount;
+ int coplanarityCount;
+ AIDA aida = AIDA.defaultInstance();
+ IHistogram2D clusterHitCount2DAll, clusterEnergy2DAll, clusterSumDiff2DAll, energyDistance2DAll, clusterAngles2DAll, clusterCoplanarity2DAll;
+ IHistogram2D clusterHitCount2D, clusterEnergy2D, clusterSumDiff2D, energyDistance2D, clusterAngles2D, clusterCoplanarity2D;
+ IHistogram1D triggerBits1D, triggerTimes1D;
+ int truthPeriod = 250;
+ private boolean useQuadrants = false;
+ protected String clusterCollectionName = "EcalClusters";
+ // FIFO queues of lists of clusters in each ECal half.
+ // Each list corresponds to one readout cycle.
+ private Queue<List<HPSEcalCluster>> topClusterQueue = null;
+ private Queue<List<HPSEcalCluster>> botClusterQueue = null;
+
+ private enum Flag {
+
+ CLUSTER_HITCOUNT(4), CLUSTER_ENERGY(3), ENERGY_SUM_DIFF(2), ENERGY_DISTANCE(1), COPLANARITY(0);
+ private final int index;
+
+ Flag(int i) {
+ index = i;
+ }
+
+ static int bitmask(EnumSet<Flag> flags) {
+ int mask = 0;
+ for (Flag flag : flags) {
+ mask |= 1 << flag.index;
+ }
+ return mask;
+ }
+ }
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ public void setBeamEnergy(double beamEnergy) {
+ if (beamEnergy == 1.1) {
+ System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 1.1 GeV beam");
+ maxCoplanarityAngle = 90;
+ clusterEnergyHigh = .7 / beamEnergy;
+ clusterEnergyLow = .1 / beamEnergy;
+ energySumThreshold = 0.8 / beamEnergy;
+ } else if (beamEnergy == 2.2) {
+ System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 2.2 GeV beam");
+ maxCoplanarityAngle = 45;
+ clusterEnergyHigh = 1.6 / beamEnergy;
+ clusterEnergyLow = .1 / beamEnergy;
+ energySumThreshold = 1.7 / beamEnergy;
+ } else if (beamEnergy == 6.6) {
+ System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 6.6 GeV beam");
+ maxCoplanarityAngle = 60;
+ clusterEnergyHigh = 5.0 / beamEnergy;
+ clusterEnergyLow = .1 / beamEnergy;
+ energySumThreshold = 5.5 / beamEnergy;
+ }
+ this.beamEnergy = beamEnergy * ECalUtils.GeV;
+ }
+
+ protected double getBeamEnergyFromDetector(Detector detector) {
+ if (detector.getName().contains("1pt1")) {
+ return 1.1;
+ } else if (detector.getName().contains("2pt2")) {
+ return 2.2;
+ } else if (detector.getName().contains("6pt6")) {
+ return 6.6;
+ } else {
+ return -1.0;
+ }
+ }
+
+ public void setTruthPeriod(int truthPeriod) {
+ this.truthPeriod = truthPeriod;
+ }
+
+ public void setPairCoincidence(int pairCoincidence) {
+ this.pairCoincidence = pairCoincidence;
+ }
+
+ @Override
+ public void detectorChanged(Detector detector) {
+ setBeamEnergy(this.getBeamEnergyFromDetector(detector));
+
+ clusterHitCount2DAll = aida.histogram2D("All cluster pairs: hit count (less energetic vs. more energetic)", 9, 0.5, 9.5, 9, 0.5, 9.5);
+ clusterSumDiff2DAll = aida.histogram2D("All cluster pairs: energy difference vs. sum", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+ clusterEnergy2DAll = aida.histogram2D("All cluster pairs: energy (less energetic vs. more energetic)", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+ energyDistance2DAll = aida.histogram2D("All cluster pairs: distance vs. energy (less energetic cluster)", 100, 0.0, 0.5 * beamEnergy, 25, 0.0, 400.0);
+ clusterCoplanarity2DAll = aida.histogram2D("All cluster pairs: cluster angle uncoplanarity vs. less energetic cluster angle", 100, -180.0, 180.0, 100, -180.0, 180.0);
+ clusterAngles2DAll = aida.histogram2D("All cluster pairs: cluster angle (less energetic vs. more energetic)", 100, -180.0, 180.0, 100, -180.0, 180.0);
+
+ clusterHitCount2D = aida.histogram2D("Passed other cuts: hit count (less energetic vs. more energetic)", 9, 0.5, 9.5, 9, 0.5, 9.5);
+ clusterSumDiff2D = aida.histogram2D("Passed other cuts: energy difference vs. sum", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+ clusterEnergy2D = aida.histogram2D("Passed other cuts: energy (less energetic vs. more energetic)", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+ energyDistance2D = aida.histogram2D("Passed other cuts: distance vs. energy (less energetic cluster)", 100, 0.0, 0.5 * beamEnergy, 25, 0.0, 400.0);
+ clusterCoplanarity2D = aida.histogram2D("Passed other cuts: cluster angle uncoplanarity vs. less energetic cluster angle", 100, -180.0, 180.0, 100, -180.0, 180.0);
+ clusterAngles2D = aida.histogram2D("Passed other cuts: cluster angle (less energetic vs. more energetic)", 100, -180.0, 180.0, 100, -180.0, 180.0);
+
+ triggerBits1D = aida.histogram1D(detector.getDetectorName() + " : " + clusterCollectionName + " : trigger bits", 33, -1.5, 31.5);
+ triggerTimes1D = aida.histogram1D(detector.getDetectorName() + " : " + clusterCollectionName + " : trigger times", truthPeriod, -0.5, truthPeriod - 0.5);
+ }
+
+ @Override
+ public void startOfData() {
+ //initialize queues and fill with empty lists
+ topClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+ botClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+ for (int i = 0; i < 2 * pairCoincidence + 1; i++) {
+ topClusterQueue.add(new ArrayList<HPSEcalCluster>());
+ }
+ for (int i = 0; i < pairCoincidence + 1; i++) {
+ botClusterQueue.add(new ArrayList<HPSEcalCluster>());
+ }
+ super.startOfData();
+ if (clusterCollectionName == null) {
+ throw new RuntimeException("The parameter clusterCollectionName was not set!");
+ }
+
+ allPairs = 0;
+ oppositeQuadrantCount = 0;
+ clusterEnergyCount = 0;
+ energySumCount = 0;
+ energyDifferenceCount = 0;
+ energyDistanceCount = 0;
+ coplanarityCount = 0;
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+ // this needs to run every readout cycle whether or not trigger is live
+ updateClusterQueues(event.get(HPSEcalCluster.class, clusterCollectionName));
+ }
+ super.process(event);
+ }
+
+ @Override
+ protected boolean triggerDecision(EventHeader event) {
+ // Get the list of raw ECal hits.
+ if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+ return testTrigger();
+ } else {
+ return false;
+ }
+ }
+
+ public boolean testTrigger() {
+ boolean trigger = false;
+
+ List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+
+ //--- Apply Trigger Cuts ---//
+
+ // Iterate through all cluster pairs present in the event. If at least
+ // one of the cluster pairs satisfies all of the trigger conditions,
+ // a trigger signal is sent to all other detectors.
+ for (HPSEcalCluster[] clusterPair : clusterPairs) {
+
+ EnumSet<Flag> bits = EnumSet.noneOf(Flag.class);
+
+ if (outputStream != null) {
+ outputStream.printf("Event %d: cluster pair (energy %f in quadrant %d (%s), energy %f in quadrant %d (%s))\n",
+ ClockSingleton.getClock(),
+ clusterPair[0].getEnergy(), ECalUtils.getQuadrant(clusterPair[0]), clusterPair[0].getSeedHit().getPositionVec().toString(),
+ clusterPair[1].getEnergy(), ECalUtils.getQuadrant(clusterPair[1]), clusterPair[1].getSeedHit().getPositionVec().toString());
+ }
+
+ allPairs++;
+
+ if (useQuadrants) {
+ // Require that the event have at least two clusters in opposite
+ // quadrants
+ if (!oppositeQuadrantsCut(clusterPair)) {
+ if (outputStream != null) {
+ outputStream.println("Failed opposite quadrant cut");
+ }
+ continue;
+ }
+ oppositeQuadrantCount++;
+ }
+
+ // Require the components of a cluster pair to have at least one
+ // hit each (should always be true)
+ if (clusterHitCount(clusterPair)) {
+ bits.add(Flag.CLUSTER_HITCOUNT);
+ }
+
+ // Require the components of a cluster pair to have an energy in
+ // the range of 100 MeV to 1.85 GeV
+ if (clusterECut(clusterPair)) {
+ bits.add(Flag.CLUSTER_ENERGY);
+ }
+
+ bits.add(Flag.ENERGY_SUM_DIFF);
+ // Require the sum of the energies of the components of the
+ // cluster pair to be less than the
+ // (Beam Energy)*(Sampling Fraction) ( 2 GeV for the Test Run )
+ if (!energySum(clusterPair)) {
+ bits.remove(Flag.ENERGY_SUM_DIFF);
+ }
+
+ // Require the difference in energy of the components of the
+ // cluster pair to be less than 1.5 GeV
+ if (!energyDifference(clusterPair)) {
+ bits.remove(Flag.ENERGY_SUM_DIFF);
+ }
+
+ // Apply a low energy cluster vs. distance cut of the form
+ // E_low + .0032 GeV/mm < .8 GeV
+ if (energyDistanceCut(clusterPair)) {
+ bits.add(Flag.ENERGY_DISTANCE);
+ }
+
+ // Require that the two clusters are coplanar with the beam within
+ // 35 degrees
+ if (coplanarityCut(clusterPair)) {
+ bits.add(Flag.COPLANARITY);
+ }
+
+ if (bits.contains(Flag.CLUSTER_ENERGY)) {
+ clusterEnergyCount++;
+ if (energySum(clusterPair)) {
+ energySumCount++;
+ if (energyDifference(clusterPair)) {
+ energyDifferenceCount++;
+ if (bits.contains(Flag.ENERGY_DISTANCE)) {
+ energyDistanceCount++;
+ if (bits.contains(Flag.COPLANARITY)) {
+ coplanarityCount++;
+ } else if (outputStream != null) {
+ outputStream.println("Failed coplanarity cut");
+ }
+ } else if (outputStream != null) {
+ outputStream.println("Failed energy-distance cut");
+ }
+ } else if (outputStream != null) {
+ outputStream.println("Failed energy difference cut");
+ }
+ } else if (outputStream != null) {
+ outputStream.println("Failed energy sum cut");
+ }
+ } else if (outputStream != null) {
+ outputStream.println("Failed cluster energy cut");
+ }
+
+ clusterHitCount2DAll.fill(clusterPair[0].getCalorimeterHits().size(), clusterPair[1].getCalorimeterHits().size());
+ clusterSumDiff2DAll.fill(clusterPair[0].getEnergy() + clusterPair[1].getEnergy(), clusterPair[0].getEnergy() - clusterPair[1].getEnergy());
+ clusterEnergy2DAll.fill(clusterPair[0].getEnergy(), clusterPair[1].getEnergy());
+ energyDistance2DAll.fill(clusterPair[1].getEnergy(), getClusterDistance(clusterPair[1]));
+ clusterCoplanarity2DAll.fill(getClusterAngle(clusterPair[1]), pairUncoplanarity(clusterPair));
+ clusterAngles2DAll.fill(getClusterAngle(clusterPair[0]), getClusterAngle(clusterPair[1]));
+
+ if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.CLUSTER_HITCOUNT)))) {
+ clusterHitCount2D.fill(clusterPair[0].getCalorimeterHits().size(), clusterPair[1].getCalorimeterHits().size());
+ }
+
+ if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.ENERGY_SUM_DIFF, Flag.CLUSTER_ENERGY)))) { //cluster energy, energy-distance, coplanarity
+ clusterSumDiff2D.fill(clusterPair[0].getEnergy() + clusterPair[1].getEnergy(), clusterPair[0].getEnergy() - clusterPair[1].getEnergy());
+ clusterEnergy2D.fill(clusterPair[0].getEnergy(), clusterPair[1].getEnergy());
+ }
+ if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.ENERGY_DISTANCE)))) {
+ energyDistance2D.fill(clusterPair[1].getEnergy(), getClusterDistance(clusterPair[1]));
+ }
+ if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.COPLANARITY)))) {
+ clusterCoplanarity2D.fill(getClusterAngle(clusterPair[1]), pairUncoplanarity(clusterPair));
+ clusterAngles2D.fill(getClusterAngle(clusterPair[0]), getClusterAngle(clusterPair[1]));
+ }
+
+ triggerBits1D.fill(Flag.bitmask(bits));
+
+ if (bits.containsAll(EnumSet.allOf(Flag.class))) {
+ // If all cuts are pased, we have a trigger
+ if (outputStream != null) {
+ outputStream.println("Passed all cuts");
+ }
+ trigger = true;
+ }
+ }
+ if (trigger) {
+ triggerBits1D.fill(-1);
+ triggerTimes1D.fill(ClockSingleton.getClock() % truthPeriod);
+ }
+ return trigger;
+ }
+
+ @Override
+ public void endOfData() {
+ if (outputStream != null) {
+ printCounts(outputStream);
+ }
+ printCounts(new PrintWriter(System.out));
+ super.endOfData();
+ }
+
+ private void printCounts(PrintWriter writer) {
+ writer.printf("Number of pairs: %d\n", allPairs);
+ writer.printf("Number of cluster pairs after successive trigger conditions:\n");
+ if (useQuadrants) {
+ writer.printf("Opposite quadrants: %d\n", oppositeQuadrantCount);
+ }
+ writer.printf("Cluster energy: %d\n", clusterEnergyCount);
+ writer.printf("Energy sum: %d\n", energySumCount);
+ writer.printf("Energy difference: %d\n", energyDifferenceCount);
+ writer.printf("Energy-distance cut: %d\n", energyDistanceCount);
+ writer.printf("Coplanarity: %d\n", coplanarityCount);
+ writer.printf("Trigger count: %d\n", numTriggers);
+ writer.close();
+ }
+
+ protected void updateClusterQueues(List<HPSEcalCluster> ecalClusters) {
+ ArrayList<HPSEcalCluster> topClusterList = new ArrayList<HPSEcalCluster>();
+ ArrayList<HPSEcalCluster> botClusterList = new ArrayList<HPSEcalCluster>();
+ for (HPSEcalCluster ecalCluster : ecalClusters) {
+// System.out.format("add cluster\t%f\t%d\n", ecalCluster.getSeedHit().getTime(), ecalCluster.getSeedHit().getIdentifierFieldValue("iy"));
+ if (ecalCluster.getSeedHit().getIdentifierFieldValue("iy") > 0) {
+ topClusterList.add(ecalCluster);
+ } else {
+ botClusterList.add(ecalCluster);
+ }
+ }
+
+ topClusterQueue.add(topClusterList);
+ botClusterQueue.add(botClusterList);
+ topClusterQueue.remove();
+ botClusterQueue.remove();
+ }
+
+ /**
+ * Get a list of all unique cluster pairs in the event
+ *
+ * @param ecalClusters : List of ECal clusters
+ * @return list of cluster pairs
+ */
+ protected List<HPSEcalCluster[]> getClusterPairsTopBot() {
+ // Make a list of cluster pairs
+ List<HPSEcalCluster[]> clusterPairs = new ArrayList<HPSEcalCluster[]>();
+
+ // Loop over all top-bottom pairs of clusters; higher-energy cluster goes first in the pair
+ // To apply pair coincidence time, use only bottom clusters from the
+ // readout cycle pairCoincidence readout cycles ago, and top clusters
+ // from all 2*pairCoincidence+1 previous readout cycles
+ for (HPSEcalCluster botCluster : botClusterQueue.element()) {
+ for (List<HPSEcalCluster> topClusters : topClusterQueue) {
+ for (HPSEcalCluster topCluster : topClusters) {
+// System.out.format("%f\t%f\n", topCluster.getSeedHit().getTime(), botCluster.getSeedHit().getTime());
+ if (topCluster.getEnergy() > botCluster.getEnergy()) {
+ HPSEcalCluster[] clusterPair = {topCluster, botCluster};
+ clusterPairs.add(clusterPair);
+ } else {
+ HPSEcalCluster[] clusterPair = {botCluster, topCluster};
+ clusterPairs.add(clusterPair);
+ }
+ }
+ }
+ }
+ return clusterPairs;
+ }
+
+ /**
+ * Checks if the ECal clusters making up a cluster pair lie in opposite
+ * quadrants
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if opposite quadrants, false otherwise
+ */
+ protected boolean oppositeQuadrantsCut(HPSEcalCluster[] clusterPair) {
+ int quad1 = ECalUtils.getQuadrant(clusterPair[0]);
+ int quad2 = ECalUtils.getQuadrant(clusterPair[1]);
+
+ //if clusters are in the same quadrant, they're not opposite quadrants
+ if (quad1 == quad2) {
+ return false;
+ } //opposite pairs of quadrants are either both even (2 and 4) or both odd (1 and 3)
+ else {
+ return ((quad1 & 1) == (quad2 & 1));
+ }
+ }
+
+ /**
+ * Checks if the ECal clusters making up a cluster pair both have at least
+ * the minimum number of hits.
+ *
+ * @param clusterPair: pair of clusters
+ * @return true if pair passes cut, false if fail
+ */
+ protected boolean clusterHitCount(HPSEcalCluster[] clusterPair) {
+ return (clusterPair[0].getCalorimeterHits().size() >= minHitCount
+ && clusterPair[1].getCalorimeterHits().size() >= minHitCount);
+ }
+
+ /**
+ * Checks if the ECal clusters making up a cluster pair lie above the low
+ * energy threshold and below the high energy threshold
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if a pair is found, false otherwise
+ */
+ protected boolean clusterECut(HPSEcalCluster[] clusterPair) {
+ return (clusterPair[0].getEnergy() < beamEnergy * clusterEnergyHigh
+ && clusterPair[1].getEnergy() < beamEnergy * clusterEnergyHigh
+ && clusterPair[0].getEnergy() > beamEnergy * clusterEnergyLow
+ && clusterPair[1].getEnergy() > beamEnergy * clusterEnergyLow);
+ }
+
+ /**
+ * Checks if the sum of the energies of ECal clusters making up a cluster
+ * pair is below an energy sum threshold
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if a pair is found, false otherwise
+ */
+ protected boolean energySum(Cluster[] clusterPair) {
+ double clusterESum = clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
+ return (clusterESum < beamEnergy * energySumThreshold);
+ }
+
+ /**
+ * Checks if the energy difference between the ECal clusters making up a
+ * cluster pair is below an energy difference threshold
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if pair is found, false otherwise
+ */
+ protected boolean energyDifference(HPSEcalCluster[] clusterPair) {
+ double clusterEDifference = clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
+
+ return (clusterEDifference < beamEnergy * energyDifferenceThreshold);
+ }
+
+ /**
+ * Require that the distance from the beam of the lowest energy cluster in a
+ * cluster pair satisfies the following E_low + d_b*.0032 GeV/mm < .8 GeV
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if pair is found, false otherwise
+ */
+ protected boolean energyDistanceCut(HPSEcalCluster[] clusterPair) {
+ HPSEcalCluster lowEnergyCluster = clusterPair[1];
+
+ // Calculate its position
+ double lowEClusterDistance = getClusterDistance(clusterPair[1]);
+ // event passes cut if above the line with X- and Y-intercepts defined by energyDistanceDistance and beamEnergy*energyDistanceThreshold
+ double clusterDistvsE = lowEnergyCluster.getEnergy() + lowEClusterDistance * beamEnergy * energyDistanceThreshold / energyDistanceDistance;
+
+ return (clusterDistvsE > beamEnergy * energyDistanceThreshold);
+ }
+
+ /**
+ * Checks if a cluster pair is coplanar to the beam within a given angle
+ *
+ * @param clusterPair : pair of clusters
+ * @return true if pair is found, false otherwise
+ */
+ protected boolean coplanarityCut(HPSEcalCluster[] clusterPair) {
+ return (Math.abs(pairUncoplanarity(clusterPair)) < maxCoplanarityAngle);
+ }
+
+ protected double pairUncoplanarity(HPSEcalCluster[] clusterPair) { // Find the angle between clusters in the pair
+ double cluster1Angle = (getClusterAngle(clusterPair[0]) + 180.0) % 180.0;
+ double cluster2Angle = (getClusterAngle(clusterPair[1]) + 180.0) % 180.0;
+
+ return cluster2Angle - cluster1Angle;
+ }
+
+ protected double getClusterAngle(HPSEcalCluster cluster) { //returns angle in range of -180 to 180
+ double position[] = cluster.getSeedHit().getPosition();
+ return Math.toDegrees(Math.atan2(position[1], position[0]));
+ }
+
+ protected double getClusterDistance(HPSEcalCluster cluster) {
+ return Math.hypot(cluster.getSeedHit().getPosition()[0], cluster.getSeedHit().getPosition()[1]);
+ }
+}
\ No newline at end of file
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerVariableDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,172 @@
+/*
+ * To change this template, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.hps.readout.ecal;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.geometry.Detector;
+
+/**
+ * Dumps trigger variables to text file
+ * @author phansson <[log in to unmask]>
+ * @version $id: $
+ */
+public class FADCTriggerVariableDriver extends FADCTriggerDriver {
+ private int _pairs = 0;
+
+ public FADCTriggerVariableDriver() {
+ }
+
+ @Override
+ public void startOfData() {
+ if(!"".equals(outputFileName)) {
+ try {
+ outputStream = new PrintWriter(outputFileName);
+ } catch (FileNotFoundException ex) {
+ Logger.getLogger(FADCTriggerVariableDriver.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ } else {
+ throw new RuntimeException("Need to supply a output file!");
+ }
+
+ String str = "event/I:beamenergy/F:pairid/I:cl1E/F:cl1posx/F:cl1posy/F:cl2E/F:cl2posx/F:cl2posy/F";
+
+ outputStream.println(str);
+
+ }
+
+
+
+ @Override
+ public void detectorChanged(Detector detector) {
+ setBeamEnergy(getBeamEnergyFromDetector(detector));
+ }
+
+
+ @Override
+ public void process(EventHeader event) {
+ // super.process(event);
+
+ if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+
+
+ List<HPSEcalCluster> clusters = event.get(HPSEcalCluster.class, clusterCollectionName);
+
+ //System.out.printf("%d ecal clusters in event\n", clusters.size());
+ //System.out.printf("%s: %d clusters\n",this.getClass().getSimpleName(),clusters.size());
+ //for(HPSEcalCluster cl : clusters) {
+ // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+ //}
+ List<HPSEcalCluster> unique_clusters = this.getUniqueClusters(clusters);
+ //System.out.printf("%s: %d unique clusters\n",this.getClass().getSimpleName(),unique_clusters.size());
+ //for(HPSEcalCluster cl : unique_clusters) {
+ // System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+ //}
+
+ updateClusterQueues(unique_clusters);
+ List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+ boolean foundClusterPairs = !clusterPairs.isEmpty();
+
+ if (foundClusterPairs) {
+
+ int ipair = 0;
+ for(HPSEcalCluster[] pair : clusterPairs) {
+
+ String evString = String.format("%d %f %d ", event.getEventNumber(),this.beamEnergy,ipair);
+ for(int icluster = 0; icluster!=2; icluster++ ) {
+
+ HPSEcalCluster cluster = pair[icluster];
+
+ //int quad = ECalUtils.getQuadrant(cluster);
+ double E = cluster.getEnergy();
+ double pos[] = cluster.getSeedHit().getPosition();
+ //System.out.printf("x %f y %f ix %d iy %d \n", pos[0], pos[1], cluster.getSeedHit().getIdentifierFieldValue("ix"), cluster.getSeedHit().getIdentifierFieldValue("iy"));
+
+ evString += String.format("%f %f %f ", E, pos[0], pos[1]);
+ }
+ //System.out.printf("%s\n",evString);
+ outputStream.println(evString);
+ ++ipair;
+ ++_pairs;
+ } // pairs
+ }
+
+ } // has clusters
+ else {
+ //System.out.printf("No ecal cluster collection in event %d \n", event.getEventNumber());
+ }
+
+
+
+ }
+
+ @Override
+ public void endOfData() {
+
+ System.out.printf("%s: processed %d pairs\n",this.getClass().getSimpleName(),this._pairs);
+
+ outputStream.close();
+
+
+ }
+
+
+
+ private List<HPSEcalCluster> getUniqueClusters(List<HPSEcalCluster> clusters) {
+ List<HPSEcalCluster> unique = new ArrayList<HPSEcalCluster>();
+ for(HPSEcalCluster loop_cl : clusters) {
+ HPSEcalClusterCmp loop_clCmp = new HPSEcalClusterCmp(loop_cl);
+ boolean found = false;
+ for(HPSEcalCluster cl : unique) {
+ if( loop_clCmp.compareTo(cl) == 0 ) {
+ found = true;
+ }
+ }
+ if( !found ) {
+ unique.add(loop_cl);
+ }
+ }
+ return unique;
+ }
+
+
+ private static class HPSEcalClusterCmp implements Comparable<HPSEcalCluster> {
+ private HPSEcalCluster _cluster;
+ public HPSEcalClusterCmp(HPSEcalCluster cl) {
+ set_cluster(cl);
+ }
+ @Override
+ public int compareTo(HPSEcalCluster cl) {
+ if(cl.getEnergy()==get_cluster().getEnergy() && cl.getPosition()[0]==get_cluster().getPosition()[0] && cl.getPosition()[1]==get_cluster().getPosition()[1] ) {
+ return 0;
+ } else {
+ if( cl.getEnergy() > get_cluster().getEnergy()) {
+ return 1;
+ } else {
+ return -1;
+ }
+ }
+ }
+ public HPSEcalCluster get_cluster() {
+ return _cluster;
+ }
+ public void set_cluster(HPSEcalCluster _cluster) {
+ this._cluster = _cluster;
+ }
+
+ }
+
+}
+
+
+
+
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTimestamp.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/ReadoutTimestamp.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,108 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
+
+/**
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: ReadoutTimestamp.java,v 1.1 2013/03/20 00:09:42 meeg Exp $
+ */
+public class ReadoutTimestamp implements GenericObject {
+
+ public static final String collectionName = "ReadoutTimestamps";
+ public static final int SYSTEM_TRIGGER = 0;
+ public static final int SYSTEM_TRACKER = 1;
+ public static final int SYSTEM_ECAL = 2;
+ private int system;
+ private double time;
+
+ public ReadoutTimestamp(int system, double time) {
+ this.system = system;
+ this.time = time;
+ }
+
+ public static void addTimestamp(TriggerableDriver triggerable, EventHeader event) {
+ ReadoutTimestamp timestamp = new ReadoutTimestamp(triggerable.getTimestampType(), triggerable.readoutDeltaT());
+ /*
+ if (TriggerDriver.class.isInstance(triggerable)) {
+ timestamp = new ReadoutTimestamp(SYSTEM_TRIGGER, triggerable.readoutDeltaT());
+ } else if (SimpleSvtReadout.class.isInstance(triggerable)) {
+ timestamp = new ReadoutTimestamp(SYSTEM_TRACKER, triggerable.readoutDeltaT());
+ } else if (EcalReadoutDriver.class.isInstance(triggerable)) {
+ timestamp = new ReadoutTimestamp(SYSTEM_ECAL, triggerable.readoutDeltaT());
+ }*/
+ addTimestamp(timestamp, event);
+ }
+
+ public static void addTimestamp(ReadoutTimestamp timestamp, EventHeader event) {
+ List<ReadoutTimestamp> timestamps;
+ if (event.hasCollection(ReadoutTimestamp.class, collectionName)) {
+ timestamps = event.get(ReadoutTimestamp.class, collectionName);
+ } else {
+ timestamps = new ArrayList<ReadoutTimestamp>();
+ event.put(collectionName, timestamps, ReadoutTimestamp.class, 0);
+ }
+ timestamps.add(timestamp);
+ }
+
+ public static double getTimestamp(int system, EventHeader event) {
+ if (event.hasCollection(GenericObject.class, collectionName)) {
+ List<GenericObject> timestamps = event.get(GenericObject.class, collectionName);
+ for (GenericObject timestamp : timestamps) {
+ if (timestamp.getIntVal(0) == system) {
+ return timestamp.getDoubleVal(0);
+ }
+ }
+ return 0;
+ } else {
+ return 0;
+ }
+ }
+
+ @Override
+ public int getNInt() {
+ return 1;
+ }
+
+ @Override
+ public int getNFloat() {
+ return 0;
+ }
+
+ @Override
+ public int getNDouble() {
+ return 1;
+ }
+
+ @Override
+ public int getIntVal(int index) {
+ if (index == 0) {
+ return system;
+ } else {
+ throw new ArrayIndexOutOfBoundsException();
+ }
+ }
+
+ @Override
+ public float getFloatVal(int index) {
+ throw new ArrayIndexOutOfBoundsException();
+ }
+
+ @Override
+ public double getDoubleVal(int index) {
+ if (index == 0) {
+ return time;
+ } else {
+ throw new ArrayIndexOutOfBoundsException();
+ }
+ }
+
+ @Override
+ public boolean isFixedSize() {
+ return true;
+ }
+}
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/SimpleEcalReadoutDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/SimpleEcalReadoutDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,59 @@
+package org.hps.readout.ecal;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.ecal.HPSCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+
+/**
+ * Performs readout of ECal hits.
+ * No time evolution - this just integrates all hits in a cycle.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: SimpleEcalReadoutDriver.java,v 1.1 2013/02/25 22:39:26 meeg Exp $
+ */
+public class SimpleEcalReadoutDriver extends EcalReadoutDriver<HPSCalorimeterHit> {
+ //buffer for deposited energy
+ Map<Long, Double> eDepMap = null;
+
+ public SimpleEcalReadoutDriver() {
+ hitClass = HPSCalorimeterHit.class;
+ }
+
+ @Override
+ protected void readHits(List<HPSCalorimeterHit> hits) {
+ for (Long cellID : eDepMap.keySet()) {
+// int ix = dec.getValue("ix");
+// int iy = dec.getValue("iy");
+// //temporary hack to disable crystals and flip X coordinate
+// int side = dec.getValue("side");
+// if (iy == 1 && ix*side >= -10 && ix*side <= -2)
+// continue;
+ if (eDepMap.get(cellID) > threshold)
+ hits.add(new HPSCalorimeterHit(eDepMap.get(cellID), readoutTime(), cellID, hitType));
+ }
+ //reset hit integration
+ eDepMap = new HashMap<Long, Double>();
+ }
+
+ @Override
+ protected void putHits(List<CalorimeterHit> hits) {
+ //fill the readout buffers
+ for (CalorimeterHit hit : hits) {
+ Double eDep = eDepMap.get(hit.getCellID());
+ if (eDep == null) {
+ eDepMap.put(hit.getCellID(), hit.getRawEnergy());
+ } else {
+ eDepMap.put(hit.getCellID(), eDep + hit.getRawEnergy());
+ }
+ }
+ }
+
+ @Override
+ protected void initReadout() {
+ //initialize buffers
+ eDepMap = new HashMap<Long, Double>();
+ }
+}
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TestRunTriggerDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TestRunTriggerDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,109 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.hps.recon.ecal.TriggerData;
+import org.lcsim.event.EventHeader;
+
+/**
+ * Reads clusters and makes trigger decision using opposite quadrant criterion.
+ * Prints triggers to file if file path specified.
+ *
+ * @author Omar Moreno <[log in to unmask]>
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TestRunTriggerDriver.java,v 1.2 2013/03/20 01:21:29 meeg Exp $
+ */
+public class TestRunTriggerDriver extends TriggerDriver {
+
+ boolean triggerThisCycle = false;
+ int cycleCounter = 0;
+ private double clusterEnergyLow = 10; //
+ int deadtimelessTriggerCount;
+ private int topBits = 0, botBits = 0;
+ protected String clusterCollectionName = "EcalClusters";
+
+ public TestRunTriggerDriver() {
+ }
+
+ @Override
+ protected void makeTriggerData(EventHeader event, String collectionName) {
+ int[] trigArray = new int[8];
+ trigArray[TriggerData.TOP_TRIG] = topBits;
+ trigArray[TriggerData.BOT_TRIG] = botBits;
+ trigArray[TriggerData.AND_TRIG] = topBits & botBits;
+ trigArray[TriggerData.OR_TRIG] = topBits | botBits;
+ TriggerData tData = new TriggerData(trigArray);
+ List<TriggerData> triggerList = new ArrayList<TriggerData>();
+ triggerList.add(tData);
+ event.put(collectionName, triggerList, TriggerData.class, 0);
+ }
+
+ public void setClusterCollectionName(String clusterCollectionName) {
+ this.clusterCollectionName = clusterCollectionName;
+ }
+
+ @Override
+ public void startOfData() {
+ super.startOfData();
+ if (clusterCollectionName == null) {
+ throw new RuntimeException("The parameter clusterCollectionName was not set!");
+ }
+
+ deadtimelessTriggerCount = 0;
+ }
+
+ @Override
+ protected boolean triggerDecision(EventHeader event) {
+ if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+ cycleCounter++;
+ if (testTrigger(event.get(HPSEcalCluster.class, clusterCollectionName))) {
+ triggerThisCycle = true;
+ }
+ }
+
+ if (cycleCounter % 4 == 0) {
+ boolean trigger = triggerThisCycle;
+ triggerThisCycle = false;
+ return trigger;
+ } else {
+ return false;
+ }
+ }
+
+ public boolean testTrigger(List<HPSEcalCluster> clusters) {
+ boolean trigger = false;
+
+ topBits <<= 1;
+ botBits <<= 1;
+ for (HPSEcalCluster cluster : clusters) {
+ if (cluster.getEnergy() > clusterEnergyLow) {
+ if (cluster.getPosition()[1] > 0) {
+ topBits |= 1;
+ } else {
+ botBits |= 1;
+ }
+ trigger = true;
+ }
+ }
+ if (trigger) {
+ deadtimelessTriggerCount++;
+ }
+ return trigger;
+ }
+
+ @Override
+ public void endOfData() {
+ if (outputStream != null) {
+ outputStream.printf("Number of cluster pairs after successive trigger conditions:\n");
+ outputStream.printf("Trigger count without dead time: %d\n", deadtimelessTriggerCount);
+ outputStream.printf("Trigger count: %d\n", numTriggers);
+ outputStream.close();
+ }
+ System.out.printf("Number of cluster pairs after successive trigger conditions:\n");
+ System.out.printf("Trigger count without dead time: %d\n", deadtimelessTriggerCount);
+ System.out.printf("Trigger count: %d\n", numTriggers);
+ super.endOfData();
+ }
+}
\ No newline at end of file
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TimeEvolutionEcalReadoutDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,94 @@
+package org.hps.readout.ecal;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.ecal.HPSCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.hps.util.RingBuffer;
+
+/**
+ * Performs readout of ECal hits.
+ * Simulates time evolution of preamp output pulse.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TimeEvolutionEcalReadoutDriver.java,v 1.1 2013/02/25 22:39:26 meeg Exp $
+ */
+public class TimeEvolutionEcalReadoutDriver extends EcalReadoutDriver<HPSCalorimeterHit> {
+
+ //buffer for deposited energy
+ Map<Long, RingBuffer> eDepMap = null;
+ //length of ring buffer (in readout cycles)
+ int bufferLength = 20;
+ //shaper time constant in ns; negative values generate square pulses of the given width
+ double t0 = 18.0;
+
+ public TimeEvolutionEcalReadoutDriver() {
+ hitClass = HPSCalorimeterHit.class;
+ }
+
+ public void setT0(double t0) {
+ this.t0 = t0;
+ }
+
+ public void setBufferLength(int bufferLength) {
+ this.bufferLength = bufferLength;
+ eDepMap = new HashMap<Long, RingBuffer>();
+ }
+
+ @Override
+ protected void readHits(List<HPSCalorimeterHit> hits) {
+ for (Long cellID : eDepMap.keySet()) {
+ RingBuffer eDepBuffer = eDepMap.get(cellID);
+ if (eDepBuffer.currentValue() > threshold) {
+// int ix = dec.getValue("ix");
+// int iy = dec.getValue("iy");
+// if (iy == 1 && ix == -2)
+// System.out.printf("Time %f, output signal %f\n", ClockSingleton.getTime(), eDepBuffer.currentValue());
+ hits.add(new HPSCalorimeterHit(eDepBuffer.currentValue(), readoutTime(), cellID, hitType));
+ }
+ eDepBuffer.step();
+ }
+ }
+
+ @Override
+ protected void putHits(List<CalorimeterHit> hits) {
+ //fill the readout buffers
+ for (CalorimeterHit hit : hits) {
+// int ix = dec.getValue("ix");
+// int iy = dec.getValue("iy");
+// if (iy == 1 && ix == -2)
+// System.out.printf("Time %f, input hit %f)\n", ClockSingleton.getTime() + hit.getTime(), hit.getRawEnergy());
+
+ RingBuffer eDepBuffer = eDepMap.get(hit.getCellID());
+ if (eDepBuffer == null) {
+ eDepBuffer = new RingBuffer(bufferLength);
+ eDepMap.put(hit.getCellID(), eDepBuffer);
+ }
+ for (int i = 0; i < bufferLength; i++) {
+ eDepBuffer.addToCell(i, hit.getRawEnergy() * pulseAmplitude((i + 1) * readoutPeriod + readoutTime() - (ClockSingleton.getTime() + hit.getTime())));
+ }
+ }
+ }
+
+ @Override
+ protected void initReadout() {
+ //initialize buffers
+ eDepMap = new HashMap<Long, RingBuffer>();
+ }
+
+ private double pulseAmplitude(double time) {
+ if (time < 0.0)
+ return 0.0;
+ if (t0 > 0.0) {
+ return (time / t0) * Math.exp(1.0 - time / t0);
+ } else {
+ if (time < -t0)
+ return 1.0;
+ else
+ return 0.0;
+ }
+ }
+}
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TriggerDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TriggerDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,187 @@
+package org.hps.readout.ecal;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.hps.recon.ecal.TriggerData;
+import org.lcsim.event.EventHeader;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.lcio.LCIOWriter;
+
+/**
+ * Makes trigger decision and sends trigger to readout drivers.
+ * Prints triggers to file if file path specified.
+ * Writes trigger events to LCIO if file path specified.
+ * To implement: extend this class and write your own triggerDecision().
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TriggerDriver.java,v 1.7 2013/09/02 21:56:56 phansson Exp $
+ */
+public abstract class TriggerDriver extends TriggerableDriver {
+
+ private boolean _DEBUG = false;
+ protected String outputFileName = null;
+ protected PrintWriter outputStream = null;
+ protected int numTriggers;
+ private int lastTrigger = Integer.MIN_VALUE;
+ private int deadTime = 0;
+ private static boolean triggerBit = false;
+ private String lcioFile = null;
+ LCIOWriter lcioWriter = null;
+ private static final List<TriggerableDriver> triggerables = new ArrayList<TriggerableDriver>();
+
+ public TriggerDriver() {
+ triggerDelay = 50.0;
+ }
+
+ public void setLcioFile(String lcioFile) {
+ this.lcioFile = lcioFile;
+ }
+
+ /**
+ * Set dead time; 0 for no dead time
+ * @param deadTime Minimum number of clock ticks between triggers
+ */
+ public void setDeadTime(int deadTime) {
+ this.deadTime = deadTime;
+ }
+
+ public void setOutputFileName(String outputFileName) {
+ this.outputFileName = outputFileName;
+ }
+
+ @Override
+ public void startOfData() {
+ addTriggerable(this);
+
+ if (outputFileName != null) {
+ try {
+ outputStream = new PrintWriter(new PrintStream(outputFileName), true);
+ } catch (IOException ex) {
+ throw new RuntimeException("Invalid outputFilePath!");
+ }
+ } else {
+ if (_DEBUG) {
+ outputStream = new PrintWriter(System.out, true);
+ }
+ }
+
+ if (lcioFile != null) {
+ try {
+ lcioWriter = new LCIOWriter(new File(lcioFile));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ numTriggers = 0;
+ }
+
+ @Override
+ public void process(EventHeader event) {
+ triggerBit = false; //reset trigger
+ //System.out.println(this.getClass().getCanonicalName() + " - process");
+ if ((lastTrigger == Integer.MIN_VALUE || ClockSingleton.getClock() - lastTrigger > deadTime) && triggerDecision(event)) {
+ sendTrigger();
+ for (TriggerableDriver triggerable : triggerables) {
+ ReadoutTimestamp.addTimestamp(triggerable, event);
+ }
+ triggerBit = true;
+ lastTrigger = ClockSingleton.getClock();
+ numTriggers++;
+ if (_DEBUG) {
+ System.out.printf(this.getClass().getSimpleName() + ": Trigger on event %d\n", event.getEventNumber());
+ }
+ if (outputStream != null) {
+ outputStream.printf("Trigger on event %d\n", event.getEventNumber());
+ }
+
+ // If an ECal trigger signal has been sent store the trigger
+ // time offset by the trigger latencies
+ if (_DEBUG) {
+ System.out.println(this.getClass().getSimpleName() + ": Trigger added on event " + event.getEventNumber());
+ }
+
+ if (outputStream != null) {
+ outputStream.printf("trigger sent to ET event builder on event %d\n", event.getEventNumber());
+ }
+ makeTriggerData(event, "TriggerStatus");
+ if (lcioWriter != null) {
+ try {
+ lcioWriter.write(event);
+ } catch (IOException ex) {
+ Logger.getLogger(TriggerDriver.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ }
+ }
+
+ // Check if there are any pending trigger bank triggers to process
+ checkTrigger(event);
+ }
+
+ protected static boolean sendTrigger() {
+ for (TriggerableDriver triggerable : triggerables) {
+ if (!triggerable.isLive()) {
+ return false;
+ }
+ }
+ for (TriggerableDriver triggerable : triggerables) {
+ triggerable.addTrigger();
+ }
+ return true;
+ }
+
+ public static void addTriggerable(TriggerableDriver triggerable) {
+ triggerables.add(triggerable);
+ }
+
+ @Override
+ protected void processTrigger(EventHeader event) {
+ if (outputStream != null) {
+ outputStream.printf("Trigger bank trigger sent on event %d\n", event.getEventNumber());
+ }
+ makeTriggerData(event, "TriggerBank");
+ }
+
+ protected abstract boolean triggerDecision(EventHeader event);
+
+ /**
+ * Make a dummy TriggerData
+ */
+ protected void makeTriggerData(EventHeader event, String collectionName) {
+ TriggerData tData = new TriggerData(new int[8]);
+ List<TriggerData> triggerList = new ArrayList<TriggerData>();
+ triggerList.add(tData);
+ event.put(collectionName, triggerList, TriggerData.class, 0);
+ }
+
+ @Override
+ public void endOfData() {
+ if (outputStream != null) {
+ outputStream.printf("Trigger count: %d\n", numTriggers);
+ outputStream.close();
+ }
+ if (lcioWriter != null) {
+ try {
+ lcioWriter.close();
+ } catch (IOException ex) {
+ Logger.getLogger(TriggerDriver.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ }
+ System.out.printf(this.getClass().getSimpleName() + ": Trigger count: %d\n", numTriggers);
+ }
+
+ public static boolean triggerBit() {
+ return triggerBit;
+ }
+
+ public int getTimestampType() {
+ return ReadoutTimestamp.SYSTEM_TRIGGER;
+ }
+}
\ No newline at end of file
java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TriggerableDriver.java (rev 0)
+++ java/sandbox/ecal-readout-sim/src/main/java/org/hps/readout/ecal/TriggerableDriver.java 2014-03-25 23:53:28 UTC (rev 347)
@@ -0,0 +1,57 @@
+package org.hps.readout.ecal;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import org.lcsim.event.EventHeader;
+import org.lcsim.hps.util.ClockSingleton;
+import org.lcsim.util.Driver;
+
+/**
+ * A driver that accepts triggers from TriggerDriver.
+ * To implement, write your own processTrigger(), and call checkTrigger() somewhere in process().
+ * You might want to set your own default latency in your constructor.
+ * readoutDeltaT() and isLive() are meant to be overridden if you're doing something unusual.
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TriggerableDriver.java,v 1.3 2013/03/20 01:03:32 meeg Exp $
+ */
+public abstract class TriggerableDriver extends Driver {
+
+ private Queue<Double> triggerTimestamps = new LinkedList<Double>();
+ protected double triggerDelay = 0.0; // [ns]
+
+ public void setTriggerDelay(double triggerDelay) {
+ this.triggerDelay = triggerDelay;
+ }
+
+ /**
+ *
+ * @return time reference for hits written by this driver in response to a trigger
+ */
+ public double readoutDeltaT() {
+ return ClockSingleton.getTime() + triggerDelay;
+ }
+
+ @Override
+ public void startOfData() {
+ TriggerDriver.addTriggerable(this);
+ }
+
+ protected abstract void processTrigger(EventHeader event);
+
+ protected void checkTrigger(EventHeader event) {
+ if (triggerTimestamps.peek() != null && ClockSingleton.getTime() >= triggerTimestamps.peek()) {
+ processTrigger(event);
+ triggerTimestamps.remove();
+ }
+ }
+
+ public void addTrigger() {
+ triggerTimestamps.add(ClockSingleton.getTime() + triggerDelay);
+ }
+
+ public boolean isLive() {
+ return true;
+ }
+
+ public abstract int getTimestampType();
+}
SVNspam 0.1