Print

Print


Author: [log in to unmask]
Date: Thu Sep  1 04:27:30 2016
New Revision: 4478

Log: (empty)

Added:
    java/trunk/users/src/main/java/org/hps/users/spaul/AidaHistogramUtil.java
    java/trunk/users/src/main/java/org/hps/users/spaul/BestTrack.java
    java/trunk/users/src/main/java/org/hps/users/spaul/PeakPositionVsTime.java
    java/trunk/users/src/main/java/org/hps/users/spaul/PrintAidaTree.java
    java/trunk/users/src/main/java/org/hps/users/spaul/PrintStuff.java
    java/trunk/users/src/main/java/org/hps/users/spaul/RunningPedestalPrintDriver.java
    java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java
    java/trunk/users/src/main/java/org/hps/users/spaul/ScalerZeropointCalculator.java
    java/trunk/users/src/main/java/org/hps/users/spaul/TestAIDA2ROOT.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/BumpHunt.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/Fitters.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/GenerateFakeStuff.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetEpsilon2.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetPullDistribution.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetYieldPerSignal.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/GoodnessOfFitAccumulator.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/Histogram.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/IFitResult.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/LogLikelihood.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/Polynomial.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/PolynomialPlusGaussian.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowResult.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowSet.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/Cuts.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/GenerateFakeResolutions.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/InvariantMassCalculator.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/MakeMassHistogram.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/PlotMassResolutionsMC.java
    java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/RecoilCalculator.java
    java/trunk/users/src/main/java/org/hps/users/spaul/dimuon/
    java/trunk/users/src/main/java/org/hps/users/spaul/dimuon/DimuonFilterDriver.java
    java/trunk/users/src/main/java/org/hps/users/spaul/pulser/
    java/trunk/users/src/main/java/org/hps/users/spaul/pulser/UncorrClusterDriver.java

Added: java/trunk/users/src/main/java/org/hps/users/spaul/AidaHistogramUtil.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/AidaHistogramUtil.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/AidaHistogramUtil.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,31 @@
+package org.hps.users.spaul;
+
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogramFactory;
+import hep.aida.ref.histogram.Histogram1D;
+
+public class AidaHistogramUtil {
+    public static IHistogram1D shift(IHistogram1D h, final int nbins){
+        return new Histogram1D(){
+            public double binHeight(int i){
+                return super.binHeight(i-nbins);
+            }
+            public double binError(int i){
+                return super.binError(i-nbins);
+            }
+        };
+    }
+    public static IHistogram1D shift(IHistogramFactory hf, IHistogram1D h, final int nbins){
+        IHistogram1D copy = hf.createCopy("copy", h);
+        copy.reset();
+        if(nbins > 0)
+            for(int i = 0; i< copy.axis().bins()-nbins; i++){
+                copy.fill(h.binMean(i), h.binHeight(i+nbins));
+            }
+        else 
+            for(int i = -nbins; i< copy.axis().bins(); i++){
+                copy.fill(h.binMean(i), h.binHeight(i+nbins));
+            }
+        return copy;
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/BestTrack.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/BestTrack.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/BestTrack.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,25 @@
+package org.hps.users.spaul;
+
+import java.util.List;
+
+import org.lcsim.event.Track;
+import org.lcsim.event.TrackerHit;
+
+public class BestTrack {
+	static boolean shareHits(Track t1, Track t2){
+		for(TrackerHit hit : t1.getTrackerHits()){
+			for(TrackerHit hit2 : t2.getTrackerHits()){
+				if(equals(hit, hit2))
+					return true;
+			}
+		}
+		return false;
+	}
+	
+	static double threshold = 3;
+	static boolean equals(TrackerHit hit1, TrackerHit hit2){
+		return Math.hypot(hit1.getPosition()[0]-hit2.getPosition()[0],
+					hit1.getPosition()[1]-hit2.getPosition()[1])< threshold;
+	}
+	
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/PeakPositionVsTime.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/PeakPositionVsTime.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/PeakPositionVsTime.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,103 @@
+package org.hps.users.spaul;
+
+import java.util.List;
+
+import hep.aida.IHistogram1D;
+
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
+import org.hps.recon.ecal.EcalRawConverter;
+import org.hps.record.triggerbank.AbstractIntData;
+import org.hps.record.triggerbank.TIData;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+public class PeakPositionVsTime extends Driver{
+	
+	
+	AIDA aida = AIDA.defaultInstance();
+	
+	private int skipEvents = 10000;
+	private int samplesPerPrint = 1000;
+	IHistogram1D radPeak, feePeak;
+	public void setSamplesPerPrint(int samplesPerPrint){
+		this.samplesPerPrint = samplesPerPrint;
+	}
+	public void setSkipEvents(int skipEvents){
+		this.skipEvents = skipEvents;
+	}
+	double beamEnergy = 0;
+	public void startOfData(){
+		
+		BeamEnergyCollection beamEnergyCollection = 
+            this.getConditionsManager().getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();        
+        if(beamEnergyCollection != null && beamEnergyCollection.size() != 0)
+            beamEnergy = beamEnergyCollection.get(0).getBeamEnergy();
+		radPeak = aida.histogram1D("radpeak", 200, 0, 1.5*beamEnergy);
+		feePeak = aida.histogram1D("feepeak", 200, 0, 1.5*beamEnergy);
+		
+	}
+	
+	
+	@Override
+	public void process(EventHeader event){
+		int eventNum = event.getEventNumber();
+		if(eventNum < skipEvents)
+			return;
+		if(eventNum % samplesPerPrint != 0){
+			double feePeakValue = getMax(feePeak);
+			double radPeakValue = getMax(radPeak);
+			long time = event.getTimeStamp();
+			System.out.println(time + "\t" + eventNum + "\t" + feePeakValue + "\t" + radPeakValue);
+			radPeak.reset();
+			feePeak.reset();
+		}
+		
+		
+		boolean singles1 = false, pairs1 = false;
+		for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+	    {
+	      if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+	      TIData tid = new TIData(gob);
+	      if (tid.isSingle1Trigger()) singles1 = true;
+	      if (tid.isPair1Trigger()) pairs1 = true;
+	    }
+		
+		
+		if(! singles1 && ! pairs1)
+			return;
+		List<Cluster> clusters = event.get(Cluster.class, "EcalClustersCorr");
+		if(singles1){
+			for(Cluster cluster : clusters){
+				feePeak.fill(cluster.getEnergy());
+			}
+		}
+		if(pairs1){
+			for(Cluster c1 : clusters){
+				for(Cluster c2 : clusters){
+					double e1 = c1.getEnergy();
+					double e2 = c2.getEnergy();
+					if(e1 < .8*beamEnergy && e2 < .8*beamEnergy //avoid FEEs
+							&& c1.getPosition()[1]*c2.getPosition()[1]<0 //both on opposite halves of detector
+							){
+						radPeak.fill(e1+e2);
+					}
+				}
+			}
+		}
+		
+	}
+	double getMax(IHistogram1D hist){
+		int peakBin = 0;
+		for(int i = 0; i< hist.axis().bins(); i++){
+			if(hist.binHeight(i) > hist.binHeight(peakBin))
+				peakBin = i;
+		}
+		
+		
+		return (hist.axis().binLowerEdge(peakBin)+hist.axis().binUpperEdge(peakBin))/2.;
+	}
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/PrintAidaTree.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/PrintAidaTree.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/PrintAidaTree.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,23 @@
+package org.hps.users.spaul;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IHistogramFactory;
+import hep.aida.ITree;
+
+public class PrintAidaTree {
+    public static void main(String arg[]) throws IllegalArgumentException, IOException{
+        String input = arg[0];
+        IAnalysisFactory af = IAnalysisFactory.create();
+        System.out.println("opening file..");
+        ITree tree = af.createTreeFactory().create(input, "xml", true);
+        System.out.println("opened file");
+        String[] names = tree.listObjectNames(".", true);
+        String[] types = tree.listObjectTypes(".", true);
+        for(int i = 0; i<names.length; i++){
+            System.out.println(names[i] + "\t" + types[i]);
+        }
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/PrintStuff.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/PrintStuff.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/PrintStuff.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,45 @@
+package org.hps.users.spaul;
+
+import java.util.List;
+import java.util.Map;
+
+import org.hps.datacat.DatacatUtilities;
+import org.hps.datacat.EvioDatasetIndex;
+
+import org.srs.datacat.model.DatasetModel;
+import org.srs.datacat.model.DatasetResultSetModel;
+import org.srs.datacat.model.dataset.DatasetWithViewModel;
+
+public class PrintStuff {
+	public static void main(String arg[]){
+		/*for(int run = 7000; run <= 8100; run++){
+			EvioDatasetIndex edi;
+			try{
+				edi = new EvioDatasetIndex(new DatacatUtilities(), run);
+			}catch(Exception e){
+				continue;
+			}
+			for(int fileNumber = 0; fileNumber < edi.getDatasets().getCount(); fileNumber++){
+				DatasetModel dataset = edi.findByFileNumber(fileNumber);
+				DatasetWithViewModel datasetView = (DatasetWithViewModel) dataset;
+				if(datasetView == null)
+					continue;
+				Map<String, Object> metadata = datasetView.getMetadataMap();
+
+				long firstTimestamp = (Long) metadata.get("FIRST_HEAD_TIMESTAMP");
+				long lastTimestamp = (Long) metadata.get("LAST_HEAD_TIMESTAMP");
+				
+				long tiTimeOffset = Long.parseLong((String) metadata.get("TI_TIME_MIN_OFFSET"));
+				//long fileNumber = (Long) metadata.get("FILE");
+				long firstPhysicsEvent = (Long) metadata.get("FIRST_PHYSICS_EVENT");
+				long lastPhysicsEvent = (Long) metadata.get("LAST_PHYSICS_EVENT");
+				long nEvents = lastPhysicsEvent- firstPhysicsEvent+1; 
+				
+				int firstTI = 0;
+				int lastTI = 0;
+				System.out.printf("%d,%d,%d,0,%d,%d,%d,%d,%d\n", run, fileNumber, nEvents, firstTimestamp, lastTimestamp, firstTI, lastTI, tiTimeOffset);
+				
+			}
+		}*/
+	}
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/RunningPedestalPrintDriver.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/RunningPedestalPrintDriver.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/RunningPedestalPrintDriver.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,38 @@
+package org.hps.users.spaul;
+
+import org.hps.recon.ecal.EcalRawConverter;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+public class RunningPedestalPrintDriver extends Driver{
+	private static int NCHANNELS = 422;
+	AIDA aida = AIDA.defaultInstance();
+	private int skipEvents = 10000;
+	private int prescale = 1000;
+	public void setPrescale(int prescale){
+		this.prescale = prescale;
+	}
+	public void setSkipEvents(int skipEvents){
+		this.skipEvents = skipEvents;
+	}
+	
+	@Override
+	public void process(EventHeader event){
+		int eventNum = event.getEventNumber();
+		if(eventNum < skipEvents)
+			return;
+		if(eventNum % prescale != 0)
+			return;
+		long time = event.getTimeStamp();
+		EcalRawConverter erc = new EcalRawConverter();
+		double[] pedestals = new double[NCHANNELS];
+		for(int cellID = 1; cellID<= NCHANNELS; cellID++){
+			pedestals[cellID-1] = erc.getSingleSamplePedestal(event, cellID);
+		}
+		String line = time + "\t" + eventNum;
+		for(int i = 0; i< NCHANNELS; i++)
+			line += "\t" + prescale;
+		System.out.println(line);
+	}
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,255 @@
+package org.hps.users.spaul;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.hps.conditions.database.DatabaseConditionsManager;
+import org.hps.conditions.svt.SvtAlignmentConstant;
+import org.hps.conditions.svt.SvtBiasConstant;
+import org.hps.conditions.svt.SvtBiasConstant.SvtBiasConstantCollection;
+import org.hps.conditions.svt.SvtMotorPosition;
+import org.hps.conditions.svt.SvtMotorPosition.SvtMotorPositionCollection;
+import org.hps.conditions.svt.SvtTimingConstants;
+import org.hps.rundb.RunManager;
+import org.hps.users.meeg.SvtChargeIntegrator;
+
+/**
+ * Calculate the zeropoint value for each run from a myadump of scalerS2b
+ * @author Sebouh Paul
+ *
+ */
+public class ScalerAttenuationCalculator {
+	//after the value of scalerS2b drops below this value, (at a beam trip)
+	// take 10 samples, and use the 10th sample is the zero point value
+	static double zp_threshold = 500;
+
+	/**
+	 *
+	 * @param args the command line arguments (requires a CSV run/file log file
+	 * and a MYA dump file.)
+	 */
+	public static void main(String[] args) {
+
+		Options options = new Options();
+		options.addOption(new Option("z", true, "use zeropoint value from a file"));
+		
+		final CommandLineParser parser = new PosixParser();
+		CommandLine cl = null;
+		
+		try {
+			cl = parser.parse(options, args);
+		} catch (ParseException e) {
+			throw new RuntimeException("Cannot parse.", e);
+		}
+
+		
+
+		Map<Integer, Long> runErrorMap = new HashMap<Integer, Long>();
+		
+
+		if (cl.getArgs().length != 2) {
+			printUsage(options);
+			return;
+		}
+
+		List<CSVRecord> records = null;
+		try {
+			FileReader reader = new FileReader(cl.getArgs()[0]);
+			final CSVParser csvParser = new CSVParser(reader, CSVFormat.DEFAULT);
+
+			records = csvParser.getRecords();
+
+			csvParser.close();
+		} catch (FileNotFoundException ex) {
+			Logger.getLogger(ScalerAttenuationCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		} catch (IOException ex) {
+			Logger.getLogger(ScalerAttenuationCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		}
+
+		SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
+		dateFormat.setTimeZone(TimeZone.getTimeZone("America/New_York"));
+
+		Map<Integer, Double> zeropointMap = new HashMap<Integer, Double>();
+		if(cl.hasOption("z")){
+			try {
+				BufferedReader br = new BufferedReader(new FileReader(cl.getOptionValue("z")));
+				String line;
+				System.err.println("zero point file header: " + br.readLine()); //discard the first line
+				double zeropoint = 0;
+				while ((line = br.readLine()) != null) {
+					String arr[] = line.split("[ \t]+");
+					int run = Integer.parseInt(arr[0]);
+					
+					//if the zero point is unknown (ie, a very short run),
+					// use the zeropoint of the previous run
+					if(Double.parseDouble(arr[1]) != 0)
+						zeropoint = Double.parseDouble(arr[1]);
+					
+					zeropointMap.put(run, zeropoint);
+					//                    System.out.format("%d %d\n", run, errorTime);
+				}
+			} catch (FileNotFoundException ex) {
+				Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+			} catch (IOException ex) {
+				Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+			}
+		}
+		
+		try {
+			BufferedReader br = new BufferedReader(new FileReader(cl.getArgs()[1]));
+			String line;
+			System.err.println("myaData header: " + br.readLine()); //discard the first line
+			/*if (perRun) {
+				if (cl.hasOption("e")) {
+					System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
+				} else {
+					System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+				}
+			} else {
+				if (cl.hasOption("e")) {
+					System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
+				} else {
+					System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+				}
+			}*/
+			System.out.println("run_num\tzeropoint\tattenuation");
+
+			
+			Date date = null;
+			Date lastDate;
+
+			for (CSVRecord record : records) {
+				int runNum = Integer.parseInt(record.get(0));
+				
+
+				
+
+				Date startDate, endDate;
+				long firstTime, lastTime;//Unix time from head bank
+				long firstTI, lastTI;//TI timestamp from TI bank
+
+				
+					firstTime = Long.parseLong(record.get(7));
+					lastTime = Long.parseLong(record.get(8));
+					firstTI = Long.parseLong(record.get(10));
+					lastTI = Long.parseLong(record.get(11));
+
+				
+
+				
+					if (firstTime == 0 || lastTime == 0) {
+						continue;
+					}
+					startDate = new Date(firstTime * 1000);
+					endDate = new Date(lastTime * 1000);
+				
+
+				
+
+				
+				br.mark(1000);
+				
+				
+				
+				
+				double attenuation_count = 0, attenuation_sum = 0;
+				
+				//only change the zeropoint if there 
+				double zeropoint = zeropointMap.get(runNum);
+				
+				double crossing_time = 0;
+				
+				double  bpm_threshold = 40;
+				
+				while ((line = br.readLine()) != null) {
+					String arr[] = line.split(" +");
+
+					if (arr.length != 5) {
+						throw new RuntimeException("this line is not correct.");
+					}
+					lastDate = date;
+					date = dateFormat.parse(arr[0] + " " + arr[1]);
+					if (date.before(startDate)) { //not in the file's time range yet; keep reading the file 
+						continue;
+					}
+
+					double scaler, livetime, bpm;
+					if (arr[2].equals("<undefined>")) {
+						scaler = 0;
+					} else {
+						scaler = Double.parseDouble(arr[2]);
+					}
+					
+					if (arr[3].equals("<undefined>")) {
+						bpm = 0;
+					} else {
+						bpm = Double.parseDouble(arr[3]);
+					}
+
+					
+
+					if (lastDate != null) {
+						long dtStart = Math.max(startDate.getTime(), lastDate.getTime());
+						long dtEnd = Math.min(date.getTime(), endDate.getTime());
+						double dt = (dtEnd - dtStart) / 1000.0;
+						
+						// average only the events when the bpm has been above a threshold for
+						// at least 15 seconds
+						if(bpm > bpm_threshold && crossing_time == 0)
+							crossing_time =date.getTime();
+						else if(bpm < bpm_threshold)
+							crossing_time = 0;
+						else if(crossing_time != 0 && date.getTime() > crossing_time+15000 && bpm > bpm_threshold){
+							attenuation_sum+= (scaler-zeropoint)/bpm;
+							attenuation_count ++;
+						}
+						
+						
+					}
+					if (date.after(endDate)) {//this is the last interval overlapping the file's time range; backtrack so this line will be read again for the next file
+						date = lastDate;
+						try {
+							br.reset();
+						} catch (IOException e) {
+						}
+						break;
+					}
+					br.mark(1000);
+					//current = nextCurrent;
+				}
+				System.out.printf("%d\t%f\t%f\n", runNum, zeropoint, attenuation_sum/attenuation_count);
+				
+			}
+		} catch (Exception ex) {
+			Logger.getLogger(ScalerAttenuationCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		}
+
+	}
+
+	private static void printUsage(Options options) {
+		HelpFormatter formatter = new HelpFormatter();
+		System.err.println("SvtChargeIntegrator <CSV log file> <MYA dump file>");
+		formatter.printHelp("Need to adhere to these options", options);
+	}
+}
+

Added: java/trunk/users/src/main/java/org/hps/users/spaul/ScalerZeropointCalculator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/ScalerZeropointCalculator.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/ScalerZeropointCalculator.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,209 @@
+package org.hps.users.spaul;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.hps.conditions.database.DatabaseConditionsManager;
+import org.hps.conditions.svt.SvtAlignmentConstant;
+import org.hps.conditions.svt.SvtBiasConstant;
+import org.hps.conditions.svt.SvtBiasConstant.SvtBiasConstantCollection;
+import org.hps.conditions.svt.SvtMotorPosition;
+import org.hps.conditions.svt.SvtMotorPosition.SvtMotorPositionCollection;
+import org.hps.conditions.svt.SvtTimingConstants;
+import org.hps.rundb.RunManager;
+
+/**
+ * Calculate the zeropoint value for each run from a myadump of scalerS2b
+ * @author Sebouh Paul
+ *
+ */
+public class ScalerZeropointCalculator {
+	//after the value of scalerS2b drops below this value, (at a beam trip)
+	// take 10 samples, and use the 10th sample is the zero point value
+	static double threshold = 500;
+
+	/**
+	 *
+	 * @param args the command line arguments (requires a CSV run/file log file
+	 * and a MYA dump file.)
+	 */
+	public static void main(String[] args) {
+
+		Options options = new Options();
+		
+		final CommandLineParser parser = new PosixParser();
+		CommandLine cl = null;
+		try {
+			cl = parser.parse(options, args);
+		} catch (ParseException e) {
+			throw new RuntimeException("Cannot parse.", e);
+		}
+
+		
+
+		Map<Integer, Long> runErrorMap = new HashMap<Integer, Long>();
+		
+
+		if (cl.getArgs().length != 2) {
+			printUsage(options);
+			return;
+		}
+
+		List<CSVRecord> records = null;
+		try {
+			FileReader reader = new FileReader(cl.getArgs()[0]);
+			final CSVParser csvParser = new CSVParser(reader, CSVFormat.DEFAULT);
+
+			records = csvParser.getRecords();
+
+			csvParser.close();
+		} catch (FileNotFoundException ex) {
+			Logger.getLogger(ScalerZeropointCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		} catch (IOException ex) {
+			Logger.getLogger(ScalerZeropointCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		}
+
+		SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
+		dateFormat.setTimeZone(TimeZone.getTimeZone("America/New_York"));
+
+		try {
+			BufferedReader br = new BufferedReader(new FileReader(cl.getArgs()[1]));
+			String line;
+			System.err.println("myaData header: " + br.readLine()); //discard the first line
+			/*if (perRun) {
+				if (cl.hasOption("e")) {
+					System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
+				} else {
+					System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+				}
+			} else {
+				if (cl.hasOption("e")) {
+					System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
+				} else {
+					System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+				}
+			}*/
+			System.out.println("run_num\tzeropoint");
+
+			
+			Date date = null;
+			Date lastDate;
+
+			for (CSVRecord record : records) {
+				int runNum = Integer.parseInt(record.get(0));
+				
+
+				
+
+				Date startDate, endDate;
+				long firstTime, lastTime;//Unix time from head bank
+				long firstTI, lastTI;//TI timestamp from TI bank
+
+				
+					firstTime = Long.parseLong(record.get(7));
+					lastTime = Long.parseLong(record.get(8));
+					firstTI = Long.parseLong(record.get(10));
+					lastTI = Long.parseLong(record.get(11));
+
+				
+
+				
+					if (firstTime == 0 || lastTime == 0) {
+						continue;
+					}
+					startDate = new Date(firstTime * 1000);
+					endDate = new Date(lastTime * 1000);
+				
+
+				
+
+				
+				br.mark(1000);
+				
+				
+				//find the 10th sample after the first trip begins (10 is an arbitrary number)
+				
+				double zeroPoint = 0;
+				int count = 0;
+				while ((line = br.readLine()) != null) {
+					String arr[] = line.split(" +");
+
+					if (arr.length != 4) {
+						throw new RuntimeException("this line is not correct.");
+					}
+					lastDate = date;
+					date = dateFormat.parse(arr[0] + " " + arr[1]);
+					if (date.before(startDate)) { //not in the file's time range yet; keep reading the file 
+						continue;
+					}
+
+					double current, livetime;
+					if (arr[2].equals("<undefined>")) {
+						current = 0;
+					} else {
+						current = Double.parseDouble(arr[2]);
+					}
+					
+
+					
+
+					if (lastDate != null) {
+						long dtStart = Math.max(startDate.getTime(), lastDate.getTime());
+						long dtEnd = Math.min(date.getTime(), endDate.getTime());
+						double dt = (dtEnd - dtStart) / 1000.0;
+						
+						if(current < threshold)
+							count++;
+						if(count == 10){
+							zeroPoint = current;
+							break;
+						}
+						
+					}
+					if (date.after(endDate)) {//this is the last interval overlapping the file's time range; backtrack so this line will be read again for the next file
+						date = lastDate;
+						try {
+							br.reset();
+						} catch (IOException e) {
+						}
+						break;
+					}
+					br.mark(1000);
+					//current = nextCurrent;
+				}
+				System.out.printf("%d\t%f\n", runNum, zeroPoint);
+				
+			}
+		} catch (Exception ex) {
+			Logger.getLogger(ScalerZeropointCalculator.class.getName()).log(Level.SEVERE, null, ex);
+		}
+
+	}
+
+	private static void printUsage(Options options) {
+		HelpFormatter formatter = new HelpFormatter();
+		System.err.println("SvtChargeIntegrator <CSV log file> <MYA dump file>");
+		formatter.printHelp("Need to adhere to these options", options);
+	}
+}
+

Added: java/trunk/users/src/main/java/org/hps/users/spaul/TestAIDA2ROOT.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/TestAIDA2ROOT.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/TestAIDA2ROOT.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,15 @@
+package org.hps.users.spaul;
+
+import java.io.IOException;
+
+import hep.aida.IHistogram1D;
+
+import org.lcsim.util.aida.AIDA;
+
+public class TestAIDA2ROOT {
+	public static void main(String arg[]) throws IOException{
+		AIDA aida = AIDA.defaultInstance();
+		IHistogram1D h = aida.histogram1D("/folder/title;xaxis;yaxis", 100, 0, 10);
+		aida.saveAs("/Users/sebouhpaul/test.root");
+	}
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/BumpHunt.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/BumpHunt.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/BumpHunt.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,104 @@
+package org.hps.users.spaul.bh;
+
+import hep.aida.IAnalysisFactory;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import org.apache.commons.math3.fitting.GaussianCurveFitter;
+import org.apache.commons.math3.fitting.PolynomialCurveFitter;
+import org.apache.commons.math3.fitting.SimpleCurveFitter;
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+public class BumpHunt {
+    static int degree;
+    static int iter;
+    public static void main(String arg[]) throws FileNotFoundException{
+        
+        Histogram h = new Histogram(arg[0]);
+        WindowSet ws = new WindowSet(arg[1]);
+        degree = Integer.parseInt(arg[2]);
+        String outfile = null;
+        if(arg.length > 3)
+            outfile = arg[3];
+        results = new ArrayList<WindowResult>();
+        for(int i = 0; i< ws.N; i++){
+            try{
+                results.add(process(h, ws, i));
+            
+            } catch(Exception e){
+                
+            }
+        }
+        
+        if(outfile != null){
+            PrintWriter pw = new PrintWriter(outfile);
+            for(WindowResult wr : results){
+                pw.printf("%f %f %f %f %f", wr.min, wr.max, wr.mean, wr.yield, wr.pull);
+            }
+        }
+    }
+    static ArrayList<WindowResult> results;
+
+    public static WindowResult process(Histogram h, WindowSet ws, int i) {
+        double min = ws.minMass[i];
+        double max = ws.maxMass[i];
+        double res = ws.resolution[i];
+    
+        Polynomial p = Fitters.polyfit(h, min, max, degree);
+
+        //PolynomialPlusGaussian pplusg = Fitters.pplusgfit(h, p, min, max, res);
+        PolynomialPlusGaussian pplusg = Fitters.boxFit(h, p, min, max, res);
+        
+         pplusg = Fitters.pplusgfit(h, p, min, max, res, pplusg.N/.80);
+        
+        System.out.println(min + " " + max);
+        System.out.println(Arrays.toString(p.p));
+        
+        //test statistic for polynomial-only fit.
+        double ll1 = LogLikelihood.getLogLikelihood(h, min, max, p);
+        System.out.println("poly LL: " + ll1);
+        System.out.println(Arrays.toString(pplusg.p) + " " + pplusg.mean + " " + pplusg.sigma + " " + pplusg.N);
+        double ll2 = LogLikelihood.getLogLikelihood(h, min, max, pplusg);
+        System.out.println("gauss poly LL: "  + ll2);
+        
+        double p_value = new ChiSquaredDistribution(1).cumulativeProbability(ll1-ll2);
+        
+        System.out.println();
+        
+        
+        double yield = pplusg.N;
+        double yieldError = getYieldError(p, h, res, pplusg.mean);
+        
+        double pull = yield/yieldError;
+        
+        WindowResult wr = new WindowResult();
+        wr.pplusg = pplusg;
+        wr.poly = p;
+        wr.max = max;
+        wr.min = min;
+        wr.mean = pplusg.mean;
+        wr.yield = yield;
+        wr.pull = pull;
+        wr.p_value = p_value;
+        
+        return wr;
+    }
+
+
+    final static double twoSqrtPi  = 2*Math.sqrt(Math.PI); 
+    private static double getYieldError(Polynomial p, Histogram h, double res,
+            double mean) {
+        double bw = h.maxMass[0]-h.minMass[0];
+
+        return Math.sqrt(p.get(mean)*res/bw)*.345;
+    }
+    
+    
+    
+    
+    
+    
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/Fitters.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/Fitters.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/Fitters.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,139 @@
+package org.hps.users.spaul.bh;
+
+import java.util.ArrayList;
+
+import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
+import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.fitting.PolynomialCurveFitter;
+import org.apache.commons.math3.fitting.SimpleCurveFitter;
+import org.apache.commons.math3.fitting.WeightedObservedPoint;
+
+public class Fitters {
+    static Polynomial polyfit(Histogram h, double min, double max, int degree){
+        PolynomialCurveFitter pcf = PolynomialCurveFitter.create(degree);
+        
+        ArrayList<WeightedObservedPoint> wops = new ArrayList(); 
+        for(int j = 0; j< h.Nbins; j++){
+            if(h.minMass[j]>= min && h.maxMass[j]<= max){
+                double w = 1/h.h[j];
+                WeightedObservedPoint wop = new WeightedObservedPoint(w, (h.minMass[j]+h.maxMass[j])/2., h.h[j]);
+                wops.add(wop);
+            }
+        }
+        double fit[] = pcf.fit(wops);
+        
+        Polynomial p = new Polynomial(fit.length);
+        p.p = fit;
+        return p;
+    }
+    static PolynomialPlusGaussian pplusgfit(Histogram h, Polynomial p, double min, double max, double res, double yield_guess){
+        
+        double start[] = new double[p.k+3];
+        start[start.length-3] = (min+max)/2;
+        start[start.length-2] = res;
+
+        start[start.length-1] = yield_guess;
+        
+        SimpleCurveFitter scf = SimpleCurveFitter.create(new ParametricGpG(), start);
+        ArrayList<WeightedObservedPoint> wops = new ArrayList(); 
+        for(int j = 0; j< h.Nbins; j++){
+            if(h.minMass[j]>= min && h.maxMass[j]<= max){
+                double w = 1/h.h[j];
+                WeightedObservedPoint wop = new WeightedObservedPoint(w, (h.minMass[j]+h.maxMass[j])/2., h.h[j]);
+                wops.add(wop);
+            }
+        }
+        double[] result = scf.fit(wops);
+        PolynomialPlusGaussian ppg = new PolynomialPlusGaussian(p.k);
+        for(int i = 0; i< p.k; i++)
+            ppg.p[i] = result[i];
+        ppg.mean = result[p.k];
+        ppg.sigma = result[p.k+1];
+        ppg.N = result[p.k+2];
+        
+        return ppg;
+    }
+    
+    static PolynomialPlusGaussian boxFit(Histogram h, Polynomial p, double min, double max, double res){
+        
+         
+        double boxmin = (max+min)/2. - 1.25*res;
+        double boxmax = (max+min)/2. + 1.25*res;
+        double N = 0;
+        for(int j = 0; j< h.Nbins; j++){
+            
+            if(h.minMass[j]>= boxmin && h.maxMass[j]<= boxmax){
+                double binMean = (h.maxMass[j]+ h.minMass[j])/2.;
+                N+= h.h[j]-p.get(binMean);
+            }
+        }
+        
+        //N/=.6;
+        
+        
+        PolynomialPlusGaussian ppg = new PolynomialPlusGaussian(p.k);
+        for(int i = 0; i< p.k; i++)
+            ppg.p[i] = p.p[i];
+        ppg.mean = (max+min)/2.;
+        ppg.sigma = res;
+        ppg.N = N;
+        
+        return ppg;
+    }
+    
+    
+    
+    
+    
+    static class ParametricGpG implements
+        ParametricUnivariateFunction{
+
+            
+
+            @Override
+            public double[] gradient(double arg0, double... arg1) {
+                
+                double[] ret = new double [arg1.length];
+                double powM = 1;
+                for(int i = 0; i<arg1.length-3; i++){
+                    ret[i] = powM;
+                    powM *= arg0;
+                    
+                }
+                double mean = arg1[arg1.length-3];
+                double sigma = arg1[arg1.length-2];
+                double N = arg1[arg1.length-1];
+                
+                ret[arg1.length-3] = 0;
+                                    //-N*(mean-arg0)/Math.pow(sigma,2)*Math.exp(-Math.pow((arg0-mean)/sigma,2)/2)
+                                    //      /(sqrt2pi*sigma);
+                
+                ret[arg1.length-2] =  0;
+                             //N*Math.exp(-Math.pow((arg0-mean)/sigma,2)/2)/(sqrt2pi)
+                            //*(-1/(sigma*sigma) + (arg0-mean)*(arg0-mean)/Math.pow(sigma,4));
+                
+                ret[arg1.length-1] = Math.exp(-Math.pow((arg0-mean)/sigma,2)/2)
+                                                    /(sqrt2pi*sigma);
+                
+                return ret; 
+            }
+
+            @Override
+            public double value(double arg0, double... arg1) {
+                
+                double y = 0;
+                double powM = 1;
+                for(int i = 0; i<arg1.length-3; i++){
+                    y += powM*arg1[i];
+                    powM *= arg0;
+                }
+                double mean = arg1[arg1.length-3];
+                double sigma = arg1[arg1.length-2];
+                double N = arg1[arg1.length-1];
+                return y + N*Math.exp(-Math.pow((arg0-mean)/sigma,2)/2)
+                                                /(sqrt2pi*sigma);
+            }
+    };
+
+    static final double sqrt2pi = Math.sqrt(2*Math.PI);
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/GenerateFakeStuff.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/GenerateFakeStuff.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/GenerateFakeStuff.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,50 @@
+package org.hps.users.spaul.bh;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Random;
+
+public class GenerateFakeStuff {
+    static  Random random = new Random();
+    static PolynomialPlusGaussian pplusg;
+    public static void main(String arg[]) throws FileNotFoundException{
+
+        PrintWriter pw1 = new PrintWriter(arg[0]);
+        double signalSize = Double.parseDouble(arg[1]);
+        pw1.println(1000);
+        
+
+        double min = 0;
+        double max = 100;
+        pplusg = new PolynomialPlusGaussian(3);
+        //pplusg.p = new double[]{10000, -300, 20};
+
+        pplusg.p = randomPoly(min, max);
+        pplusg.mean = 50;
+        pplusg.sigma = 1;
+        pplusg.N = signalSize*Math.sqrt(pplusg.get(pplusg.mean));
+
+        System.out.println(pplusg);
+        for(int i = 0; i< 1000; i++){
+            double x = i/10.;
+            double y = pplusg.get(x);
+
+            pw1.println(i/10. + " " + (i+1)/10. + " " + Math.round(y + Math.sqrt(y)*random.nextGaussian()));
+        }
+        pw1.close();
+
+        PrintWriter pw2 = new PrintWriter("windows.txt");
+        pw2.println(90);
+        for(int i = 0; i< 90; i++){
+
+            pw2.println(i + " " + (i+10) + "  " + 1);
+            
+        }
+        pw2.close();
+    }
+    private static double[] randomPoly(double min, double max) {
+    
+        double[] p = new double[]{10000*(.7+.6*random.nextDouble()), -30*random.nextGaussian(), .2*random.nextGaussian()};
+        return p;
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetEpsilon2.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetEpsilon2.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetEpsilon2.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,13 @@
+package org.hps.users.spaul.bh;
+
+import java.io.FileNotFoundException;
+
+public class GetEpsilon2 {
+    public static void main(String arg[]) throws FileNotFoundException{
+        String radHistFile = arg[0];
+        String bumpHuntResults = arg[1];
+        Histogram radHist = new Histogram(radHistFile);
+        
+        
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetPullDistribution.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetPullDistribution.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetPullDistribution.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,40 @@
+package org.hps.users.spaul.bh;
+
+import java.io.FileNotFoundException;
+
+import hep.aida.*;
+
+public class GetPullDistribution {
+    public static void main(String arg[]) throws FileNotFoundException{
+        IAnalysisFactory af = IAnalysisFactory.create();
+        ITree tree = af.createTreeFactory().create();
+        IHistogramFactory hf = af.createHistogramFactory(tree);
+
+        IHistogram1D h1 = hf.createHistogram1D("pull", 100, -10, 10);
+        IPlotter p = af.createPlotterFactory().create();
+        p.region(0).plot(h1);
+        p.show();
+
+        IHistogram1D h2 = hf.createHistogram1D("yield", 100, -1000, 1000);
+        p = af.createPlotterFactory().create();
+        p.region(0).plot(h2);
+        p.show();
+
+        IHistogram1D h3 = hf.createHistogram1D("p_value", 100, 0, 1);
+        p = af.createPlotterFactory().create();
+        p.region(0).plot(h3);
+        p.show();
+
+        for(int i = 0; i<100000; i++){
+            GenerateFakeStuff.main(new String[]{"hist.txt", "0"});
+            BumpHunt.main(new String[]{"hist.txt", "windows.txt", "4"});
+            for(WindowResult wr : BumpHunt.results){
+                //if(wr.p_value > .999){
+                    h1.fill(wr.pull);
+                    h2.fill(wr.yield);
+                //}
+                h3.fill(wr.p_value);
+            }
+        }
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetYieldPerSignal.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetYieldPerSignal.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/GetYieldPerSignal.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,82 @@
+package org.hps.users.spaul.bh;
+
+import java.io.FileNotFoundException;
+import java.util.Random;
+
+import org.hps.users.spaul.StyleUtil;
+
+import hep.aida.*;
+
+public class GetYieldPerSignal {
+    public static void main(String arg[]) throws FileNotFoundException{
+        IAnalysisFactory af = IAnalysisFactory.create();
+        ITree tree = af.createTreeFactory().create();
+        IHistogramFactory hf = af.createHistogramFactory(tree);
+
+        IHistogram1D h1 = hf.createHistogram1D("pull", 100, -10, 10);
+        IPlotter p = af.createPlotterFactory().create();
+        p.region(0).plot(h1);
+        p.show();
+
+        IHistogram1D h2 = hf.createHistogram1D("yield", 100, -5000, 5000);
+        p = af.createPlotterFactory().create();
+        p.region(0).plot(h2);
+        p.show();
+
+        IHistogram1D h3 = hf.createHistogram1D("p_value", 100, 0, 1);
+        p = af.createPlotterFactory().create();
+        p.region(0).plot(h3);
+        p.show();
+        
+        IHistogram2D h4 = hf.createHistogram2D("yield vs signal", 100, -1000, 4000, 100, -1000, 4000);
+        
+        p = af.createPlotterFactory().create();
+        p.region(0).plot(h4);
+        StyleUtil.stylize(p.region(0), "yield", "signal");
+        p.show();
+        
+        
+        WindowSet ws = new WindowSet("windows.txt");
+        BumpHunt.degree = 2;
+
+        Random r = new Random();
+        
+        //variables for a linear fit
+        double sx = 0, sy = 0,sy2 = 0, sx2 = 0, sxy = 0, n = 0;
+        for(int i = 0; i<10000; i++){
+            GenerateFakeStuff.main(new String[]{"hist.txt", "5"});
+            
+            Histogram h = new Histogram("hist.txt");
+            
+            
+            WindowResult wr = BumpHunt.process(h, ws, 45);
+            h1.fill(wr.pull);
+            h2.fill(wr.yield);
+            //}
+            h3.fill(wr.p_value);
+            //}
+            double s = r.nextDouble()*10;
+            GenerateFakeStuff.main(new String[]{"hist.txt", Double.toString(s)});
+            h = new Histogram("hist.txt");
+            
+            
+             wr = BumpHunt.process(h, ws, 45);
+            h4.fill(GenerateFakeStuff.pplusg.N, wr.yield);
+            double x = GenerateFakeStuff.pplusg.N;
+            double y = wr.yield;
+            sy += y;
+            sy2 += y*y;
+            sx +=x;
+            sx2+= x*x;
+            sxy += x*y;
+            n++;
+        }
+        double m = (sxy*n-sy*sx)/(sx2*n-sx*sx);
+        double b = sy/n-m*sx/n;
+        System.out.println("m = " + m);
+        System.out.println("b = " + b);
+        
+        
+        
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/GoodnessOfFitAccumulator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/GoodnessOfFitAccumulator.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/GoodnessOfFitAccumulator.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,60 @@
+package org.hps.users.spaul.bh;
+
+public class GoodnessOfFitAccumulator{
+    private double logLikelinessSum = 0;
+    private double chi2Sum = 0;
+    /**
+     * Accumulate a data point
+     * @param h the measured value
+     * @param f the fit value
+     */
+    public void accumulate(double h, double f) {
+        logLikelinessSum+=2*(f-h+h*Math.log(h/f));
+        chi2Sum+= (f-h)*(f-h)/h;
+        total_dof ++;
+    } 
+    /**
+     *@return -2*log(L)
+     */
+    public double getLogLikeliness() {
+        return logLikelinessSum;
+    }
+    /**
+     *@return Pearson's chi^2
+     */
+    public double getChi2() {
+        return chi2Sum;
+    }
+    /**
+     *@return the number of degrees of freedom
+     */
+    public double getDOF(){
+        return total_dof;
+    }
+    /**
+     * @return the number of parameters in the fit
+     */
+    public double getFitParameterCount(){
+        return fitParamCount;
+    }
+
+    /**
+     * resets the sums
+     */
+    public void reset(){
+        logLikelinessSum =0;
+        chi2Sum = 0;
+        total_dof = -fitParamCount;
+    }
+    private int fitParamCount;
+    private int total_dof;
+    /**
+     * Constructs an instance of GoodnessOfFit.
+     * @param fitParamCount the number of parameters in the fit
+     */
+    public GoodnessOfFitAccumulator(int fitParamCount){
+        this.fitParamCount = fitParamCount;
+        this.total_dof = -fitParamCount;
+    }
+
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/Histogram.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/Histogram.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/Histogram.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,26 @@
+package org.hps.users.spaul.bh;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Scanner;
+
+public class Histogram {
+    int Nbins;
+    Histogram(String s) throws FileNotFoundException{
+        this(new File(s));
+    }
+    double[] h, minMass, maxMass;
+    public Histogram(File file) throws FileNotFoundException {
+        Scanner scanner = new Scanner(file);
+        Nbins = scanner.nextInt();
+        minMass = new double[Nbins];
+        maxMass = new double[Nbins];
+        h = new double[Nbins];
+        for(int i = 0; i< Nbins; i++){
+            minMass[i] = scanner.nextDouble();
+            maxMass[i] = scanner.nextDouble();
+            h[i] = scanner.nextDouble();
+        }
+    }
+
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/IFitResult.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/IFitResult.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/IFitResult.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,6 @@
+package org.hps.users.spaul.bh;
+
+public interface IFitResult {
+    double get(double m);
+    int paramCount();
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/LogLikelihood.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/LogLikelihood.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/LogLikelihood.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,15 @@
+package org.hps.users.spaul.bh;
+
+import org.apache.commons.math3.fitting.WeightedObservedPoint;
+
+public class LogLikelihood {
+    static double getLogLikelihood(Histogram h, double min, double max, IFitResult result){
+        GoodnessOfFitAccumulator gofa = new GoodnessOfFitAccumulator(result.paramCount());
+        for(int j = 0; j< h.Nbins; j++){
+            if(h.minMass[j]>= min && h.maxMass[j]<= max){
+                gofa.accumulate(h.h[j], result.get((h.minMass[j]+h.maxMass[j])/2.));
+            }
+        }
+        return gofa.getLogLikeliness();
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/Polynomial.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/Polynomial.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/Polynomial.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,20 @@
+package org.hps.users.spaul.bh;
+
+public class Polynomial implements IFitResult {
+    double p[];
+    int k;
+    Polynomial(int k){
+        this.p = new double[k];
+        this.k = k;
+    }
+    public double get(double m){
+        double y = 0;
+        double powM = 1;
+        for(int i = 0; i<k; i++){
+            y += powM*p[i];
+            powM *= m;
+        }
+        return y;
+    }
+    public int paramCount(){return k;}
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/PolynomialPlusGaussian.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/PolynomialPlusGaussian.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/PolynomialPlusGaussian.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,38 @@
+package org.hps.users.spaul.bh;
+
+import java.util.Arrays;
+
+public class PolynomialPlusGaussian implements IFitResult{
+
+    /**
+     * The square root of two pi
+     */
+    static final double sqrt2pi = Math.sqrt(2*Math.PI);
+    
+    
+    //parameters of the fit
+    double p[];
+    int k;
+    double mean, sigma, N;
+    
+    PolynomialPlusGaussian(int k){
+        this.p = new double[k];
+        this.k = k;
+    }
+    public double get(double m){
+        double y = 0;
+        double powM = 1;
+        for(int i = 0; i<k; i++){
+            y += powM*p[i];
+            powM *= m;
+        }
+        return y + N*Math.exp(-Math.pow((m-mean)/sigma,2)/2)
+            /(sqrt2pi*sigma);
+    }
+    public int paramCount(){
+        return k + 3;
+    }
+    public String toString(){
+        return "p + g " + Arrays.toString(p) + " " + mean + " " + sigma + " " + N; 
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowResult.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowResult.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowResult.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,14 @@
+package org.hps.users.spaul.bh;
+
+public class WindowResult {
+    double min, max;
+    
+    IFitResult poly;
+    IFitResult pplusg;
+    
+    double mean;
+    double yield;
+    double pull;
+    
+    double p_value;
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowSet.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowSet.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/WindowSet.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,40 @@
+package org.hps.users.spaul.bh;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Scanner;
+
+public class WindowSet {
+    int N;
+    
+    double minMass[];
+    double maxMass[];
+    double resolution[];
+    
+    WindowSet(String s) throws FileNotFoundException{
+        this(new File(s));
+    }
+    public WindowSet(File file) throws FileNotFoundException {
+        Scanner scanner = new Scanner(file);
+        N = scanner.nextInt();
+        minMass = new double[N];
+        maxMass = new double[N];
+        resolution = new double[N];
+        for(int i = 0; i< N; i++){
+            minMass[i] = scanner.nextDouble();
+            maxMass[i] = scanner.nextDouble();
+            resolution[i] = scanner.nextDouble();
+        }
+        printToLatexTable();
+        
+    }
+    void printToLatexTable(){
+        System.out.println("window # & center & $\\sigma_M$  & bottom & top & width \\\\");
+        for(int i = 0; i<N; i++){
+            System.out.printf("%d & %.0f & %.0f & %.0f & %.0f & %.0f \\\\\n", 
+                    i+1, (minMass[i]+maxMass[i])/2., resolution[i],
+                    minMass[i], maxMass[i], maxMass[i]-minMass[i]);
+        }
+    }
+    
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/Cuts.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/Cuts.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/Cuts.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,64 @@
+package org.hps.users.spaul.bh.test;
+
+import org.lcsim.event.Vertex;
+
+public class Cuts {
+
+    static double xtarget = 0; //NABO
+    
+    static double xmax =2, ymax = 1, 
+                vchi2max = 10,
+                v0pzmin = 1.2, v0pzmax = 2.75,
+                minP = .1, maxP = 2.0,
+                tchi2max = 50;
+
+    public static boolean passesCuts(Vertex v) {
+        double x = v.getPosition().x();
+        double y = v.getPosition().y();
+        double z = v.getPosition().z();
+        if(Math.abs(x-xtarget)> 2 || Math.abs(y) > 1)
+            return false;
+        System.out.println("passed position test");
+        if(v.getChi2()> vchi2max)
+            return false;
+        System.out.println("passed vertex chi2 test");
+        
+        double px = v.getAssociatedParticle().getMomentum().x();
+        double py = v.getAssociatedParticle().getMomentum().y();
+        double pz = v.getAssociatedParticle().getMomentum().z();
+        
+        if(pz < v0pzmin || pz > v0pzmax)
+            return false;
+
+        System.out.println("passed pztot test");
+        
+        double px1 = v.getAssociatedParticle().getParticles().get(0).getMomentum().x();
+        double py1 = v.getAssociatedParticle().getParticles().get(0).getMomentum().y();
+        double pz1 = v.getAssociatedParticle().getParticles().get(0).getMomentum().z();
+        
+        if(pz1 < minP || pz1> maxP)
+            return false;
+        System.out.println("passed pz1 test");
+        
+        double px2 = v.getAssociatedParticle().getParticles().get(1).getMomentum().x();
+        double py2 = v.getAssociatedParticle().getParticles().get(1).getMomentum().y();
+        double pz2 = v.getAssociatedParticle().getParticles().get(1).getMomentum().z();
+        
+        if(pz2 < minP || pz2> maxP)
+            return false;
+        System.out.println("passed pz2 test");
+        
+        if(v.getAssociatedParticle().getParticles().get(0).getTracks().size() == 0
+                || v.getAssociatedParticle().getParticles().get(1).getTracks().size() == 0)
+            return false;
+        
+        double chi2_1 = v.getAssociatedParticle().getParticles().get(0).getTracks().get(0).getChi2();
+
+        double chi2_2 = v.getAssociatedParticle().getParticles().get(1).getTracks().get(0).getChi2();
+        if(chi2_1 > tchi2max || chi2_2 > tchi2max)
+            return false;
+        
+        return true;
+    }
+
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/GenerateFakeResolutions.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/GenerateFakeResolutions.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/GenerateFakeResolutions.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,27 @@
+package org.hps.users.spaul.bh.test;
+
+import java.io.IOException;
+import java.util.Random;
+
+import hep.aida.*;
+
+public class GenerateFakeResolutions {
+    public static void main(String arg[]) throws IllegalArgumentException, IOException{
+        IAnalysisFactory af = IAnalysisFactory.create();
+        Random r = new Random();
+        int masses[] = {25, 50, 75, 100, 150, 200, 250};
+        for(int m : masses){
+            String filename = m + "_fake.aida";
+            ITree tree = af.createTreeFactory().create(filename,"xml",false,true);
+            IHistogramFactory hf = af.createHistogramFactory(tree);
+            IHistogram1D h = hf.createHistogram1D("recon mass", 300, 0, 300);
+            double dm$m = .05;
+            for(int i = 0; i< 10000; i++){
+                h.fill(m + m*dm$m*r.nextGaussian());
+            }
+            tree.commit();
+            tree.close();
+            System.out.println("wrote file " + filename);
+        }
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/InvariantMassCalculator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/InvariantMassCalculator.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/InvariantMassCalculator.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,87 @@
+package org.hps.users.spaul.bh.test;
+
+import hep.physics.vec.Hep3Vector;
+
+import java.util.List;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.Vertex;
+
+public class InvariantMassCalculator {
+    static double Ebeam = 2.2;
+    
+    static String vertexCollectionName = "TargetConstrainedV0Vertices";
+    
+    static public double getMass(Vertex v){
+        
+            
+            return getMassRecon(v.getAssociatedParticle().getParticles().get(0),
+                    v.getAssociatedParticle().getParticles().get(1));
+            
+        
+    }
+    //special case:  we have the recoiled particle which is particle #3
+    static double getMassRecon(ReconstructedParticle part1, ReconstructedParticle part2, ReconstructedParticle part3) {
+        
+        Hep3Vector p1 = part1.getMomentum();
+        Hep3Vector p2 = part2.getMomentum();
+        
+        double E1 = getCombinedEnergy(part1);
+        double E2 = getCombinedEnergy(part2);
+        double E3 = getCombinedEnergy(part3);
+        if(E1 > E2){
+            E1 = Ebeam - E2 - E3;
+        }
+        else if(E1 < E2){
+            E2 = Ebeam - E1 - E3;
+        }
+        
+        double denom = Math.sqrt(p1.magnitudeSquared()*p2.magnitudeSquared());
+        double cos = (p1.x()*p2.x()+p1.y()*p2.y()+p1.z()*p2.z())
+                        /denom;
+        
+        double sin2 = 
+                    (Math.pow(p1.y()*p2.z()-p1.z()*p2.y(),2) +
+                    Math.pow(p1.z()*p2.x()-p1.x()*p2.z(),2) +
+                    Math.pow(p1.x()*p2.y()-p1.y()*p2.x(),2)
+                    )/denom*denom; 
+        
+        //this is equivalent to 2*sin(theta/2)*sqrt(E1*E2);
+        return Math.sqrt(2*E1*E2*sin2/(1+cos)); 
+        
+        //return Math.sqrt(2*E1*E2*(1-cos)); 
+        
+    }
+    
+    
+    static double  getMassRecon(ReconstructedParticle part1, ReconstructedParticle part2) {
+        
+        Hep3Vector p1 = part1.getMomentum();
+        Hep3Vector p2 = part2.getMomentum();
+        
+        double E1 = getCombinedEnergy(part1);
+        double E2 = getCombinedEnergy(part2);
+        
+        double denom = Math.sqrt(p1.magnitudeSquared()*p2.magnitudeSquared());
+        double cos = (p1.x()*p2.x()+p1.y()*p2.y()+p1.z()*p2.z())
+                        /denom;
+        double sin2 = 
+                    (Math.pow(p1.y()*p2.z()-p1.z()*p2.y(),2) +
+                    Math.pow(p1.z()*p2.x()-p1.x()*p2.z(),2) +
+                    Math.pow(p1.x()*p2.y()-p1.y()*p2.x(),2)
+                    )/denom*denom; 
+        
+        //this is equivalent to 2*sin(theta/2)*sqrt(E1*E2);
+        //return Math.sqrt(2*E1*E2*sin2/(1+cos)); 
+        
+        return Math.sqrt(2*E1*E2*(1-cos)); 
+        
+    }
+    
+    static double getCombinedEnergy(ReconstructedParticle part){
+        // @TODO figure out how to combine the Ecal measurement with the 
+        // SVT measurement
+        return part.getMomentum().magnitude();
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/MakeMassHistogram.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/MakeMassHistogram.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/MakeMassHistogram.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,110 @@
+package org.hps.users.spaul.bh.test;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+import hep.aida.IHistogramFactory;
+import hep.aida.ITree;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import org.hps.conditions.ConditionsDriver;
+import org.hps.users.spaul.feecc.CustomBinning;
+import org.hps.users.spaul.feecc.RemoveDuplicateParticles;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.Vertex;
+import org.lcsim.lcio.LCIOReader;
+
+public class MakeMassHistogram {
+
+    static String vertexCollectionName = "TargetConstrainedV0Vertices";
+    static boolean display = false;
+    static CustomBinning cb;
+    public static void main(String arg[]) throws IllegalArgumentException, IOException{
+
+
+        String input = arg[0];
+        String output = arg[1];
+
+        IAnalysisFactory af = IAnalysisFactory.create();
+        ITree tree = af.createTreeFactory().create(output,"xml",false,true);
+        IHistogramFactory hf = af.createHistogramFactory(tree);
+        setupHistograms(tree, hf);
+
+        ConditionsDriver hack = new ConditionsDriver();
+        //hack.setXmlConfigResource("/u/group/hps/hps_soft/detector-data/detectors/HPS-EngRun2015-Nominal-v3");
+        hack.setDetectorName("HPS-PhysicsRun2016-2pt2-v0");
+        hack.setFreeze(true);
+        hack.setRunNumber(Integer.parseInt(arg[2]));
+        hack.initialize();
+        File file = new File(input);
+        File inputs[] = new File[]{file};
+        if(file.isDirectory()){
+            inputs = file.listFiles();
+        }
+        for(int i = 0; i< inputs.length; i++){
+            LCIOReader reader = new LCIOReader(inputs[i]);
+            //reader.open(input);
+            //reader.
+            EventHeader event = reader.read();
+            int nEvents = 0;
+            try{
+                outer : while(event != null){
+                    processEvent(event);
+
+                    //System.out.println(Q2);
+
+                    event = reader.read();
+                }
+            } catch (Exception e){
+                e.printStackTrace();
+            }
+        }
+        tree.commit();
+        tree.close();
+
+    }
+
+
+
+
+
+    static IHistogram1D h1;
+
+
+    static void setupHistograms(ITree tree, IHistogramFactory hf){
+        //h1 = hf.createHistogram2D("px\\/pz vs py\\/pz", 160, -.16, .24, 160, -.2, .2);
+
+        // 1 MeV. bins. 
+        double maxMass = .4;
+        double massBin = .001;
+        h1 = hf.createHistogram1D("recon mass", (int) (maxMass/massBin), 0, maxMass);
+
+
+    }
+
+    private static void processEvent(EventHeader event) {
+        if(event.getEventNumber() %10000 == 0)
+            System.out.println("event number " + event.getEventNumber());
+
+
+
+
+        List<ReconstructedParticle> particles = event.get(ReconstructedParticle.class, "FinalStateParticles");
+        particles = RemoveDuplicateParticles.removeDuplicateParticles(particles);
+        List<Vertex> vertices = event.get(Vertex.class, vertexCollectionName);
+
+        //System.out.println(mA);
+        for(Vertex v : vertices){
+
+            if(Cuts.passesCuts(v))
+                h1.fill(InvariantMassCalculator.getMass(v));
+        }
+        //h9_t.fill(event.get(MCParticle.class, "MCParticles").get(0).getEnergy());
+
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/PlotMassResolutionsMC.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/PlotMassResolutionsMC.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/PlotMassResolutionsMC.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,53 @@
+package org.hps.users.spaul.bh.test;
+
+import java.io.IOException;
+
+import org.hps.users.spaul.StyleUtil;
+
+import hep.aida.*;
+
+public class PlotMassResolutionsMC {
+    public static void main(String[] arg) throws IllegalArgumentException, IOException{
+        IAnalysisFactory af = IAnalysisFactory.create();
+        ITreeFactory tf  = af.createTreeFactory();
+        IPlotterFactory pf = af.createPlotterFactory();
+        
+        IPlotter p1 = pf.create();
+        p1.createRegions(1,2);
+        IPlotterRegion r1 = p1.region(0);
+        IPlotterRegion r2 = p1.region(1);
+        
+        IDataPointSet dps = af.createDataPointSetFactory(af.createTreeFactory().create()).create("mass resolutions", 2);
+        
+        for(int i = 0; i<arg.length/2; i++){
+            double mass = Double.parseDouble(arg[2*i]);
+            String file = arg[2*i+1];
+            ITree tree = tf.create(file);
+            IHistogram1D massRecon = (IHistogram1D) tree.find("recon mass");
+            
+            massRecon.setTitle(mass + " MeV");
+            massRecon.scale(1/massRecon.maxBinHeight());
+            double rms = massRecon.rms();
+            double n = massRecon.entries();
+            // http://web.eecs.umich.edu/~fessler/papers/files/tr/stderr.pdf
+            double drms = rms *Math.sqrt(Math.sqrt(2/(n-1)));
+            r1.plot(massRecon);
+            
+            IDataPoint dp = dps.addPoint();
+            dp.coordinate(0).setValue(mass);
+            dp.coordinate(1).setValue(rms/mass);
+            dp.coordinate(1).setErrorPlus(drms/mass);
+            dp.coordinate(1).setErrorMinus(drms/mass);
+        }
+        r2.plot(dps);
+        
+        StyleUtil.stylize(r1, "Mass Resolution", "A' mass (GeV)", "arbitrary units");
+        StyleUtil.stylize(r2, "Mass Resolution", "A' mass (GeV)", "dm/m");
+        //r2.style().dataStyle().setParameter("showLineBetweenPoints", arg1)
+        r2.setYLimits(0, .2);
+        r1.setXLimits(0, .300);
+        r2.setXLimits(0, .300);
+        
+        p1.show();
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/RecoilCalculator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/RecoilCalculator.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/bh/test/RecoilCalculator.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,78 @@
+package org.hps.users.spaul.bh.test;
+
+import java.util.Random;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogramFactory;
+import hep.aida.IPlotter;
+import hep.aida.ITree;
+
+public class RecoilCalculator {
+    
+    //some ridiculous equations that came out of mathematica, and then translated into java.  
+    //might have some mistakes in there
+    static public double getProductAB(double a, double b, double sum, double wa, double wb, double wsum){
+        return ((a*wsum*wa - b*wsum*wb + sum* wsum* wb + a*wa*wb)*(-a*wsum*wa + sum*wsum*wa + 
+                   b*wsum*wb + b*wa*wb))/Math.pow(wsum*(wa + wb) + wa*wb,2);
+    }
+    static public double getProductError(double a, double b, double sum, double wa, double wb, double wsum){
+        return Math.sqrt((2*b*sum*wsum*wb*(2*wsum*wsum*(wa - wb) + wsum*wa*(wa - wb) + wa*wa*wb) + 
+                   b*b*(wsum+ wa)*wb*wb*(4*wsum*wsum + wa*wb + wsum*(wa + wb)) + 
+                   a*a*wa*wa*(wsum + wb)*(4*wsum*wsum + wa*wb + wsum*(wa + wb)) + 
+                   sum*sum*wsum*wsum*(wsum*(wa - wb)*(wa-wb)+ wa*wb*(wa + wb)) - 
+                   2*a*wsum*wa*(sum*(2*wsum*wsum*(wa - wb) + wsum*(wa - wb)*wb - wa*wb*wb) + 
+                      b*wb*(4*wsum*wsum + 3*wa*wb + 3*wsum*(wa + wb))))
+                      /Math.pow(wa*wb + wsum*(wa + wb),3));
+        
+    }
+    
+    public static void main(String arg[]){
+        IAnalysisFactory af = IAnalysisFactory.create();
+        ITree tree = af.createTreeFactory().create();
+        IHistogramFactory hf = af.createHistogramFactory(tree);
+
+        IHistogram1D h1 = hf.createHistogram1D("stuff", 100, .4, 1);
+        IPlotter p = af.createPlotterFactory().create();
+        p.region(0).plot(h1);
+        p.show();
+        
+        IHistogram1D h2 = hf.createHistogram1D("stuff2", 100, 0, .05);
+        IPlotter p2 = af.createPlotterFactory().create();
+        p2.region(0).plot(h2);
+        p2.show();
+        
+        double smean = 2;
+        double amean = 1.5;
+        double bmean = .5;
+        
+        double ds  = .003;
+        double da = .005;
+        double db = .07;
+        
+        System.out.println(getProductAB(amean, bmean, smean, 1/(da*da), 1/(db*db), 1/(ds*ds))
+                -getProductAB(bmean, amean, smean, 1/(db*db), 1/(da*da), 1/(ds*ds)))
+                ;
+        System.out.println(getProductError(amean, bmean, smean, 1/(da*da), 1/(db*db), 1/(ds*ds))
+                -getProductError(bmean, amean, smean, 1/(db*db), 1/(da*da), 1/(ds*ds)))
+                ;
+        Random r =new Random();
+        for(int i = 0; i< 10000; i++){
+            
+            double s = r.nextGaussian()*ds + smean;
+            double a = r.nextGaussian()*da + amean;
+            double b = r.nextGaussian()*db + bmean;
+            
+            double product = getProductAB(a, b, s, 1/(da*da), 1/(db*db), 1/(ds*ds));
+            double dproduct = getProductError(a,b,s, 1/(da*da), 1/(db*db), 1/(ds*ds));
+            
+            h1.fill(product);
+            h2.fill(dproduct);
+        }
+        
+        
+        
+    }
+    
+    
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/dimuon/DimuonFilterDriver.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/dimuon/DimuonFilterDriver.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/dimuon/DimuonFilterDriver.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,59 @@
+package org.hps.users.spaul.dimuon;
+
+import java.util.List;
+
+import org.hps.record.triggerbank.AbstractIntData;
+import org.hps.record.triggerbank.TIData;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
+import org.lcsim.event.Track;
+import org.lcsim.util.Driver;
+
+public class DimuonFilterDriver extends Driver{
+      public void process(EventHeader event) {
+
+            
+
+            // only keep singles triggers:
+            if (!event.hasCollection(GenericObject.class,"TriggerBank"))
+              throw new Driver.NextEventException();
+            boolean isSingles=false;
+            for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+            {
+              if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+              TIData tid = new TIData(gob);
+              if (tid.isSingle0Trigger()  || tid.isSingle1Trigger())
+              {
+                isSingles=true;
+                break;
+              }
+            }
+            if (!isSingles) throw new Driver.NextEventException();
+
+            if (!event.hasCollection(Cluster.class, "EcalClusters"))
+              throw new Driver.NextEventException();
+            
+            
+
+            if (!event.hasCollection(Track.class, "GBLTracks"))
+                throw new Driver.NextEventException();
+            
+            List<Track> tracks = event.get(Track.class,"GBLTracks");
+            
+            if(tracks.size()<2)
+                throw new Driver.NextEventException();
+            
+            boolean foundPlus = false;
+            for(Track t : tracks){
+                if(t.getCharge()==1)
+                    foundPlus = true;
+            }
+            
+            
+            
+            
+          }
+
+
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/pulser/UncorrClusterDriver.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/pulser/UncorrClusterDriver.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/pulser/UncorrClusterDriver.java	Thu Sep  1 04:27:30 2016
@@ -0,0 +1,92 @@
+package org.hps.users.spaul.pulser;
+
+import hep.aida.IHistogram2D;
+
+import java.util.List;
+
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+public class UncorrClusterDriver extends Driver{
+	AIDA aida = AIDA.defaultInstance();
+	public static boolean fid_ECal(double x, double y)
+    {
+        y = Math.abs(y);
+
+        boolean in_fid = false;
+        double x_edge_low = -262.74;
+        double x_edge_high = 347.7;
+        double y_edge_low = 33.54;
+        double y_edge_high = 75.18;
+
+        double x_gap_low = -106.66;
+        double x_gap_high = 42.17;
+        double y_gap_high = 47.18;
+
+        y = Math.abs(y);
+
+        if( x > x_edge_low && x < x_edge_high && y > y_edge_low && y < y_edge_high )
+        {
+            if( !(x > x_gap_low && x < x_gap_high && y > y_edge_low && y < y_gap_high) )
+            {
+                in_fid = true;
+            }
+        }
+
+        return in_fid;
+    }
+	 private void processUncorrectedCluster(Cluster c){
+	    	double time = 0;
+	    	double seedEnergy = -100;
+	    	if(Math.abs(c.getPosition()[1])< 47.18)
+        		return;
+	        for(CalorimeterHit hit : c.getCalorimeterHits()){
+	        	//if(Math.abs(hit.getPositionVec().y())< 47.18)
+	        		//continue;
+	            if(hit.getCorrectedEnergy() > seedEnergy){
+	                seedEnergy = hit.getCorrectedEnergy();
+	                time = hit.getTime();
+	                
+	            }
+	        }
+	        double clusterEnergy = c.getEnergy();
+	        //c.get
+	        int size = c.getSize();
+	        
+	        
+	        energyHist.fill(clusterEnergy, time);
+	        
+	        seedHist.fill(seedEnergy, time);
+	    	sizeHist.fill(size, time);
+	    	
+	    	if(clusterEnergy>.6 && seedEnergy>.4 && size>=3){
+	    	energyHistCut.fill(clusterEnergy, time);
+	        
+	        seedHistCut.fill(seedEnergy, time);
+	    	sizeHistCut.fill(size, time);
+	    	}
+	 }
+	 
+	 IHistogram2D energyHist, seedHist, sizeHist, 
+	 		energyHistCut, seedHistCut, sizeHistCut;
+	 @Override
+	 public void process(EventHeader event){
+		 List<Cluster> clusters = event.get(Cluster.class, "EcalClusters");
+		 for(Cluster c : clusters)
+			 processUncorrectedCluster(c);
+	 }
+	 @Override
+	 public void detectorChanged(Detector d){
+		 energyHist = aida.histogram2D("uncorr/energy vs time", 100, 0, 2, 100, 0, 400);
+		 seedHist = aida.histogram2D("uncorr/seed vs time", 100, 0, 2, 100, 0, 400);
+		 sizeHist = aida.histogram2D("uncorr/size vs time", 10, 0, 10, 100, 0, 400);
+		 energyHistCut = aida.histogram2D("uncorr/energy vs time with cuts", 100, 0, 2, 100, 0, 400);
+		 seedHistCut = aida.histogram2D("uncorr/seed vs time with cuts", 100, 0, 2, 100, 0, 400);
+		 sizeHistCut = aida.histogram2D("uncorr/size vs time with cuts", 10, 0, 10, 100, 0, 400);
+	
+	 }
+}