Numass underflow update

This commit is contained in:
darksnake 2017-07-31 17:06:37 +03:00
parent 31f1fa0a71
commit 7ae632ed69
14 changed files with 145 additions and 101 deletions

View File

@ -120,7 +120,7 @@ public class NumassDataUtils {
// Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i])); // Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
// return new NumassPointImpl( // return new NumassPointImpl(
// point.getVoltage(), // point.getVoltage(),
// point.getStartTime(), // point.getTime(),
// point.getLength(), // point.getLength(),
// array // array
// ); // );

View File

@ -31,12 +31,14 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
} }
/** /**
* Return unsorted stream of events including events from frames * Return unsorted stream of events including events from frames.
* In theory, events after processing could be unsorted due to mixture of frames and events.
* In practice usually block have either frame or events, but not both.
* *
* @param block * @param block
* @return * @return
*/ */
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) { public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
if (block.getFrames().count() == 0) { if (block.getFrames().count() == 0) {
return block.getEvents(); return block.getEvents();
} else if (getProcessor() == null) { } else if (getProcessor() == null) {

View File

@ -25,7 +25,7 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
} }
public Stream<NumassEvent> getEventStream(NumassBlock block, int loChannel, int upChannel) { public Stream<NumassEvent> getEventStream(NumassBlock block, int loChannel, int upChannel) {
return getEventStream(block, Meta.empty()).filter(it -> it.getChanel() >= loChannel && it.getChanel() < upChannel); return getEvents(block, Meta.empty()).filter(it -> it.getChanel() >= loChannel && it.getChanel() < upChannel);
} }
@Override @Override

View File

@ -31,12 +31,12 @@ public class TimeAnalyzer extends AbstractAnalyzer {
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0); int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE); int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
long t0 = config.getValue("t0").longValue(); long t0 = getT0(block, config);
AtomicLong totalN = new AtomicLong(0); AtomicLong totalN = new AtomicLong(0);
AtomicLong totalT = new AtomicLong(0); AtomicLong totalT = new AtomicLong(0);
extendedEventStream(block, config) getEventsWithDelay(block, config)
.filter(pair -> { .filter(pair -> {
short channel = pair.getKey().getChanel(); short channel = pair.getKey().getChanel();
return channel >= loChannel && channel < upChannel; return channel >= loChannel && channel < upChannel;
@ -73,6 +73,10 @@ public class TimeAnalyzer extends AbstractAnalyzer {
} }
} }
private long getT0(NumassBlock block, Meta config) {
return config.getValue("t0",0).longValue();
}
/** /**
* The chain of event times in nanos * The chain of event times in nanos
* *
@ -80,14 +84,17 @@ public class TimeAnalyzer extends AbstractAnalyzer {
* @param config * @param config
* @return * @return
*/ */
public Stream<Pair<NumassEvent, Long>> extendedEventStream(NumassBlock block, Meta config) { public Stream<Pair<NumassEvent, Long>> getEventsWithDelay(NumassBlock block, Meta config) {
long t0 = config.getValue("t0").longValue(); long t0 = getT0(block, config);
AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null); AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null);
return super.getEventStream(block, config) //using super implementation Stream<NumassEvent> eventStream = super.getEvents(block, config);//using super implementation
.sorted() if (config.getBoolean("sort", false)) {
.map(event -> { eventStream = eventStream.sorted();
}
return eventStream.map(event -> {
if (lastEvent.get() == null) { if (lastEvent.get() == null) {
lastEvent.set(event); lastEvent.set(event);
return new Pair<>(event, 0L); return new Pair<>(event, 0L);
@ -104,8 +111,14 @@ public class TimeAnalyzer extends AbstractAnalyzer {
}).filter(pair -> pair.getValue() >= t0); }).filter(pair -> pair.getValue() >= t0);
} }
/**
* The filtered stream of events
* @param block
* @param config
* @return
*/
@Override @Override
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) { public Stream<NumassEvent> getEvents(NumassBlock block, Meta config) {
return extendedEventStream(block,config).map(Pair::getKey); return getEventsWithDelay(block, config).map(Pair::getKey);
} }
} }

View File

@ -32,11 +32,11 @@ public class MetaBlock implements NumassBlock {
@Override @Override
public Stream<NumassEvent> getEvents() { public Stream<NumassEvent> getEvents() {
return blocks.stream().flatMap(NumassBlock::getEvents); return blocks.stream().flatMap(NumassBlock::getEvents).sorted(Comparator.comparing(NumassEvent::getTime));
} }
@Override @Override
public Stream<NumassFrame> getFrames() { public Stream<NumassFrame> getFrames() {
return blocks.stream().flatMap(NumassBlock::getFrames); return blocks.stream().flatMap(NumassBlock::getFrames).sorted(Comparator.comparing(NumassFrame::getTime));
} }
} }

View File

@ -74,7 +74,7 @@ public interface NumassAnalyzer {
String COUNT_KEY = "count"; String COUNT_KEY = "count";
String LENGTH_KEY = "length"; String LENGTH_KEY = "length";
String COUNT_RATE_KEY = "cr"; String COUNT_RATE_KEY = "cr";
String COUNT_RATE_ERROR_KEY = "cr.err"; String COUNT_RATE_ERROR_KEY = "crErr";
/** /**
* Perform analysis on block. The values for count rate, its error and point length in nanos must * Perform analysis on block. The values for count rate, its error and point length in nanos must
@ -91,7 +91,7 @@ public interface NumassAnalyzer {
* @param block * @param block
* @return * @return
*/ */
Stream<NumassEvent> getEventStream(NumassBlock block, Meta config); Stream<NumassEvent> getEvents(NumassBlock block, Meta config);
/** /**
* Analyze the whole set. And return results as a table * Analyze the whole set. And return results as a table
@ -121,7 +121,7 @@ public interface NumassAnalyzer {
//optimized for fastest computation //optimized for fastest computation
//TODO requires additional performance optimization //TODO requires additional performance optimization
AtomicLong[] spectrum = new AtomicLong[MAX_CHANNEL]; AtomicLong[] spectrum = new AtomicLong[MAX_CHANNEL];
getEventStream(block, config).forEach(event -> { getEvents(block, config).forEach(event -> {
if (spectrum[event.getChanel()] == null) { if (spectrum[event.getChanel()] == null) {
spectrum[event.getChanel()] = new AtomicLong(1); spectrum[event.getChanel()] = new AtomicLong(1);
} else { } else {

View File

@ -32,7 +32,7 @@ public class NumassFrame {
this.tickSize = tickSize; this.tickSize = tickSize;
} }
public Instant getStartTime() { public Instant getTime() {
return startTime; return startTime;
} }

View File

@ -110,7 +110,6 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
} }
} }
/** /**
* The name of informational meta file in numass data directory * The name of informational meta file in numass data directory
*/ */
@ -222,7 +221,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
@Override @Override
public Instant getStartTime() { public Instant getStartTime() {
if (meta.hasValue("start_time")) { if (meta().hasValue("start_time")) {
return meta().getValue("start_time").timeValue(); return meta().getValue("start_time").timeValue();
} else { } else {
return NumassSet.super.getStartTime(); return NumassSet.super.getStartTime();

View File

@ -16,6 +16,10 @@ dependencies {
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal') compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')
} }
idea{
}
//task listActions(dependsOn: classes, type: JavaExec) { //task listActions(dependsOn: classes, type: JavaExec) {
// main "inr.numass.LaunchGrindShell" // main "inr.numass.LaunchGrindShell"
// args "-lc" // args "-lc"

View File

@ -8,7 +8,7 @@ import hep.dataforge.values.Values
import inr.numass.data.analyzers.TimeAnalyzer import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassBlock import inr.numass.data.api.NumassBlock
import java.util.stream.DoubleStream import java.util.stream.LongStream
/** /**
* Created by darksnake on 27-Jun-17. * Created by darksnake on 27-Jun-17.
@ -20,23 +20,28 @@ class PointAnalyzer {
static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) { static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize) return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize)
.fill(analyzer.extendedEventStream(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)).mapToDouble {it.value / 1000 as double}) .fill(analyzer.getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
.mapToDouble {
it.value / 1000 as double
})
} }
static Histogram histogram(DoubleStream stream, double binSize = 0.5, int binNum = 500) { static Histogram histogram(LongStream stream, double binSize = 0.5, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize).fill(stream) return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize).fill(stream.mapToDouble {
it / 1000 as double
})
} }
static Values analyze(Map values = Collections.emptyMap(), NumassBlock block, Closure metaClosure = null) { static Values analyze(Map values = Collections.emptyMap(), NumassBlock block, Closure metaClosure = null) {
return analyzer.analyze(block, Grind.buildMeta(values, metaClosure)) return analyzer.analyze(block, Grind.buildMeta(values, metaClosure))
} }
//
static class Result { // static class Result {
double cr; // double cr;
double crErr; // double crErr;
long num; // long num;
double t0; // double t0;
int loChannel; // int loChannel;
int upChannel; // int upChannel;
} // }
} }

View File

@ -42,11 +42,11 @@ new GrindShell(ctx).eval {
List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get() } List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get() }
def loChannel = 400; def loChannel = 400;
def upChannel = 800; def upChannel = 2000;
def chain = new TimeAnalyzer().extendedEventStream(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)) def chain = new TimeAnalyzer().getEventsWithDelay(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)).mapToLong{it.value}
def histogram = PointAnalyzer.histogram(chain, 5e-6, 500).asTable(); def histogram = PointAnalyzer.histogram(chain, 1, 500).asTable();
println "finished histogram calculation..." println "finished histogram calculation..."

View File

@ -8,6 +8,7 @@ import hep.dataforge.plots.fx.FXPlotManager
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer import inr.numass.data.PointAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage import inr.numass.data.storage.NumassStorage
@ -29,8 +30,8 @@ new GrindShell(ctx).eval {
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir); NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
def set = "set_1" def set = "set_43"
def hv = 14500; def hv = 16000;
def loader = storage.provide("loader::$set", NumassSet.class).get(); def loader = storage.provide("loader::$set", NumassSet.class).get();
def point = loader.provide("$hv", NumassPoint.class).get() def point = loader.provide("$hv", NumassPoint.class).get()
@ -56,17 +57,9 @@ new GrindShell(ctx).eval {
def t0 = (1..150).collect { 500 * it } def t0 = (1..150).collect { 500 * it }
// point.blocks.eachWithIndex { block, index ->
// def statPlotPoints = t0.collect {
// def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel)
// ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
// }
// plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints)
// }
def statPlotPoints = t0.collect { def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel) def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY));
} }
plot.plot(name: "total", frame: "stat-method", showLine: true, statPlotPoints) plot.plot(name: "total", frame: "stat-method", showLine: true, statPlotPoints)

View File

@ -8,6 +8,7 @@ import hep.dataforge.plots.fx.FXPlotManager
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer import inr.numass.data.PointAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.ProtoNumassPoint import inr.numass.data.storage.ProtoNumassPoint
@ -19,7 +20,7 @@ ctx.pluginManager().load(NumassPlugin.class)
new GrindShell(ctx).eval { new GrindShell(ctx).eval {
PlotHelper plot = plots PlotHelper plot = plots
NumassPoint point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\test\\40_kHz_5s.df")) NumassPoint point = ProtoNumassPoint.readFile(Paths.get("D:\\Work\\Numass\\data\\2017_05\\Fill_3_events\\set_33\\p0(30s)(HV1=16000).df"))
def loChannel = 0; def loChannel = 0;
def upChannel = 10000; def upChannel = 10000;
@ -41,19 +42,12 @@ new GrindShell(ctx).eval {
println "The expected count rate for 30 us delay is $trueCR" println "The expected count rate for 30 us delay is $trueCR"
def t0 = (1..150).collect { 500 * it } def t0 = (1..150).collect { 1000 * it }
// point.blocks.eachWithIndex { block, index ->
// def statPlotPoints = t0.collect {
// def result = PointAnalyzer.analyze(block, t0: it, "window.lo": loChannel, "window.up": upChannel)
// ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err"));
// }
// plot.plot(name: index, frame: "stat-method", showLine: true, statPlotPoints)
// }
def statPlotPoints = t0.collect { def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel) def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble("cr.err")); ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY));
} }
plot.plot(name: "total", frame: "stat-method", showLine: true, thickness: 4, statPlotPoints) plot.plot(name: "total", frame: "stat-method", showLine: true, thickness: 4, statPlotPoints)

View File

@ -6,10 +6,13 @@
package inr.numass.scripts.underflow package inr.numass.scripts.underflow
import hep.dataforge.cache.CachePlugin
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.context.Global import hep.dataforge.context.Global
import hep.dataforge.grind.Grind import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.grind.GrindShell import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.actions.GrindPipe
import hep.dataforge.grind.helpers.PlotHelper import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.io.ColumnedDataWriter import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
@ -25,59 +28,84 @@ import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorage import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory import inr.numass.data.storage.NumassStorageFactory
import java.util.stream.Collectors import java.util.stream.Collectors
import static hep.dataforge.grind.Grind.buildMeta
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
Context ctx = Global.instance() Context ctx = Global.instance()
ctx.pluginManager().load(FXPlotManager) ctx.pluginManager().load(FXPlotManager)
ctx.pluginManager().load(NumassPlugin.class) ctx.pluginManager().load(NumassPlugin.class)
ctx.pluginManager().load(CachePlugin.class)
Meta meta = buildMeta {
data(dir: "D:\\Work\\Numass\\data\\2017_05\\Fill_2", mask: "set_.{1,3}")
generate(t0: 3e4, sort: true)
subtract(reference: 18600)
fit(xlow: 450, xHigh: 700, upper: 3100, binning: 20)
}
new GrindShell(ctx).eval { new GrindShell(ctx).eval {
//Defining root directory //Defining root directory
File dataDirectory = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2") File dataDirectory = new File(meta.getString("data.dir"))
//creating storage instance //creating storage instance
NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory); NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory);
//Reading points //Reading points
Map<Double, List<NumassPoint>> allPoints = StorageUtils //Free operation. No reading done
List<NumassSet> sets = StorageUtils
.loaderStream(storage) .loaderStream(storage)
.filter { it.key.matches("set_.{1,3}") } .filter { it.key.matches(meta.getString("data.mask")) }
.map { .map {
println "loading ${it.key}" println "loading ${it.key}"
it.value return it.value
}.flatMap { it.points } }.collect(Collectors.toList());
.collect(Collectors.groupingBy { it.voltage })
Meta analyzerMeta = Grind.buildMeta(t0: 3e4) NumassAnalyzer analyzer = new TimeAnalyzer();
NumassAnalyzer analyzer = new TimeAnalyzer()
//creating spectra def dataBuilder = DataSet.builder(NumassPoint);
Map spectra = allPoints.collectEntries {
def point = new SimpleNumassPoint(it.key, it.value) sets.sort { it.startTime }
println "generating spectrum for ${point.voltage}" .collectMany { it.points.collect() }
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)] .groupBy { it.voltage }
.each { key, value ->
def point = new SimpleNumassPoint(key as double, value as List<NumassPoint>)
String name = (key as Integer).toString()
dataBuilder.putStatic(name, point, buildMeta(voltage: key));
} }
DataNode<NumassPoint> data = dataBuilder.build()
def generate = GrindPipe.<NumassPoint, Table> build(name: "generate") {
return analyzer.getSpectrum(delegate.input as NumassPoint, delegate.meta)
}
def refereceVoltage = 18600d DataNode<Table> spectra = generate.run(context, data, meta.getMeta("generate"));
spectra = context.getFeature(CachePlugin).cacheNode("underflow", meta, spectra)
//subtracting reference point //subtracting reference point
if (refereceVoltage) { Map<Double, Table> spectraMap
println "subtracting reference point ${refereceVoltage}" if (meta.hasValue("subtract.reference")) {
def referencePoint = spectra[refereceVoltage] String referenceVoltage = meta["subtract.reference"].stringValue()
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries { println "subtracting reference point ${referenceVoltage}"
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)] def referencePoint = spectra.compute(referenceVoltage)
spectraMap = spectra
.findAll { it.name != referenceVoltage }
.collectEntries {
return [(it.meta["voltage"].doubleValue()): NumassDataUtils.subtractSpectrum(it.get(), referencePoint)]
} }
} else {
spectraMap = spectra.collectEntries { return [(it.meta["voltage"].doubleValue()): it.get()] }
} }
//Showing selected points //Showing selected points
@ -97,22 +125,28 @@ new GrindShell(ctx).eval {
//configuring and plotting //configuring and plotting
plotGroup.configure(showLine: true, showSymbol: false, showErrors: false, connectionType: "step") plotGroup.configure(showLine: true, showSymbol: false, showErrors: false, connectionType: "step")
def frame = (plots as PlotHelper).getManager().getPlotFrame("Spectra") def frame = (plots as PlotHelper).getManager().getPlotFrame("Spectra")
frame.configure("yAxis.type": "log") frame.configureValue("yAxis.type", "log")
frame.addAll(plotGroup) frame.addAll(plotGroup)
} }
showPoints(spectra.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] }) showPoints(spectraMap.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] })
println()
Table correctionTable = TableTransform.filter( Table correctionTable = TableTransform.filter(
UnderflowFitter.fitAllPoints(spectra, 450, 700, 3100, 20), UnderflowFitter.fitAllPoints(
spectraMap,
meta["fit.xlow"].intValue(),
meta["fit.xHigh"].intValue(),
meta["fit.upper"].intValue(),
meta["fit.binning"].intValue()
),
"correction", "correction",
0, 0,
2) 2
)
ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters") ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters")
(plots as PlotHelper).plot(correctionTable, name: "correction", frame: "Correction") { (plots as PlotHelper).plot(correctionTable, name: "correction", frame: "Correction") {
adapter("x.value":"U", "y.value":"correction") adapter("x.value": "U", "y.value": "correction")
} }
} }