Numass underflow
This commit is contained in:
parent
8dd7870220
commit
99baa4e3eb
@ -45,6 +45,7 @@ public interface NumassAnalyzer {
|
||||
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
|
||||
.addNumber(COUNT_KEY, Y_VALUE_KEY)
|
||||
.addNumber(COUNT_RATE_KEY)
|
||||
.addNumber(COUNT_RATE_ERROR_KEY)
|
||||
.addNumber("binSize");
|
||||
ListTable.Builder builder = new ListTable.Builder(format);
|
||||
int loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
|
||||
@ -53,6 +54,7 @@ public interface NumassAnalyzer {
|
||||
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
|
||||
AtomicLong count = new AtomicLong(0);
|
||||
AtomicReference<Double> countRate = new AtomicReference<>(0d);
|
||||
AtomicReference<Double> countRateDispersion = new AtomicReference<>(0d);
|
||||
|
||||
int binLo = chan;
|
||||
int binUp = chan + binSize;
|
||||
@ -63,9 +65,10 @@ public interface NumassAnalyzer {
|
||||
}).forEach(row -> {
|
||||
count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
|
||||
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);
|
||||
countRateDispersion.accumulateAndGet(row.getDouble(COUNT_RATE_ERROR_KEY, 0), (d1, d2) -> d1 + d2);
|
||||
});
|
||||
int bin = Math.min(binSize, upChannel - chan);
|
||||
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
|
||||
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
@ -0,0 +1,39 @@
|
||||
package inr.numass.scripts.underflow
|
||||
|
||||
import hep.dataforge.cache.CachePlugin
|
||||
import hep.dataforge.context.Context
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.data.DataNode
|
||||
import hep.dataforge.grind.GrindShell
|
||||
import hep.dataforge.io.ColumnedDataWriter
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.plots.fx.FXPlotManager
|
||||
import hep.dataforge.tables.Table
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.data.NumassDataUtils
|
||||
import inr.numass.data.api.NumassAnalyzer
|
||||
|
||||
import static hep.dataforge.grind.Grind.buildMeta
|
||||
|
||||
Context ctx = Global.instance()
|
||||
ctx.pluginManager().load(FXPlotManager)
|
||||
ctx.pluginManager().load(NumassPlugin.class)
|
||||
ctx.pluginManager().load(CachePlugin.class)
|
||||
|
||||
Meta meta = buildMeta {
|
||||
data(dir: "D:\\Work\\Numass\\data\\2017_05\\Fill_2", mask: "set_.{1,3}")
|
||||
generate(t0: 3e4, sort: true)
|
||||
}
|
||||
|
||||
def shell = new GrindShell(ctx);
|
||||
|
||||
DataNode<Table> spectra = UnderflowUtils.getSpectraMap(shell, meta);
|
||||
|
||||
shell.eval {
|
||||
Table p17100 = NumassAnalyzer.spectrumWithBinning(spectra.optData("17100").get().get(),20);
|
||||
Table p17000 = NumassAnalyzer.spectrumWithBinning(spectra.optData("17000").get().get(),20);
|
||||
|
||||
Table subtract =NumassDataUtils.subtractSpectrum(p17100,p17000);
|
||||
|
||||
ColumnedDataWriter.writeTable(System.out, subtract, "Response function")
|
||||
}
|
@ -10,31 +10,20 @@ import hep.dataforge.cache.CachePlugin
|
||||
import hep.dataforge.context.Context
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.data.DataNode
|
||||
import hep.dataforge.data.DataSet
|
||||
import hep.dataforge.grind.GrindShell
|
||||
import hep.dataforge.grind.actions.GrindPipe
|
||||
import hep.dataforge.grind.helpers.PlotHelper
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.plots.data.PlottableData
|
||||
import hep.dataforge.plots.data.PlottableGroup
|
||||
import hep.dataforge.plots.fx.FXPlotManager
|
||||
import hep.dataforge.storage.commons.StorageUtils
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.tables.TableTransform
|
||||
import hep.dataforge.tables.XYAdapter
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.data.NumassDataUtils
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.api.SimpleNumassPoint
|
||||
import inr.numass.data.storage.NumassStorage
|
||||
import inr.numass.data.storage.NumassStorageFactory
|
||||
import javafx.application.Platform
|
||||
|
||||
import java.util.stream.Collectors
|
||||
|
||||
import static hep.dataforge.grind.Grind.buildMeta
|
||||
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
|
||||
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
|
||||
@ -52,50 +41,11 @@ Meta meta = buildMeta {
|
||||
}
|
||||
|
||||
|
||||
new GrindShell(ctx).eval {
|
||||
def shell = new GrindShell(ctx);
|
||||
|
||||
//Defining root directory
|
||||
File dataDirectory = new File(meta.getString("data.dir"))
|
||||
DataNode<Table> spectra = UnderflowUtils.getSpectraMap(shell, meta);
|
||||
|
||||
//creating storage instance
|
||||
|
||||
NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory);
|
||||
|
||||
//Reading points
|
||||
//Free operation. No reading done
|
||||
List<NumassSet> sets = StorageUtils
|
||||
.loaderStream(storage)
|
||||
.filter { it.key.matches(meta.getString("data.mask")) }
|
||||
.map {
|
||||
println "loading ${it.key}"
|
||||
return it.value
|
||||
}.collect(Collectors.toList());
|
||||
|
||||
NumassAnalyzer analyzer = new TimeAnalyzer();
|
||||
|
||||
def dataBuilder = DataSet.builder(NumassPoint);
|
||||
|
||||
sets.sort { it.startTime }
|
||||
.collectMany { it.points.collect() }
|
||||
.groupBy { it.voltage }
|
||||
.each { key, value ->
|
||||
def point = new SimpleNumassPoint(key as double, value as List<NumassPoint>)
|
||||
String name = (key as Integer).toString()
|
||||
dataBuilder.putStatic(name, point, buildMeta(voltage: key));
|
||||
}
|
||||
|
||||
DataNode<NumassPoint> data = dataBuilder.build()
|
||||
|
||||
def generate = GrindPipe.<NumassPoint, Table> build(name: "generate") {
|
||||
return analyzer.getSpectrum(delegate.input as NumassPoint, delegate.meta)
|
||||
}
|
||||
|
||||
DataNode<Table> spectra = generate.run(context, data, meta.getMeta("generate"));
|
||||
Meta id = buildMeta {
|
||||
put meta.getMeta("data")
|
||||
put meta.getMeta("generate")
|
||||
}
|
||||
spectra = context.getFeature(CachePlugin).cacheNode("underflow", id, spectra)
|
||||
shell.eval {
|
||||
|
||||
//subtracting reference point
|
||||
Map<Double, Table> spectraMap
|
||||
|
@ -0,0 +1,71 @@
|
||||
package inr.numass.scripts.underflow
|
||||
|
||||
import hep.dataforge.cache.CachePlugin
|
||||
import hep.dataforge.data.DataNode
|
||||
import hep.dataforge.data.DataSet
|
||||
import hep.dataforge.grind.GrindShell
|
||||
import hep.dataforge.grind.actions.GrindPipe
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.storage.commons.StorageUtils
|
||||
import hep.dataforge.tables.Table
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.api.SimpleNumassPoint
|
||||
import inr.numass.data.storage.NumassStorage
|
||||
import inr.numass.data.storage.NumassStorageFactory
|
||||
|
||||
import java.util.stream.Collectors
|
||||
|
||||
import static hep.dataforge.grind.Grind.buildMeta
|
||||
|
||||
class UnderflowUtils {
|
||||
|
||||
static DataNode<Table> getSpectraMap(GrindShell shell, Meta meta) {
|
||||
return shell.eval {
|
||||
//Defining root directory
|
||||
File dataDirectory = new File(meta.getString("data.dir"))
|
||||
|
||||
//creating storage instance
|
||||
|
||||
NumassStorage storage = NumassStorageFactory.buildLocal(dataDirectory);
|
||||
|
||||
//Reading points
|
||||
//Free operation. No reading done
|
||||
List<NumassSet> sets = StorageUtils
|
||||
.loaderStream(storage)
|
||||
.filter { it.key.matches(meta.getString("data.mask")) }
|
||||
.map {
|
||||
println "loading ${it.key}"
|
||||
return it.value
|
||||
}.collect(Collectors.toList());
|
||||
|
||||
NumassAnalyzer analyzer = new TimeAnalyzer();
|
||||
|
||||
def dataBuilder = DataSet.builder(NumassPoint);
|
||||
|
||||
sets.sort { it.startTime }
|
||||
.collectMany {NumassSet set -> set.points.collect() }
|
||||
.groupBy { NumassPoint point -> point.voltage }
|
||||
.each { key, value ->
|
||||
def point = new SimpleNumassPoint(key as double, value as List<NumassPoint>)
|
||||
String name = (key as Integer).toString()
|
||||
dataBuilder.putStatic(name, point, buildMeta(voltage: key));
|
||||
}
|
||||
|
||||
DataNode<NumassPoint> data = dataBuilder.build()
|
||||
|
||||
def generate = GrindPipe.<NumassPoint, Table> build(name: "generate") {
|
||||
return analyzer.getSpectrum(delegate.input as NumassPoint, delegate.meta)
|
||||
}
|
||||
|
||||
DataNode<Table> spectra = generate.run(shell.context, data, meta.getMeta("generate"));
|
||||
Meta id = buildMeta {
|
||||
put meta.getMeta("data")
|
||||
put meta.getMeta("generate")
|
||||
}
|
||||
return shell.context.getFeature(CachePlugin).cacheNode("underflow", id, spectra)
|
||||
} as DataNode<Table>
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user