Returned generics to DataNodes. :(

This commit is contained in:
Alexander Nozik 2017-09-26 17:04:50 +03:00
parent 0a58e4c351
commit 6422e102c8
6 changed files with 103 additions and 47 deletions

View File

@ -14,7 +14,7 @@ import java.nio.file.Paths
Workspace numass = FileBasedWorkspace.build(Paths.get("D:/Work/Numass/sterile2016_10/workspace.groovy"))
numass.runTask("prepare", "fill_1_all").forEachDataWithType(Table) {
numass.runTask("prepare", "fill_1_all").forEachData(Table) {
Table table = it.get();
def dp18 = table.find { it["Uset"] == 18000 }
def dp17 = table.find { it["Uset"] == 17000 }

View File

@ -0,0 +1,38 @@
package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.GrindShell
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.maths.histogram.SimpleHistogram
import hep.dataforge.meta.Meta
import hep.dataforge.plots.fx.FXPlotManager
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
Context ctx = Global.instance()
ctx.pluginManager().load(FXPlotManager)
ctx.pluginManager().load(NumassPlugin)
new GrindShell(ctx).eval {
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
def set = "set_3"
def hv = 14000
def loader = storage.provide("loader::$set", NumassSet.class).get();
def point = loader.optPoint(hv).get()
def table = new SimpleHistogram([0d, 0d] as Double[], [2d, 100d] as Double[])
.fill(new TimeAnalyzer().getEventsWithDelay(point, Meta.empty()).map {
[it.value / 1000, it.key.chanel] as Double[]
}).asTable()
ColumnedDataWriter.writeTable(System.out, table, "hist")
storage.close()
}

View File

@ -2,13 +2,14 @@ package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.data.DataSet
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.meta.Meta
import hep.dataforge.plots.fx.FXPlotManager
import hep.dataforge.tables.ValueMap
import hep.dataforge.storage.commons.StorageUtils
import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.actions.TimeAnalyzedAction
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
@ -24,55 +25,36 @@ ctx.pluginManager().load(FXPlotManager)
ctx.pluginManager().load(NumassPlugin.class)
new GrindShell(ctx).eval {
PlotHelper plot = plots
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_3")
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
Meta meta = Grind.buildMeta(binNum: 200) {
window(lo: 500, up: 1800)
}
// def set = "set_43"
// def loader = storage.provide("loader::$set", NumassSet.class).get();
// def data = NumassUtils.pointsToNode(loader).filter { name, data ->
// return data.meta().getDouble("voltage",0) < 15000
// };
def set = "set_43"
def hv = 14000;
def loader = storage.provide("loader::$set", NumassSet.class).get();
def point = loader.provide("$hv", NumassPoint.class).get()
def dataBuilder = DataSet.builder(NumassPoint)
def loChannel = 450;
def upChannel = 3100;
def histogram = PointAnalyzer.histogram(point, loChannel, upChannel, 1, 500).asTable();
println "finished histogram calculation..."
plot.configure("histogram") {
xAxis(axisTitle: "delay", axisUnits: "us")
yAxis(type: "log")
StorageUtils.loaderStream(storage, false)
.filter { it.value instanceof NumassSet }
.forEach { pair ->
(pair.value as NumassSet).optPoint(hv).ifPresent {
dataBuilder.putData(pair.key, it, it.meta);
}
}
def data = dataBuilder.build()
plot.plot(name: "test", frame: "histogram", showLine: true, showSymbol: false, showErrors: false, connectionType: "step", histogram, {
adapter("x.value": "x", "y.value": "count")
})
def trueCR = PointAnalyzer.analyze(point, t0: 30e3, "window.lo": loChannel, "window.up": upChannel).getDouble("cr")
println "The expected count rate for 30 us delay is $trueCR"
def t0 = (1..150).collect { 500 * it }
def statPlotPoints = t0.collect {
def result = PointAnalyzer.analyze(point, t0: it, "window.lo": loChannel, "window.up": upChannel)
ValueMap.ofMap("x": it / 1000, "y": result.getDouble("cr"), "y.err": result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY));
}
plot.plot(name: "total", frame: "stat-method", showLine: true, statPlotPoints)
// def delta = 5e-6
// def discrepancyPlotPoints = (1..20).collect { delta * it }.collect {
// def t1 = it
// def t2 = it + delta
// def result = PointAnalyzer.count(point, t1, t2, loChannel, upChannel) - (Math.exp(-trueCR * t1) - Math.exp(-trueCR * t2)) * point.length * trueCR
// ValueMap.ofMap("x.value": it + delta / 2, "y.value": result);
// }
//
// plot.plot(name: hv, frame: "discrepancy", discrepancyPlotPoints)
def result = new TimeAnalyzedAction().run(ctx, data, meta);
result.computeAll();
storage.close()
}

View File

@ -44,7 +44,7 @@ new GrindShell(ctx).eval {
println "The expected count rate for 30 us delay is $trueCR"
def t0 = (1..150).collect { 1000 * it }
def t0 = (1..150).collect { 420 * it }
def statPlotPoints = t0.collect {

View File

@ -49,7 +49,7 @@ public class NumassSubstractEmptySourceTask extends AbstractTask<Table> {
DataTree.Builder<Table> builder = DataTree.builder(Table.class);
DataNode<Table> rootNode = data.getCheckedNode("prepare", Table.class);
Data<? extends Table> emptySource = data.getCheckedNode("empty", Table.class).getData();
rootNode.forEachDataWithType(Table.class, input -> {
rootNode.forEachData(Table.class, input -> {
Data<? extends Table> res = subtract(input, emptySource);
res.getGoal().onComplete((r, err) -> {
if (r != null) {

View File

@ -15,12 +15,17 @@
*/
package inr.numass.utils;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataSet;
import hep.dataforge.io.envelopes.EnvelopeBuilder;
import hep.dataforge.io.envelopes.TaglessEnvelopeType;
import hep.dataforge.io.markup.Markedup;
import hep.dataforge.io.markup.SimpleMarkupRenderer;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.io.IOException;
@ -116,5 +121,36 @@ public class NumassUtils {
writeEnvelope(stream, meta, out -> new SimpleMarkupRenderer(out).render(something.markup(meta)));
}
/**
* Convert numass set to DataNode
*
* @param set
* @return
*/
public static DataNode<Object> setToNode(NumassSet set) {
DataSet.Builder<Object> builder = DataSet.builder();
builder.setName(set.getName());
set.getPoints().forEach(point -> {
Meta pointMeta = new MetaBuilder("point")
.putValue("voltage", point.getVoltage())
.putValue("index", point.meta().getInt("external_meta.point_index",-1))
.putValue("run", point.meta().getString("external_meta.session",""))
.putValue("group", point.meta().getString("external_meta.group",""));
String pointName = "point_" + point.meta().getInt("external_meta.point_index",point.hashCode());
builder.putData(pointName, point, pointMeta);
});
set.getHvData().ifPresent(hv -> builder.putData("hv", hv, Meta.empty()));
return builder.build();
}
/**
* Convert numass set to uniform node which consists of points
* @param set
* @return
*/
public static DataNode<NumassPoint> pointsToNode(NumassSet set){
return setToNode(set).checked(NumassPoint.class);
}
}