Changes in model manager

This commit is contained in:
Alexander Nozik 2017-10-25 16:43:22 +03:00
parent ec844b7f5c
commit fd8255a654
16 changed files with 607 additions and 685 deletions

View File

@ -113,7 +113,7 @@ public class TimeAnalyzer extends AbstractAnalyzer {
double countRate = (t1 * cr1 + t2 * cr2) / (t1 + t2);
double countRateErr = Math.sqrt(Math.pow(t1 * err1 / (t1 + t2), 2) + Math.pow(t2 * err1 / (t1 + t2), 2));
double countRateErr = Math.sqrt(Math.pow(t1 * err1 / (t1 + t2), 2) + Math.pow(t2 * err2 / (t1 + t2), 2));
return ValueMap.of(NAME_LIST,

View File

@ -49,7 +49,7 @@ Meta paramMeta = Grind.buildMeta("params") {
trap(value: 1.089, err: 0.05)
}
XYModel model = mm.buildModel(modelMeta)
XYModel model = mm.getModel(modelMeta)
ParamSet allPars = ParamSet.fromMeta(paramMeta);

View File

@ -8,7 +8,7 @@ import hep.dataforge.grind.GrindShell
import hep.dataforge.kodex.fx.plots.PlotManager
import hep.dataforge.meta.Meta
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzedAction
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
@ -70,7 +70,7 @@ new GrindShell(ctx).eval {
// }
// def data = dataBuilder.build()
def result = new TimeAnalyzedAction().run(ctx, data, meta);
def result = new TimeAnalyzerAction().run(ctx, data, meta);
result.computeAll();

View File

@ -6,7 +6,7 @@ import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.kodex.fx.plots.PlotManager
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzedAction
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.SimpleChainGenerator
import inr.numass.data.api.SimpleNumassPoint
import org.apache.commons.math3.random.JDKRandomGenerator
@ -39,5 +39,5 @@ new GrindShell(ctx).eval {
def meta = Grind.buildMeta(plotHist: false)
new TimeAnalyzedAction().simpleRun(point, meta);
new TimeAnalyzerAction().simpleRun(point, meta);
}

View File

@ -1,33 +0,0 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.tables.Table;
import inr.numass.NumassUtils;
import inr.numass.data.analyzers.SmartAnalyzer;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassSet;
import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING;
/**
* The action performs the readout of data and collection of count rate into a table
* Created by darksnake on 11.07.2017.
*/
@TypedActionDef(name = "numass.analyze", inputType = NumassSet.class, outputType = Table.class)
@ValueDef(name = "window.lo", type = {NUMBER, STRING}, def = "0", info = "Lower bound for window")
@ValueDef(name = "window.up", type = {NUMBER, STRING}, def = "10000", info = "Upper bound for window")
public class AnalyzeDataAction extends OneToOneAction<NumassSet, Table> {
@Override
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
//TODO add processor here
NumassAnalyzer analyzer = new SmartAnalyzer();
Table res = analyzer.analyzeSet(input, inputMeta);
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, inputMeta, res));
return res;
}
}

View File

@ -1,128 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.GroupBuilder;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import java.util.*;
/**
* @author Darksnake
*/
@TypedActionDef(name = "numass.merge", inputType = Table.class, outputType = Table.class, info = "Merge different numass data files into one.")
@NodeDef(name = "grouping", info = "The definition of grouping rule for this merge", from = "method::hep.dataforge.actions.GroupBuilder.byMeta")
public class MergeDataAction extends ManyToOneAction<Table, Table> {
public static final String MERGE_NAME = "mergeName";
public static String[] parnames = {NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY};
@Override
protected List<DataNode<Table>> buildGroups(Context context, DataNode<Table> input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta);
List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta);
} else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, input.getName())).group(input);
}
return groups;
}
@Override
protected Table execute(Context context, String nodeName, Map<String, Table> data, Laminate meta) {
Table res = mergeDataSets(data.values());
return new ListTable(res.getFormat(), TableTransform.sort(res, NumassPoint.HV_KEY, true));
}
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
output(context, groupName, stream -> NumassUtils.INSTANCE.write(stream, outputMeta, output));
}
private Values mergeDataPoints(Values dp1, Values dp2) {
if (dp1 == null) {
return dp2;
}
if (dp2 == null) {
return dp1;
}
double voltage = dp1.getValue(NumassPoint.HV_KEY).doubleValue();
double t1 = dp1.getValue(NumassPoint.LENGTH_KEY).doubleValue();
double t2 = dp2.getValue(NumassPoint.LENGTH_KEY).doubleValue();
double time = t1 + t2;
long total = dp1.getValue(NumassAnalyzer.COUNT_KEY).intValue() + dp2.getValue(NumassAnalyzer.COUNT_KEY).intValue();
double cr1 = dp1.getValue(NumassAnalyzer.COUNT_RATE_KEY).doubleValue();
double cr2 = dp2.getValue(NumassAnalyzer.COUNT_RATE_KEY).doubleValue();
double cr = (cr1 * t1 + cr2 * t2) / (t1 + t2);
double err1 = dp1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY);
double err2 = dp2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY);
// абсолютные ошибки складываются квадратично
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
ValueMap.Builder map = ValueMap.of(parnames, voltage, time, total, cr, crErr).builder();
return map.build();
}
private Table mergeDataSets(Collection<Table> ds) {
//Сливаем все точки в один набор данных
Map<Double, List<Values>> points = new LinkedHashMap<>();
for (Table d : ds) {
if (!d.getFormat().getNames().contains(parnames)) {
throw new IllegalArgumentException();
}
for (Values dp : d) {
double uset = dp.getValue(NumassPoint.HV_KEY).doubleValue();
if (!points.containsKey(uset)) {
points.put(uset, new ArrayList<>());
}
points.get(uset).add(dp);
}
}
List<Values> res = new ArrayList<>();
points.entrySet().stream().map((entry) -> {
Values curPoint = null;
for (Values newPoint : entry.getValue()) {
curPoint = mergeDataPoints(curPoint, newPoint);
}
return curPoint;
}).forEach(res::add);
return new ListTable(MetaTableFormat.forNames(parnames), res);
}
}

View File

@ -1,122 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.GroupBuilder;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.stat.fit.FitState;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.MetaTableFormat;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* @author Darksnake
*/
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = Table.class, info = "Generate summary for fit results of different datasets.")
@ValueDef(name = "parnames", multiple = true, required = true, info = "List of names of parameters for which summary should be done")
public class SummaryAction extends ManyToOneAction<FitState, Table> {
public static final String SUMMARY_NAME = "sumName";
@Override
@SuppressWarnings("unchecked")
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta);
List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta);
} else {
groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group(input);
}
return groups;
}
@Override
protected Table execute(Context context, String nodeName, Map<String, FitState> input, Laminate meta) {
String[] parNames;
if (meta.hasValue("parnames")) {
parNames = meta.getStringArray("parnames");
} else {
throw new RuntimeException("Infering parnames not suppoerted");
}
String[] names = new String[2 * parNames.length + 2];
names[0] = "file";
for (int i = 0; i < parNames.length; i++) {
names[2 * i + 1] = parNames[i];
names[2 * i + 2] = parNames[i] + "Err";
}
names[names.length - 1] = "chi2";
ListTable.Builder res = new ListTable.Builder(MetaTableFormat.forNames(names));
double[] weights = new double[parNames.length];
Arrays.fill(weights, 0);
double[] av = new double[parNames.length];
Arrays.fill(av, 0);
input.forEach((String key, FitState value) -> {
FitState state = value;
Value[] values = new Value[names.length];
values[0] = Value.of(key);
for (int i = 0; i < parNames.length; i++) {
Value val = Value.of(state.getParameters().getDouble(parNames[i]));
values[2 * i + 1] = val;
Value err = Value.of(state.getParameters().getError(parNames[i]));
values[2 * i + 2] = err;
double weight = 1 / err.doubleValue() / err.doubleValue();
av[i] += val.doubleValue() * weight;
weights[i] += weight;
}
values[values.length - 1] = Value.of(state.getChi2());
Values point = ValueMap.of(names, (Object[]) values);
res.row(point);
});
Value[] averageValues = new Value[names.length];
averageValues[0] = Value.of("average");
averageValues[averageValues.length - 1] = Value.of(0);
for (int i = 0; i < parNames.length; i++) {
averageValues[2 * i + 1] = Value.of(av[i] / weights[i]);
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]));
}
res.row(ValueMap.of(names, (Object[]) averageValues));
return res.build();
}
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
output(context, groupName, stream -> NumassUtils.INSTANCE.write(stream, outputMeta, output));
super.afterGroup(context, groupName, outputMeta, output);
}
}

View File

@ -1,181 +0,0 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaUtils;
import hep.dataforge.names.Named;
import hep.dataforge.tables.ColumnFormat;
import hep.dataforge.tables.ColumnTable;
import hep.dataforge.tables.ListColumn;
import hep.dataforge.tables.Table;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.NumassUtilsKt;
import javafx.util.Pair;
import java.util.ArrayList;
import java.util.List;
import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_ERROR_KEY;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
/**
* Apply corrections and transformations to analyzed data
* Created by darksnake on 11.07.2017.
*/
@TypedActionDef(name = "numass.transform", inputType = Table.class, outputType = Table.class)
@ValueDef(name = "correction",
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`")
@ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
@NodeDef(name = "correction", multiple = true, from = "method::inr.numass.actions.TransformDataAction.makeCorrection")
public class TransformDataAction extends OneToOneAction<Table, Table> {
@Override
protected Table execute(Context context, String name, Table input, Laminate meta) {
List<Correction> corrections = new ArrayList<>();
meta.optMeta("corrections").ifPresent(cors ->
MetaUtils.nodeStream(cors)
.map(Pair::getValue)
.map(this::makeCorrection)
.forEach(corrections::add)
);
if (meta.hasValue("correction")) {
final String correction = meta.getString("correction");
corrections.add(point -> NumassUtilsKt.pointExpression(correction, point));
}
ColumnTable table = ColumnTable.copy(input);
for (Correction correction : corrections) {
//adding correction columns
if (!correction.isAnonimous()) {
table = table.buildColumn(ColumnFormat.build(correction.getName(), NUMBER),
correction::corr);
if (correction.hasError()) {
table = table.buildColumn(ColumnFormat.build(correction.getName() + ".err", NUMBER),
correction::corrErr);
}
}
}
// adding original count rate and error columns
table = table.addColumn(new ListColumn(ColumnFormat.build(COUNT_RATE_KEY + ".orig", NUMBER), table.getColumn
(COUNT_RATE_KEY).stream()));
table = table.addColumn(new ListColumn(ColumnFormat.build(COUNT_RATE_ERROR_KEY + ".orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()));
List<Double> cr = new ArrayList<>();
List<Double> crErr = new ArrayList<>();
table.getRows().forEach(point -> {
double correctionFactor = corrections.stream()
.mapToDouble(cor -> cor.corr(point))
.reduce((d1, d2) -> d1 * d2).orElse(1);
double relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble(cor -> cor.relativeErr(point))
.reduce((d1, d2) -> d1 * d1 + d2 * d2).orElse(0)
);
double originalCR = point.getDouble(COUNT_RATE_KEY);
double originalCRErr = point.getDouble(COUNT_RATE_ERROR_KEY);
cr.add(originalCR * correctionFactor);
if (relativeCorrectionError == 0) {
crErr.add(originalCRErr * correctionFactor);
} else {
crErr.add(Math.sqrt(Math.pow(originalCRErr / originalCR, 2d) + Math.pow(relativeCorrectionError, 2d))
* originalCR);
}
});
//replacing cr column
Table res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).getFormat(), cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).getFormat(), crErr.stream()));
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, meta, res));
return res;
}
@ValueDef(name = "value", type = {NUMBER, STRING}, info = "Value or function to multiply count rate")
@ValueDef(name = "err", type = {NUMBER, STRING}, info = "error of the value")
private Correction makeCorrection(Meta corrMeta) {
final String expr = corrMeta.getString("value");
final String errExpr = corrMeta.getString("err", "");
return new Correction() {
@Override
public String getName() {
return corrMeta.getString("name", corrMeta.getName());
}
@Override
public double corr(Values point) {
return NumassUtilsKt.pointExpression(expr, point);
}
@Override
public double corrErr(Values point) {
if (errExpr.isEmpty()) {
return 0;
} else {
return NumassUtilsKt.pointExpression(errExpr, point);
}
}
@Override
public boolean hasError() {
return !errExpr.isEmpty();
}
};
}
private interface Correction extends Named {
@Override
default String getName() {
return "";
}
/**
* correction coefficient
*
* @param point
* @return
*/
double corr(Values point);
/**
* correction coefficient uncertainty
*
* @param point
* @return
*/
default double corrErr(Values point) {
return 0;
}
default boolean hasError() {
return false;
}
default double relativeErr(Values point) {
double corrErr = corrErr(point);
if (corrErr == 0) {
return 0;
} else {
return corrErr / corr(point);
}
}
}
}

View File

@ -1,100 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.tasks;
import hep.dataforge.actions.GenericAction;
import hep.dataforge.cache.CachePlugin;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataTree;
import hep.dataforge.description.NodeDef;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.meta.Template;
import hep.dataforge.tables.Table;
import hep.dataforge.workspace.tasks.AbstractTask;
import hep.dataforge.workspace.tasks.TaskModel;
import inr.numass.actions.AnalyzeDataAction;
import inr.numass.actions.MergeDataAction;
import inr.numass.actions.MonitorCorrectAction;
import inr.numass.actions.TransformDataAction;
import inr.numass.data.api.NumassSet;
/**
* Prepare data task
*
* @author Alexander Nozik
*/
@NodeDef(name = "prepare")
@NodeDef(name = "monitor")
@NodeDef(name = "merge")
@Deprecated
public class NumassPrepareTask extends AbstractTask<Table> {
@Override
protected DataNode<Table> run(TaskModel model, DataNode<?> input) {
Meta config = model.meta();
Context context = model.getContext();
//acquiring initial data. Data node could not be empty
DataNode<NumassSet> data = input.getCheckedNode("data", NumassSet.class);
//preparing table data
Meta prepareMeta = config.getMeta("prepare");
DataNode<Table> tables = runAction(new AnalyzeDataAction(), context, data, prepareMeta);
tables = runAction(new TransformDataAction(), context, tables, prepareMeta);
//intermediate caching
tables = model.getContext().getFeature(CachePlugin.class).cacheNode("prepare", prepareMeta, tables);
if (config.hasMeta("monitor")) {
Meta monitorMeta = config.getMeta("monitor");
tables = runAction(new MonitorCorrectAction(), context, tables, monitorMeta);
}
//merging if needed
if (config.hasMeta("merge")) {
DataTree.Builder<Table> resultBuilder = DataTree.builder(Table.class);
DataTree.Builder<Table> tablesForMerge = new DataTree.Builder<>(tables);
config.getMetaList("merge").forEach(mergeNode -> {
Meta mergeMeta = Template.compileTemplate(mergeNode, config);
DataNode<Table> mergeData = runAction(new MergeDataAction(), context, tablesForMerge.build(), mergeMeta);
mergeData.dataStream().forEach(d -> {
resultBuilder.putData("merge." + d.getName(), d.anonymize());
});
});
tables = resultBuilder.build();
}
return tables;
}
@Override
protected void buildModel(TaskModel.Builder model, Meta meta) {
model.configure(
new MetaBuilder()
.putNode(meta.getMetaOrEmpty("prepare"))
.putNode(meta.getMetaOrEmpty("monitor"))
.putNode(meta.getMetaOrEmpty("merge"))
);
model.dependsOn("select", meta, "data");
}
private <T, R> DataNode<R> runAction(GenericAction<T, R> action, Context context, DataNode<T> data, Meta meta) {
return action.run(context, data, meta);
}
@Override
public String getName() {
return "prepare";
}
}

View File

@ -23,7 +23,6 @@ import hep.dataforge.kodex.fx.plots.PlotContainer
import hep.dataforge.maths.MathPlugin
import hep.dataforge.meta.Meta
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.stat.fit.FitManager
import hep.dataforge.stat.models.ModelManager
import hep.dataforge.stat.models.WeightedXYModel
import hep.dataforge.stat.models.XYModel
@ -43,36 +42,35 @@ import org.apache.commons.math3.util.FastMath
@PluginDef(
group = "inr.numass",
name = "numass",
dependsOn = arrayOf("hep.dataforge:math", "hep.dataforge:MINUIT"),
dependsOn = arrayOf("hep.dataforge:math", "hep.dataforge:MINUIT", "hep.dataforge:actions"),
support = false,
info = "Numass data analysis tools"
)
class NumassPlugin : BasicPlugin() {
override fun attach(context: Context) {
// StorageManager.buildFrom(context);
super.attach(context)
context.pluginManager().load(NumassIO())
val fm = context.getFeature(FitManager::class.java)
loadModels(fm.modelManager)
loadModels(context.getFeature(ModelManager::class.java))
loadMath(MathPlugin.buildFrom(context))
val actions = context.pluginManager().getOrLoad(ActionManager::class.java)
actions.attach(context)
context.getFeature(ActionManager::class.java).apply {
putTask(NumassFitScanTask::class.java)
putTask(NumassFitScanSummaryTask::class.java)
putTask(NumassFitSummaryTask::class.java)
put(selectTask)
put(analyzeTask)
put(mergeTask)
put(mergeEmptyTask)
put(monitorTableTask)
put(subtractEmptyTask)
put(transformTask)
put(filterTask)
put(fitTask)
put(plotFitTask)
}
actions.putTask(NumassFitScanTask::class.java)
actions.putTask(NumassFitScanSummaryTask::class.java)
actions.putTask(NumassFitSummaryTask::class.java)
actions.put(selectTask)
actions.put(analyzeTask)
actions.put(mergeTask)
actions.put(mergeEmptyTask)
actions.put(monitorTableTask)
actions.put(subtractEmptyTask)
actions.put(transformTask)
actions.put(filterTask)
actions.put(fitTask)
}
private fun loadMath(math: MathPlugin) {
@ -122,45 +120,45 @@ class NumassPlugin : BasicPlugin() {
// return new XYModel(spectrum, getAdapter(an));
// });
manager.addModel("scatter") { context, an ->
val A = an.getDouble("resolution", 8.3e-5)!!//8.3e-5
val from = an.getDouble("from", 0.0)!!
val to = an.getDouble("to", 0.0)!!
manager.addModel("scatter") { context, meta ->
val A = meta.getDouble("resolution", 8.3e-5)!!//8.3e-5
val from = meta.getDouble("from", 0.0)!!
val to = meta.getDouble("to", 0.0)!!
val sp: ModularSpectrum
if (from == to) {
sp = ModularSpectrum(GaussSourceSpectrum(), A)
sp = if (from == to) {
ModularSpectrum(GaussSourceSpectrum(), A)
} else {
sp = ModularSpectrum(GaussSourceSpectrum(), A, from, to)
ModularSpectrum(GaussSourceSpectrum(), A, from, to)
}
val spectrum = NBkgSpectrum(sp)
XYModel(spectrum, getAdapter(an))
XYModel(meta, getAdapter(meta), spectrum)
}
manager.addModel("scatter-empiric") { context, an ->
val eGun = an.getDouble("eGun", 19005.0)!!
manager.addModel("scatter-empiric") { context, meta ->
val eGun = meta.getDouble("eGun", 19005.0)!!
val interpolator = buildInterpolator(context, an, eGun)
val interpolator = buildInterpolator(context, meta, eGun)
val loss = EmpiricalLossSpectrum(interpolator, eGun + 5)
val spectrum = NBkgSpectrum(loss)
val weightReductionFactor = an.getDouble("weightReductionFactor", 2.0)!!
val weightReductionFactor = meta.getDouble("weightReductionFactor", 2.0)!!
WeightedXYModel(spectrum, getAdapter(an)) { dp -> weightReductionFactor }
WeightedXYModel(meta, getAdapter(meta), spectrum) { dp -> weightReductionFactor }
}
manager.addModel("scatter-empiric-variable") { context, an ->
val eGun = an.getDouble("eGun", 19005.0)!!
manager.addModel("scatter-empiric-variable") { context, meta ->
val eGun = meta.getDouble("eGun", 19005.0)!!
//builder transmisssion with given data, annotation and smoothing
val interpolator = buildInterpolator(context, an, eGun)
val interpolator = buildInterpolator(context, meta, eGun)
val loss = VariableLossSpectrum.withData(interpolator, eGun + 5)
val tritiumBackground = an.getDouble("tritiumBkg", 0.0)!!
val tritiumBackground = meta.getDouble("tritiumBkg", 0.0)!!
val spectrum: NBkgSpectrum
if (tritiumBackground == 0.0) {
@ -169,19 +167,17 @@ class NumassPlugin : BasicPlugin() {
spectrum = CustomNBkgSpectrum.tritiumBkgSpectrum(loss, tritiumBackground)
}
val weightReductionFactor = an.getDouble("weightReductionFactor", 2.0)!!
val weightReductionFactor = meta.getDouble("weightReductionFactor", 2.0)!!
val res = WeightedXYModel(spectrum, getAdapter(an)) { dp -> weightReductionFactor }
res.meta = an
res
WeightedXYModel(meta, getAdapter(meta), spectrum) { dp -> weightReductionFactor }
}
manager.addModel("scatter-analytic-variable") { context, an ->
val eGun = an.getDouble("eGun", 19005.0)!!
manager.addModel("scatter-analytic-variable") { context, meta ->
val eGun = meta.getDouble("eGun", 19005.0)!!
val loss = VariableLossSpectrum.withGun(eGun + 5)
val tritiumBackground = an.getDouble("tritiumBkg", 0.0)!!
val tritiumBackground = meta.getDouble("tritiumBkg", 0.0)!!
val spectrum: NBkgSpectrum
if (tritiumBackground == 0.0) {
@ -190,39 +186,37 @@ class NumassPlugin : BasicPlugin() {
spectrum = CustomNBkgSpectrum.tritiumBkgSpectrum(loss, tritiumBackground)
}
XYModel(spectrum, getAdapter(an))
XYModel(meta, getAdapter(meta), spectrum)
}
manager.addModel("scatter-empiric-experimental") { context, an ->
val eGun = an.getDouble("eGun", 19005.0)!!
manager.addModel("scatter-empiric-experimental") { context, meta ->
val eGun = meta.getDouble("eGun", 19005.0)!!
//builder transmisssion with given data, annotation and smoothing
val interpolator = buildInterpolator(context, an, eGun)
val interpolator = buildInterpolator(context, meta, eGun)
val smoothing = an.getDouble("lossSmoothing", 0.3)!!
val smoothing = meta.getDouble("lossSmoothing", 0.3)!!
val loss = ExperimentalVariableLossSpectrum.withData(interpolator, eGun + 5, smoothing)
val spectrum = NBkgSpectrum(loss)
val weightReductionFactor = an.getDouble("weightReductionFactor", 2.0)!!
val weightReductionFactor = meta.getDouble("weightReductionFactor", 2.0)!!
val res = WeightedXYModel(spectrum, getAdapter(an)) { dp -> weightReductionFactor }
res.meta = an
res
WeightedXYModel(meta, getAdapter(meta), spectrum) { dp -> weightReductionFactor }
}
manager.addModel("sterile") { context, meta ->
val sp = SterileNeutrinoSpectrum(context, meta)
val spectrum = NBkgSpectrum(sp)
XYModel(spectrum, getAdapter(meta))
XYModel(meta, getAdapter(meta), spectrum)
}
manager.addModel("gun") { context, an ->
manager.addModel("gun") { context, meta ->
val gsp = GunSpectrum()
val tritiumBackground = an.getDouble("tritiumBkg", 0.0)!!
val tritiumBackground = meta.getDouble("tritiumBkg", 0.0)!!
val spectrum: NBkgSpectrum
if (tritiumBackground == 0.0) {
@ -231,7 +225,7 @@ class NumassPlugin : BasicPlugin() {
spectrum = CustomNBkgSpectrum.tritiumBkgSpectrum(gsp, tritiumBackground)
}
XYModel(spectrum, getAdapter(an))
XYModel(meta, getAdapter(meta), spectrum)
}
}

View File

@ -0,0 +1,128 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions
import hep.dataforge.actions.GroupBuilder
import hep.dataforge.actions.ManyToOneAction
import hep.dataforge.context.Context
import hep.dataforge.data.DataNode
import hep.dataforge.description.NodeDef
import hep.dataforge.description.TypedActionDef
import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.tables.*
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import java.util.*
/**
* @author Darksnake
*/
@TypedActionDef(name = "numass.merge", inputType = Table::class, outputType = Table::class, info = "Merge different numass data files into one.")
@NodeDef(name = "grouping", info = "The definition of grouping rule for this merge", from = "method::hep.dataforge.actions.GroupBuilder.byMeta")
class MergeDataAction : ManyToOneAction<Table, Table>() {
val parnames = arrayOf(NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
val meta = inputMeta(context, input.meta(), actionMeta)
val groups: List<DataNode<Table>>
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta)
} else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, input.name)).group(input)
}
return groups
}
override fun execute(context: Context, nodeName: String, data: Map<String, Table>, meta: Laminate): Table {
val res = mergeDataSets(data.values)
return ListTable(res.format, TableTransform.sort(res, NumassPoint.HV_KEY, true))
}
override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) {
output(context, groupName) { stream -> NumassUtils.write(stream, outputMeta, output) }
}
private fun mergeDataPoints(dp1: Values?, dp2: Values?): Values? {
if (dp1 == null) {
return dp2
}
if (dp2 == null) {
return dp1
}
val voltage = dp1.getValue(NumassPoint.HV_KEY).doubleValue()
val t1 = dp1.getValue(NumassPoint.LENGTH_KEY).doubleValue()
val t2 = dp2.getValue(NumassPoint.LENGTH_KEY).doubleValue()
val time = t1 + t2
val total = (dp1.getValue(NumassAnalyzer.COUNT_KEY).intValue() + dp2.getValue(NumassAnalyzer.COUNT_KEY).intValue()).toLong()
val cr1 = dp1.getValue(NumassAnalyzer.COUNT_RATE_KEY).doubleValue()
val cr2 = dp2.getValue(NumassAnalyzer.COUNT_RATE_KEY).doubleValue()
val cr = (cr1 * t1 + cr2 * t2) / (t1 + t2)
val err1 = dp1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)!!
val err2 = dp2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)!!
// абсолютные ошибки складываются квадратично
val crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time
val map = ValueMap.of(parnames, voltage, time, total, cr, crErr).builder()
return map.build()
}
private fun mergeDataSets(ds: Collection<Table>): Table {
//Сливаем все точки в один набор данных
val points = LinkedHashMap<Double, MutableList<Values>>()
for (d in ds) {
if (!d.format.names.contains(*parnames)) {
throw IllegalArgumentException()
}
for (dp in d) {
val uset = dp.getValue(NumassPoint.HV_KEY).doubleValue()
if (!points.containsKey(uset)) {
points.put(uset, ArrayList())
}
points[uset]?.add(dp)
}
}
val res = ArrayList<Values>()
points.entries.stream().map<Values> { entry ->
var curPoint: Values? = null
for (newPoint in entry.value) {
curPoint = mergeDataPoints(curPoint, newPoint)
}
curPoint
}.forEach { res.add(it) }
return ListTable(MetaTableFormat.forNames(parnames), res)
}
companion object {
val MERGE_NAME = "mergeName"
}
}

View File

@ -0,0 +1,116 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions
import hep.dataforge.actions.GroupBuilder
import hep.dataforge.actions.ManyToOneAction
import hep.dataforge.context.Context
import hep.dataforge.data.DataNode
import hep.dataforge.description.TypedActionDef
import hep.dataforge.description.ValueDef
import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.stat.fit.FitState
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.MetaTableFormat
import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Value
import inr.numass.NumassUtils
import java.util.*
/**
* @author Darksnake
*/
@TypedActionDef(name = "summary", inputType = FitState::class, outputType = Table::class, info = "Generate summary for fit results of different datasets.")
@ValueDef(name = "parnames", multiple = true, required = true, info = "List of names of parameters for which summary should be done")
class SummaryAction : ManyToOneAction<FitState, Table>() {
protected override fun buildGroups(context: Context, input: DataNode<FitState>, actionMeta: Meta): List<DataNode<FitState>> {
val meta = inputMeta(context, input.meta(), actionMeta)
val groups: List<DataNode<FitState>>
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta)
} else {
groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group<FitState>(input)
}
return groups
}
override fun execute(context: Context, nodeName: String, input: Map<String, FitState>, meta: Laminate): Table {
val parNames: Array<String>
if (meta.hasValue("parnames")) {
parNames = meta.getStringArray("parnames")
} else {
throw RuntimeException("Infering parnames not suppoerted")
}
val names = arrayOfNulls<String>(2 * parNames.size + 2)
names[0] = "file"
for (i in parNames.indices) {
names[2 * i + 1] = parNames[i]
names[2 * i + 2] = parNames[i] + "Err"
}
names[names.size - 1] = "chi2"
val res = ListTable.Builder(MetaTableFormat.forNames(names))
val weights = DoubleArray(parNames.size)
Arrays.fill(weights, 0.0)
val av = DoubleArray(parNames.size)
Arrays.fill(av, 0.0)
input.forEach { key: String, value: FitState ->
val values = arrayOfNulls<Value>(names.size)
values[0] = Value.of(key)
for (i in parNames.indices) {
val `val` = Value.of(value.parameters.getDouble(parNames[i]))
values[2 * i + 1] = `val`
val err = Value.of(value.parameters.getError(parNames[i]))
values[2 * i + 2] = err
val weight = 1.0 / err.doubleValue() / err.doubleValue()
av[i] += `val`.doubleValue() * weight
weights[i] += weight
}
values[values.size - 1] = Value.of(value.chi2)
val point = ValueMap.of(names, *values as Array<Any>)
res.row(point)
}
val averageValues = arrayOfNulls<Value>(names.size)
averageValues[0] = Value.of("average")
averageValues[averageValues.size - 1] = Value.of(0)
for (i in parNames.indices) {
averageValues[2 * i + 1] = Value.of(av[i] / weights[i])
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]))
}
res.row(ValueMap.of(names, *averageValues as Array<Any>))
return res.build()
}
override fun afterGroup(context: Context, groupName: String, outputMeta: Meta, output: Table) {
output(context, groupName) { stream -> NumassUtils.write(stream, outputMeta, output) }
super.afterGroup(context, groupName, outputMeta, output)
}
companion object {
val SUMMARY_NAME = "sumName"
}
}

View File

@ -32,7 +32,7 @@ import inr.numass.data.api.NumassPoint
NodeDef(name = "plot", info = "Configuration for stat plots")
)
@TypedActionDef(name = "timeSpectrum", inputType = NumassPoint::class, outputType = Table::class)
class TimeAnalyzedAction : OneToOneAction<NumassPoint, Table>() {
class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
private val analyzer = TimeAnalyzer();
override fun execute(context: Context, name: String, input: NumassPoint, inputMeta: Laminate): Table {

View File

@ -0,0 +1,175 @@
package inr.numass.actions
import hep.dataforge.actions.OneToOneAction
import hep.dataforge.context.Context
import hep.dataforge.description.NodeDef
import hep.dataforge.description.TypedActionDef
import hep.dataforge.description.ValueDef
import hep.dataforge.description.ValueDefs
import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.names.Named
import hep.dataforge.tables.ColumnFormat
import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.ListColumn
import hep.dataforge.tables.Table
import hep.dataforge.values.ValueType.NUMBER
import hep.dataforge.values.ValueType.STRING
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.data.api.NumassAnalyzer.COUNT_RATE_ERROR_KEY
import inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
import inr.numass.pointExpression
import java.util.*
/**
* Apply corrections and transformations to analyzed data
* Created by darksnake on 11.07.2017.
*/
@TypedActionDef(name = "numass.transform", inputType = Table::class, outputType = Table::class)
@ValueDefs(
ValueDef(name = "correction", info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`"),
ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
)
@NodeDef(name = "correction", multiple = true, from = "method::inr.numass.actions.TransformDataAction.makeCorrection")
class TransformDataAction : OneToOneAction<Table, Table>() {
override fun execute(context: Context, name: String, input: Table, meta: Laminate): Table {
val corrections = ArrayList<Correction>()
meta.optMeta("corrections").ifPresent { cors ->
MetaUtils.nodeStream(cors)
.map<Meta> { it.value }
.map<Correction> { this.makeCorrection(it) }
.forEach { corrections.add(it) }
}
if (meta.hasValue("correction")) {
val correction = meta.getString("correction")
corrections.add(object : Correction {
override fun corr(point: Values): Double {
return pointExpression(correction, point)
}
})
}
var table = ColumnTable.copy(input)
for (correction in corrections) {
//adding correction columns
if (!correction.isAnonimous) {
table = table.buildColumn(ColumnFormat.build(correction.name, NUMBER)) { correction.corr(it) }
if (correction.hasError()) {
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) { correction.corrErr(it) }
}
}
}
// adding original count rate and error columns
table = table.addColumn(ListColumn(ColumnFormat.build(COUNT_RATE_KEY + ".orig", NUMBER), table.getColumn(COUNT_RATE_KEY).stream()))
table = table.addColumn(ListColumn(ColumnFormat.build(COUNT_RATE_ERROR_KEY + ".orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()))
val cr = ArrayList<Double>()
val crErr = ArrayList<Double>()
table.rows.forEach { point ->
val correctionFactor = corrections.stream()
.mapToDouble { cor -> cor.corr(point) }
.reduce { d1, d2 -> d1 * d2 }.orElse(1.0)
val relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble { cor -> cor.relativeErr(point) }
.reduce { d1, d2 -> d1 * d1 + d2 * d2 }.orElse(0.0)
)
val originalCR = point.getDouble(COUNT_RATE_KEY)!!
val originalCRErr = point.getDouble(COUNT_RATE_ERROR_KEY)!!
cr.add(originalCR * correctionFactor)
if (relativeCorrectionError == 0.0) {
crErr.add(originalCRErr * correctionFactor)
} else {
crErr.add(Math.sqrt(Math.pow(originalCRErr / originalCR, 2.0) + Math.pow(relativeCorrectionError, 2.0)) * originalCR)
}
}
//replacing cr column
val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
output(context, name) { stream -> NumassUtils.write(stream, meta, res) }
return res
}
@ValueDefs(
ValueDef(name = "value", type = arrayOf(NUMBER, STRING), info = "Value or function to multiply count rate"),
ValueDef(name = "err", type = arrayOf(NUMBER, STRING), info = "error of the value")
)
private fun makeCorrection(corrMeta: Meta): Correction {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
return object : Correction {
override fun getName(): String {
return corrMeta.getString("name", corrMeta.name)
}
override fun corr(point: Values): Double {
return pointExpression(expr, point)
}
override fun corrErr(point: Values): Double {
return if (errExpr.isEmpty()) {
0.0
} else {
pointExpression(errExpr, point)
}
}
override fun hasError(): Boolean {
return !errExpr.isEmpty()
}
}
}
private interface Correction : Named {
override fun getName(): String {
return ""
}
/**
* correction coefficient
*
* @param point
* @return
*/
fun corr(point: Values): Double
/**
* correction coefficient uncertainty
*
* @param point
* @return
*/
fun corrErr(point: Values): Double {
return 0.0
}
fun hasError(): Boolean {
return false
}
fun relativeErr(point: Values): Double {
val corrErr = corrErr(point)
return if (corrErr == 0.0) {
0.0
} else {
corrErr / corr(point)
}
}
}
}

View File

@ -0,0 +1,11 @@
package inr.numass.models
import hep.dataforge.stat.models.Model
import hep.dataforge.stat.models.ModelDescriptor
import hep.dataforge.stat.models.ModelFactory
import hep.dataforge.utils.ContextMetaFactory
fun model(name: String, descriptor: ModelDescriptor? = null, factory: ContextMetaFactory<Model>): ModelFactory {
return ModelFactory.build(name, descriptor, factory);
}

View File

@ -12,18 +12,22 @@ import hep.dataforge.kodex.task
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.PlotUtils
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.data.XYFunctionPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.stat.fit.FitAction
import hep.dataforge.stat.fit.FitHelper
import hep.dataforge.stat.fit.FitResult
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.actions.MergeDataAction
import inr.numass.actions.MergeDataAction.MERGE_NAME
import inr.numass.actions.MergeDataAction.Companion.MERGE_NAME
import inr.numass.actions.TransformDataAction
import inr.numass.addSetMarkers
import inr.numass.data.analyzers.SmartAnalyzer
@ -32,6 +36,8 @@ import inr.numass.data.api.NumassSet
import inr.numass.subtract
import inr.numass.unbox
import inr.numass.utils.ExpressionUtils
import java.io.PrintWriter
import java.util.stream.StreamSupport
val selectTask = task("select") {
model { meta ->
@ -49,48 +55,46 @@ val monitorTableTask = task("monitor") {
dependsOn(selectTask, meta)
configure(meta.getMetaOrEmpty("analyzer"))
}
join<NumassSet, Table> {
result { data ->
val monitorVoltage = meta.getDouble("monitorVoltage", 16000.0);
val analyzer = SmartAnalyzer()
val analyzerMeta = meta.getMetaOrEmpty("analyzer")
//TODO add separator labels
val res = ListTable.Builder("timestamp", "count", "cr", "crErr")
.rows(
data.values.stream().parallel()
.flatMap { it.points }
.filter { it.voltage == monitorVoltage }
.map { it -> analyzer.analyzePoint(it, analyzerMeta) }
).build()
join<NumassSet, Table> { data ->
val monitorVoltage = meta.getDouble("monitorVoltage", 16000.0);
val analyzer = SmartAnalyzer()
val analyzerMeta = meta.getMetaOrEmpty("analyzer")
//TODO add separator labels
val res = ListTable.Builder("timestamp", "count", "cr", "crErr")
.rows(
data.values.stream().parallel()
.flatMap { it.points }
.filter { it.voltage == monitorVoltage }
.map { it -> analyzer.analyzePoint(it, analyzerMeta) }
).build()
if (meta.getBoolean("showPlot", true)) {
context.provide("plots", PlotManager::class.java).ifPresent {
it.display(stage = "monitor") {
configure {
"xAxis.title" to "time"
"xAxis.type" to "time"
"yAxis.title" to "Count rate"
"yAxis.units" to "Hz"
}
plots + DataPlot.plot(name, XYAdapter("timestamp", "cr", "crErr"), res)
}.also { frame ->
if (frame is JFreeChartFrame) {
//add set markers
addSetMarkers(frame, data.values)
}
context.io().out("numass.monitor", name, "dfp").use {
NumassUtils.writeEnvelope(it, PlotFrame.Wrapper().wrap(frame))
}
if (meta.getBoolean("showPlot", true)) {
context.provide("plots", PlotManager::class.java).ifPresent {
it.display(stage = "monitor") {
configure {
"xAxis.title" to "time"
"xAxis.type" to "time"
"yAxis.title" to "Count rate"
"yAxis.units" to "Hz"
}
plots + DataPlot.plot(name, XYAdapter("timestamp", "cr", "crErr"), res)
}.also { frame ->
if (frame is JFreeChartFrame) {
//add set markers
addSetMarkers(frame, data.values)
}
context.io().out("numass.monitor", name, "dfp").use {
NumassUtils.writeEnvelope(it, PlotFrame.Wrapper().wrap(frame))
}
}
}
context.io().out("numass.monitor", name).use {
NumassUtils.write(it, meta, res)
}
return@result res;
}
context.io().out("numass.monitor", name).use {
NumassUtils.write(it, meta, res)
}
return@join res;
}
}
@ -99,12 +103,10 @@ val analyzeTask = task("analyze") {
dependsOn(selectTask, meta);
configure(MetaUtils.optEither(meta, "analyzer", "prepare").orElse(Meta.empty()))
}
pipe<NumassSet, Table> {
result { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res ->
context.io().out("numass.analyze", name).use {
NumassUtils.write(it, meta, res)
}
pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res ->
context.io().out("numass.analyze", name).use {
NumassUtils.write(it, meta, res)
}
}
}
@ -118,6 +120,15 @@ val mergeTask = task("merge") {
action<Table, Table>(MergeDataAction())
}
//val newMergeTask = task("merge") {
// model { meta ->
// dependsOn(analyzeTask, meta)
// }
// join<Table, Table> {
// byValue(MERGE_NAME)
// }
//}
val mergeEmptyTask = task("empty") {
model { meta ->
if (!meta.hasMeta("empty")) {
@ -189,20 +200,19 @@ val filterTask = task("filter") {
model { meta ->
dependsOn(transformTask, meta)
}
pipe<Table, Table> {
result { data ->
if (meta.hasValue("from") || meta.hasValue("to")) {
val uLo = meta.getDouble("from", 0.0)!!
val uHi = meta.getDouble("to", java.lang.Double.POSITIVE_INFINITY)!!
this.log.report("Filtering finished")
TableTransform.filter(data, NumassPoint.HV_KEY, uLo, uHi)
} else if (meta.hasValue("condition")) {
TableTransform.filter(data) { ExpressionUtils.condition(meta.getString("condition"), it.unbox()) }
} else {
throw RuntimeException("No filtering condition specified")
}
pipe<Table, Table> { data ->
if (meta.hasValue("from") || meta.hasValue("to")) {
val uLo = meta.getDouble("from", 0.0)!!
val uHi = meta.getDouble("to", java.lang.Double.POSITIVE_INFINITY)!!
this.log.report("Filtering finished")
TableTransform.filter(data, NumassPoint.HV_KEY, uLo, uHi)
} else if (meta.hasValue("condition")) {
TableTransform.filter(data) { ExpressionUtils.condition(meta.getString("condition"), it.unbox()) }
} else {
throw RuntimeException("No filtering condition specified")
}
}
}
val fitTask = task("fit") {
@ -210,5 +220,57 @@ val fitTask = task("fit") {
dependsOn(filterTask, meta)
configure(meta.getMeta("fit"))
}
action<Table, FitResult>(FitAction())
pipe<Table, FitResult> { data ->
context.io().out("numass.fit", name).use { out ->
val writer = PrintWriter(out)
writer.printf("%n*** META ***%n")
writer.println(meta.toString())
writer.flush()
FitHelper(context).fit(data, meta)
.setListenerStream(out)
.report(log)
.run()
.also {
if (meta.getBoolean("printLog", true)) {
log.print(writer)
}
}
}
}
}
val plotFitTask = task("plotFit") {
model { meta ->
dependsOn(fitTask, meta)
configure(meta.getMetaOrEmpty("plotFit"))
}
pipe<FitResult, PlotFrame> { input ->
val fitModel = input.optModel(context).orElseThrow { IllegalStateException("Can't load model") } as XYModel
val data = input.data
val adapter: XYAdapter = fitModel.adapter
val function = { x: Double -> fitModel.spectrum.value(x, input.parameters) }
val frame = PlotUtils.getPlotManager(context)
.getPlotFrame("numass.plotFit", name, meta.getMeta("frame", Meta.empty()))
val fit = XYFunctionPlot("fit").apply {
setFunction(function)
setDensity(100, false)
setSmoothing(true)
}
frame.add(fit)
// ensuring all data points are calculated explicitly
StreamSupport.stream<Values>(data.spliterator(), false)
.map { dp -> adapter.getX(dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
frame.add(DataPlot.plot("data", adapter, data))
return@pipe frame;
}
}