minor fixes

This commit is contained in:
Alexander Nozik 2017-10-22 13:21:36 +03:00
parent c341c29120
commit 6da107c49a
13 changed files with 222 additions and 191 deletions

View File

@ -214,7 +214,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
@Override
public Instant getStartTime() {
return meta.optValue("start_time").map(Value::timeValue).orElseGet(() -> NumassSet.super.getStartTime());
return getMeta().optValue("start_time").map(Value::timeValue).orElseGet(() -> NumassSet.super.getStartTime());
}
@Override

View File

@ -8,10 +8,10 @@ package inr.numass.scripts
import hep.dataforge.grind.Grind
import hep.dataforge.values.Values
import inr.numass.NumassUtils
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.NumassDataLoader
import inr.numass.utils.NMEventGeneratorWithPulser
import inr.numass.utils.NumassUtils
import inr.numass.utils.PileUpSimulator
import inr.numass.utils.UnderflowCorrection
import org.apache.commons.math3.random.JDKRandomGenerator

View File

@ -6,10 +6,10 @@ import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.tables.Table;
import inr.numass.NumassUtils;
import inr.numass.data.analyzers.SmartAnalyzer;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassSet;
import inr.numass.utils.NumassUtils;
import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING;
@ -27,7 +27,7 @@ public class AnalyzeDataAction extends OneToOneAction<NumassSet, Table> {
//TODO add processor here
NumassAnalyzer analyzer = new SmartAnalyzer();
Table res = analyzer.analyzeSet(input, inputMeta);
output(context, name, stream -> NumassUtils.write(stream, inputMeta, res));
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, inputMeta, res));
return res;
}
}

View File

@ -25,9 +25,9 @@ import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import inr.numass.utils.NumassUtils;
import java.util.*;
@ -62,7 +62,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
output(context, groupName, stream -> NumassUtils.write(stream, outputMeta, output));
output(context, groupName, stream -> NumassUtils.INSTANCE.write(stream, outputMeta, output));
}
private Values mergeDataPoints(Values dp1, Values dp2) {

View File

@ -27,9 +27,9 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import inr.numass.utils.NumassUtils;
import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
@ -136,7 +136,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
// }
Table res = new ListTable(dataList);
output(context, name, stream -> NumassUtils.write(stream, meta, res));
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, meta, res));
return res;
}
@ -201,7 +201,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
String monitorFileName = meta.getString("monitorFile", "monitor");
ListTable data = new ListTable(monitorPoints);
output(context, monitorFileName, stream -> NumassUtils.write(stream, meta, data));
output(context, monitorFileName, stream -> NumassUtils.INSTANCE.write(stream, meta, data));
// ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
}
}

View File

@ -14,7 +14,7 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import inr.numass.NumassUtils;
import java.io.IOException;
import java.nio.file.Path;
@ -47,7 +47,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
});
Table res = builder.build();
output(context,name, stream -> NumassUtils.write(stream,inputMeta,res));
output(context,name, stream -> NumassUtils.INSTANCE.write(stream,inputMeta,res));
return res;
} catch (IOException ex) {
throw new RuntimeException("Could not read reference file", ex);

View File

@ -30,7 +30,7 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import inr.numass.NumassUtils;
import java.util.Arrays;
import java.util.List;
@ -115,7 +115,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
@Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
output(context, groupName, stream -> NumassUtils.write(stream, outputMeta, output));
output(context, groupName, stream -> NumassUtils.INSTANCE.write(stream, outputMeta, output));
super.afterGroup(context, groupName, outputMeta, output);
}

View File

@ -14,7 +14,8 @@ import hep.dataforge.tables.ColumnTable;
import hep.dataforge.tables.ListColumn;
import hep.dataforge.tables.Table;
import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import inr.numass.NumassUtils;
import inr.numass.NumassUtilsKt;
import javafx.util.Pair;
import java.util.ArrayList;
@ -24,7 +25,6 @@ import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_ERROR_KEY;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.utils.NumassUtils.pointExpression;
/**
* Apply corrections and transformations to analyzed data
@ -51,7 +51,7 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
if (meta.hasValue("correction")) {
final String correction = meta.getString("correction");
corrections.add(point -> pointExpression(correction, point));
corrections.add(point -> NumassUtilsKt.pointExpression(correction, point));
}
@ -102,7 +102,7 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
Table res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).getFormat(), cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).getFormat(), crErr.stream()));
output(context, name, stream -> NumassUtils.write(stream, meta, res));
output(context, name, stream -> NumassUtils.INSTANCE.write(stream, meta, res));
return res;
}
@ -120,7 +120,7 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
@Override
public double corr(Values point) {
return pointExpression(expr, point);
return NumassUtilsKt.pointExpression(expr, point);
}
@Override
@ -128,7 +128,7 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
if (errExpr.isEmpty()) {
return 0;
} else {
return pointExpression(errExpr, point);
return NumassUtilsKt.pointExpression(errExpr, point);
}
}

View File

@ -7,8 +7,8 @@ package inr.numass.models;
import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.values.Values;
import inr.numass.NumassUtils;
import inr.numass.utils.NumassIntegrator;
import inr.numass.utils.NumassUtils;
import org.apache.commons.math3.analysis.UnivariateFunction;
/**
@ -19,7 +19,7 @@ import org.apache.commons.math3.analysis.UnivariateFunction;
public class CustomNBkgSpectrum extends NBkgSpectrum {
public static CustomNBkgSpectrum tritiumBkgSpectrum(ParametricFunction source, double amplitude){
UnivariateFunction differentialBkgFunction = NumassUtils.tritiumBackgroundFunction(amplitude);
UnivariateFunction differentialBkgFunction = NumassUtils.INSTANCE.tritiumBackgroundFunction(amplitude);
UnivariateFunction integralBkgFunction =
(x) -> NumassIntegrator.getDefaultIntegrator()
.integrate(x, 18580d, differentialBkgFunction);

View File

@ -21,7 +21,7 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.workspace.tasks.AbstractTask;
import hep.dataforge.workspace.tasks.TaskModel;
import inr.numass.utils.NumassUtils;
import inr.numass.NumassUtils;
import java.util.Map;
@ -79,7 +79,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
pars.getValue("trap"));
});
Table res = TableTransform.sort(builder.build(), "m", true);
output(context, nodeName, stream -> NumassUtils.write(stream,meta,res));
output(context, nodeName, stream -> NumassUtils.INSTANCE.write(stream,meta,res));
return res;
}

View File

@ -1,167 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.utils;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataSet;
import hep.dataforge.io.envelopes.DefaultEnvelopeType;
import hep.dataforge.io.envelopes.Envelope;
import hep.dataforge.io.envelopes.EnvelopeBuilder;
import hep.dataforge.io.envelopes.TaglessEnvelopeType;
import hep.dataforge.io.markup.Markedup;
import hep.dataforge.io.markup.SimpleMarkupRenderer;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import static java.lang.Math.*;
/**
* @author Darksnake
*/
public class NumassUtils {
/**
* Integral beta spectrum background with given amplitude (total count rate
* from)
*
* @param amplitude
* @return
*/
public static UnivariateFunction tritiumBackgroundFunction(double amplitude) {
return (e) -> {
/*чистый бета-спектр*/
double e0 = 18575d;
double D = e0 - e;//E0-E
if (D <= 0) {
return 0;
}
return amplitude * factor(e) * D * D;
};
}
private static double factor(double E) {
double me = 0.511006E6;
double Etot = E + me;
double pe = sqrt(E * (E + 2d * me));
double ve = pe / Etot;
double yfactor = 2d * 2d * 1d / 137.039 * Math.PI;
double y = yfactor / ve;
double Fn = y / abs(1d - exp(-y));
double Fermi = Fn * (1.002037 - 0.001427 * ve);
double res = Fermi * pe * Etot;
return res * 1E-23;
}
/**
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @return
*/
public static double pointExpression(String expression, Values point) {
Map<String, Object> exprParams = new HashMap<>();
//Adding all point values to expression parameters
point.getNames().forEach(name -> exprParams.put(name, point.getValue(name).value()));
//Adding aliases for commonly used parameters
exprParams.put("T", point.getDouble("length"));
exprParams.put("U", point.getDouble("voltage"));
return ExpressionUtils.function(expression, exprParams);
}
/**
* Write an envelope wrapping given data to given stream
*
* @param stream
* @param meta
* @param dataWriter
* @throws IOException
*/
public static void writeEnvelope(OutputStream stream, Meta meta, Consumer<OutputStream> dataWriter) {
try {
TaglessEnvelopeType.instance.getWriter().write(
stream,
new EnvelopeBuilder()
.setMeta(meta)
.setData(dataWriter)
.build()
);
stream.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void writeEnvelope(OutputStream stream, Envelope envelope) {
try {
DefaultEnvelopeType.instance.getWriter().write(stream, envelope);
stream.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void write(OutputStream stream, Meta meta, Markedup something) {
writeEnvelope(stream, meta, out -> new SimpleMarkupRenderer(out).render(something.markup(meta)));
}
/**
* Convert numass set to DataNode
*
* @param set
* @return
*/
public static DataNode<Object> setToNode(NumassSet set) {
DataSet.Builder<Object> builder = DataSet.builder();
builder.setName(set.getName());
set.getPoints().forEach(point -> {
Meta pointMeta = new MetaBuilder("point")
.putValue("voltage", point.getVoltage())
.putValue("index", point.meta().getInt("external_meta.point_index", -1))
.putValue("run", point.meta().getString("external_meta.session", ""))
.putValue("group", point.meta().getString("external_meta.group", ""));
String pointName = "point_" + point.meta().getInt("external_meta.point_index", point.hashCode());
builder.putData(pointName, point, pointMeta);
});
set.getHvData().ifPresent(hv -> builder.putData("hv", hv, Meta.empty()));
return builder.build();
}
/**
* Convert numass set to uniform node which consists of points
*
* @param set
* @return
*/
public static DataNode<NumassPoint> pointsToNode(NumassSet set) {
return setToNode(set).checked(NumassPoint.class);
}
}

View File

@ -0,0 +1,192 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass
import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.io.envelopes.DefaultEnvelopeType
import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.io.envelopes.EnvelopeBuilder
import hep.dataforge.io.envelopes.TaglessEnvelopeType
import hep.dataforge.io.markup.Markedup
import hep.dataforge.io.markup.SimpleMarkupRenderer
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.values.Values
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.utils.ExpressionUtils
import org.apache.commons.math3.analysis.UnivariateFunction
import org.jfree.chart.plot.IntervalMarker
import org.jfree.chart.ui.RectangleInsets
import tornadofx.*
import java.awt.Color
import java.awt.Font
import java.io.IOException
import java.io.OutputStream
import java.lang.Math.*
import java.util.*
/**
* @author Darksnake
*/
object NumassUtils {
/**
* Integral beta spectrum background with given amplitude (total count rate
* from)
*
* @param amplitude
* @return
*/
fun tritiumBackgroundFunction(amplitude: Double): UnivariateFunction {
return UnivariateFunction { e: Double ->
/*чистый бета-спектр*/
val e0 = 18575.0
val D = e0 - e//E0-E
if (D <= 0) {
0.0
} else {
amplitude * factor(e) * D * D
}
}
}
private fun factor(E: Double): Double {
val me = 0.511006E6
val Etot = E + me
val pe = sqrt(E * (E + 2.0 * me))
val ve = pe / Etot
val yfactor = 2.0 * 2.0 * 1.0 / 137.039 * Math.PI
val y = yfactor / ve
val Fn = y / abs(1.0 - exp(-y))
val Fermi = Fn * (1.002037 - 0.001427 * ve)
val res = Fermi * pe * Etot
return res * 1E-23
}
/**
* Write an envelope wrapping given data to given stream
*
* @param stream
* @param meta
* @param dataWriter
* @throws IOException
*/
fun writeEnvelope(stream: OutputStream, meta: Meta, dataWriter: (OutputStream) -> Unit) {
try {
TaglessEnvelopeType.instance.writer.write(
stream,
EnvelopeBuilder()
.setMeta(meta)
.setData(dataWriter)
.build()
)
stream.flush()
} catch (e: IOException) {
throw RuntimeException(e)
}
}
fun writeEnvelope(stream: OutputStream, envelope: Envelope) {
try {
DefaultEnvelopeType.instance.writer.write(stream, envelope)
stream.flush()
} catch (e: IOException) {
throw RuntimeException(e)
}
}
fun write(stream: OutputStream, meta: Meta, something: Markedup) {
writeEnvelope(stream, meta) { out ->
SimpleMarkupRenderer(out).render(something.markup(meta))
}
}
/**
* Convert numass set to DataNode
*
* @param set
* @return
*/
fun setToNode(set: NumassSet): DataNode<Any> {
val builder = DataSet.builder()
builder.setName(set.name)
set.points.forEach { point ->
val pointMeta = MetaBuilder("point")
.putValue("voltage", point.voltage)
.putValue("index", point.meta().getInt("external_meta.point_index", -1))
.putValue("run", point.meta().getString("external_meta.session", ""))
.putValue("group", point.meta().getString("external_meta.group", ""))
val pointName = "point_" + point.meta().getInt("external_meta.point_index", point.hashCode())!!
builder.putData(pointName, point, pointMeta)
}
set.hvData.ifPresent { hv -> builder.putData("hv", hv, Meta.empty()) }
return builder.build()
}
/**
* Convert numass set to uniform node which consists of points
*
* @param set
* @return
*/
fun pointsToNode(set: NumassSet): DataNode<NumassPoint> {
return setToNode(set).checked(NumassPoint::class.java)
}
}
/**
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @return
*/
fun pointExpression(expression: String, point: Values): Double {
val exprParams = HashMap<String, Any>()
//Adding all point values to expression parameters
point.names.forEach { name -> exprParams.put(name, point.getValue(name).value()) }
//Adding aliases for commonly used parameters
exprParams.put("T", point.getDouble("length"))
exprParams.put("U", point.getDouble("voltage"))
return ExpressionUtils.function(expression, exprParams)
}
/**
* Add set markers to time chart
*/
fun addSetMarkers(frame: JFreeChartFrame, sets: Collection<NumassSet>) {
val jfcPlot = frame.chart.xyPlot
val paint = Color(0.0f, 0.0f, 1.0f, 0.1f)
sets.stream().forEach {
val start = it.startTime;
val stop = it.meta.optValue("end_time").map { it.timeValue() }
.orElse(start.plusSeconds(3600))
.minusSeconds(30)
val marker = IntervalMarker(start.toEpochMilli().toDouble(), stop.toEpochMilli().toDouble(), paint)
marker.label = it.name
marker.labelFont = Font("Verdana", Font.BOLD, 20);
marker.labelOffset = RectangleInsets(30.0, 30.0, 30.0, 30.0)
runLater { jfcPlot.addDomainMarker(marker) }
}
}

View File

@ -7,12 +7,14 @@ import hep.dataforge.kodex.fx.plots.plus
import hep.dataforge.kodex.task
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import inr.numass.NumassUtils
import inr.numass.addSetMarkers
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.utils.NumassUtils
val selectDataTask = task("select") {
model {
@ -53,6 +55,10 @@ val monitorTableTask = task("monitor") {
}
plots + DataPlot.plot(name, XYAdapter("timestamp", "cr", "crErr"), res)
}.also { frame ->
if (frame is JFreeChartFrame) {
//add set markers
addSetMarkers(frame, data.values)
}
context.io().out("numass.monitor", name, "dfp").use {
NumassUtils.writeEnvelope(it, PlotFrame.Wrapper().wrap(frame))
}