Fixing adapter mess

This commit is contained in:
Alexander Nozik 2017-11-28 15:40:32 +03:00
parent 5bcd1277f0
commit 96d7598355
28 changed files with 197 additions and 191 deletions

View File

@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassAnalyzer.*; import static inr.numass.data.api.NumassAnalyzer.*;
/** /**
@ -147,8 +147,8 @@ public class NumassDataUtils {
} }
@NotNull @NotNull
public static SpectrumDataAdapter adapter() { public static SpectrumAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time"); return new SpectrumAdapter("Uset", "CR", "CRerr", "Time");
} }
@ -214,7 +214,7 @@ public class NumassDataUtils {
// * @return // * @return
// */ // */
// public static Table setHVScale(ListTable data, double beta) { // public static Table setHVScale(ListTable data, double beta) {
// SpectrumDataAdapter reader = adapter(); // SpectrumAdapter reader = adapter();
// ListTable.Builder res = new ListTable.Builder(data.getFormat()); // ListTable.Builder res = new ListTable.Builder(data.getFormat());
// for (Values dp : data) { // for (Values dp : data) {
// double corrFactor = 1 + beta; // double corrFactor = 1 + beta;
@ -235,8 +235,8 @@ public class NumassDataUtils {
// * @param dtime // * @param dtime
// * @return // * @return
// */ // */
// public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) { // public static Table correctForDeadTime(ListTable data, SpectrumAdapter adapter, double dtime) {
//// SpectrumDataAdapter adapter = adapter(); //// SpectrumAdapter adapter = adapter();
// ListTable.Builder res = new ListTable.Builder(data.getFormat()); // ListTable.Builder res = new ListTable.Builder(data.getFormat());
// for (Values dp : data) { // for (Values dp : data) {
// double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp)); // double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));

View File

@ -0,0 +1,113 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.Adapters;
import hep.dataforge.tables.BasicAdapter;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.tables.ValuesAdapter;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import java.util.Optional;
import java.util.stream.Stream;
import static hep.dataforge.tables.Adapters.*;
/**
* @author Darksnake
*/
public class SpectrumAdapter extends BasicAdapter {
private static final String POINT_LENGTH_NAME = "time";
public SpectrumAdapter(Meta meta) {
super(meta);
}
public SpectrumAdapter(String xName, String yName, String yErrName, String measurementTime) {
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
.setValue(X_VALUE_KEY, xName)
.setValue(Y_VALUE_KEY, yName)
.setValue(Y_ERROR_KEY, yErrName)
.setValue(POINT_LENGTH_NAME, measurementTime)
.build()
);
}
public SpectrumAdapter(String xName, String yName, String measurementTime) {
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
.setValue(X_VALUE_KEY, xName)
.setValue(Y_VALUE_KEY, yName)
.setValue(POINT_LENGTH_NAME, measurementTime)
.build()
);
}
public double getTime(Values point) {
return this.optComponent(point, POINT_LENGTH_NAME).map(Value::doubleValue).orElse(1d);
}
public Values buildSpectrumDataPoint(double x, long count, double t) {
return ValueMap.of(new String[]{getComponentName(X_VALUE_KEY), getComponentName(Y_VALUE_KEY),
getComponentName(POINT_LENGTH_NAME)},
x, count, t);
}
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
return ValueMap.of(new String[]{getComponentName(X_VALUE_KEY), getComponentName(Y_VALUE_KEY),
getComponentName(Y_ERROR_KEY), getComponentName(POINT_LENGTH_NAME)},
x, count, countErr, t);
}
@Override
public Optional<Value> optComponent(Values values, String component) {
switch (component) {
case "count":
return super.optComponent(values, Y_VALUE_KEY);
case Y_VALUE_KEY:
return super.optComponent(values, Y_VALUE_KEY)
.map(it -> it.doubleValue() / getTime(values))
.map(Value::of);
case Y_ERROR_KEY:
Optional<Value> err = super.optComponent(values, Y_ERROR_KEY);
if (err.isPresent()) {
return Optional.of(Value.of(err.get().doubleValue() / getTime(values)));
} else {
double y = getComponent(values, Y_VALUE_KEY).doubleValue();
if (y < 0) {
return Optional.empty();
} else if (y == 0) {
//avoid infinite weights
return Optional.of(Value.of(1d / getTime(values)));
} else {
return Optional.of(Value.of(Math.sqrt(y) / getTime(values)));
}
}
default:
return super.optComponent(values, component);
}
}
@Override
public Stream<String> listComponents() {
return Stream.concat(super.listComponents(), Stream.of(Adapters.Y_VALUE_KEY, Adapters.Y_ERROR_KEY)).distinct();
}
}

View File

@ -1,116 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.data;
import hep.dataforge.exceptions.DataFormatException;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.tables.ValuesAdapter;
import hep.dataforge.tables.XYAdapter;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import java.util.Objects;
/**
* @author Darksnake
*/
public class SpectrumDataAdapter extends XYAdapter {
private static final String POINT_LENGTH_NAME = "time";
public SpectrumDataAdapter() {
}
public SpectrumDataAdapter(Meta meta) {
super(meta);
}
public SpectrumDataAdapter(String xName, String yName, String yErrName, String measurementTime) {
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
.setValue(X_VALUE_KEY, xName)
.setValue(Y_VALUE_KEY, yName)
.setValue(Y_ERROR_KEY, yErrName)
.setValue(POINT_LENGTH_NAME, measurementTime)
.build()
);
}
public SpectrumDataAdapter(String xName, String yName, String measurementTime) {
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
.setValue(X_VALUE_KEY, xName)
.setValue(Y_VALUE_KEY, yName)
.setValue(POINT_LENGTH_NAME, measurementTime)
.build()
);
}
public double getTime(Values point) {
return this.getComponent(point, POINT_LENGTH_NAME, 1d).doubleValue();
}
public Values buildSpectrumDataPoint(double x, long count, double t) {
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
nameFor(POINT_LENGTH_NAME)},
x, count, t);
}
public Values buildSpectrumDataPoint(double x, long count, double countErr, double t) {
return ValueMap.of(new String[]{nameFor(X_VALUE_KEY), nameFor(Y_VALUE_KEY),
nameFor(Y_ERROR_KEY), nameFor(POINT_LENGTH_NAME)},
x, count, countErr, t);
}
@Override
public boolean providesYError(Values point) {
return true;
}
public long getCount(Values point) {
return super.getY(point).numberValue().longValue();
}
@Override
public Value getValue(Values point, String axis) {
if (Objects.equals(axis, XYAdapter.Y_AXIS)) {
return Value.of(getComponent(point, Y_VALUE_KEY).doubleValue() / getTime(point));
} else {
return super.getValue(point, axis);
}
}
@Override
public Value getError(Values point, String axis) {
if (Objects.equals(axis, XYAdapter.Y_AXIS)) {
if (super.providesYError(point)) {
return Value.of(getComponent(point, Y_ERROR_KEY).doubleValue() / getTime(point));
} else {
double y = getComponent(point, Y_VALUE_KEY).doubleValue();
if (y < 0) {
throw new DataFormatException();
} else if (y == 0) {
//avoid infinite weights
return Value.of(1d / getTime(point));
} else {
return Value.of(Math.sqrt(y) / getTime(point));
}
}
} else {
return super.getError(point, axis);
}
}
}

View File

@ -11,7 +11,7 @@ import org.jetbrains.annotations.Nullable;
import java.util.Comparator; import java.util.Comparator;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY; import static inr.numass.data.api.NumassPoint.HV_KEY;
/** /**

View File

@ -19,7 +19,7 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY; import static inr.numass.data.api.NumassPoint.HV_KEY;
/** /**

View File

@ -12,7 +12,7 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.IntStream; import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.Adapters.*;
import static inr.numass.data.api.NumassPoint.HV_KEY; import static inr.numass.data.api.NumassPoint.HV_KEY;
/** /**

View File

@ -22,7 +22,7 @@ import hep.dataforge.stat.fit.MINUITPlugin
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable import hep.dataforge.tables.ListTable
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.models.BetaSpectrum import inr.numass.models.BetaSpectrum
import inr.numass.models.ModularSpectrum import inr.numass.models.ModularSpectrum
import inr.numass.models.NBkgSpectrum import inr.numass.models.NBkgSpectrum
@ -51,7 +51,7 @@ sp.setCaching(false);
// ModularTritiumSpectrum beta = new ModularTritiumSpectrum(2.28e-4, 18395d, 18580d, "d:\\PlayGround\\FS.txt"); // ModularTritiumSpectrum beta = new ModularTritiumSpectrum(2.28e-4, 18395d, 18580d, "d:\\PlayGround\\FS.txt");
NBkgSpectrum spectrum = new NBkgSpectrum(sp); NBkgSpectrum spectrum = new NBkgSpectrum(sp);
XYModel model = new XYModel("tritium", spectrum, new SpectrumDataAdapter()); XYModel model = new XYModel("tritium", spectrum, new SpectrumAdapter());
ParamSet allPars = new ParamSet(); ParamSet allPars = new ParamSet();

View File

@ -24,7 +24,7 @@ import hep.dataforge.stat.fit.FitState
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.data.SpectrumGenerator import inr.numass.data.SpectrumGenerator
import inr.numass.models.NBkgSpectrum import inr.numass.models.NBkgSpectrum
import inr.numass.models.sterile.SterileNeutrinoSpectrum import inr.numass.models.sterile.SterileNeutrinoSpectrum
@ -47,7 +47,7 @@ SterileNeutrinoSpectrum sp = new SterileNeutrinoSpectrum(Global.instance(), Meta
//beta.setCaching(false); //beta.setCaching(false);
NBkgSpectrum spectrum = new NBkgSpectrum(sp); NBkgSpectrum spectrum = new NBkgSpectrum(sp);
XYModel model = new XYModel(Meta.empty(), new SpectrumDataAdapter(), spectrum); XYModel model = new XYModel(Meta.empty(), new SpectrumAdapter(), spectrum);
ParamSet allPars = new ParamSet(); ParamSet allPars = new ParamSet();
@ -81,7 +81,7 @@ SpectrumGenerator generator = new SpectrumGenerator(model, allPars, 12316);
def data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000, 18500, 604800 / 100 * 100, 100)); def data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000, 18500, 604800 / 100 * 100, 100));
//data = TritiumUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 10e-9); //data = TritiumUtils.correctForDeadTime(data, new SpectrumAdapter(), 10e-9);
// data = data.filter("X", Value.of(15510.0), Value.of(18610.0)); // data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
// allPars.setParValue("X", 0.4); // allPars.setParValue("X", 0.4);

View File

@ -20,7 +20,7 @@ import hep.dataforge.io.FittingIOUtils
import hep.dataforge.stat.fit.FitManager import hep.dataforge.stat.fit.FitManager
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.models.GunSpectrum import inr.numass.models.GunSpectrum
import inr.numass.models.NBkgSpectrum import inr.numass.models.NBkgSpectrum
@ -35,7 +35,7 @@ FitManager fm = new FitManager();
GunSpectrum gsp = new GunSpectrum(); GunSpectrum gsp = new GunSpectrum();
NBkgSpectrum spectrum = new NBkgSpectrum(gsp); NBkgSpectrum spectrum = new NBkgSpectrum(gsp);
XYModel model = new XYModel("gun", spectrum, new SpectrumDataAdapter()); XYModel model = new XYModel("gun", spectrum, new SpectrumAdapter());
ParamSet allPars = new ParamSet() ParamSet allPars = new ParamSet()
.setPar("N", 1e3, 1e2) .setPar("N", 1e3, 1e2)

View File

@ -22,7 +22,7 @@ import hep.dataforge.meta.Meta
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.stat.parametric.ParametricFunction import hep.dataforge.stat.parametric.ParametricFunction
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.models.NBkgSpectrum import inr.numass.models.NBkgSpectrum
import inr.numass.models.sterile.SterileNeutrinoSpectrum import inr.numass.models.sterile.SterileNeutrinoSpectrum
@ -47,7 +47,7 @@ Meta cfg = new GrindMetaBuilder().meta() {
ParametricFunction beta = new SterileNeutrinoSpectrum(Global.instance(), cfg); ParametricFunction beta = new SterileNeutrinoSpectrum(Global.instance(), cfg);
NBkgSpectrum spectrum = new NBkgSpectrum(beta); NBkgSpectrum spectrum = new NBkgSpectrum(beta);
XYModel model = new XYModel(spectrum, new SpectrumDataAdapter()); XYModel model = new XYModel(spectrum, new SpectrumAdapter());
ParamSet allPars = new ParamSet(); ParamSet allPars = new ParamSet();
@ -66,7 +66,7 @@ FittingIOUtils.printSpectrum(Global.out(), spectrum, allPars, 14000, 18600.0, 40
// //
//ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90)); //ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90));
// //
//data = NumassUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 1e-8); //data = NumassUtils.correctForDeadTime(data, new SpectrumAdapter(), 1e-8);
//// data = data.filter("X", Value.of(15510.0), Value.of(18610.0)); //// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
//// allPars.setParValue("X", 0.4); //// allPars.setParValue("X", 0.4);
//FitState state = new FitState(data, model, allPars); //FitState state = new FitState(data, model, allPars);

View File

@ -21,7 +21,7 @@ import hep.dataforge.stat.fit.MINUITPlugin
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable import hep.dataforge.tables.ListTable
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.data.SpectrumGenerator import inr.numass.data.SpectrumGenerator
import inr.numass.models.BetaSpectrum import inr.numass.models.BetaSpectrum
import inr.numass.models.ModularSpectrum import inr.numass.models.ModularSpectrum
@ -49,7 +49,7 @@ ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), resolution, 18395
beta.setCaching(false); beta.setCaching(false);
NBkgSpectrum spectrum = new NBkgSpectrum(beta); NBkgSpectrum spectrum = new NBkgSpectrum(beta);
XYModel model = new XYModel("tritium", spectrum, new SpectrumDataAdapter()); XYModel model = new XYModel("tritium", spectrum, new SpectrumAdapter());
ParamSet allPars = new ParamSet(); ParamSet allPars = new ParamSet();

View File

@ -21,7 +21,7 @@ import hep.dataforge.stat.fit.MINUITPlugin
import hep.dataforge.stat.fit.ParamSet import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable import hep.dataforge.tables.ListTable
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.data.SpectrumGenerator import inr.numass.data.SpectrumGenerator
import inr.numass.models.BetaSpectrum import inr.numass.models.BetaSpectrum
import inr.numass.models.ModularSpectrum import inr.numass.models.ModularSpectrum
@ -49,7 +49,7 @@ ModularSpectrum beta = new ModularSpectrum(new BetaSpectrum(), resolution, 13490
beta.setCaching(false); beta.setCaching(false);
NBkgSpectrum spectrum = new NBkgSpectrum(beta); NBkgSpectrum spectrum = new NBkgSpectrum(beta);
XYModel model = new XYModel("tritium", spectrum, new SpectrumDataAdapter()); XYModel model = new XYModel("tritium", spectrum, new SpectrumAdapter());
ParamSet allPars = new ParamSet(); ParamSet allPars = new ParamSet();

View File

@ -14,7 +14,7 @@ import hep.dataforge.stat.models.XYModel
import hep.dataforge.stat.parametric.ParametricFunction import hep.dataforge.stat.parametric.ParametricFunction
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.SpectrumDataAdapter import inr.numass.data.SpectrumAdapter
import inr.numass.data.SpectrumGenerator import inr.numass.data.SpectrumGenerator
import inr.numass.models.NBkgSpectrum import inr.numass.models.NBkgSpectrum
import inr.numass.models.NumassModelsKt import inr.numass.models.NumassModelsKt
@ -33,7 +33,7 @@ new GrindShell(ctx).eval {
def response = new Gauss(5.0) def response = new Gauss(5.0)
ParametricFunction spectrum = NumassModelsKt.convolute(beta, response) ParametricFunction spectrum = NumassModelsKt.convolute(beta, response)
def model = new XYModel(Meta.empty(), new SpectrumDataAdapter(), new NBkgSpectrum(spectrum)); def model = new XYModel(Meta.empty(), new SpectrumAdapter(Meta.empty()), new NBkgSpectrum(spectrum));
ParamSet params = morph(ParamSet, [:], "params") { ParamSet params = morph(ParamSet, [:], "params") {
N(value: 1e+12, err: 30, lower: 0) N(value: 1e+12, err: 30, lower: 0)
@ -67,7 +67,7 @@ new GrindShell(ctx).eval {
ph.plot(data: (2000..19500).step(50).collectEntries { [it, model.value(it, params)] }, name: "spectrum-mod") ph.plot(data: (2000..19500).step(50).collectEntries { [it, model.value(it, params)] }, name: "spectrum-mod")
.configure(showLine: true, showSymbol: false, showErrors: false, thickness: 2, connectionType: "spline", color: "green") .configure(showLine: true, showSymbol: false, showErrors: false, thickness: 2, connectionType: "spline", color: "green")
ph.plot(data: data, adapter: new SpectrumDataAdapter()) ph.plot(data: data, adapter: new SpectrumAdapter())
.configure(color: "blue") .configure(color: "blue")
FitState state = new FitState(data, model, params); FitState state = new FitState(data, model, params);

View File

@ -9,9 +9,9 @@ import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.plots.PlotFrame import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.ColumnTable import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.SmartAnalyzer import inr.numass.data.analyzers.SmartAnalyzer
@ -57,7 +57,7 @@ new GrindShell(ctx).eval {
NumassAnalyzer analyzer = new SmartAnalyzer(); NumassAnalyzer analyzer = new SmartAnalyzer();
def adapter = new XYAdapter(NumassAnalyzer.CHANNEL_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY) def adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
def t0 = 15 def t0 = 15

View File

@ -17,9 +17,9 @@ import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.plots.PlotGroup import hep.dataforge.plots.PlotGroup
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.XYAdapter
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import javafx.application.Platform import javafx.application.Platform
@ -65,7 +65,7 @@ shell.eval {
//Showing selected points //Showing selected points
def showPoints = { Map points, int binning = 20, int loChannel = 300, int upChannel = 2000 -> def showPoints = { Map points, int binning = 20, int loChannel = 300, int upChannel = 2000 ->
def plotGroup = new PlotGroup("points"); def plotGroup = new PlotGroup("points");
def adapter = new XYAdapter(CHANNEL_KEY, COUNT_RATE_KEY) def adapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
points.each { points.each {
plotGroup.add( plotGroup.add(
DataPlot.plot( DataPlot.plot(
@ -105,7 +105,12 @@ shell.eval {
} }
Platform.runLater { Platform.runLater {
(plots as PlotHelper).plot(correctionTable, new XYAdapter("U", "correction"), "upper_${xHigh}", "upper") (plots as PlotHelper).plot(
data: correctionTable,
adapter: Adapters.buildXYAdapter("U", "correction"),
name: "upper_${xHigh}",
frame: "upper"
)
} }
} }
@ -126,7 +131,12 @@ shell.eval {
) )
Platform.runLater { Platform.runLater {
(plots as PlotHelper).plot(correctionTable, new XYAdapter("U", "correction"), "lower_${xLow}", "lower") (plots as PlotHelper).plot(
data: correctionTable,
adapter: Adapters.buildXYAdapter("U", "correction"),
name: "lower_${xLow}",
frame: "lower"
)
} }
} }
} }

View File

@ -29,8 +29,9 @@ import hep.dataforge.plots.data.XYFunctionPlot;
import hep.dataforge.stat.fit.FitResult; import hep.dataforge.stat.fit.FitResult;
import hep.dataforge.stat.fit.FitState; import hep.dataforge.stat.fit.FitState;
import hep.dataforge.stat.models.XYModel; import hep.dataforge.stat.models.XYModel;
import hep.dataforge.tables.Adapters;
import hep.dataforge.tables.NavigableValuesSource; import hep.dataforge.tables.NavigableValuesSource;
import hep.dataforge.tables.XYAdapter; import hep.dataforge.tables.ValuesAdapter;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
@ -55,9 +56,9 @@ public class PlotFitResultAction extends OneToOneAction<FitResult, FitResult> {
} }
XYModel model = (XYModel) state.getModel(); XYModel model = (XYModel) state.getModel();
XYAdapter adapter; ValuesAdapter adapter;
if (metaData.hasMeta("adapter")) { if (metaData.hasMeta("adapter")) {
adapter = new XYAdapter(metaData.getMeta("adapter")); adapter = Adapters.buildAdapter(metaData.getMeta("adapter"));
} else if (state.getModel() instanceof XYModel) { } else if (state.getModel() instanceof XYModel) {
adapter = model.getAdapter(); adapter = model.getAdapter();
} else { } else {
@ -74,7 +75,7 @@ public class PlotFitResultAction extends OneToOneAction<FitResult, FitResult> {
fit.setSmoothing(true); fit.setSmoothing(true);
// ensuring all data points are calculated explicitly // ensuring all data points are calculated explicitly
StreamSupport.stream(data.spliterator(), false) StreamSupport.stream(data.spliterator(), false)
.map(dp -> adapter.getX(dp).doubleValue()).sorted().forEach(fit::calculateIn); .map(dp -> Adapters.getXValue(adapter, dp).doubleValue()).sorted().forEach(fit::calculateIn);
frame.add(fit); frame.add(fit);

View File

@ -15,9 +15,11 @@
*/ */
package inr.numass.data; package inr.numass.data;
import hep.dataforge.meta.Meta;
import hep.dataforge.stat.fit.ParamSet; import hep.dataforge.stat.fit.ParamSet;
import hep.dataforge.stat.models.Generator; import hep.dataforge.stat.models.Generator;
import hep.dataforge.stat.models.XYModel; import hep.dataforge.stat.models.XYModel;
import hep.dataforge.tables.Adapters;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
@ -43,7 +45,7 @@ public class SpectrumGenerator implements Generator {
private RandomDataGenerator generator; private RandomDataGenerator generator;
private ParamSet params; private ParamSet params;
private XYModel source; private XYModel source;
private SpectrumDataAdapter adapter = new SpectrumDataAdapter(); private SpectrumAdapter adapter = new SpectrumAdapter(Meta.empty());
public SpectrumGenerator(XYModel source, ParamSet params, int seed) { public SpectrumGenerator(XYModel source, ParamSet params, int seed) {
this.source = source; this.source = source;
@ -65,7 +67,7 @@ public class SpectrumGenerator implements Generator {
@Override @Override
public Table generateData(Iterable<Values> config) { public Table generateData(Iterable<Values> config) {
ListTable.Builder res = new ListTable.Builder(adapter.getFormat()); ListTable.Builder res = new ListTable.Builder(Adapters.getXYFormat(adapter));
for (Values aConfig : config) { for (Values aConfig : config) {
res.row(this.generateDataPoint(aConfig)); res.row(this.generateDataPoint(aConfig));
} }
@ -80,7 +82,7 @@ public class SpectrumGenerator implements Generator {
* @return * @return
*/ */
public Table generateExactData(Iterable<Values> config) { public Table generateExactData(Iterable<Values> config) {
ListTable.Builder res = new ListTable.Builder(adapter.getFormat()); ListTable.Builder res = new ListTable.Builder(Adapters.getXYFormat(adapter));
for (Values aConfig : config) { for (Values aConfig : config) {
res.row(this.generateExactDataPoint(aConfig)); res.row(this.generateExactDataPoint(aConfig));
} }
@ -162,16 +164,16 @@ public class SpectrumGenerator implements Generator {
} }
public SpectrumDataAdapter getAdapter() { public SpectrumAdapter getAdapter() {
return adapter; return adapter;
} }
public void setAdapter(SpectrumDataAdapter adapter) { public void setAdapter(SpectrumAdapter adapter) {
this.adapter = adapter; this.adapter = adapter;
} }
private double getX(Values point) { private double getX(Values point) {
return adapter.getX(point).doubleValue(); return Adapters.getXValue(adapter,point).doubleValue();
} }
public void setGeneratorType(GeneratorType type) { public void setGeneratorType(GeneratorType type) {

View File

@ -17,6 +17,7 @@ package inr.numass.data;
import hep.dataforge.maths.NamedMatrix; import hep.dataforge.maths.NamedMatrix;
import hep.dataforge.stat.parametric.ParametricFunction; import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.tables.Adapters;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
@ -56,7 +57,7 @@ public class SpectrumInformation {
* @return * @return
*/ */
public NamedMatrix getInformationMatrix(Values set, ListTable data, String... parNames) { public NamedMatrix getInformationMatrix(Values set, ListTable data, String... parNames) {
SpectrumDataAdapter reader = NumassDataUtils.adapter(); SpectrumAdapter adapter = NumassDataUtils.adapter();
String[] names = parNames; String[] names = parNames;
if (names.length == 0) { if (names.length == 0) {
@ -72,7 +73,7 @@ public class SpectrumInformation {
* Нужно вычислять сразу всю матрицу для каждой точки, тогда количество * Нужно вычислять сразу всю матрицу для каждой точки, тогда количество
* вызовов производных будет строго равно 1. * вызовов производных будет строго равно 1.
*/ */
res = res.add(getPointInfoMatrix(set, reader.getX(dp).doubleValue(), reader.getTime(dp), names).getMatrix()); res = res.add(getPointInfoMatrix(set, Adapters.getXValue(adapter,dp).doubleValue(), adapter.getTime(dp), names).getMatrix());
} }
return new NamedMatrix(names, res); return new NamedMatrix(names, res);

View File

@ -19,10 +19,11 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.SpectrumDataAdapter;
import java.util.Scanner; import java.util.Scanner;
import static hep.dataforge.tables.Adapters.X_AXIS;
/** /**
* *
* @author Darksnake * @author Darksnake
@ -31,7 +32,7 @@ public class DataModelUtils {
public static Table getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) { public static Table getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
assert to != from; assert to != from;
final String[] list = {SpectrumDataAdapter.X_AXIS, "time"}; final String[] list = {X_AXIS, "time"};
ListTable.Builder res = new ListTable.Builder(list); ListTable.Builder res = new ListTable.Builder(list);
for (int i = 0; i < numpoints; i++) { for (int i = 0; i < numpoints; i++) {
@ -45,7 +46,7 @@ public class DataModelUtils {
} }
public static Table getSpectrumConfigurationFromResource(String resource) { public static Table getSpectrumConfigurationFromResource(String resource) {
final String[] list = {SpectrumDataAdapter.X_AXIS, "time"}; final String[] list = {X_AXIS, "time"};
ListTable.Builder res = new ListTable.Builder(list); ListTable.Builder res = new ListTable.Builder(list);
Scanner scan = new Scanner(DataModelUtils.class.getResourceAsStream(resource)); Scanner scan = new Scanner(DataModelUtils.class.getResourceAsStream(resource));
while (scan.hasNextLine()) { while (scan.hasNextLine()) {

View File

@ -16,11 +16,13 @@
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.context.Global; import hep.dataforge.context.Global;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.Adapters;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.SpectrumDataAdapter; import inr.numass.data.SpectrumAdapter;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
@ -58,8 +60,8 @@ public class OldDataReader {
} }
public static Table readData(String path, double Elow) { public static Table readData(String path, double Elow) {
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(factory.getFormat()); ListTable.Builder res = new ListTable.Builder(Adapters.getXYFormat(factory));
Path file = Global.instance().getIo().getFile(path); Path file = Global.instance().getIo().getFile(path);
double x; double x;
int count; int count;
@ -110,8 +112,8 @@ public class OldDataReader {
} }
public static Table readDataAsGun(String path, double Elow) { public static Table readDataAsGun(String path, double Elow) {
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(factory.getFormat()); ListTable.Builder res = new ListTable.Builder(Adapters.getXYFormat(factory));
Path file = Global.instance().getIo().getFile(path); Path file = Global.instance().getIo().getFile(path);
double x; double x;
long count; long count;
@ -143,8 +145,8 @@ public class OldDataReader {
} }
public static Table readSpectrumData(String path) { public static Table readSpectrumData(String path) {
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumAdapter factory = new SpectrumAdapter(Meta.empty());
ListTable.Builder res = new ListTable.Builder(factory.getFormat()); ListTable.Builder res = new ListTable.Builder(Adapters.getXYFormat(factory));
Path file = Global.instance().getIo().getFile(path); Path file = Global.instance().getIo().getFile(path);
double x; double x;
double count; double count;

View File

@ -26,8 +26,8 @@ import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.stat.models.ModelManager import hep.dataforge.stat.models.ModelManager
import hep.dataforge.stat.models.WeightedXYModel import hep.dataforge.stat.models.WeightedXYModel
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.ValuesAdapter import hep.dataforge.tables.ValuesAdapter
import hep.dataforge.tables.XYAdapter
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.models.* import inr.numass.models.*
@ -259,11 +259,11 @@ class NumassPlugin : BasicPlugin() {
} }
} }
private fun getAdapter(an: Meta): XYAdapter { private fun getAdapter(an: Meta): ValuesAdapter {
return if (an.hasMeta(ValuesAdapter.ADAPTER_KEY)) { return if (an.hasMeta(ValuesAdapter.ADAPTER_KEY)) {
XYAdapter(an.getMeta(ValuesAdapter.ADAPTER_KEY)) Adapters.buildAdapter(an.getMeta(ValuesAdapter.ADAPTER_KEY))
} else { } else {
XYAdapter(NumassPoint.HV_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY) Adapters.buildXYAdapter(NumassPoint.HV_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
} }
} }
} }

View File

@ -9,8 +9,8 @@ import hep.dataforge.maths.histogram.UnivariateHistogram
import hep.dataforge.meta.Laminate import hep.dataforge.meta.Laminate
import hep.dataforge.plots.PlotPlugin import hep.dataforge.plots.PlotPlugin
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.ValueType import hep.dataforge.values.ValueType
import inr.numass.data.analyzers.TimeAnalyzer import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer
@ -122,7 +122,7 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
} }
statPlot.append( statPlot.append(
XYAdapter.DEFAULT_ADAPTER.buildXYDataPoint( Adapters.buildXYDataPoint(
t / 1000.0, t / 1000.0,
result.getDouble("cr") / norm, result.getDouble("cr") / norm,
result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm

View File

@ -9,8 +9,8 @@ import hep.dataforge.maths.histogram.UnivariateHistogram
import hep.dataforge.meta.Laminate import hep.dataforge.meta.Laminate
import hep.dataforge.plots.PlotPlugin import hep.dataforge.plots.PlotPlugin
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.ValueType import hep.dataforge.values.ValueType
import inr.numass.data.analyzers.TimeAnalyzer import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer
@ -122,7 +122,7 @@ class TimeSpectrumAction : OneToOneAction<NumassPoint, Table>() {
} }
statPlot.append( statPlot.append(
XYAdapter.DEFAULT_ADAPTER.buildXYDataPoint( Adapters.buildXYDataPoint(
t / 1000.0, t / 1000.0,
result.getDouble("cr") / norm, result.getDouble("cr") / norm,
result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm

View File

@ -20,10 +20,7 @@ import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.stat.fit.FitHelper import hep.dataforge.stat.fit.FitHelper
import hep.dataforge.stat.fit.FitResult import hep.dataforge.stat.fit.FitResult
import hep.dataforge.stat.models.XYModel import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.ListTable import hep.dataforge.tables.*
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.ValueType import hep.dataforge.values.ValueType
import hep.dataforge.values.Values import hep.dataforge.values.Values
import inr.numass.NumassUtils import inr.numass.NumassUtils
@ -78,7 +75,7 @@ val monitorTableTask = task("monitor") {
"yAxis.title" to "Count rate" "yAxis.title" to "Count rate"
"yAxis.units" to "Hz" "yAxis.units" to "Hz"
} }
plots + DataPlot.plot(name, XYAdapter("timestamp", "cr", "crErr"), res) plots + DataPlot.plot(name, Adapters.buildXYAdapter("timestamp", "cr", "crErr"), res)
}.also { frame -> }.also { frame ->
if (frame is JFreeChartFrame) { if (frame is JFreeChartFrame) {
//add set markers //add set markers
@ -256,7 +253,7 @@ val plotFitTask = task("plotFit") {
val data = input.data val data = input.data
val adapter: XYAdapter = fitModel.adapter val adapter: ValuesAdapter = fitModel.adapter
val function = { x: Double -> fitModel.spectrum.value(x, input.parameters) } val function = { x: Double -> fitModel.spectrum.value(x, input.parameters) }
@ -273,7 +270,7 @@ val plotFitTask = task("plotFit") {
// ensuring all data points are calculated explicitly // ensuring all data points are calculated explicitly
StreamSupport.stream<Values>(data.spliterator(), false) StreamSupport.stream<Values>(data.spliterator(), false)
.map { dp -> adapter.getX(dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) } .map { dp -> Adapters.getXValue(adapter,dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
frame.add(DataPlot.plot("data", adapter, data)) frame.add(DataPlot.plot("data", adapter, data))

View File

@ -11,7 +11,6 @@ import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.SimpleAnalyzer import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer

View File

@ -13,7 +13,6 @@ import hep.dataforge.storage.api.TableLoader
import hep.dataforge.storage.api.ValueIndex import hep.dataforge.storage.api.ValueIndex
import hep.dataforge.tables.ListTable import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import hep.dataforge.values.Values import hep.dataforge.values.Values
import javafx.collections.FXCollections import javafx.collections.FXCollections
import javafx.collections.MapChangeListener import javafx.collections.MapChangeListener

View File

@ -3,14 +3,12 @@ package inr.numass.viewer
import hep.dataforge.fx.dfIcon import hep.dataforge.fx.dfIcon
import hep.dataforge.fx.plots.PlotContainer import hep.dataforge.fx.plots.PlotContainer
import hep.dataforge.fx.runGoal import hep.dataforge.fx.runGoal
import hep.dataforge.fx.ui
import hep.dataforge.kodex.configure import hep.dataforge.kodex.configure
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.plots.PlotFrame import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.XYAdapter
import inr.numass.data.analyzers.SimpleAnalyzer import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint

View File

@ -4,7 +4,6 @@ import hep.dataforge.fx.plots.PlotContainer
import hep.dataforge.plots.data.DataPlot import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartFrame import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
import hep.dataforge.tables.XYAdapter
import tornadofx.* import tornadofx.*
import java.util.* import java.util.*