[no commit message]

This commit is contained in:
Alexander Nozik 2016-02-23 19:21:45 +03:00
parent ecf91cb442
commit 7c0a61bffb
31 changed files with 401 additions and 389 deletions

View File

@ -21,7 +21,7 @@ import hep.dataforge.control.collectors.RegularPointCollector;
import hep.dataforge.control.measurements.DataDevice; import hep.dataforge.control.measurements.DataDevice;
import hep.dataforge.control.ports.PortHandler; import hep.dataforge.control.ports.PortHandler;
import hep.dataforge.control.ports.TcpPortHandler; import hep.dataforge.control.ports.TcpPortHandler;
import hep.dataforge.data.DataFormatBuilder; import hep.dataforge.data.FormatBuilder;
import hep.dataforge.exceptions.ControlException; import hep.dataforge.exceptions.ControlException;
import hep.dataforge.exceptions.PortException; import hep.dataforge.exceptions.PortException;
import hep.dataforge.exceptions.StorageException; import hep.dataforge.exceptions.StorageException;
@ -85,7 +85,7 @@ public class PKT8Device extends DataDevice<PKT8Device.PKT8Measurement> implement
String suffix = Integer.toString((int) Instant.now().toEpochMilli()); String suffix = Integer.toString((int) Instant.now().toEpochMilli());
// Building data format // Building data format
DataFormatBuilder formatBuilder = new DataFormatBuilder() FormatBuilder formatBuilder = new FormatBuilder()
.addTime("timestamp"); .addTime("timestamp");
List<String> names = new ArrayList<>(); List<String> names = new ArrayList<>();

View File

@ -22,8 +22,8 @@ import hep.dataforge.control.measurements.AbstractMeasurement;
import hep.dataforge.control.measurements.Measurement; import hep.dataforge.control.measurements.Measurement;
import hep.dataforge.control.ports.PortHandler; import hep.dataforge.control.ports.PortHandler;
import hep.dataforge.control.ports.TcpPortHandler; import hep.dataforge.control.ports.TcpPortHandler;
import hep.dataforge.data.DataFormat; import hep.dataforge.data.Format;
import hep.dataforge.data.DataFormatBuilder; import hep.dataforge.data.FormatBuilder;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.exceptions.ControlException; import hep.dataforge.exceptions.ControlException;
@ -387,12 +387,12 @@ public class MspDevice extends SingleMeasurementDevice implements PortHandler.Po
throw new IllegalStateException("Peak map is not initialized"); throw new IllegalStateException("Peak map is not initialized");
} }
DataFormatBuilder builder = new DataFormatBuilder().addTime("timestamp"); FormatBuilder builder = new FormatBuilder().addTime("timestamp");
for (String peakName : this.peakMap.values()) { for (String peakName : this.peakMap.values()) {
builder.addNumber(peakName); builder.addNumber(peakName);
} }
DataFormat format = builder.build(); Format format = builder.build();
//TODO Переделать!!! //TODO Переделать!!!
String run = meta().getString("numass.run", ""); String run = meta().getString("numass.run", "");

View File

@ -15,7 +15,6 @@
*/ */
package inr.numass.readvac; package inr.numass.readvac;
import hep.dataforge.data.DataParser;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.io.LineIterator; import hep.dataforge.io.LineIterator;
@ -31,6 +30,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import hep.dataforge.data.PointParser;
/** /**
* *
@ -55,7 +55,7 @@ public class VACFileReader implements Iterator<DataPoint> {
} }
private final LineIterator iterator; private final LineIterator iterator;
private final DataParser parser; private final PointParser parser;
private VACFileReader(File vacFile) throws FileNotFoundException { private VACFileReader(File vacFile) throws FileNotFoundException {
this.iterator = new LineIterator(vacFile); this.iterator = new LineIterator(vacFile);
@ -63,7 +63,7 @@ public class VACFileReader implements Iterator<DataPoint> {
parser = new LikhovidVACParser(); parser = new LikhovidVACParser();
} }
public VACFileReader(File vacFile, DataParser parser) throws FileNotFoundException { public VACFileReader(File vacFile, PointParser parser) throws FileNotFoundException {
this.iterator = new LineIterator(vacFile); this.iterator = new LineIterator(vacFile);
iterator.next(); iterator.next();
this.parser = parser; this.parser = parser;
@ -117,7 +117,7 @@ public class VACFileReader implements Iterator<DataPoint> {
return res; return res;
} }
private static class LikhovidVACParser implements DataParser { private static class LikhovidVACParser implements PointParser {
static final Pattern pattern = Pattern.compile("(\\S* \\S*)\\s*(\\S*);\\s*(\\S*)\\s*(\\S*)\\s*(\\S*)"); static final Pattern pattern = Pattern.compile("(\\S* \\S*)\\s*(\\S*);\\s*(\\S*)\\s*(\\S*)\\s*(\\S*)");
@Override @Override
public DataPoint parse(String str) { public DataPoint parse(String str) {

View File

@ -15,7 +15,7 @@
*/ */
package inr.numass.readvac; package inr.numass.readvac;
import hep.dataforge.data.DataFormatBuilder; import hep.dataforge.data.FormatBuilder;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.exceptions.StorageException; import hep.dataforge.exceptions.StorageException;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
@ -75,7 +75,7 @@ public class VACManager implements AutoCloseable {
private static PointLoader setupLoader(Storage storage, String run) throws StorageException { private static PointLoader setupLoader(Storage storage, String run) throws StorageException {
return LoaderFactory.buildPointLoder(storage, "vactms", run, "timestamp", return LoaderFactory.buildPointLoder(storage, "vactms", run, "timestamp",
new DataFormatBuilder(names) new FormatBuilder(names)
.setFormat("timestamp", ValueType.TIME) .setFormat("timestamp", ValueType.TIME)
.build()); .build());
} }

View File

@ -17,8 +17,7 @@ package hep.dataforge.plotfit;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataSet; import hep.dataforge.data.XYAdapter;
import hep.dataforge.data.XYDataAdapter;
import hep.dataforge.datafitter.FitState; import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.models.XYModel; import hep.dataforge.datafitter.models.XYModel;
import hep.dataforge.description.NodeDef; import hep.dataforge.description.NodeDef;
@ -32,6 +31,7 @@ import hep.dataforge.plots.XYPlotFrame;
import hep.dataforge.plots.data.PlottableData; import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.PlottableFunction; import hep.dataforge.plots.data.PlottableFunction;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -49,16 +49,16 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
@Override @Override
protected FitState execute(Logable log, Meta metaData, FitState input) { protected FitState execute(Logable log, Meta metaData, FitState input) {
DataSet data = input.getDataSet(); PointSet data = input.getDataSet();
if (!(input.getModel() instanceof XYModel)) { if (!(input.getModel() instanceof XYModel)) {
log.logError("The fit model should be instance of XYModel for this action. Action failed!"); log.logError("The fit model should be instance of XYModel for this action. Action failed!");
return input; return input;
} }
XYModel model = (XYModel) input.getModel(); XYModel model = (XYModel) input.getModel();
XYDataAdapter adapter; XYAdapter adapter;
if (metaData.hasNode("adapter")) { if (metaData.hasNode("adapter")) {
adapter = new XYDataAdapter(metaData.getNode("adapter")); adapter = new XYAdapter(metaData.getNode("adapter"));
} else if (input.getModel() instanceof XYModel) { } else if (input.getModel() instanceof XYModel) {
adapter = model.getAdapter(); adapter = model.getAdapter();
} else { } else {

View File

@ -19,8 +19,7 @@ import hep.dataforge.actions.ActionManager;
import hep.dataforge.context.BasicPlugin; import hep.dataforge.context.BasicPlugin;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.context.PluginDef; import hep.dataforge.context.PluginDef;
import hep.dataforge.data.DataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.data.XYDataAdapter;
import hep.dataforge.datafitter.FitManager; import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitPlugin; import hep.dataforge.datafitter.FitPlugin;
import hep.dataforge.datafitter.models.Model; import hep.dataforge.datafitter.models.Model;
@ -54,6 +53,7 @@ import inr.numass.models.TransmissionInterpolator;
import inr.numass.models.VariableLossSpectrum; import inr.numass.models.VariableLossSpectrum;
import org.apache.commons.math3.analysis.BivariateFunction; import org.apache.commons.math3.analysis.BivariateFunction;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
import hep.dataforge.data.PointAdapter;
/** /**
* *
@ -265,11 +265,11 @@ public class NumassPlugin extends BasicPlugin {
} }
} }
private XYDataAdapter getAdapter(Meta an) { private XYAdapter getAdapter(Meta an) {
if (an.hasNode(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME)) { if (an.hasNode(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)) {
return new XYDataAdapter(an.getNode(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME)); return new XYAdapter(an.getNode(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME));
} else { } else {
return new XYDataAdapter("Uread", "CR", "CRerr"); return new XYAdapter("Uread", "CR", "CRerr");
} }
} }
} }

View File

@ -8,35 +8,35 @@ package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.log.Logable; import hep.dataforge.io.log.Logable;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import hep.dataforge.data.PointSet;
/** /**
* Adjust errors for all numass points in the dataset * Adjust errors for all numass points in the dataset
* *
* @author Alexander Nozik <altavir@gmail.com> * @author Alexander Nozik <altavir@gmail.com>
*/ */
@TypedActionDef(name = "adjustErrors", inputType = DataSet.class, outputType = DataSet.class) @TypedActionDef(name = "adjustErrors", inputType = PointSet.class, outputType = PointSet.class)
public class AdjustErrorsAction extends OneToOneAction<DataSet, DataSet> { public class AdjustErrorsAction extends OneToOneAction<PointSet, PointSet> {
public AdjustErrorsAction(Context context, Meta annotation) { public AdjustErrorsAction(Context context, Meta annotation) {
super(context, annotation); super(context, annotation);
} }
@Override @Override
protected DataSet execute(Logable log, Meta meta, DataSet input) { protected PointSet execute(Logable log, Meta meta, PointSet input) {
List<DataPoint> points = new ArrayList<>(); List<DataPoint> points = new ArrayList<>();
for (DataPoint dp : input) { for (DataPoint dp : input) {
points.add(evalPoint(meta, dp)); points.add(evalPoint(meta, dp));
} }
return new ListDataSet(input.getName(), input.meta(), points, input.getDataFormat()); return new ListPointSet(input.getName(), input.meta(), points, input.getDataFormat());
} }
private DataPoint evalPoint(Meta meta, DataPoint dp) { private DataPoint evalPoint(Meta meta, DataPoint dp) {

View File

@ -15,7 +15,7 @@
*/ */
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
import inr.numass.data.NMFile; import inr.numass.data.NMFile;
@ -27,7 +27,7 @@ import java.util.Map;
* *
* @author Darksnake * @author Darksnake
*/ */
public class BorderData extends ListDataSet { public class BorderData extends ListPointSet {
private final static String[] names = {"U", "80%", "90%", "95%", "99%"}; private final static String[] names = {"U", "80%", "90%", "95%", "99%"};
private final static double[] percents = {0.8, 0.9, 0.95, 0.99}; private final static double[] percents = {0.8, 0.9, 0.95, 0.99};

View File

@ -20,8 +20,7 @@ import hep.dataforge.content.GroupBuilder;
import hep.dataforge.content.NamedGroup; import hep.dataforge.content.NamedGroup;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.description.NodeDef; import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
@ -33,15 +32,16 @@ import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import hep.dataforge.data.PointSet;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "merge", inputType = DataSet.class, outputType = DataSet.class, description = "Merge different numass data files into one.") @TypedActionDef(name = "merge", inputType = PointSet.class, outputType = PointSet.class, description = "Merge different numass data files into one.")
@NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.content.GroupBuilder.byAnnotation") @NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.content.GroupBuilder.byAnnotation")
//@Parameter(name = "groupBy", def = "mergeTag", info = "Defines the name of the value by which grouping is made. The value is supposed to be a String, but in practice could be any type which could be converted to String.") //@Parameter(name = "groupBy", def = "mergeTag", info = "Defines the name of the value by which grouping is made. The value is supposed to be a String, but in practice could be any type which could be converted to String.")
public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> { public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
public static final String MERGE_NAME = "mergeName"; public static final String MERGE_NAME = "mergeName";
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr"}; public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr"};
@ -51,8 +51,8 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
} }
@Override @Override
protected List<NamedGroup<DataSet>> buildGroups(Meta reader, List<DataSet> input) { protected List<NamedGroup<PointSet>> buildGroups(Meta reader, List<PointSet> input) {
List<NamedGroup<DataSet>> groups; List<NamedGroup<PointSet>> groups;
if (reader.hasValue("grouping.byValue")) { if (reader.hasValue("grouping.byValue")) {
groups = super.buildGroups(reader, input); groups = super.buildGroups(reader, input);
} else { } else {
@ -62,17 +62,17 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
} }
@Override @Override
protected DataSet execute(Logable log, Meta reader, NamedGroup<DataSet> input) { protected PointSet execute(Logable log, Meta reader, NamedGroup<PointSet> input) {
return mergeOne(log, input.getName(), input.asList()); return mergeOne(log, input.getName(), input.asList());
// List<DataSet> res = new ArrayList<>(); // List<DataSet> res = new ArrayList<>();
// for (NamedGroup<DataSet> group : groups) { // for (NamedGroup<DataSet> buildGroups : groups) {
// res.add(mergeOne(log, group.getName(), group.asList())); // res.add(mergeOne(log, buildGroups.getName(), buildGroups.asList()));
// } // }
// return new ContentList<>(input.getName(), DataSet.class, res); // return new ContentList<>(input.getName(), PointSet.class, res);
} }
private DataSet mergeOne(Logable log, String fileName, List<DataSet> files) { private PointSet mergeOne(Logable log, String fileName, List<PointSet> files) {
DataSet[] data = new DataSet[files.size()]; PointSet[] data = new PointSet[files.size()];
String head = "Numass data merge\n"; String head = "Numass data merge\n";
String numassPath = ""; String numassPath = "";
@ -96,7 +96,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
} }
} }
DataSet res = mergeDataSets(fileName, data); PointSet res = mergeDataSets(fileName, data);
/* /*
* Указываем путь только если он одинаковый для всех входных файлов * Указываем путь только если он одинаковый для всех входных файлов
@ -116,7 +116,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
// private Map<String, List<DataSet>> buildMergeGroups(String mergeBy, NamedGroup<DataSet> input) { // private Map<String, List<DataSet>> buildMergeGroups(String mergeBy, NamedGroup<DataSet> input) {
// Map<String, List<DataSet>> map = new HashMap<>(); // Map<String, List<DataSet>> map = new HashMap<>();
// for (DataSet ds : input) { // for (PointSet ds : input) {
// String tag = ds.meta().getString(mergeBy, meta().getString(mergeBy, "merge")); // String tag = ds.meta().getString(mergeBy, meta().getString(mergeBy, "merge"));
// if (!map.containsKey(tag)) { // if (!map.containsKey(tag)) {
// map.put(tag, new ArrayList<>()); // map.put(tag, new ArrayList<>());
@ -167,10 +167,10 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
return map; return map;
} }
private DataSet mergeDataSets(String name, DataSet... ds) { private PointSet mergeDataSets(String name, PointSet... ds) {
//Сливаем все точки в один набор данных //Сливаем все точки в один набор данных
Map<Double, List<DataPoint>> points = new LinkedHashMap<>(); Map<Double, List<DataPoint>> points = new LinkedHashMap<>();
for (DataSet d : ds) { for (PointSet d : ds) {
if (!d.getDataFormat().contains(parnames)) { if (!d.getDataFormat().contains(parnames)) {
throw new IllegalArgumentException(); throw new IllegalArgumentException();
} }
@ -193,7 +193,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
res.add(curPoint); res.add(curPoint);
} }
return new ListDataSet(name, null, res); return new ListPointSet(name, null, res);
} }

View File

@ -19,8 +19,7 @@ import hep.dataforge.actions.ActionResult;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
@ -37,16 +36,17 @@ import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import hep.dataforge.data.PointSet;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "monitor", inputType = DataSet.class, outputType = DataSet.class) @TypedActionDef(name = "monitor", inputType = PointSet.class, outputType = PointSet.class)
@ValueDef(name = "monitorPoint", type = "NUMBER", required = true, info = "The Uset for monitor point") @ValueDef(name = "monitorPoint", type = "NUMBER", required = true, info = "The Uset for monitor point")
@ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.out") @ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.out")
@ValueDef(name = "calculateRelative", info = "Calculate count rate relative to average monitor point", def = "false") @ValueDef(name = "calculateRelative", info = "Calculate count rate relative to average monitor point", def = "false")
public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> { public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"}; private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"};
@ -57,7 +57,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
} }
@Override @Override
protected DataSet execute(Logable log, Meta reader, DataSet sourceData) throws ContentException { protected PointSet execute(Logable log, Meta reader, PointSet sourceData) throws ContentException {
double monitor = reader.getDouble("monitorPoint", Double.NaN); double monitor = reader.getDouble("monitorPoint", Double.NaN);
@ -131,7 +131,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
// } else { // } else {
// format = DataFormat.of(parnames); // format = DataFormat.of(parnames);
// } // }
DataSet data = new ListDataSet(sourceData.getName(), sourceData.meta(), dataList); PointSet data = new ListPointSet(sourceData.getName(), sourceData.meta(), dataList);
OutputStream stream = buildActionOutput(data); OutputStream stream = buildActionOutput(data);
@ -141,7 +141,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
} }
@Override @Override
protected void afterAction(ActionResult<DataSet> pack) throws ContentException { protected void afterAction(ActionResult<PointSet> pack) throws ContentException {
printMonitorData(); printMonitorData();
super.afterAction(pack); super.afterAction(pack);
} }
@ -149,7 +149,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
private void printMonitorData() { private void printMonitorData() {
String monitorFileName = meta().getString("monitorFile", "monitor"); String monitorFileName = meta().getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(monitorFileName); OutputStream stream = buildActionOutput(monitorFileName);
ListDataSet data = new ListDataSet("monitor", null, monitorPoints); ListPointSet data = new ListPointSet("monitor", null, monitorPoints);
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames); ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
} }

View File

@ -17,10 +17,9 @@ package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataFormat; import hep.dataforge.data.Format;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
@ -36,17 +35,18 @@ import java.io.OutputStream;
import java.time.Instant; import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import hep.dataforge.data.PointSet;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = DataSet.class) @TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = PointSet.class)
@ValueDef(name = "lowerWindow", type = "NUMBER", def = "0", info = "Base for the window lowerWindow bound") @ValueDef(name = "lowerWindow", type = "NUMBER", def = "0", info = "Base for the window lowerWindow bound")
@ValueDef(name = "lowerWindowSlope", type = "NUMBER", def = "0", info = "Slope for the window lowerWindow bound") @ValueDef(name = "lowerWindowSlope", type = "NUMBER", def = "0", info = "Slope for the window lowerWindow bound")
@ValueDef(name = "upperWindow", type = "NUMBER", info = "Upper bound for window") @ValueDef(name = "upperWindow", type = "NUMBER", info = "Upper bound for window")
@ValueDef(name = "deadTime", type = "NUMBER", def = "0", info = "Dead time in us") @ValueDef(name = "deadTime", type = "NUMBER", def = "0", info = "Dead time in us")
public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> { public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr", "Timestamp"}; public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr", "Timestamp"};
@ -62,7 +62,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
} }
@Override @Override
protected ListDataSet execute(Logable log, Meta reader, NMFile dataFile) throws ContentException { protected ListPointSet execute(Logable log, Meta reader, NMFile dataFile) throws ContentException {
// log.logString("File %s started", dataFile.getName()); // log.logString("File %s started", dataFile.getName());
int upper = dataFile.meta().getInt("upperWindow", this.meta().getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1)); int upper = dataFile.meta().getInt("upperWindow", this.meta().getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1));
@ -93,13 +93,13 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
dataList.add(new MapDataPoint(parnames, new Object[]{Uset, Uread, time, total, wind, corr, cr, crErr, timestamp})); dataList.add(new MapDataPoint(parnames, new Object[]{Uset, Uread, time, total, wind, corr, cr, crErr, timestamp}));
} }
DataFormat format; Format format;
if (!dataList.isEmpty()) { if (!dataList.isEmpty()) {
//Генерируем автоматический формат по первой строчке //Генерируем автоматический формат по первой строчке
format = DataFormat.forPoint(dataList.get(0)); format = Format.forPoint(dataList.get(0));
} else { } else {
format = DataFormat.forNames(8, parnames); format = Format.forNames(8, parnames);
} }
// AnnotationBuilder builder = dataFile.meta().getBuilder(); // AnnotationBuilder builder = dataFile.meta().getBuilder();
@ -112,7 +112,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
} }
head = head + "\n" + new XMLMetaWriter().writeString(meta(), null) + "\n"; head = head + "\n" + new XMLMetaWriter().writeString(meta(), null) + "\n";
ListDataSet data = new ListDataSet(dataFile.getName(), dataFile.meta(), dataList, format); ListPointSet data = new ListPointSet(dataFile.getName(), dataFile.meta(), dataList, format);
OutputStream stream = buildActionOutput(data); OutputStream stream = buildActionOutput(data);

View File

@ -17,10 +17,9 @@ package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.data.XYDataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.datafitter.FitState; import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTaskResult; import hep.dataforge.datafitter.FitTaskResult;
import hep.dataforge.datafitter.Param; import hep.dataforge.datafitter.Param;
@ -54,6 +53,7 @@ import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator;
import org.apache.commons.math3.stat.StatUtils; import org.apache.commons.math3.stat.StatUtils;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -174,7 +174,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
ParamSet parameters = input.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"}); ParamSet parameters = input.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"});
NamedMatrix covariance = input.getCovariance(); NamedMatrix covariance = input.getCovariance();
DataSet spreadData = generateSpread(writer, input.getName(), parameters, covariance); PointSet spreadData = generateSpread(writer, input.getName(), parameters, covariance);
ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getDataFormat().asArray()); ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getDataFormat().asArray());
} }
} }
@ -188,7 +188,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
return 1d - integrator.integrate(integrand, 5d, threshold); return 1d - integrator.integrate(integrand, 5d, threshold);
} }
private double calculateIntegralExIonRatio(DataSet data, double X, double integralThreshold) { private double calculateIntegralExIonRatio(PointSet data, double X, double integralThreshold) {
double scatterProb = 1 - Math.exp(-X); double scatterProb = 1 - Math.exp(-X);
double[] x = data.getColumn("Uset").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray(); double[] x = data.getColumn("Uset").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray();
@ -232,12 +232,12 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name) new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
); );
// XYPlotFrame frame = JFreeChartFrame.drawFrame("Ion ratio Distribution for " + name, null); // XYPlotFrame frame = JFreeChartFrame.drawFrame("Ion ratio Distribution for " + name, null);
frame.add(PlottableData.plot(hist, new XYDataAdapter("binCenter", "count"))); frame.add(PlottableData.plot(hist, new XYAdapter("binCenter", "count")));
return new DescriptiveStatistics(res).getStandardDeviation(); return new DescriptiveStatistics(res).getStandardDeviation();
} }
public static DataSet generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) { public static PointSet generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) {
int numCalls = 1000; int numCalls = 1000;
int gridPoints = 200; int gridPoints = 200;
double a = 8; double a = 8;
@ -272,7 +272,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
} }
} }
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"}; String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
ListDataSet res = new ListDataSet("spread", pointNames); ListPointSet res = new ListPointSet("spread", pointNames);
for (int i = 0; i < gridPoints; i++) { for (int i = 0; i < gridPoints; i++) {
res.add(new MapDataPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i])); res.add(new MapDataPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));

View File

@ -15,8 +15,8 @@
*/ */
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.data.DataFormat; import hep.dataforge.data.Format;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
import inr.numass.data.NMFile; import inr.numass.data.NMFile;
@ -30,17 +30,17 @@ import org.apache.commons.math3.util.Pair;
* *
* @author Darksnake * @author Darksnake
*/ */
public class SlicedData extends ListDataSet { public class SlicedData extends ListPointSet {
private static final String TNAME = "Time"; private static final String TNAME = "Time";
//format = {U,username1,username2, ...} //format = {U,username1,username2, ...}
private static final String UNAME = "U"; private static final String UNAME = "U";
private static DataFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){ private static Format prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
ArrayList<String> names = new ArrayList<>(intervals.keySet()); ArrayList<String> names = new ArrayList<>(intervals.keySet());
names.add(0, TNAME); names.add(0, TNAME);
names.add(0, UNAME); names.add(0, UNAME);
return DataFormat.forNames(8, names); return Format.forNames(8, names);
} }

View File

@ -19,10 +19,9 @@ import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.content.GroupBuilder; import hep.dataforge.content.GroupBuilder;
import hep.dataforge.content.NamedGroup; import hep.dataforge.content.NamedGroup;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataFormat; import hep.dataforge.data.Format;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.datafitter.FitState; import hep.dataforge.datafitter.FitState;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
@ -33,13 +32,14 @@ import hep.dataforge.values.Value;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import hep.dataforge.data.PointSet;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = DataSet.class, description = "Generate summary for fit results of different datasets.") @TypedActionDef(name = "summary", inputType = FitState.class, outputType = PointSet.class, description = "Generate summary for fit results of different datasets.")
public class SummaryAction extends ManyToOneAction<FitState, DataSet> { public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
public static final String SUMMARY_NAME = "sumName"; public static final String SUMMARY_NAME = "sumName";
@ -59,7 +59,7 @@ public class SummaryAction extends ManyToOneAction<FitState, DataSet> {
} }
@Override @Override
protected DataSet execute(Logable log, Meta reader, NamedGroup<FitState> input){ protected PointSet execute(Logable log, Meta reader, NamedGroup<FitState> input){
String[] parNames = meta().getStringArray("parnames"); String[] parNames = meta().getStringArray("parnames");
String[] names = new String[2 * parNames.length + 2]; String[] names = new String[2 * parNames.length + 2];
names[0] = "file"; names[0] = "file";
@ -72,7 +72,7 @@ public class SummaryAction extends ManyToOneAction<FitState, DataSet> {
// boolean calculateWAV = meta().getBoolean("wav", true); // boolean calculateWAV = meta().getBoolean("wav", true);
String fileName = reader.getString(SUMMARY_NAME, "summary"); String fileName = reader.getString(SUMMARY_NAME, "summary");
ListDataSet res = new ListDataSet(fileName, DataFormat.forNames(8, names)); ListPointSet res = new ListPointSet(fileName, Format.forNames(8, names));
double[] weights = new double[parNames.length]; double[] weights = new double[parNames.length];
Arrays.fill(weights, 0); Arrays.fill(weights, 0);

View File

@ -15,8 +15,8 @@
*/ */
package inr.numass.data; package inr.numass.data;
import hep.dataforge.data.DataFormat; import hep.dataforge.data.Format;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.io.ColumnedDataWriter; import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
@ -30,16 +30,19 @@ import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static java.lang.String.format;
import static java.lang.String.format;
import static java.lang.String.format;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
public class ESpectrum extends ListDataSet { public class ESpectrum extends ListPointSet {
private final static String binCenter = "chanel"; private final static String binCenter = "chanel";
private static DataFormat prepareFormat(List<NMPoint> points) { private static Format prepareFormat(List<NMPoint> points) {
// ArrayList<String> names = new ArrayList<>(); // ArrayList<String> names = new ArrayList<>();
// names.add(binCenter); // names.add(binCenter);
Map<String, ValueFormat> format = new LinkedHashMap<>(); Map<String, ValueFormat> format = new LinkedHashMap<>();
@ -49,7 +52,7 @@ public class ESpectrum extends ListDataSet {
format.put(format("%.3f", point.getUread()), ValueFormatFactory.fixedWidth(10)); format.put(format("%.3f", point.getUread()), ValueFormatFactory.fixedWidth(10));
} }
return new DataFormat(format); return new Format(format);
} }
int binning = 1; int binning = 1;

View File

@ -15,21 +15,21 @@
*/ */
package inr.numass.data; package inr.numass.data;
import hep.dataforge.data.DataAdapter;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.data.XYDataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.exceptions.DataFormatException; import hep.dataforge.exceptions.DataFormatException;
import hep.dataforge.exceptions.NameNotFoundException; import hep.dataforge.exceptions.NameNotFoundException;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder; import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
import hep.dataforge.data.PointAdapter;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
public class SpectrumDataAdapter extends XYDataAdapter { public class SpectrumDataAdapter extends XYAdapter {
private static final String POINT_LENGTH_NAME = "time"; private static final String POINT_LENGTH_NAME = "time";
@ -41,7 +41,7 @@ public class SpectrumDataAdapter extends XYDataAdapter {
} }
public SpectrumDataAdapter(String xName, String yName, String yErrName, String measurementTime) { public SpectrumDataAdapter(String xName, String yName, String yErrName, String measurementTime) {
super(new MetaBuilder(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME) super(new MetaBuilder(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)
.setValue(X_NAME, xName) .setValue(X_NAME, xName)
.setValue(Y_NAME, yName) .setValue(Y_NAME, yName)
.setValue(Y_ERR_NAME, yErrName) .setValue(Y_ERR_NAME, yErrName)
@ -51,7 +51,7 @@ public class SpectrumDataAdapter extends XYDataAdapter {
} }
public SpectrumDataAdapter(String xName, String yName, String measurementTime) { public SpectrumDataAdapter(String xName, String yName, String measurementTime) {
super(new MetaBuilder(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME) super(new MetaBuilder(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)
.setValue(X_NAME, xName) .setValue(X_NAME, xName)
.setValue(Y_NAME, yName) .setValue(Y_NAME, yName)
.setValue(POINT_LENGTH_NAME, measurementTime) .setValue(POINT_LENGTH_NAME, measurementTime)

View File

@ -16,7 +16,7 @@
package inr.numass.data; package inr.numass.data;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.datafitter.ParamSet; import hep.dataforge.datafitter.ParamSet;
import hep.dataforge.datafitter.models.Generator; import hep.dataforge.datafitter.models.Generator;
import hep.dataforge.datafitter.models.XYModel; import hep.dataforge.datafitter.models.XYModel;
@ -27,6 +27,9 @@ import java.util.Iterator;
import org.apache.commons.math3.random.JDKRandomGenerator; import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.commons.math3.random.RandomDataGenerator; import org.apache.commons.math3.random.RandomDataGenerator;
import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.RandomGenerator;
import static java.lang.Double.isNaN;
import static java.lang.Double.isNaN;
import static java.lang.Double.isNaN;
/** /**
* Генератор наборов данных для спектров. На входе требуется набор данных, * Генератор наборов данных для спектров. На входе требуется набор данных,
@ -63,8 +66,8 @@ public class SpectrumGenerator implements Generator {
} }
@Override @Override
public ListDataSet generateData(Iterable<DataPoint> config) { public ListPointSet generateData(Iterable<DataPoint> config) {
ListDataSet res = new ListDataSet(adapter.getFormat()); ListPointSet res = new ListPointSet(adapter.getFormat());
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) { for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
res.add(this.generateDataPoint(it.next())); res.add(this.generateDataPoint(it.next()));
} }

View File

@ -16,7 +16,7 @@
package inr.numass.data; package inr.numass.data;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.functions.ParametricFunction; import hep.dataforge.functions.ParametricFunction;
import static hep.dataforge.maths.MatrixOperations.inverse; import static hep.dataforge.maths.MatrixOperations.inverse;
import hep.dataforge.maths.NamedDoubleSet; import hep.dataforge.maths.NamedDoubleSet;
@ -37,7 +37,7 @@ public class SpectrumInformation {
this.source = source; this.source = source;
} }
public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListDataSet data, String... parNames) { public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListPointSet data, String... parNames) {
String[] names = parNames; String[] names = parNames;
if(names.length==0) { if(names.length==0) {
names = source.namesAsArray(); names = source.namesAsArray();
@ -55,7 +55,7 @@ public class SpectrumInformation {
* @param parNames * @param parNames
* @return * @return
*/ */
public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListDataSet data, String... parNames) { public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListPointSet data, String... parNames) {
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases")); SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
String[] names = parNames; String[] names = parNames;

View File

@ -16,12 +16,12 @@
package inr.numass.models; package inr.numass.models;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet;
import hep.dataforge.io.IOUtils; import hep.dataforge.io.IOUtils;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.util.ArrayList; import java.util.ArrayList;
import org.apache.commons.math3.util.Pair; import org.apache.commons.math3.util.Pair;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -34,7 +34,7 @@ public class FSS{
public FSS(File FSSFile) { public FSS(File FSSFile) {
try { try {
DataSet data = IOUtils.readColumnedData(FSSFile,"E","P"); PointSet data = IOUtils.readColumnedData(FSSFile,"E","P");
this.points = new ArrayList<>(); this.points = new ArrayList<>();
norm = 0; norm = 0;
for (DataPoint dp : data) { for (DataPoint dp : data) {

View File

@ -19,7 +19,6 @@ import hep.dataforge.actions.ActionResult;
import hep.dataforge.actions.RunManager; import hep.dataforge.actions.RunManager;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet;
import hep.dataforge.io.ColumnedDataReader; import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import java.io.File; import java.io.File;
@ -28,6 +27,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator; import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -47,8 +47,8 @@ public class TransmissionInterpolator implements UnivariateFunction {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation, String xName, String yName, int nSmooth, double w, double border) throws InterruptedException { public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation, String xName, String yName, int nSmooth, double w, double border) throws InterruptedException {
ActionResult<DataSet> pack = RunManager.executeAction(context, actionAnnotation); ActionResult<PointSet> pack = RunManager.executeAction(context, actionAnnotation);
DataSet data = pack.iterator().next().get(); PointSet data = pack.iterator().next().get();
return new TransmissionInterpolator(data, xName, yName, nSmooth, w, border); return new TransmissionInterpolator(data, xName, yName, nSmooth, w, border);
} }

View File

@ -16,7 +16,7 @@
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
@ -26,10 +26,10 @@ import hep.dataforge.data.MapDataPoint;
*/ */
public class DataModelUtils { public class DataModelUtils {
public static ListDataSet getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) { public static ListPointSet getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
assert to != from; assert to != from;
final String[] list = {"x", "time"}; final String[] list = {"x", "time"};
ListDataSet res = new ListDataSet(list); ListPointSet res = new ListPointSet(list);
for (int i = 0; i < numpoints; i++) { for (int i = 0; i < numpoints; i++) {
// формула работает даже в том случае когда порядок точек обратный // формула работает даже в том случае когда порядок точек обратный
@ -41,8 +41,8 @@ public class DataModelUtils {
return res; return res;
} }
// public static ListDataSet maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) { // public static ListPointSet maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) {
// ListDataSet res = new ListDataSet(XYDataPoint.names); // ListPointSet res = new ListPointSet(XYDataPoint.names);
// for (DataPoint point : data) { // for (DataPoint point : data) {
// res.add(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime)); // res.add(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime));
// } // }

View File

@ -17,7 +17,7 @@ package inr.numass.utils;
import hep.dataforge.context.GlobalContext; import hep.dataforge.context.GlobalContext;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import inr.numass.data.SpectrumDataAdapter; import inr.numass.data.SpectrumDataAdapter;
import java.io.File; import java.io.File;
@ -25,6 +25,9 @@ import java.io.FileNotFoundException;
import java.util.Locale; import java.util.Locale;
import static java.util.Locale.setDefault; import static java.util.Locale.setDefault;
import java.util.Scanner; import java.util.Scanner;
import static java.util.Locale.setDefault;
import static java.util.Locale.setDefault;
import static java.util.Locale.setDefault;
/** /**
* *
@ -32,9 +35,9 @@ import java.util.Scanner;
*/ */
public class OldDataReader { public class OldDataReader {
public static ListDataSet readConfig(String path) throws FileNotFoundException { public static ListPointSet readConfig(String path) throws FileNotFoundException {
String[] list = {"X","time","ushift"}; String[] list = {"X","time","ushift"};
ListDataSet res = new ListDataSet(list); ListPointSet res = new ListPointSet(list);
File file = GlobalContext.instance().io().getFile(path); File file = GlobalContext.instance().io().getFile(path);
Scanner sc = new Scanner(file); Scanner sc = new Scanner(file);
sc.nextLine(); sc.nextLine();
@ -54,9 +57,9 @@ public class OldDataReader {
return res; return res;
} }
public static ListDataSet readData(String path, double Elow) { public static ListPointSet readData(String path, double Elow) {
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumDataAdapter factory = new SpectrumDataAdapter();
ListDataSet res = new ListDataSet(factory.getFormat()); ListPointSet res = new ListPointSet(factory.getFormat());
File file = GlobalContext.instance().io().getFile(path); File file = GlobalContext.instance().io().getFile(path);
double x; double x;
int count; int count;
@ -106,9 +109,9 @@ public class OldDataReader {
return res; return res;
} }
public static ListDataSet readDataAsGun(String path, double Elow) { public static ListPointSet readDataAsGun(String path, double Elow) {
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumDataAdapter factory = new SpectrumDataAdapter();
ListDataSet res = new ListDataSet(factory.getFormat()); ListPointSet res = new ListPointSet(factory.getFormat());
File file = GlobalContext.instance().io().getFile(path); File file = GlobalContext.instance().io().getFile(path);
double x; double x;
long count; long count;
@ -139,9 +142,9 @@ public class OldDataReader {
return res; return res;
} }
public static ListDataSet readSpectrumData(String path){ public static ListPointSet readSpectrumData(String path){
SpectrumDataAdapter factory = new SpectrumDataAdapter(); SpectrumDataAdapter factory = new SpectrumDataAdapter();
ListDataSet res = new ListDataSet(factory.getFormat()); ListPointSet res = new ListPointSet(factory.getFormat());
File file = GlobalContext.instance().io().getFile(path); File file = GlobalContext.instance().io().getFile(path);
double x; double x;
double count; double count;

View File

@ -16,12 +16,15 @@
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.ListDataSet; import hep.dataforge.data.ListPointSet;
import inr.numass.data.SpectrumDataAdapter; import inr.numass.data.SpectrumDataAdapter;
import static java.lang.Math.abs; import static java.lang.Math.abs;
import static java.lang.Math.exp; import static java.lang.Math.exp;
import static java.lang.Math.sqrt; import static java.lang.Math.sqrt;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
import static java.lang.Math.abs;
import static java.lang.Math.abs;
import static java.lang.Math.abs;
/** /**
* *
@ -38,10 +41,10 @@ public class TritiumUtils {
// * @param driftPerSecond // * @param driftPerSecond
// * @return // * @return
// */ // */
// public static ListDataSet applyDrift(ListDataSet data, double driftPerSecond) { // public static ListPointSet applyDrift(ListPointSet data, double driftPerSecond) {
// double t = 0; // double t = 0;
// //
// ListDataSet res = new ListDataSet(data.getDataFormat()); // ListPointSet res = new ListPointSet(data.getDataFormat());
// for (DataPoint d : data) { // for (DataPoint d : data) {
// SpectrumDataPoint dp = (SpectrumDataPoint) d; // SpectrumDataPoint dp = (SpectrumDataPoint) d;
// double corrFactor = 1 + driftPerSecond * t; // double corrFactor = 1 + driftPerSecond * t;
@ -60,9 +63,9 @@ public class TritiumUtils {
* @param dtime * @param dtime
* @return * @return
*/ */
public static ListDataSet correctForDeadTime(ListDataSet data, double dtime) { public static ListPointSet correctForDeadTime(ListPointSet data, double dtime) {
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases")); SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
ListDataSet res = new ListDataSet(data.getDataFormat()); ListPointSet res = new ListPointSet(data.getDataFormat());
for (DataPoint dp : data) { for (DataPoint dp : data) {
double corrFactor = 1 / (1 - dtime * reader.getCount(dp) /reader.getTime(dp)); double corrFactor = 1 / (1 - dtime * reader.getCount(dp) /reader.getTime(dp));
res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue(), (long) (reader.getCount(dp)*corrFactor),reader.getTime(dp))); res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue(), (long) (reader.getCount(dp)*corrFactor),reader.getTime(dp)));
@ -78,9 +81,9 @@ public class TritiumUtils {
* @param beta * @param beta
* @return * @return
*/ */
public static ListDataSet setHVScale(ListDataSet data, double beta) { public static ListPointSet setHVScale(ListPointSet data, double beta) {
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases")); SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
ListDataSet res = new ListDataSet(data.getDataFormat()); ListPointSet res = new ListPointSet(data.getDataFormat());
for (DataPoint dp : data) { for (DataPoint dp : data) {
double corrFactor = 1 + beta; double corrFactor = 1 + beta;
res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue()*corrFactor, reader.getCount(dp), reader.getTime(dp))); res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue()*corrFactor, reader.getCount(dp), reader.getTime(dp)));

View File

@ -16,7 +16,7 @@
package inr.numass.prop; package inr.numass.prop;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.XYDataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.exceptions.NameNotFoundException; import hep.dataforge.exceptions.NameNotFoundException;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
@ -25,7 +25,7 @@ import hep.dataforge.values.Value;
* *
* @author Darksnake * @author Darksnake
*/ */
public class PoissonAdapter extends XYDataAdapter { public class PoissonAdapter extends XYAdapter {
public PoissonAdapter(String xName, String yName) { public PoissonAdapter(String xName, String yName) {
super(xName, yName); super(xName, yName);

View File

@ -18,7 +18,6 @@ package inr.numass.prop;
import hep.dataforge.context.GlobalContext; import hep.dataforge.context.GlobalContext;
import static hep.dataforge.context.GlobalContext.out; import static hep.dataforge.context.GlobalContext.out;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet;
import hep.dataforge.datafitter.FitManager; import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitState; import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.ParamSet; import hep.dataforge.datafitter.ParamSet;
@ -30,6 +29,7 @@ import hep.dataforge.maths.RandomUtils;
import inr.numass.models.BetaSpectrum; import inr.numass.models.BetaSpectrum;
import inr.numass.models.NBkgSpectrum; import inr.numass.models.NBkgSpectrum;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import hep.dataforge.data.PointSet;
/** /**
* Hello world! * Hello world!
@ -72,7 +72,7 @@ public class PropTest {
//pm.plotFunction(trans.getProduct(bareBeta, allPars, 9000d), 1000d, 19000d, 400); //pm.plotFunction(trans.getProduct(bareBeta, allPars, 9000d), 1000d, 19000d, 400);
// pm.plotFunction(FunctionUtils.fix1stArgument(trans.getBivariateFunction(allPars), 14000d), 1000, 18000, 400); // pm.plotFunction(FunctionUtils.fix1stArgument(trans.getBivariateFunction(allPars), 14000d), 1000, 18000, 400);
HistogramGenerator generator = new HistogramGenerator(null, model, allPars); HistogramGenerator generator = new HistogramGenerator(null, model, allPars);
DataSet data = generator.generateUniformHistogram(1000d, 18500d, 350); PointSet data = generator.generateUniformHistogram(1000d, 18500d, 350);
long count = 0; long count = 0;
for (DataPoint dp : data) { for (DataPoint dp : data) {

View File

@ -18,8 +18,7 @@ package inr.numass.prop.ar;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.datafitter.FitManager; import hep.dataforge.datafitter.FitManager;
import hep.dataforge.datafitter.FitPlugin; import hep.dataforge.datafitter.FitPlugin;
@ -38,17 +37,18 @@ import inr.numass.prop.SplitNormalSpectrum;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import hep.dataforge.data.PointSet;
/** /**
* *
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = DataSet.class, description = "Fit JNA data by apropriate model") @TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = PointSet.class, description = "Fit JNA data by apropriate model")
@ValueDef(name = "saveResult", type = "BOOLEAN", def = "true", info = "Save the results of action to a file") @ValueDef(name = "saveResult", type = "BOOLEAN", def = "true", info = "Save the results of action to a file")
@ValueDef(name = "suffix", def = "", info = "Suffix for saved file") @ValueDef(name = "suffix", def = "", info = "Suffix for saved file")
@ValueDef(name = "loFitChanel", type = "NUMBER", def = "600", info = "Lo chanel to filter data for fit") @ValueDef(name = "loFitChanel", type = "NUMBER", def = "600", info = "Lo chanel to filter data for fit")
@ValueDef(name = "upFitChanel", type = "NUMBER", def = "1100", info = "Up chanel to filter data for fit") @ValueDef(name = "upFitChanel", type = "NUMBER", def = "1100", info = "Up chanel to filter data for fit")
public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> { public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
private final FitManager fm; private final FitManager fm;
@ -63,7 +63,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
} }
@Override @Override
protected DataSet execute(Logable log, Meta meta, JNAEpisode input){ protected PointSet execute(Logable log, Meta meta, JNAEpisode input){
List<DataPoint> res = new ArrayList<>(input.size()); List<DataPoint> res = new ArrayList<>(input.size());
Model model = buildModel(); Model model = buildModel();
@ -96,7 +96,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
res.add(point); res.add(point);
} }
DataSet data = new ListDataSet(input.getName(), input.meta(), res); PointSet data = new ListPointSet(input.getName(), input.meta(), res);
if (meta.getBoolean("saveResult")) { if (meta.getBoolean("saveResult")) {
String suffix = meta.getString("suffix"); String suffix = meta.getString("suffix");
@ -111,7 +111,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
Meta reader = readMeta(spectrum.meta()); Meta reader = readMeta(spectrum.meta());
double lowerChanel = reader.getDouble("loFitChanel"); double lowerChanel = reader.getDouble("loFitChanel");
double upperChanel = reader.getDouble("upFitChanel"); double upperChanel = reader.getDouble("upFitChanel");
DataSet data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel); PointSet data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel);
ParamSet params = new ParamSet() ParamSet params = new ParamSet()
.setPar("amp", 2e5, 1e3) .setPar("amp", 2e5, 1e3)
.setPar("pos", 800d, 1d) .setPar("pos", 800d, 1d)

View File

@ -17,8 +17,7 @@ package inr.numass.prop.ar;
import hep.dataforge.content.NamedMetaHolder; import hep.dataforge.content.NamedMetaHolder;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.description.NodeDef; import hep.dataforge.description.NodeDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
@ -29,6 +28,7 @@ import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -61,12 +61,12 @@ public class JNASpectrum extends NamedMetaHolder {
} }
} }
public DataSet asDataSet() { public PointSet asDataSet() {
List<DataPoint> points = new ArrayList<>(); List<DataPoint> points = new ArrayList<>();
for (Map.Entry<Double, Long> point : spectrum.entrySet()) { for (Map.Entry<Double, Long> point : spectrum.entrySet()) {
points.add(new MapDataPoint(names, point.getKey(), point.getValue())); points.add(new MapDataPoint(names, point.getKey(), point.getValue()));
} }
return new ListDataSet(getName(), meta(), points); return new ListPointSet(getName(), meta(), points);
} }
public Map<Double, Long> asMap() { public Map<Double, Long> asMap() {

View File

@ -18,7 +18,6 @@ package inr.numass.prop.ar;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet;
import hep.dataforge.data.FileData; import hep.dataforge.data.FileData;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
@ -39,6 +38,7 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Scanner; import java.util.Scanner;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -72,7 +72,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
Scanner timeScanner = new Scanner(timeFile); Scanner timeScanner = new Scanner(timeFile);
String tempFileName = reader.getString("temperatureFile", ""); String tempFileName = reader.getString("temperatureFile", "");
DataSet tempData = null; PointSet tempData = null;
if (!tempFileName.isEmpty()) { if (!tempFileName.isEmpty()) {
String[] format = {"time", "T2", "T4", "T5", "T6"}; String[] format = {"time", "T2", "T4", "T5", "T6"};
File tempFile = IOUtils.getFile(input.getInputFile(), tempFileName); File tempFile = IOUtils.getFile(input.getInputFile(), tempFileName);
@ -108,7 +108,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
} }
private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, DataSet tempData) { private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, PointSet tempData) {
MetaBuilder meta = parent.getBuilder(); MetaBuilder meta = parent.getBuilder();
meta.putValue("relativeStartTime", startTime); meta.putValue("relativeStartTime", startTime);
meta.putValue("relativeStopTime", stopTime); meta.putValue("relativeStopTime", stopTime);

View File

@ -16,7 +16,6 @@
package inr.numass.prop; package inr.numass.prop;
import hep.dataforge.context.GlobalContext; import hep.dataforge.context.GlobalContext;
import hep.dataforge.data.DataSet;
import hep.dataforge.data.FileData; import hep.dataforge.data.FileData;
import hep.dataforge.datafitter.MINUITPlugin; import hep.dataforge.datafitter.MINUITPlugin;
import hep.dataforge.io.ColumnedDataWriter; import hep.dataforge.io.ColumnedDataWriter;
@ -26,6 +25,7 @@ import inr.numass.prop.ar.JNAEpisode;
import inr.numass.prop.ar.ReadJNADataAction; import inr.numass.prop.ar.ReadJNADataAction;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import hep.dataforge.data.PointSet;
/** /**
* *
@ -48,7 +48,7 @@ public class TestFit {
); );
JNAEpisode spectra = new ReadJNADataAction(GlobalContext.instance(), null).runOne(file); JNAEpisode spectra = new ReadJNADataAction(GlobalContext.instance(), null).runOne(file);
DataSet data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra); PointSet data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra);
ColumnedDataWriter.writeDataSet(System.out, data, "***RESULT***"); ColumnedDataWriter.writeDataSet(System.out, data, "***RESULT***");
} }

View File

@ -17,7 +17,7 @@ package inr.numass.prop;
import hep.dataforge.context.GlobalContext; import hep.dataforge.context.GlobalContext;
import hep.dataforge.data.FileData; import hep.dataforge.data.FileData;
import hep.dataforge.data.XYDataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder; import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.plots.PlotFrame; import hep.dataforge.plots.PlotFrame;
@ -60,7 +60,7 @@ public class TestReader {
PlotFrame frame = FXPlotUtils.displayJFreeChart("JNA test", null); PlotFrame frame = FXPlotUtils.displayJFreeChart("JNA test", null);
frame.add(PlottableData.plot(sp.asDataSet(), new XYDataAdapter("chanel", "count"))); frame.add(PlottableData.plot(sp.asDataSet(), new XYAdapter("chanel", "count")));
Meta temps = sp.meta().getNode("temperature"); Meta temps = sp.meta().getNode("temperature");
@ -72,7 +72,7 @@ public class TestReader {
// double lowerChanel = 600; // double lowerChanel = 600;
// double upperChanel = 1100; // double upperChanel = 1100;
// DataSet data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel); // PointSet data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel);
// ParamSet params = new ParamSet() // ParamSet params = new ParamSet()
// .setPar("amp", 2e5, 1e3) // .setPar("amp", 2e5, 1e3)
// .setPar("pos", 800d, 1d) // .setPar("pos", 800d, 1d)

View File

@ -21,10 +21,9 @@ package inr.numass.viewer;
* and open the template in the editor. * and open the template in the editor.
*/ */
import hep.dataforge.data.DataPoint; import hep.dataforge.data.DataPoint;
import hep.dataforge.data.DataSet; import hep.dataforge.data.ListPointSet;
import hep.dataforge.data.ListDataSet;
import hep.dataforge.data.MapDataPoint; import hep.dataforge.data.MapDataPoint;
import hep.dataforge.data.XYDataAdapter; import hep.dataforge.data.XYAdapter;
import hep.dataforge.io.ColumnedDataWriter; import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder; import hep.dataforge.meta.MetaBuilder;
@ -73,6 +72,7 @@ import org.controlsfx.validation.ValidationSupport;
import org.controlsfx.validation.Validator; import org.controlsfx.validation.Validator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import hep.dataforge.data.PointSet;
/** /**
* FXML Controller class * FXML Controller class
@ -359,7 +359,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
for (NMPoint point : points) { for (NMPoint point : points) {
String seriesName = String.format("%d: %.2f (%.2f)", points.indexOf(point), point.getUset(), point.getUread()); String seriesName = String.format("%d: %.2f (%.2f)", points.indexOf(point), point.getUset(), point.getUread());
PlottableData datum = PlottableData.plot(seriesName,new XYDataAdapter("chanel", "count"), point.getData(binning, normalize)); PlottableData datum = PlottableData.plot(seriesName,new XYAdapter("chanel", "count"), point.getData(binning, normalize));
datum.configure(plottableConfig); datum.configure(plottableConfig);
plottables.add(datum); plottables.add(datum);
} }
@ -402,7 +402,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
int loChannel = (int) channelSlider.getLowValue(); int loChannel = (int) channelSlider.getLowValue();
int upChannel = (int) channelSlider.getHighValue(); int upChannel = (int) channelSlider.getHighValue();
double dTime = getDTime(); double dTime = getDTime();
ListDataSet spectrumDataSet = new ListDataSet(names); ListPointSet spectrumDataSet = new ListPointSet(names);
for (NMPoint point : points) { for (NMPoint point : points) {
spectrumDataSet.add(new MapDataPoint(names, new Object[]{ spectrumDataSet.add(new MapDataPoint(names, new Object[]{
@ -439,7 +439,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
fileChooser.setInitialFileName(data.getName() + "_detector.out"); fileChooser.setInitialFileName(data.getName() + "_detector.out");
File destination = fileChooser.showSaveDialog(detectorPlotPane.getScene().getWindow()); File destination = fileChooser.showSaveDialog(detectorPlotPane.getScene().getWindow());
if (destination != null) { if (destination != null) {
DataSet detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true); PointSet detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true);
try { try {
ColumnedDataWriter ColumnedDataWriter
.writeDataSet(destination, detectorData, "Numass data viewer detector data export for " + data.getName(), .writeDataSet(destination, detectorData, "Numass data viewer detector data export for " + data.getName(),