Major refactoring. Immutable DataPoints and Tables
This commit is contained in:
parent
dc03dfebf8
commit
78e1554804
@ -21,7 +21,7 @@ import hep.dataforge.control.collectors.RegularPointCollector;
|
||||
import hep.dataforge.control.measurements.DataDevice;
|
||||
import hep.dataforge.control.ports.PortHandler;
|
||||
import hep.dataforge.control.ports.TcpPortHandler;
|
||||
import hep.dataforge.points.FormatBuilder;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.PortException;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
@ -85,7 +85,7 @@ public class PKT8Device extends DataDevice<PKT8Device.PKT8Measurement> implement
|
||||
String suffix = Integer.toString((int) Instant.now().toEpochMilli());
|
||||
|
||||
// Building data format
|
||||
FormatBuilder formatBuilder = new FormatBuilder()
|
||||
TableFormatBuilder formatBuilder = new TableFormatBuilder()
|
||||
.addTime("timestamp");
|
||||
List<String> names = new ArrayList<>();
|
||||
|
||||
|
@ -23,15 +23,15 @@ import hep.dataforge.control.measurements.AbstractMeasurement;
|
||||
import hep.dataforge.control.measurements.Measurement;
|
||||
import hep.dataforge.control.ports.PortHandler;
|
||||
import hep.dataforge.control.ports.TcpPortHandler;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.MeasurementException;
|
||||
import hep.dataforge.exceptions.PortException;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.points.PointFormat;
|
||||
import hep.dataforge.points.FormatBuilder;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.storage.api.PointLoader;
|
||||
import hep.dataforge.storage.api.Storage;
|
||||
import hep.dataforge.storage.commons.LoaderFactory;
|
||||
@ -406,12 +406,12 @@ public class MspDevice extends SingleMeasurementDevice implements PortHandler.Po
|
||||
throw new IllegalStateException("Peak map is not initialized");
|
||||
}
|
||||
|
||||
FormatBuilder builder = new FormatBuilder().addTime("timestamp");
|
||||
TableFormatBuilder builder = new TableFormatBuilder().addTime("timestamp");
|
||||
this.peakMap.values().stream().forEach((peakName) -> {
|
||||
builder.addNumber(peakName);
|
||||
});
|
||||
|
||||
PointFormat format = builder.build();
|
||||
TableFormat format = builder.build();
|
||||
|
||||
String suffix = Integer.toString((int) Instant.now().toEpochMilli());
|
||||
PointLoader loader = LoaderFactory
|
||||
@ -501,7 +501,7 @@ public class MspDevice extends SingleMeasurementDevice implements PortHandler.Po
|
||||
|
||||
Instant time = Instant.now();
|
||||
|
||||
MapPoint point = new MapPoint();
|
||||
MapPoint.Builder point = new MapPoint.Builder();
|
||||
point.putValue("timestamp", time);
|
||||
|
||||
measurement.entrySet().stream().forEach((entry) -> {
|
||||
@ -515,7 +515,7 @@ public class MspDevice extends SingleMeasurementDevice implements PortHandler.Po
|
||||
forEachTypedConnection(Roles.STORAGE_ROLE, StorageConnection.class, (StorageConnection connection) -> {
|
||||
PointLoader pl = loaderMap.computeIfAbsent(connection, con -> makeLoader(con));
|
||||
try {
|
||||
pl.push(point);
|
||||
pl.push(point.build());
|
||||
} catch (StorageException ex) {
|
||||
getLogger().error("Push to loader failed", ex);
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.control.connections.Roles;
|
||||
import hep.dataforge.control.connections.StorageConnection;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.PortException;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
@ -282,7 +282,7 @@ public class MspViewController implements Initializable, MspListener {
|
||||
|
||||
@Override
|
||||
public void acceptScan(Map<Integer, Double> measurement) {
|
||||
MapPoint point = new MapPoint();
|
||||
MapPoint.Builder point = new MapPoint.Builder();
|
||||
for (Map.Entry<Integer, Double> entry : measurement.entrySet()) {
|
||||
Double val = entry.getValue();
|
||||
if (val <= 0) {
|
||||
@ -290,7 +290,7 @@ public class MspViewController implements Initializable, MspListener {
|
||||
}
|
||||
point.putValue(Integer.toString(entry.getKey()), val);
|
||||
}
|
||||
plottables.put(point);
|
||||
plottables.put(point.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.values.Value;
|
||||
import java.math.BigDecimal;
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.io.LineIterator;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
@ -30,7 +30,7 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import hep.dataforge.points.PointParser;
|
||||
import hep.dataforge.tables.PointParser;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -21,7 +21,7 @@ import ch.qos.logback.core.AppenderBase;
|
||||
import ch.qos.logback.core.encoder.EchoEncoder;
|
||||
import ch.qos.logback.core.encoder.Encoder;
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.io.BasicIOManager;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.points.FormatBuilder;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.storage.api.PointLoader;
|
||||
@ -75,8 +75,8 @@ public class VACManager implements AutoCloseable {
|
||||
|
||||
private static PointLoader setupLoader(Storage storage, String run) throws StorageException {
|
||||
return LoaderFactory.buildPointLoder(storage, "vactms", run, "timestamp",
|
||||
new FormatBuilder(names)
|
||||
.setFormat("timestamp", ValueType.TIME)
|
||||
new TableFormatBuilder(names)
|
||||
.setType("timestamp", ValueType.TIME)
|
||||
.build());
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ import hep.dataforge.control.measurements.Sensor;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
import hep.dataforge.io.MetaFileReader;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.points.FormatBuilder;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.storage.api.PointLoader;
|
||||
import hep.dataforge.storage.api.Storage;
|
||||
import hep.dataforge.storage.commons.LoaderFactory;
|
||||
@ -94,9 +94,9 @@ public class ReadVac extends Application {
|
||||
}
|
||||
}
|
||||
|
||||
FormatBuilder format = new FormatBuilder().setFormat("timestamp", ValueType.TIME);
|
||||
TableFormatBuilder format = new TableFormatBuilder().setType("timestamp", ValueType.TIME);
|
||||
device.getSensors().stream().forEach((s) -> {
|
||||
format.setFormat(s.getName(), ValueType.NUMBER);
|
||||
format.setType(s.getName(), ValueType.NUMBER);
|
||||
});
|
||||
|
||||
PointLoader pl = LoaderFactory.buildPointLoder(localStorage, "vactms", runName, "timestamp", format.build());
|
||||
|
@ -12,11 +12,11 @@ import hep.dataforge.control.devices.annotations.RoleDef;
|
||||
import hep.dataforge.control.measurements.AbstractMeasurement;
|
||||
import hep.dataforge.control.measurements.Measurement;
|
||||
import hep.dataforge.control.measurements.Sensor;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.MeasurementException;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.points.PointListener;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.PointListener;
|
||||
import hep.dataforge.values.Value;
|
||||
import java.time.Instant;
|
||||
import java.util.Collection;
|
||||
@ -132,12 +132,12 @@ public class VacCollectorDevice extends Sensor<DataPoint> {
|
||||
}
|
||||
|
||||
private DataPoint terminator() {
|
||||
MapPoint p = new MapPoint();
|
||||
MapPoint.Builder p = new MapPoint.Builder();
|
||||
p.putValue("timestamp", Instant.now());
|
||||
sensorMap.keySet().stream().forEach((n) -> {
|
||||
p.putValue(n, null);
|
||||
});
|
||||
return p;
|
||||
return p.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -12,7 +12,7 @@ import hep.dataforge.control.devices.DeviceListener;
|
||||
import hep.dataforge.control.measurements.Measurement;
|
||||
import hep.dataforge.control.measurements.MeasurementListener;
|
||||
import hep.dataforge.control.measurements.Sensor;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.MeasurementException;
|
||||
import hep.dataforge.fx.ConsoleFragment;
|
||||
@ -23,7 +23,7 @@ import hep.dataforge.plots.data.DynamicPlottable;
|
||||
import hep.dataforge.plots.data.DynamicPlottableSet;
|
||||
import hep.dataforge.plots.fx.PlotContainer;
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame;
|
||||
import hep.dataforge.points.FormatBuilder;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.storage.api.PointLoader;
|
||||
import hep.dataforge.storage.api.Storage;
|
||||
import hep.dataforge.storage.commons.LoaderFactory;
|
||||
@ -259,9 +259,9 @@ public class VacCollectorController implements Initializable, DeviceListener, Me
|
||||
if (loaderFactory != null) {
|
||||
loader = loaderFactory.apply(device, localStorage);
|
||||
} else {
|
||||
FormatBuilder format = new FormatBuilder().setFormat("timestamp", ValueType.TIME);
|
||||
TableFormatBuilder format = new TableFormatBuilder().setType("timestamp", ValueType.TIME);
|
||||
device.getSensors().stream().forEach((s) -> {
|
||||
format.setFormat(s.getName(), ValueType.NUMBER);
|
||||
format.setType(s.getName(), ValueType.NUMBER);
|
||||
});
|
||||
|
||||
loader = LoaderFactory.buildPointLoder(localStorage, "vactms",
|
||||
|
@ -17,7 +17,7 @@ package hep.dataforge.plotfit;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.points.XYAdapter;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
import hep.dataforge.description.NodeDef;
|
||||
@ -32,8 +32,8 @@ import hep.dataforge.plots.XYPlotFrame;
|
||||
import hep.dataforge.plots.data.PlottableData;
|
||||
import hep.dataforge.plots.data.PlottableFunction;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -19,7 +19,7 @@ import hep.dataforge.actions.ActionManager;
|
||||
import hep.dataforge.context.BasicPlugin;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.PluginDef;
|
||||
import hep.dataforge.points.XYAdapter;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitPlugin;
|
||||
import hep.dataforge.datafitter.models.ModelManager;
|
||||
@ -52,7 +52,7 @@ import inr.numass.models.TransmissionInterpolator;
|
||||
import inr.numass.models.VariableLossSpectrum;
|
||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import hep.dataforge.points.PointAdapter;
|
||||
import hep.dataforge.tables.PointAdapter;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -7,34 +7,34 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.io.log.Logable;
|
||||
import hep.dataforge.meta.Laminate;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
* Adjust errors for all numass points in the dataset
|
||||
*
|
||||
* @author Alexander Nozik <altavir@gmail.com>
|
||||
*/
|
||||
@TypedActionDef(name = "adjustErrors", inputType = PointSet.class, outputType = PointSet.class)
|
||||
public class AdjustErrorsAction extends OneToOneAction<PointSet, PointSet> {
|
||||
@TypedActionDef(name = "adjustErrors", inputType = Table.class, outputType = Table.class)
|
||||
public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
||||
|
||||
@Override
|
||||
protected PointSet execute(Context context, Logable log, String name, Laminate meta, PointSet input) {
|
||||
protected Table execute(Context context, Logable log, String name, Laminate meta, Table input) {
|
||||
List<DataPoint> points = new ArrayList<>();
|
||||
for (DataPoint dp : input) {
|
||||
points.add(evalPoint(meta, dp));
|
||||
}
|
||||
|
||||
return new ListPointSet(input.getFormat(), points);
|
||||
return new ListTable(input.getFormat(), points);
|
||||
}
|
||||
|
||||
private DataPoint evalPoint(Meta meta, DataPoint dp) {
|
||||
@ -65,8 +65,8 @@ public class AdjustErrorsAction extends OneToOneAction<PointSet, PointSet> {
|
||||
}
|
||||
|
||||
private DataPoint adjust(DataPoint dp, Meta config) {
|
||||
MapPoint res = new MapPoint(dp);
|
||||
if (res.hasValue("CRerr")) {
|
||||
MapPoint.Builder res = new MapPoint.Builder(dp);
|
||||
if (dp.hasValue("CRerr")) {
|
||||
double instability = 0;
|
||||
if (dp.hasValue("CR")) {
|
||||
instability = dp.getDouble("CR") * config.getDouble("instability", 0);
|
||||
@ -74,11 +74,11 @@ public class AdjustErrorsAction extends OneToOneAction<PointSet, PointSet> {
|
||||
|
||||
double factor = config.getDouble("factor", 1d);
|
||||
double base = config.getDouble("base", 0);
|
||||
double adjusted = res.getDouble("CRerr") * factor + instability + base;
|
||||
double adjusted = dp.getDouble("CRerr") * factor + instability + base;
|
||||
res.putValue("CRerr", adjusted);
|
||||
} else {
|
||||
throw new RuntimeException("The value CRerr is not found in the data point!");
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
}
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.SimplePointSource;
|
||||
import hep.dataforge.values.Value;
|
||||
import inr.numass.data.NMFile;
|
||||
import inr.numass.data.NMPoint;
|
||||
@ -27,7 +27,7 @@ import java.util.Map;
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class BorderData extends ListPointSet {
|
||||
public class BorderData extends SimplePointSource {
|
||||
|
||||
private final static String[] names = {"U", "80%", "90%", "95%", "99%"};
|
||||
private final static double[] percents = {0.8, 0.9, 0.95, 0.99};
|
||||
@ -79,7 +79,7 @@ public class BorderData extends ListPointSet {
|
||||
}
|
||||
}
|
||||
}
|
||||
this.add(new MapPoint(map));
|
||||
this.addRow(new MapPoint(map));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,34 +25,34 @@ import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.io.log.Logable;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "merge", inputType = PointSet.class, outputType = PointSet.class, description = "Merge different numass data files into one.")
|
||||
@TypedActionDef(name = "merge", inputType = Table.class, outputType = Table.class, description = "Merge different numass data files into one.")
|
||||
@NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.actions.GroupBuilder.byAnnotation")
|
||||
public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
|
||||
public static final String MERGE_NAME = "mergeName";
|
||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr"};
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected List<DataNode<PointSet>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(context, input.meta(), actionMeta);
|
||||
List<DataNode<PointSet>> groups;
|
||||
List<DataNode<Table>> groups;
|
||||
if (meta.hasValue("grouping.byValue")) {
|
||||
groups = super.buildGroups(context, input, actionMeta);
|
||||
} else {
|
||||
@ -62,19 +62,19 @@ public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PointSet execute(Context context, Logable log, String nodeName, Map<String, PointSet> data, Meta meta) {
|
||||
PointSet res = mergeDataSets(nodeName, data.values());
|
||||
return new ListPointSet(res.getFormat(),res.sort("Uset", true));
|
||||
protected Table execute(Context context, Logable log, String nodeName, Map<String, Table> data, Meta meta) {
|
||||
Table res = mergeDataSets(nodeName, data.values());
|
||||
return new ListTable(res.getFormat(),res.sort("Uset", true));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterGroup(Context context, Logable log, String groupName, Meta outputMeta, PointSet output) {
|
||||
protected void afterGroup(Context context, Logable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(context, groupName);
|
||||
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MetaBuilder outputMeta(DataNode<PointSet> input) {
|
||||
protected MetaBuilder outputMeta(DataNode<Table> input) {
|
||||
|
||||
String numassPath = input.stream().<String>map(item -> item.getValue().meta().getString("numass.path", null))
|
||||
.reduce("", (String path, String newPath) -> {
|
||||
@ -130,7 +130,7 @@ public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
// абсолютные ошибки складываются квадратично
|
||||
double crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time;
|
||||
|
||||
MapPoint map = new MapPoint(parnames, Uset, Uread, time, total, wind, corr, cr, crErr);
|
||||
MapPoint.Builder map = new MapPoint(parnames, Uset, Uread, time, total, wind, corr, cr, crErr).builder();
|
||||
|
||||
if (dp1.names().contains("relCR") && dp2.names().contains("relCR")) {
|
||||
double relCR = (dp1.getDouble("relCR") + dp2.getDouble("relCR")) / 2;
|
||||
@ -138,14 +138,14 @@ public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
map.putValue("relCRerr", crErr * relCR / cr);
|
||||
}
|
||||
|
||||
return map;
|
||||
return map.build();
|
||||
}
|
||||
|
||||
private PointSet mergeDataSets(String name, Collection<PointSet> ds) {
|
||||
private Table mergeDataSets(String name, Collection<Table> ds) {
|
||||
//Сливаем все точки в один набор данных
|
||||
Map<Double, List<DataPoint>> points = new LinkedHashMap<>();
|
||||
for (PointSource d : ds) {
|
||||
if (!d.getFormat().contains(parnames)) {
|
||||
if (!d.getFormat().names().contains(parnames)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
for (DataPoint dp : d) {
|
||||
@ -169,7 +169,7 @@ public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
res.add(curPoint);
|
||||
});
|
||||
|
||||
return new ListPointSet(res);
|
||||
return new ListTable(res);
|
||||
|
||||
}
|
||||
|
||||
|
@ -17,9 +17,9 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.exceptions.ContentException;
|
||||
@ -36,18 +36,18 @@ import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "monitor", inputType = PointSet.class, outputType = PointSet.class)
|
||||
@TypedActionDef(name = "monitor", inputType = Table.class, outputType = Table.class)
|
||||
@ValueDef(name = "monitorPoint", type = "NUMBER", required = true, info = "The Uset for monitor point")
|
||||
@ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.out")
|
||||
@ValueDef(name = "calculateRelative", info = "Calculate count rate relative to average monitor point", def = "false")
|
||||
public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
||||
|
||||
private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"};
|
||||
|
||||
@ -55,7 +55,7 @@ public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
//FIXME remove from state
|
||||
|
||||
@Override
|
||||
protected PointSet execute(Context context, Logable log, String name, Laminate meta, PointSet sourceData) throws ContentException {
|
||||
protected Table execute(Context context, Logable log, String name, Laminate meta, Table sourceData) throws ContentException {
|
||||
|
||||
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
||||
|
||||
@ -80,8 +80,8 @@ public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
List<DataPoint> dataList = new ArrayList<>();
|
||||
|
||||
for (DataPoint dp : sourceData) {
|
||||
MapPoint point = new MapPoint(dp);
|
||||
point.putValue("Monitor", 1.0);
|
||||
MapPoint.Builder pb = new MapPoint.Builder(dp);
|
||||
pb.putValue("Monitor", 1.0);
|
||||
if (!isMonitorPoint(monitor, dp) || index.isEmpty()) {
|
||||
LocalDateTime time = getTime(dp);
|
||||
Entry<LocalDateTime, DataPoint> previousMonitor = index.floorEntry(time);
|
||||
@ -105,31 +105,31 @@ public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
double err = Math.sqrt(corrErr * corrErr + pointErr * pointErr) * getCR(dp);
|
||||
|
||||
if (dp.names().contains("Monitor")) {
|
||||
point.putValue("Monitor", Value.of(dp.getValue("Monitor").doubleValue() / corrFactor));
|
||||
pb.putValue("Monitor", Value.of(dp.getValue("Monitor").doubleValue() / corrFactor));
|
||||
} else {
|
||||
point.putValue("Monitor", corrFactor);
|
||||
pb.putValue("Monitor", corrFactor);
|
||||
}
|
||||
point.putValue("CR", Value.of(dp.getValue("CR").doubleValue() / corrFactor));
|
||||
point.putValue("Window", Value.of(dp.getValue("Window").doubleValue() / corrFactor));
|
||||
point.putValue("Corrected", Value.of(dp.getValue("Corrected").doubleValue() / corrFactor));
|
||||
point.putValue("CRerr", Value.of(err));
|
||||
pb.putValue("CR", Value.of(dp.getValue("CR").doubleValue() / corrFactor));
|
||||
pb.putValue("Window", Value.of(dp.getValue("Window").doubleValue() / corrFactor));
|
||||
pb.putValue("Corrected", Value.of(dp.getValue("Corrected").doubleValue() / corrFactor));
|
||||
pb.putValue("CRerr", Value.of(err));
|
||||
}
|
||||
if (meta.getBoolean("calculateRelative", false)) {
|
||||
point.putValue("relCR", point.getValue("CR").doubleValue() / norm);
|
||||
point.putValue("relCRerr", point.getValue("CRerr").doubleValue() / norm);
|
||||
pb.putValue("relCR", pb.build().getValue("CR").doubleValue() / norm);
|
||||
pb.putValue("relCRerr", pb.build().getValue("CRerr").doubleValue() / norm);
|
||||
}
|
||||
dataList.add(point);
|
||||
dataList.add(pb.build());
|
||||
}
|
||||
|
||||
// DataFormat format;
|
||||
//
|
||||
// if (!dataList.isEmpty()) {
|
||||
// //Генерируем автоматический формат по первой строчке
|
||||
// format = DataFormat.of(dataList.get(0));
|
||||
// format = DataFormat.of(dataList.getRow(0));
|
||||
// } else {
|
||||
// format = DataFormat.of(parnames);
|
||||
// }
|
||||
PointSet data = new ListPointSet(dataList);
|
||||
Table data = new ListTable(dataList);
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
|
||||
@ -139,7 +139,7 @@ public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterAction(Context context, String name, PointSet res, Laminate meta) {
|
||||
protected void afterAction(Context context, String name, Table res, Laminate meta) {
|
||||
printMonitorData(context, meta);
|
||||
super.afterAction(context, name, res, meta);
|
||||
}
|
||||
@ -147,7 +147,7 @@ public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
private void printMonitorData(Context context, Meta meta) {
|
||||
String monitorFileName = meta.getString("monitorFile", "monitor");
|
||||
OutputStream stream = buildActionOutput(context, monitorFileName);
|
||||
ListPointSet data = new ListPointSet(monitorPoints);
|
||||
ListTable data = new ListTable(monitorPoints);
|
||||
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
|
||||
}
|
||||
|
||||
|
@ -17,10 +17,10 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.points.PointFormat;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.exceptions.ContentException;
|
||||
@ -36,18 +36,18 @@ import java.io.OutputStream;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = PointSet.class)
|
||||
@TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = Table.class)
|
||||
@ValueDef(name = "lowerWindow", type = "NUMBER", def = "0", info = "Base for the window lowerWindow bound")
|
||||
@ValueDef(name = "lowerWindowSlope", type = "NUMBER", def = "0", info = "Slope for the window lowerWindow bound")
|
||||
@ValueDef(name = "upperWindow", type = "NUMBER", info = "Upper bound for window")
|
||||
@ValueDef(name = "deadTime", type = "NUMBER", def = "0", info = "Dead time in us")
|
||||
public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
|
||||
public class PrepareDataAction extends OneToOneAction<NMFile, Table> {
|
||||
|
||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr", "Timestamp"};
|
||||
|
||||
@ -59,7 +59,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListPointSet execute(Context context, Logable log, String name, Laminate meta, NMFile dataFile) {
|
||||
protected ListTable execute(Context context, Logable log, String name, Laminate meta, NMFile dataFile) {
|
||||
// log.logString("File %s started", dataFile.getName());
|
||||
|
||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||
@ -90,13 +90,13 @@ public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
|
||||
dataList.add(new MapPoint(parnames, new Object[]{Uset, Uread, time, total, wind, corr, cr, crErr, timestamp}));
|
||||
}
|
||||
|
||||
PointFormat format;
|
||||
TableFormat format;
|
||||
|
||||
if (!dataList.isEmpty()) {
|
||||
//Генерируем автоматический формат по первой строчке
|
||||
format = PointFormat.forPoint(dataList.get(0));
|
||||
format = TableFormat.forPoint(dataList.get(0));
|
||||
} else {
|
||||
format = PointFormat.forNames(8, parnames);
|
||||
format = TableFormat.fixedWidth(8, parnames);
|
||||
}
|
||||
|
||||
// AnnotationBuilder builder = dataFile.meta().getBuilder();
|
||||
@ -109,7 +109,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
|
||||
}
|
||||
head = head + "\n" + new XMLMetaWriter().writeString(meta, null) + "\n";
|
||||
|
||||
ListPointSet data = new ListPointSet(format, dataList);
|
||||
ListTable data = new ListTable(format, dataList);
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
|
||||
|
@ -17,9 +17,9 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.points.XYAdapter;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.FitTaskResult;
|
||||
import hep.dataforge.datafitter.Param;
|
||||
@ -34,7 +34,6 @@ import hep.dataforge.maths.NamedDoubleSet;
|
||||
import hep.dataforge.maths.NamedMatrix;
|
||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||
import hep.dataforge.meta.Laminate;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.plots.PlotsPlugin;
|
||||
import hep.dataforge.plots.XYPlotFrame;
|
||||
@ -54,7 +53,7 @@ import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator;
|
||||
import org.apache.commons.math3.stat.StatUtils;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -171,8 +170,8 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
|
||||
ParamSet parameters = input.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"});
|
||||
NamedMatrix covariance = input.getCovariance();
|
||||
PointSet spreadData = generateSpread(writer, name, parameters, covariance);
|
||||
ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getFormat().asArray());
|
||||
Table spreadData = generateSpread(writer, name, parameters, covariance);
|
||||
ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getFormat().namesAsArray());
|
||||
}
|
||||
}
|
||||
|
||||
@ -185,7 +184,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
return 1d - integrator.integrate(integrand, 5d, threshold);
|
||||
}
|
||||
|
||||
private double calculateIntegralExIonRatio(PointSet data, double X, double integralThreshold) {
|
||||
private double calculateIntegralExIonRatio(Table data, double X, double integralThreshold) {
|
||||
double scatterProb = 1 - Math.exp(-X);
|
||||
|
||||
double[] x = data.getColumn("Uset").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray();
|
||||
@ -234,7 +233,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
return new DescriptiveStatistics(res).getStandardDeviation();
|
||||
}
|
||||
|
||||
public static PointSet generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) {
|
||||
public static Table generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) {
|
||||
int numCalls = 1000;
|
||||
int gridPoints = 200;
|
||||
double a = 8;
|
||||
@ -269,12 +268,12 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
}
|
||||
}
|
||||
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
|
||||
ListPointSet res = new ListPointSet(pointNames);
|
||||
ListTable.Builder res = new ListTable.Builder(pointNames);
|
||||
for (int i = 0; i < gridPoints; i++) {
|
||||
res.add(new MapPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
|
||||
res.addRow(new MapPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
|
||||
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -15,9 +15,10 @@
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.points.PointFormat;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.SimplePointSource;
|
||||
import hep.dataforge.values.Value;
|
||||
import inr.numass.data.NMFile;
|
||||
import inr.numass.data.NMPoint;
|
||||
@ -30,17 +31,17 @@ import org.apache.commons.math3.util.Pair;
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class SlicedData extends ListPointSet {
|
||||
public class SlicedData extends SimplePointSource {
|
||||
private static final String TNAME = "Time";
|
||||
//format = {U,username1,username2, ...}
|
||||
private static final String UNAME = "U";
|
||||
|
||||
|
||||
private static PointFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
|
||||
private static TableFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
|
||||
ArrayList<String> names = new ArrayList<>(intervals.keySet());
|
||||
names.add(0, TNAME);
|
||||
names.add(0, UNAME);
|
||||
return PointFormat.forNames(8, names);
|
||||
return TableFormat.fixedWidth(8, names);
|
||||
}
|
||||
|
||||
|
||||
@ -83,7 +84,7 @@ public class SlicedData extends ListPointSet {
|
||||
map.put(name, Value.of(count));
|
||||
}
|
||||
}
|
||||
this.add(new MapPoint(map));
|
||||
this.addRow(new MapPoint(map));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -19,10 +19,10 @@ import hep.dataforge.actions.ManyToOneAction;
|
||||
import hep.dataforge.actions.GroupBuilder;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataNode;
|
||||
import hep.dataforge.points.PointFormat;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
@ -32,23 +32,23 @@ import hep.dataforge.values.Value;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import java.util.Map;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = PointSet.class, description = "Generate summary for fit results of different datasets.")
|
||||
public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = Table.class, description = "Generate summary for fit results of different datasets.")
|
||||
public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
||||
|
||||
public static final String SUMMARY_NAME = "sumName";
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected List<DataNode<PointSet>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(context, input.meta(), actionMeta);
|
||||
List<DataNode<PointSet>> groups;
|
||||
List<DataNode<Table>> groups;
|
||||
if (meta.hasValue("grouping.byValue")) {
|
||||
groups = super.buildGroups(context, input, actionMeta);
|
||||
} else {
|
||||
@ -58,7 +58,7 @@ public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PointSet execute(Context context, Logable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
||||
protected Table execute(Context context, Logable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
||||
String[] parNames = meta.getStringArray("parnames");
|
||||
String[] names = new String[2 * parNames.length + 2];
|
||||
names[0] = "file";
|
||||
@ -68,7 +68,7 @@ public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
}
|
||||
names[names.length - 1] = "chi2";
|
||||
|
||||
ListPointSet res = new ListPointSet(PointFormat.forNames(8, names));
|
||||
ListTable.Builder res = new ListTable.Builder(TableFormat.fixedWidth(8, names));
|
||||
|
||||
double[] weights = new double[parNames.length];
|
||||
Arrays.fill(weights, 0);
|
||||
@ -90,7 +90,7 @@ public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
}
|
||||
values[values.length - 1] = Value.of(state.getChi2());
|
||||
DataPoint point = new MapPoint(names, values);
|
||||
res.add(point);
|
||||
res.addRow(point);
|
||||
});
|
||||
|
||||
Value[] averageValues = new Value[names.length];
|
||||
@ -102,13 +102,13 @@ public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
averageValues[2 * i + 2] = Value.of(1 / Math.sqrt(weights[i]));
|
||||
}
|
||||
|
||||
res.add(new MapPoint(names, averageValues));
|
||||
res.addRow(new MapPoint(names, averageValues));
|
||||
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterGroup(Context context, Logable log, String groupName, Meta outputMeta, PointSet output) {
|
||||
protected void afterGroup(Context context, Logable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(context, groupName);
|
||||
ColumnedDataWriter.writeDataSet(stream, output, groupName);
|
||||
|
||||
|
@ -15,48 +15,38 @@
|
||||
*/
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.points.PointFormat;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.tables.SimplePointSource;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.values.ValueFormat;
|
||||
import hep.dataforge.values.ValueFormatFactory;
|
||||
import hep.dataforge.values.ValueType;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class ESpectrum extends ListPointSet {
|
||||
public class ESpectrum extends SimplePointSource {
|
||||
|
||||
private final static String binCenter = "chanel";
|
||||
|
||||
private static PointFormat prepareFormat(List<NMPoint> points) {
|
||||
// ArrayList<String> names = new ArrayList<>();
|
||||
// names.add(binCenter);
|
||||
Map<String, ValueFormat> format = new LinkedHashMap<>();
|
||||
format.put(binCenter, ValueFormatFactory.forType(ValueType.STRING));
|
||||
for (NMPoint point : points) {
|
||||
// names.add(format("%.3f", point.getUread()));
|
||||
format.put(format("%.3f", point.getUread()), ValueFormatFactory.fixedWidth(10));
|
||||
}
|
||||
private static TableFormat prepareFormat(List<NMPoint> points) {
|
||||
TableFormatBuilder builder = new TableFormatBuilder();
|
||||
|
||||
return new PointFormat(format);
|
||||
builder.addString(binCenter);
|
||||
points.stream().forEach((point) -> {
|
||||
builder.addColumn(format("%.3f", point.getUread()), 10, ValueType.NUMBER);
|
||||
});
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
int binning = 1;
|
||||
@ -82,7 +72,7 @@ public class ESpectrum extends ListPointSet {
|
||||
for (int j = 0; j < points.size(); j++) {
|
||||
res.put(format("%.3f", points.get(j).getUread()), Value.of(spectra.get(j).get(x)));
|
||||
}
|
||||
this.add(new MapPoint(res));
|
||||
this.addRow(new MapPoint(res));
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -19,10 +19,10 @@ import hep.dataforge.exceptions.DataFormatException;
|
||||
import hep.dataforge.exceptions.NameNotFoundException;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.points.PointAdapter;
|
||||
import hep.dataforge.points.XYAdapter;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.PointAdapter;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.values.Value;
|
||||
|
||||
/**
|
||||
|
@ -19,21 +19,15 @@ import hep.dataforge.datafitter.ParamSet;
|
||||
import hep.dataforge.datafitter.models.Generator;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
import static hep.dataforge.maths.RandomUtils.getDefaultRandomGenerator;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import static java.lang.Double.isNaN;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.Table;
|
||||
import static java.lang.Math.sqrt;
|
||||
import java.util.Iterator;
|
||||
import org.apache.commons.math3.random.JDKRandomGenerator;
|
||||
import org.apache.commons.math3.random.RandomDataGenerator;
|
||||
import org.apache.commons.math3.random.RandomGenerator;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
|
||||
/**
|
||||
* Генератор наборов данных для спектров. На входе требуется набор данных,
|
||||
@ -70,12 +64,12 @@ public class SpectrumGenerator implements Generator {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListPointSet generateData(Iterable<DataPoint> config) {
|
||||
ListPointSet res = new ListPointSet(adapter.getFormat());
|
||||
public Table generateData(Iterable<DataPoint> config) {
|
||||
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
||||
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
|
||||
res.add(this.generateDataPoint(it.next()));
|
||||
res.addRow(this.generateDataPoint(it.next()));
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -85,12 +79,12 @@ public class SpectrumGenerator implements Generator {
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
public ListPointSet generateExactData(Iterable<DataPoint> config) {
|
||||
ListPointSet res = new ListPointSet(adapter.getFormat());
|
||||
public Table generateExactData(Iterable<DataPoint> config) {
|
||||
ListTable.Builder res = new ListTable.Builder(adapter.getFormat());
|
||||
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
|
||||
res.add(this.generateExactDataPoint(it.next()));
|
||||
res.addRow(this.generateExactDataPoint(it.next()));
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public DataPoint generateExactDataPoint(DataPoint configPoint) {
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.functions.ParametricFunction;
|
||||
import static hep.dataforge.maths.MatrixOperations.inverse;
|
||||
import hep.dataforge.maths.NamedDoubleSet;
|
||||
@ -38,7 +38,7 @@ public class SpectrumInformation {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListPointSet data, String... parNames) {
|
||||
public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListTable data, String... parNames) {
|
||||
String[] names = parNames;
|
||||
if (names.length == 0) {
|
||||
names = source.namesAsArray();
|
||||
@ -56,7 +56,7 @@ public class SpectrumInformation {
|
||||
* @param parNames
|
||||
* @return
|
||||
*/
|
||||
public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListPointSet data, String... parNames) {
|
||||
public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListTable data, String... parNames) {
|
||||
SpectrumDataAdapter reader = TritiumUtils.adapter();
|
||||
|
||||
String[] names = parNames;
|
||||
|
@ -15,14 +15,14 @@
|
||||
*/
|
||||
package inr.numass.models;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.io.IOUtils;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.ArrayList;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -19,13 +19,13 @@ package inr.numass.models;
|
||||
import hep.dataforge.functions.ParametricFunction;
|
||||
import hep.dataforge.maths.NamedDoubleSet;
|
||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||
import hep.dataforge.names.NameSet;
|
||||
import hep.dataforge.names.NameSetContainer;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public interface Transmission extends NameSet{
|
||||
public interface Transmission extends NameSetContainer{
|
||||
|
||||
double getValue(NamedDoubleSet set, double input, double output);
|
||||
double getDeriv(String name, NamedDoubleSet set, double input, double output);
|
||||
|
@ -20,15 +20,15 @@ import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataNode;
|
||||
import hep.dataforge.io.ColumnedDataReader;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.points.PointSource;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.PointSource;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -49,7 +49,7 @@ public class TransmissionInterpolator implements UnivariateFunction {
|
||||
@SuppressWarnings("unchecked")
|
||||
public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation,
|
||||
String xName, String yName, int nSmooth, double w, double border) throws InterruptedException {
|
||||
DataNode<PointSet> node = ActionUtils.runConfig(context, actionAnnotation);
|
||||
DataNode<Table> node = ActionUtils.runConfig(context, actionAnnotation);
|
||||
PointSource data = node.getData().get();
|
||||
return new TransmissionInterpolator(data, xName, yName, nSmooth, w, border);
|
||||
}
|
||||
|
@ -15,9 +15,10 @@
|
||||
*/
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.Table;
|
||||
import java.util.Scanner;
|
||||
|
||||
/**
|
||||
@ -26,37 +27,37 @@ import java.util.Scanner;
|
||||
*/
|
||||
public class DataModelUtils {
|
||||
|
||||
public static ListPointSet getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
|
||||
public static Table getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
|
||||
assert to != from;
|
||||
final String[] list = {"x", "time"};
|
||||
ListPointSet res = new ListPointSet(list);
|
||||
ListTable.Builder res = new ListTable.Builder(list);
|
||||
|
||||
for (int i = 0; i < numpoints; i++) {
|
||||
// формула работает даже в том случае когда порядок точек обратный
|
||||
double x = from + (to - from) / (numpoints - 1) * i;
|
||||
DataPoint point = new MapPoint(list, x, time);
|
||||
res.add(point);
|
||||
res.addRow(point);
|
||||
}
|
||||
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static ListPointSet getSpectrumConfigurationFromResource(String resource) {
|
||||
public static Table getSpectrumConfigurationFromResource(String resource) {
|
||||
final String[] list = {"x", "time"};
|
||||
ListPointSet res = new ListPointSet(list);
|
||||
ListTable.Builder res = new ListTable.Builder(list);
|
||||
Scanner scan = new Scanner(DataModelUtils.class.getResourceAsStream(resource));
|
||||
while (scan.hasNextLine()) {
|
||||
double x = scan.nextDouble();
|
||||
int time = scan.nextInt();
|
||||
res.add(new MapPoint(list, x, time));
|
||||
res.addRow(new MapPoint(list, x, time));
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
// public static ListPointSet maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) {
|
||||
// ListPointSet res = new ListPointSet(XYDataPoint.names);
|
||||
// public static ListTable maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) {
|
||||
// ListTable res = new ListTable(XYDataPoint.names);
|
||||
// for (DataPoint point : data) {
|
||||
// res.add(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime));
|
||||
// res.addRow(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime));
|
||||
// }
|
||||
// return res;
|
||||
// }
|
||||
|
@ -16,142 +16,16 @@
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.Table;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
import java.util.Scanner;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -159,31 +33,31 @@ import static java.util.Locale.setDefault;
|
||||
*/
|
||||
public class OldDataReader {
|
||||
|
||||
public static ListPointSet readConfig(String path) throws FileNotFoundException {
|
||||
String[] list = {"X","time","ushift"};
|
||||
ListPointSet res = new ListPointSet(list);
|
||||
public static Table readConfig(String path) throws FileNotFoundException {
|
||||
String[] list = {"X", "time", "ushift"};
|
||||
ListTable.Builder res = new ListTable.Builder(list);
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
Scanner sc = new Scanner(file);
|
||||
sc.nextLine();
|
||||
|
||||
while(sc.hasNextLine()){
|
||||
while (sc.hasNextLine()) {
|
||||
String line = sc.nextLine();
|
||||
Scanner lineScan = new Scanner(line);
|
||||
int time = lineScan.nextInt();
|
||||
double u = lineScan.nextDouble();
|
||||
double ushift = 0;
|
||||
if(lineScan.hasNextDouble()){
|
||||
if (lineScan.hasNextDouble()) {
|
||||
ushift = lineScan.nextDouble();
|
||||
}
|
||||
DataPoint point = new MapPoint(list, u,time,ushift);
|
||||
res.add(point);
|
||||
DataPoint point = new MapPoint(list, u, time, ushift);
|
||||
res.addRow(point);
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static ListPointSet readData(String path, double Elow) {
|
||||
public static Table readData(String path, double Elow) {
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
ListTable.Builder res = new ListTable.Builder(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
int count;
|
||||
@ -226,16 +100,16 @@ public class OldDataReader {
|
||||
dummy = sc.nextDouble();
|
||||
DataPoint point = factory.buildSpectrumDataPoint(x, count, time);
|
||||
if (x >= Elow) {
|
||||
res.add(point);
|
||||
res.addRow(point);
|
||||
}
|
||||
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static ListPointSet readDataAsGun(String path, double Elow) {
|
||||
public static Table readDataAsGun(String path, double Elow) {
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
ListTable.Builder res = new ListTable.Builder(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
long count;
|
||||
@ -260,15 +134,15 @@ public class OldDataReader {
|
||||
dummy = sc.nextDouble();
|
||||
DataPoint point = factory.buildSpectrumDataPoint(x, count, time);
|
||||
if (x > Elow) {
|
||||
res.add(point);
|
||||
res.addRow(point);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static ListPointSet readSpectrumData(String path){
|
||||
public static Table readSpectrumData(String path) {
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
ListTable.Builder res = new ListTable.Builder(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
double count;
|
||||
@ -300,14 +174,14 @@ public class OldDataReader {
|
||||
count = lsc.nextDouble();
|
||||
cr = lsc.nextDouble();
|
||||
crErr = lsc.nextDouble();
|
||||
DataPoint point = factory.buildSpectrumDataPoint(x, (long)(cr*time), crErr*time, time);
|
||||
DataPoint point = factory.buildSpectrumDataPoint(x, (long) (cr * time), crErr * time, time);
|
||||
// SpectrumDataPoint point = new SpectrumDataPoint(x, (long) count, time);
|
||||
|
||||
res.add(point);
|
||||
res.addRow(point);
|
||||
}
|
||||
}
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -15,20 +15,14 @@
|
||||
*/
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.Table;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import static java.lang.Math.exp;
|
||||
import static java.lang.Math.sqrt;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -36,29 +30,8 @@ import static java.lang.Math.abs;
|
||||
*/
|
||||
public class TritiumUtils {
|
||||
|
||||
// /**
|
||||
// * Линейное уплывание интенсивности в зависимости от времени. Размерность:
|
||||
// * обратные секунды
|
||||
// *
|
||||
// * @param data
|
||||
// * @param driftPerSecond
|
||||
// * @return
|
||||
// */
|
||||
// public static ListPointSet applyDrift(ListPointSet data, double driftPerSecond) {
|
||||
// double t = 0;
|
||||
//
|
||||
// ListPointSet res = new ListPointSet(data.getFormat());
|
||||
// for (DataPoint d : data) {
|
||||
// SpectrumDataPoint dp = (SpectrumDataPoint) d;
|
||||
// double corrFactor = 1 + driftPerSecond * t;
|
||||
// dp = new SpectrumDataPoint(dp.getX(), (long) (dp.getCount() * corrFactor), dp.getTime());
|
||||
// res.add(dp);
|
||||
// t += dp.getTime();
|
||||
// }
|
||||
// return res;
|
||||
//
|
||||
// }
|
||||
public static ListPointSet correctForDeadTime(ListPointSet data, double dtime) {
|
||||
|
||||
public static Table correctForDeadTime(ListTable data, double dtime) {
|
||||
return correctForDeadTime(data, adapter(), dtime);
|
||||
}
|
||||
|
||||
@ -69,14 +42,14 @@ public class TritiumUtils {
|
||||
* @param dtime
|
||||
* @return
|
||||
*/
|
||||
public static ListPointSet correctForDeadTime(ListPointSet data, SpectrumDataAdapter adapter, double dtime) {
|
||||
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
|
||||
// SpectrumDataAdapter adapter = adapter();
|
||||
ListPointSet res = new ListPointSet(data.getFormat());
|
||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
for (DataPoint dp : data) {
|
||||
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
|
||||
res.add(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
||||
res.addRow(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -86,14 +59,14 @@ public class TritiumUtils {
|
||||
* @param beta
|
||||
* @return
|
||||
*/
|
||||
public static ListPointSet setHVScale(ListPointSet data, double beta) {
|
||||
public static Table setHVScale(ListTable data, double beta) {
|
||||
SpectrumDataAdapter reader = adapter();
|
||||
ListPointSet res = new ListPointSet(data.getFormat());
|
||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
for (DataPoint dp : data) {
|
||||
double corrFactor = 1 + beta;
|
||||
res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||
res.addRow(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||
}
|
||||
return res;
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static SpectrumDataAdapter adapter() {
|
||||
|
@ -24,8 +24,8 @@ import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.DFProcess;
|
||||
import hep.dataforge.context.ProcessManager;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.plots.PlotUtils;
|
||||
import hep.dataforge.plots.data.DynamicPlottable;
|
||||
import hep.dataforge.plots.data.DynamicPlottableSet;
|
||||
@ -147,13 +147,13 @@ public class MspViewController {
|
||||
* @return
|
||||
*/
|
||||
private DataPoint terminatorPoint(DataPoint last) {
|
||||
MapPoint p = new MapPoint();
|
||||
MapPoint.Builder p = new MapPoint.Builder();
|
||||
p.putValue("timestamp", last.getValue("timestamp").timeValue().plusMillis(10));
|
||||
for (String name : last.namesAsArray()) {
|
||||
if (!name.equals("timestamp")) {
|
||||
p.putValue(name, Value.NULL);
|
||||
}
|
||||
}
|
||||
return p;
|
||||
return p.build();
|
||||
}
|
||||
}
|
||||
|
@ -22,10 +22,10 @@ package inr.numass.viewer;
|
||||
*/
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.ProcessManager;
|
||||
import hep.dataforge.points.DataPoint;
|
||||
import hep.dataforge.points.ListPointSet;
|
||||
import hep.dataforge.points.MapPoint;
|
||||
import hep.dataforge.points.XYAdapter;
|
||||
import hep.dataforge.tables.DataPoint;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.MapPoint;
|
||||
import hep.dataforge.tables.XYAdapter;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
@ -73,7 +73,7 @@ import org.controlsfx.validation.ValidationSupport;
|
||||
import org.controlsfx.validation.Validator;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import hep.dataforge.points.PointSet;
|
||||
import hep.dataforge.tables.Table;
|
||||
|
||||
/**
|
||||
* FXML Controller class
|
||||
@ -208,7 +208,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
// }
|
||||
// Viewer.runTask(task);
|
||||
// try {
|
||||
// this.points = task.get();
|
||||
// this.points = task.getRow();
|
||||
// } catch (InterruptedException | ExecutionException ex) {
|
||||
// logger.error("Can't load spectrum data points", ex);
|
||||
// }
|
||||
@ -411,10 +411,10 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
int loChannel = (int) channelSlider.getLowValue();
|
||||
int upChannel = (int) channelSlider.getHighValue();
|
||||
double dTime = getDTime();
|
||||
ListPointSet spectrumDataSet = new ListPointSet(names);
|
||||
ListTable.Builder spectrumDataSet = new ListTable.Builder(names);
|
||||
|
||||
for (NMPoint point : points) {
|
||||
spectrumDataSet.add(new MapPoint(names, new Object[]{
|
||||
spectrumDataSet.addRow(new MapPoint(names, new Object[]{
|
||||
point.getUset(),
|
||||
point.getUread(),
|
||||
point.getLength(),
|
||||
@ -434,7 +434,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
data.getName(), loChannel, upChannel, dTime);
|
||||
|
||||
ColumnedDataWriter
|
||||
.writeDataSet(destination, spectrumDataSet, comment, false);
|
||||
.writeDataSet(destination, spectrumDataSet.build(), comment, false);
|
||||
} catch (IOException ex) {
|
||||
LoggerFactory.getLogger(getClass()).error("Destination file not found", ex);
|
||||
}
|
||||
@ -448,7 +448,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
fileChooser.setInitialFileName(data.getName() + "_detector.out");
|
||||
File destination = fileChooser.showSaveDialog(detectorPlotPane.getScene().getWindow());
|
||||
if (destination != null) {
|
||||
PointSet detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true);
|
||||
Table detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true);
|
||||
try {
|
||||
ColumnedDataWriter
|
||||
.writeDataSet(destination, detectorData, "Numass data viewer detector data export for " + data.getName(),
|
||||
|
Loading…
Reference in New Issue
Block a user