[no commit message]
This commit is contained in:
parent
ecf91cb442
commit
7c0a61bffb
@ -21,7 +21,7 @@ import hep.dataforge.control.collectors.RegularPointCollector;
|
||||
import hep.dataforge.control.measurements.DataDevice;
|
||||
import hep.dataforge.control.ports.PortHandler;
|
||||
import hep.dataforge.control.ports.TcpPortHandler;
|
||||
import hep.dataforge.data.DataFormatBuilder;
|
||||
import hep.dataforge.data.FormatBuilder;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
import hep.dataforge.exceptions.PortException;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
@ -85,7 +85,7 @@ public class PKT8Device extends DataDevice<PKT8Device.PKT8Measurement> implement
|
||||
String suffix = Integer.toString((int) Instant.now().toEpochMilli());
|
||||
|
||||
// Building data format
|
||||
DataFormatBuilder formatBuilder = new DataFormatBuilder()
|
||||
FormatBuilder formatBuilder = new FormatBuilder()
|
||||
.addTime("timestamp");
|
||||
List<String> names = new ArrayList<>();
|
||||
|
||||
|
@ -22,8 +22,8 @@ import hep.dataforge.control.measurements.AbstractMeasurement;
|
||||
import hep.dataforge.control.measurements.Measurement;
|
||||
import hep.dataforge.control.ports.PortHandler;
|
||||
import hep.dataforge.control.ports.TcpPortHandler;
|
||||
import hep.dataforge.data.DataFormat;
|
||||
import hep.dataforge.data.DataFormatBuilder;
|
||||
import hep.dataforge.data.Format;
|
||||
import hep.dataforge.data.FormatBuilder;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.exceptions.ControlException;
|
||||
@ -387,12 +387,12 @@ public class MspDevice extends SingleMeasurementDevice implements PortHandler.Po
|
||||
throw new IllegalStateException("Peak map is not initialized");
|
||||
}
|
||||
|
||||
DataFormatBuilder builder = new DataFormatBuilder().addTime("timestamp");
|
||||
FormatBuilder builder = new FormatBuilder().addTime("timestamp");
|
||||
for (String peakName : this.peakMap.values()) {
|
||||
builder.addNumber(peakName);
|
||||
}
|
||||
|
||||
DataFormat format = builder.build();
|
||||
Format format = builder.build();
|
||||
|
||||
//TODO Переделать!!!
|
||||
String run = meta().getString("numass.run", "");
|
||||
|
@ -13,130 +13,130 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.data.DataParser;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.io.LineIterator;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class VACFileReader implements Iterator<DataPoint> {
|
||||
|
||||
private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss");//14.04.2014 21:30:10
|
||||
|
||||
public static VACFileReader fromDirectory(String dir) throws FileNotFoundException {
|
||||
File directory = new File(dir);
|
||||
String[] list = directory.list((File dir1, String name) -> name.startsWith("VacTMS") && name.endsWith(".txt"));
|
||||
if(list.length == 0){
|
||||
throw new FileNotFoundException("Data files not found in the given directory");
|
||||
}
|
||||
Arrays.sort(list);
|
||||
return new VACFileReader(new File(directory,list[list.length-1]));
|
||||
}
|
||||
|
||||
public static VACFileReader fromFile(String file) throws FileNotFoundException {
|
||||
return new VACFileReader(new File(file));
|
||||
}
|
||||
|
||||
private final LineIterator iterator;
|
||||
private final DataParser parser;
|
||||
|
||||
private VACFileReader(File vacFile) throws FileNotFoundException {
|
||||
this.iterator = new LineIterator(vacFile);
|
||||
iterator.next();
|
||||
parser = new LikhovidVACParser();
|
||||
}
|
||||
|
||||
public VACFileReader(File vacFile, DataParser parser) throws FileNotFoundException {
|
||||
this.iterator = new LineIterator(vacFile);
|
||||
iterator.next();
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
public DataPoint get(Instant time) {
|
||||
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||
}
|
||||
|
||||
public DataPoint getLast() {
|
||||
DataPoint point = null;
|
||||
while (hasNext()) {
|
||||
point = next();
|
||||
}
|
||||
return point;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataPoint next() {
|
||||
if (iterator.hasNext()) {
|
||||
return parser.parse(iterator.next());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public List<DataPoint> updateFrom(Instant from) {
|
||||
List<DataPoint> res = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
DataPoint point = next();
|
||||
if (point != null && point.getValue("timestamp").timeValue().isAfter(from)) {
|
||||
res.add(point);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public List<DataPoint> updateFrom() {
|
||||
List<DataPoint> res = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
DataPoint point = next();
|
||||
if (point != null) {
|
||||
res.add(point);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
private static class LikhovidVACParser implements DataParser {
|
||||
static final Pattern pattern = Pattern.compile("(\\S* \\S*)\\s*(\\S*);\\s*(\\S*)\\s*(\\S*)\\s*(\\S*)");
|
||||
@Override
|
||||
public DataPoint parse(String str) {
|
||||
Matcher matcher = pattern.matcher(str);
|
||||
if(!matcher.matches()){
|
||||
return null;
|
||||
}
|
||||
|
||||
LocalDateTime dt = LocalDateTime.parse(matcher.group(1), formatter);
|
||||
Instant time = dt.toInstant(ZoneOffset.ofHours(0));
|
||||
String p1 = matcher.group(2);
|
||||
String p2 = matcher.group(3);
|
||||
String p3 = matcher.group(4);
|
||||
String px = matcher.group(5);
|
||||
|
||||
|
||||
return new MapDataPoint(VACManager.names, new Object[]{time, p1, p2, p3, px});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.io.LineIterator;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import hep.dataforge.data.PointParser;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class VACFileReader implements Iterator<DataPoint> {
|
||||
|
||||
private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss");//14.04.2014 21:30:10
|
||||
|
||||
public static VACFileReader fromDirectory(String dir) throws FileNotFoundException {
|
||||
File directory = new File(dir);
|
||||
String[] list = directory.list((File dir1, String name) -> name.startsWith("VacTMS") && name.endsWith(".txt"));
|
||||
if(list.length == 0){
|
||||
throw new FileNotFoundException("Data files not found in the given directory");
|
||||
}
|
||||
Arrays.sort(list);
|
||||
return new VACFileReader(new File(directory,list[list.length-1]));
|
||||
}
|
||||
|
||||
public static VACFileReader fromFile(String file) throws FileNotFoundException {
|
||||
return new VACFileReader(new File(file));
|
||||
}
|
||||
|
||||
private final LineIterator iterator;
|
||||
private final PointParser parser;
|
||||
|
||||
private VACFileReader(File vacFile) throws FileNotFoundException {
|
||||
this.iterator = new LineIterator(vacFile);
|
||||
iterator.next();
|
||||
parser = new LikhovidVACParser();
|
||||
}
|
||||
|
||||
public VACFileReader(File vacFile, PointParser parser) throws FileNotFoundException {
|
||||
this.iterator = new LineIterator(vacFile);
|
||||
iterator.next();
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
public DataPoint get(Instant time) {
|
||||
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
|
||||
}
|
||||
|
||||
public DataPoint getLast() {
|
||||
DataPoint point = null;
|
||||
while (hasNext()) {
|
||||
point = next();
|
||||
}
|
||||
return point;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataPoint next() {
|
||||
if (iterator.hasNext()) {
|
||||
return parser.parse(iterator.next());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public List<DataPoint> updateFrom(Instant from) {
|
||||
List<DataPoint> res = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
DataPoint point = next();
|
||||
if (point != null && point.getValue("timestamp").timeValue().isAfter(from)) {
|
||||
res.add(point);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public List<DataPoint> updateFrom() {
|
||||
List<DataPoint> res = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
DataPoint point = next();
|
||||
if (point != null) {
|
||||
res.add(point);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
private static class LikhovidVACParser implements PointParser {
|
||||
static final Pattern pattern = Pattern.compile("(\\S* \\S*)\\s*(\\S*);\\s*(\\S*)\\s*(\\S*)\\s*(\\S*)");
|
||||
@Override
|
||||
public DataPoint parse(String str) {
|
||||
Matcher matcher = pattern.matcher(str);
|
||||
if(!matcher.matches()){
|
||||
return null;
|
||||
}
|
||||
|
||||
LocalDateTime dt = LocalDateTime.parse(matcher.group(1), formatter);
|
||||
Instant time = dt.toInstant(ZoneOffset.ofHours(0));
|
||||
String p1 = matcher.group(2);
|
||||
String p2 = matcher.group(3);
|
||||
String p3 = matcher.group(4);
|
||||
String px = matcher.group(5);
|
||||
|
||||
|
||||
return new MapDataPoint(VACManager.names, new Object[]{time, p1, p2, p3, px});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -15,7 +15,7 @@
|
||||
*/
|
||||
package inr.numass.readvac;
|
||||
|
||||
import hep.dataforge.data.DataFormatBuilder;
|
||||
import hep.dataforge.data.FormatBuilder;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.exceptions.StorageException;
|
||||
import hep.dataforge.meta.Meta;
|
||||
@ -75,7 +75,7 @@ public class VACManager implements AutoCloseable {
|
||||
|
||||
private static PointLoader setupLoader(Storage storage, String run) throws StorageException {
|
||||
return LoaderFactory.buildPointLoder(storage, "vactms", run, "timestamp",
|
||||
new DataFormatBuilder(names)
|
||||
new FormatBuilder(names)
|
||||
.setFormat("timestamp", ValueType.TIME)
|
||||
.build());
|
||||
}
|
||||
|
@ -17,8 +17,7 @@ package hep.dataforge.plotfit;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
import hep.dataforge.description.NodeDef;
|
||||
@ -32,6 +31,7 @@ import hep.dataforge.plots.XYPlotFrame;
|
||||
import hep.dataforge.plots.data.PlottableData;
|
||||
import hep.dataforge.plots.data.PlottableFunction;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -49,16 +49,16 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
|
||||
@Override
|
||||
protected FitState execute(Logable log, Meta metaData, FitState input) {
|
||||
|
||||
DataSet data = input.getDataSet();
|
||||
PointSet data = input.getDataSet();
|
||||
if (!(input.getModel() instanceof XYModel)) {
|
||||
log.logError("The fit model should be instance of XYModel for this action. Action failed!");
|
||||
return input;
|
||||
}
|
||||
XYModel model = (XYModel) input.getModel();
|
||||
|
||||
XYDataAdapter adapter;
|
||||
XYAdapter adapter;
|
||||
if (metaData.hasNode("adapter")) {
|
||||
adapter = new XYDataAdapter(metaData.getNode("adapter"));
|
||||
adapter = new XYAdapter(metaData.getNode("adapter"));
|
||||
} else if (input.getModel() instanceof XYModel) {
|
||||
adapter = model.getAdapter();
|
||||
} else {
|
||||
|
@ -19,8 +19,7 @@ import hep.dataforge.actions.ActionManager;
|
||||
import hep.dataforge.context.BasicPlugin;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.PluginDef;
|
||||
import hep.dataforge.data.DataAdapter;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitPlugin;
|
||||
import hep.dataforge.datafitter.models.Model;
|
||||
@ -54,6 +53,7 @@ import inr.numass.models.TransmissionInterpolator;
|
||||
import inr.numass.models.VariableLossSpectrum;
|
||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import hep.dataforge.data.PointAdapter;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -265,11 +265,11 @@ public class NumassPlugin extends BasicPlugin {
|
||||
}
|
||||
}
|
||||
|
||||
private XYDataAdapter getAdapter(Meta an) {
|
||||
if (an.hasNode(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME)) {
|
||||
return new XYDataAdapter(an.getNode(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME));
|
||||
private XYAdapter getAdapter(Meta an) {
|
||||
if (an.hasNode(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)) {
|
||||
return new XYAdapter(an.getNode(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME));
|
||||
} else {
|
||||
return new XYDataAdapter("Uread", "CR", "CRerr");
|
||||
return new XYAdapter("Uread", "CR", "CRerr");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,35 +8,35 @@ package inr.numass.actions;
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.io.log.Logable;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
* Adjust errors for all numass points in the dataset
|
||||
*
|
||||
* @author Alexander Nozik <altavir@gmail.com>
|
||||
*/
|
||||
@TypedActionDef(name = "adjustErrors", inputType = DataSet.class, outputType = DataSet.class)
|
||||
public class AdjustErrorsAction extends OneToOneAction<DataSet, DataSet> {
|
||||
@TypedActionDef(name = "adjustErrors", inputType = PointSet.class, outputType = PointSet.class)
|
||||
public class AdjustErrorsAction extends OneToOneAction<PointSet, PointSet> {
|
||||
|
||||
public AdjustErrorsAction(Context context, Meta annotation) {
|
||||
super(context, annotation);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSet execute(Logable log, Meta meta, DataSet input) {
|
||||
protected PointSet execute(Logable log, Meta meta, PointSet input) {
|
||||
List<DataPoint> points = new ArrayList<>();
|
||||
for (DataPoint dp : input) {
|
||||
points.add(evalPoint(meta, dp));
|
||||
}
|
||||
|
||||
return new ListDataSet(input.getName(), input.meta(), points, input.getDataFormat());
|
||||
return new ListPointSet(input.getName(), input.meta(), points, input.getDataFormat());
|
||||
}
|
||||
|
||||
private DataPoint evalPoint(Meta meta, DataPoint dp) {
|
||||
|
@ -15,7 +15,7 @@
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.values.Value;
|
||||
import inr.numass.data.NMFile;
|
||||
@ -27,7 +27,7 @@ import java.util.Map;
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class BorderData extends ListDataSet {
|
||||
public class BorderData extends ListPointSet {
|
||||
|
||||
private final static String[] names = {"U", "80%", "90%", "95%", "99%"};
|
||||
private final static double[] percents = {0.8, 0.9, 0.95, 0.99};
|
||||
|
@ -20,8 +20,7 @@ import hep.dataforge.content.GroupBuilder;
|
||||
import hep.dataforge.content.NamedGroup;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.description.NodeDef;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
@ -33,15 +32,16 @@ import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "merge", inputType = DataSet.class, outputType = DataSet.class, description = "Merge different numass data files into one.")
|
||||
@TypedActionDef(name = "merge", inputType = PointSet.class, outputType = PointSet.class, description = "Merge different numass data files into one.")
|
||||
@NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.content.GroupBuilder.byAnnotation")
|
||||
//@Parameter(name = "groupBy", def = "mergeTag", info = "Defines the name of the value by which grouping is made. The value is supposed to be a String, but in practice could be any type which could be converted to String.")
|
||||
public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
|
||||
|
||||
public static final String MERGE_NAME = "mergeName";
|
||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr"};
|
||||
@ -51,8 +51,8 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<NamedGroup<DataSet>> buildGroups(Meta reader, List<DataSet> input) {
|
||||
List<NamedGroup<DataSet>> groups;
|
||||
protected List<NamedGroup<PointSet>> buildGroups(Meta reader, List<PointSet> input) {
|
||||
List<NamedGroup<PointSet>> groups;
|
||||
if (reader.hasValue("grouping.byValue")) {
|
||||
groups = super.buildGroups(reader, input);
|
||||
} else {
|
||||
@ -62,17 +62,17 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSet execute(Logable log, Meta reader, NamedGroup<DataSet> input) {
|
||||
protected PointSet execute(Logable log, Meta reader, NamedGroup<PointSet> input) {
|
||||
return mergeOne(log, input.getName(), input.asList());
|
||||
// List<DataSet> res = new ArrayList<>();
|
||||
// for (NamedGroup<DataSet> group : groups) {
|
||||
// res.add(mergeOne(log, group.getName(), group.asList()));
|
||||
// for (NamedGroup<DataSet> buildGroups : groups) {
|
||||
// res.add(mergeOne(log, buildGroups.getName(), buildGroups.asList()));
|
||||
// }
|
||||
// return new ContentList<>(input.getName(), DataSet.class, res);
|
||||
// return new ContentList<>(input.getName(), PointSet.class, res);
|
||||
}
|
||||
|
||||
private DataSet mergeOne(Logable log, String fileName, List<DataSet> files) {
|
||||
DataSet[] data = new DataSet[files.size()];
|
||||
private PointSet mergeOne(Logable log, String fileName, List<PointSet> files) {
|
||||
PointSet[] data = new PointSet[files.size()];
|
||||
String head = "Numass data merge\n";
|
||||
|
||||
String numassPath = "";
|
||||
@ -96,7 +96,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
}
|
||||
}
|
||||
|
||||
DataSet res = mergeDataSets(fileName, data);
|
||||
PointSet res = mergeDataSets(fileName, data);
|
||||
|
||||
/*
|
||||
* Указываем путь только если он одинаковый для всех входных файлов
|
||||
@ -116,7 +116,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
|
||||
// private Map<String, List<DataSet>> buildMergeGroups(String mergeBy, NamedGroup<DataSet> input) {
|
||||
// Map<String, List<DataSet>> map = new HashMap<>();
|
||||
// for (DataSet ds : input) {
|
||||
// for (PointSet ds : input) {
|
||||
// String tag = ds.meta().getString(mergeBy, meta().getString(mergeBy, "merge"));
|
||||
// if (!map.containsKey(tag)) {
|
||||
// map.put(tag, new ArrayList<>());
|
||||
@ -167,10 +167,10 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
return map;
|
||||
}
|
||||
|
||||
private DataSet mergeDataSets(String name, DataSet... ds) {
|
||||
private PointSet mergeDataSets(String name, PointSet... ds) {
|
||||
//Сливаем все точки в один набор данных
|
||||
Map<Double, List<DataPoint>> points = new LinkedHashMap<>();
|
||||
for (DataSet d : ds) {
|
||||
for (PointSet d : ds) {
|
||||
if (!d.getDataFormat().contains(parnames)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
@ -193,7 +193,7 @@ public class MergeDataAction extends ManyToOneAction<DataSet, DataSet> {
|
||||
res.add(curPoint);
|
||||
}
|
||||
|
||||
return new ListDataSet(name, null, res);
|
||||
return new ListPointSet(name, null, res);
|
||||
|
||||
}
|
||||
|
||||
|
@ -19,8 +19,7 @@ import hep.dataforge.actions.ActionResult;
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
@ -37,16 +36,17 @@ import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "monitor", inputType = DataSet.class, outputType = DataSet.class)
|
||||
@TypedActionDef(name = "monitor", inputType = PointSet.class, outputType = PointSet.class)
|
||||
@ValueDef(name = "monitorPoint", type = "NUMBER", required = true, info = "The Uset for monitor point")
|
||||
@ValueDef(name = "monitorFile", info = "The outputfile for monitor points", def = "monitor.out")
|
||||
@ValueDef(name = "calculateRelative", info = "Calculate count rate relative to average monitor point", def = "false")
|
||||
public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
|
||||
public class MonitorCorrectAction extends OneToOneAction<PointSet, PointSet> {
|
||||
|
||||
private static final String[] monitorNames = {"Timestamp", "Total", "CR", "CRerr"};
|
||||
|
||||
@ -57,7 +57,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSet execute(Logable log, Meta reader, DataSet sourceData) throws ContentException {
|
||||
protected PointSet execute(Logable log, Meta reader, PointSet sourceData) throws ContentException {
|
||||
|
||||
double monitor = reader.getDouble("monitorPoint", Double.NaN);
|
||||
|
||||
@ -131,7 +131,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
|
||||
// } else {
|
||||
// format = DataFormat.of(parnames);
|
||||
// }
|
||||
DataSet data = new ListDataSet(sourceData.getName(), sourceData.meta(), dataList);
|
||||
PointSet data = new ListPointSet(sourceData.getName(), sourceData.meta(), dataList);
|
||||
|
||||
OutputStream stream = buildActionOutput(data);
|
||||
|
||||
@ -141,7 +141,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterAction(ActionResult<DataSet> pack) throws ContentException {
|
||||
protected void afterAction(ActionResult<PointSet> pack) throws ContentException {
|
||||
printMonitorData();
|
||||
super.afterAction(pack);
|
||||
}
|
||||
@ -149,7 +149,7 @@ public class MonitorCorrectAction extends OneToOneAction<DataSet, DataSet> {
|
||||
private void printMonitorData() {
|
||||
String monitorFileName = meta().getString("monitorFile", "monitor");
|
||||
OutputStream stream = buildActionOutput(monitorFileName);
|
||||
ListDataSet data = new ListDataSet("monitor", null, monitorPoints);
|
||||
ListPointSet data = new ListPointSet("monitor", null, monitorPoints);
|
||||
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
|
||||
}
|
||||
|
||||
|
@ -17,10 +17,9 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataFormat;
|
||||
import hep.dataforge.data.Format;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
@ -36,17 +35,18 @@ import java.io.OutputStream;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = DataSet.class)
|
||||
@TypedActionDef(name = "prepareData", inputType = NMFile.class, outputType = PointSet.class)
|
||||
@ValueDef(name = "lowerWindow", type = "NUMBER", def = "0", info = "Base for the window lowerWindow bound")
|
||||
@ValueDef(name = "lowerWindowSlope", type = "NUMBER", def = "0", info = "Slope for the window lowerWindow bound")
|
||||
@ValueDef(name = "upperWindow", type = "NUMBER", info = "Upper bound for window")
|
||||
@ValueDef(name = "deadTime", type = "NUMBER", def = "0", info = "Dead time in us")
|
||||
public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
|
||||
public class PrepareDataAction extends OneToOneAction<NMFile, PointSet> {
|
||||
|
||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corrected", "CR", "CRerr", "Timestamp"};
|
||||
|
||||
@ -62,7 +62,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListDataSet execute(Logable log, Meta reader, NMFile dataFile) throws ContentException {
|
||||
protected ListPointSet execute(Logable log, Meta reader, NMFile dataFile) throws ContentException {
|
||||
// log.logString("File %s started", dataFile.getName());
|
||||
|
||||
int upper = dataFile.meta().getInt("upperWindow", this.meta().getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1));
|
||||
@ -93,13 +93,13 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
|
||||
dataList.add(new MapDataPoint(parnames, new Object[]{Uset, Uread, time, total, wind, corr, cr, crErr, timestamp}));
|
||||
}
|
||||
|
||||
DataFormat format;
|
||||
Format format;
|
||||
|
||||
if (!dataList.isEmpty()) {
|
||||
//Генерируем автоматический формат по первой строчке
|
||||
format = DataFormat.forPoint(dataList.get(0));
|
||||
format = Format.forPoint(dataList.get(0));
|
||||
} else {
|
||||
format = DataFormat.forNames(8, parnames);
|
||||
format = Format.forNames(8, parnames);
|
||||
}
|
||||
|
||||
// AnnotationBuilder builder = dataFile.meta().getBuilder();
|
||||
@ -112,7 +112,7 @@ public class PrepareDataAction extends OneToOneAction<NMFile, DataSet> {
|
||||
}
|
||||
head = head + "\n" + new XMLMetaWriter().writeString(meta(), null) + "\n";
|
||||
|
||||
ListDataSet data = new ListDataSet(dataFile.getName(), dataFile.meta(), dataList, format);
|
||||
ListPointSet data = new ListPointSet(dataFile.getName(), dataFile.meta(), dataList, format);
|
||||
|
||||
OutputStream stream = buildActionOutput(data);
|
||||
|
||||
|
@ -17,10 +17,9 @@ package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.FitTaskResult;
|
||||
import hep.dataforge.datafitter.Param;
|
||||
@ -54,6 +53,7 @@ import org.apache.commons.math3.analysis.interpolation.UnivariateInterpolator;
|
||||
import org.apache.commons.math3.stat.StatUtils;
|
||||
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -174,7 +174,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
|
||||
ParamSet parameters = input.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"});
|
||||
NamedMatrix covariance = input.getCovariance();
|
||||
DataSet spreadData = generateSpread(writer, input.getName(), parameters, covariance);
|
||||
PointSet spreadData = generateSpread(writer, input.getName(), parameters, covariance);
|
||||
ColumnedDataWriter.writeDataSet(System.out, spreadData, "", spreadData.getDataFormat().asArray());
|
||||
}
|
||||
}
|
||||
@ -188,7 +188,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
return 1d - integrator.integrate(integrand, 5d, threshold);
|
||||
}
|
||||
|
||||
private double calculateIntegralExIonRatio(DataSet data, double X, double integralThreshold) {
|
||||
private double calculateIntegralExIonRatio(PointSet data, double X, double integralThreshold) {
|
||||
double scatterProb = 1 - Math.exp(-X);
|
||||
|
||||
double[] x = data.getColumn("Uset").asList().stream().mapToDouble((val) -> val.doubleValue()).toArray();
|
||||
@ -232,12 +232,12 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
|
||||
);
|
||||
// XYPlotFrame frame = JFreeChartFrame.drawFrame("Ion ratio Distribution for " + name, null);
|
||||
frame.add(PlottableData.plot(hist, new XYDataAdapter("binCenter", "count")));
|
||||
frame.add(PlottableData.plot(hist, new XYAdapter("binCenter", "count")));
|
||||
|
||||
return new DescriptiveStatistics(res).getStandardDeviation();
|
||||
}
|
||||
|
||||
public static DataSet generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) {
|
||||
public static PointSet generateSpread(PrintWriter writer, String name, NamedDoubleSet parameters, NamedMatrix covariance) {
|
||||
int numCalls = 1000;
|
||||
int gridPoints = 200;
|
||||
double a = 8;
|
||||
@ -272,7 +272,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
}
|
||||
}
|
||||
String[] pointNames = {"e", "central", "lower", "upper", "dispersion"};
|
||||
ListDataSet res = new ListDataSet("spread", pointNames);
|
||||
ListPointSet res = new ListPointSet("spread", pointNames);
|
||||
for (int i = 0; i < gridPoints; i++) {
|
||||
res.add(new MapDataPoint(pointNames, grid[i], central[i], lower[i], upper[i], dispersion[i]));
|
||||
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.data.DataFormat;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.Format;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.values.Value;
|
||||
import inr.numass.data.NMFile;
|
||||
@ -30,17 +30,17 @@ import org.apache.commons.math3.util.Pair;
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class SlicedData extends ListDataSet {
|
||||
public class SlicedData extends ListPointSet {
|
||||
private static final String TNAME = "Time";
|
||||
//format = {U,username1,username2, ...}
|
||||
private static final String UNAME = "U";
|
||||
|
||||
|
||||
private static DataFormat prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
|
||||
private static Format prepateFormat(Map<String,Pair<Integer,Integer>> intervals){
|
||||
ArrayList<String> names = new ArrayList<>(intervals.keySet());
|
||||
names.add(0, TNAME);
|
||||
names.add(0, UNAME);
|
||||
return DataFormat.forNames(8, names);
|
||||
return Format.forNames(8, names);
|
||||
}
|
||||
|
||||
|
||||
|
@ -19,10 +19,9 @@ import hep.dataforge.actions.ManyToOneAction;
|
||||
import hep.dataforge.content.GroupBuilder;
|
||||
import hep.dataforge.content.NamedGroup;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataFormat;
|
||||
import hep.dataforge.data.Format;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
@ -33,13 +32,14 @@ import hep.dataforge.values.Value;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = DataSet.class, description = "Generate summary for fit results of different datasets.")
|
||||
public class SummaryAction extends ManyToOneAction<FitState, DataSet> {
|
||||
@TypedActionDef(name = "summary", inputType = FitState.class, outputType = PointSet.class, description = "Generate summary for fit results of different datasets.")
|
||||
public class SummaryAction extends ManyToOneAction<FitState, PointSet> {
|
||||
|
||||
public static final String SUMMARY_NAME = "sumName";
|
||||
|
||||
@ -59,7 +59,7 @@ public class SummaryAction extends ManyToOneAction<FitState, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSet execute(Logable log, Meta reader, NamedGroup<FitState> input){
|
||||
protected PointSet execute(Logable log, Meta reader, NamedGroup<FitState> input){
|
||||
String[] parNames = meta().getStringArray("parnames");
|
||||
String[] names = new String[2 * parNames.length + 2];
|
||||
names[0] = "file";
|
||||
@ -72,7 +72,7 @@ public class SummaryAction extends ManyToOneAction<FitState, DataSet> {
|
||||
// boolean calculateWAV = meta().getBoolean("wav", true);
|
||||
String fileName = reader.getString(SUMMARY_NAME, "summary");
|
||||
|
||||
ListDataSet res = new ListDataSet(fileName, DataFormat.forNames(8, names));
|
||||
ListPointSet res = new ListPointSet(fileName, Format.forNames(8, names));
|
||||
|
||||
double[] weights = new double[parNames.length];
|
||||
Arrays.fill(weights, 0);
|
||||
|
@ -15,8 +15,8 @@
|
||||
*/
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.data.DataFormat;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.Format;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.values.Value;
|
||||
@ -30,16 +30,19 @@ import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.String.format;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class ESpectrum extends ListDataSet {
|
||||
public class ESpectrum extends ListPointSet {
|
||||
|
||||
private final static String binCenter = "chanel";
|
||||
|
||||
private static DataFormat prepareFormat(List<NMPoint> points) {
|
||||
private static Format prepareFormat(List<NMPoint> points) {
|
||||
// ArrayList<String> names = new ArrayList<>();
|
||||
// names.add(binCenter);
|
||||
Map<String, ValueFormat> format = new LinkedHashMap<>();
|
||||
@ -49,7 +52,7 @@ public class ESpectrum extends ListDataSet {
|
||||
format.put(format("%.3f", point.getUread()), ValueFormatFactory.fixedWidth(10));
|
||||
}
|
||||
|
||||
return new DataFormat(format);
|
||||
return new Format(format);
|
||||
}
|
||||
|
||||
int binning = 1;
|
||||
|
@ -15,21 +15,21 @@
|
||||
*/
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.data.DataAdapter;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.exceptions.DataFormatException;
|
||||
import hep.dataforge.exceptions.NameNotFoundException;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.data.PointAdapter;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class SpectrumDataAdapter extends XYDataAdapter {
|
||||
public class SpectrumDataAdapter extends XYAdapter {
|
||||
|
||||
private static final String POINT_LENGTH_NAME = "time";
|
||||
|
||||
@ -41,7 +41,7 @@ public class SpectrumDataAdapter extends XYDataAdapter {
|
||||
}
|
||||
|
||||
public SpectrumDataAdapter(String xName, String yName, String yErrName, String measurementTime) {
|
||||
super(new MetaBuilder(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME)
|
||||
super(new MetaBuilder(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)
|
||||
.setValue(X_NAME, xName)
|
||||
.setValue(Y_NAME, yName)
|
||||
.setValue(Y_ERR_NAME, yErrName)
|
||||
@ -51,7 +51,7 @@ public class SpectrumDataAdapter extends XYDataAdapter {
|
||||
}
|
||||
|
||||
public SpectrumDataAdapter(String xName, String yName, String measurementTime) {
|
||||
super(new MetaBuilder(DataAdapter.DATA_ADAPTER_ANNOTATION_NAME)
|
||||
super(new MetaBuilder(PointAdapter.DATA_ADAPTER_ANNOTATION_NAME)
|
||||
.setValue(X_NAME, xName)
|
||||
.setValue(Y_NAME, yName)
|
||||
.setValue(POINT_LENGTH_NAME, measurementTime)
|
||||
|
@ -16,7 +16,7 @@
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
import hep.dataforge.datafitter.models.Generator;
|
||||
import hep.dataforge.datafitter.models.XYModel;
|
||||
@ -27,6 +27,9 @@ import java.util.Iterator;
|
||||
import org.apache.commons.math3.random.JDKRandomGenerator;
|
||||
import org.apache.commons.math3.random.RandomDataGenerator;
|
||||
import org.apache.commons.math3.random.RandomGenerator;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
import static java.lang.Double.isNaN;
|
||||
|
||||
/**
|
||||
* Генератор наборов данных для спектров. На входе требуется набор данных,
|
||||
@ -63,8 +66,8 @@ public class SpectrumGenerator implements Generator {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListDataSet generateData(Iterable<DataPoint> config) {
|
||||
ListDataSet res = new ListDataSet(adapter.getFormat());
|
||||
public ListPointSet generateData(Iterable<DataPoint> config) {
|
||||
ListPointSet res = new ListPointSet(adapter.getFormat());
|
||||
for (Iterator<DataPoint> it = config.iterator(); it.hasNext();) {
|
||||
res.add(this.generateDataPoint(it.next()));
|
||||
}
|
||||
|
@ -16,7 +16,7 @@
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.functions.ParametricFunction;
|
||||
import static hep.dataforge.maths.MatrixOperations.inverse;
|
||||
import hep.dataforge.maths.NamedDoubleSet;
|
||||
@ -37,7 +37,7 @@ public class SpectrumInformation {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListDataSet data, String... parNames) {
|
||||
public NamedMatrix getExpetedCovariance(NamedDoubleSet set, ListPointSet data, String... parNames) {
|
||||
String[] names = parNames;
|
||||
if(names.length==0) {
|
||||
names = source.namesAsArray();
|
||||
@ -55,7 +55,7 @@ public class SpectrumInformation {
|
||||
* @param parNames
|
||||
* @return
|
||||
*/
|
||||
public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListDataSet data, String... parNames) {
|
||||
public NamedMatrix getInformationMatrix(NamedDoubleSet set, ListPointSet data, String... parNames) {
|
||||
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
|
||||
|
||||
String[] names = parNames;
|
||||
|
@ -13,61 +13,61 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.models;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.io.IOUtils;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.ArrayList;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class FSS{
|
||||
private final ArrayList<Pair<Double,Double>> points;
|
||||
private double norm;
|
||||
|
||||
public FSS(File FSSFile) {
|
||||
try {
|
||||
|
||||
DataSet data = IOUtils.readColumnedData(FSSFile,"E","P");
|
||||
this.points = new ArrayList<>();
|
||||
norm = 0;
|
||||
for (DataPoint dp : data) {
|
||||
Double E = dp.getValue("E").doubleValue();
|
||||
Double P = dp.getValue("P").doubleValue();
|
||||
this.points.add(new Pair<>(E,P));
|
||||
norm += P;
|
||||
}
|
||||
if(points.isEmpty()) {
|
||||
throw new Error("Error reading FSS FILE. No points.");
|
||||
}
|
||||
} catch (FileNotFoundException ex) {
|
||||
throw new Error("Error reading FSS FILE. File not found.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
double getE(int n){
|
||||
return this.points.get(n).getFirst();
|
||||
}
|
||||
|
||||
double getP(int n){
|
||||
return this.points.get(n).getSecond() / norm;
|
||||
}
|
||||
|
||||
boolean isEmpty(){
|
||||
return points.isEmpty();
|
||||
}
|
||||
|
||||
int size(){
|
||||
return points.size();
|
||||
|
||||
}
|
||||
}
|
||||
package inr.numass.models;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.io.IOUtils;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.ArrayList;
|
||||
import org.apache.commons.math3.util.Pair;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class FSS{
|
||||
private final ArrayList<Pair<Double,Double>> points;
|
||||
private double norm;
|
||||
|
||||
public FSS(File FSSFile) {
|
||||
try {
|
||||
|
||||
PointSet data = IOUtils.readColumnedData(FSSFile,"E","P");
|
||||
this.points = new ArrayList<>();
|
||||
norm = 0;
|
||||
for (DataPoint dp : data) {
|
||||
Double E = dp.getValue("E").doubleValue();
|
||||
Double P = dp.getValue("P").doubleValue();
|
||||
this.points.add(new Pair<>(E,P));
|
||||
norm += P;
|
||||
}
|
||||
if(points.isEmpty()) {
|
||||
throw new Error("Error reading FSS FILE. No points.");
|
||||
}
|
||||
} catch (FileNotFoundException ex) {
|
||||
throw new Error("Error reading FSS FILE. File not found.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
double getE(int n){
|
||||
return this.points.get(n).getFirst();
|
||||
}
|
||||
|
||||
double getP(int n){
|
||||
return this.points.get(n).getSecond() / norm;
|
||||
}
|
||||
|
||||
boolean isEmpty(){
|
||||
return points.isEmpty();
|
||||
}
|
||||
|
||||
int size(){
|
||||
return points.size();
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,6 @@ import hep.dataforge.actions.ActionResult;
|
||||
import hep.dataforge.actions.RunManager;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.io.ColumnedDataReader;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import java.io.File;
|
||||
@ -28,6 +27,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -47,8 +47,8 @@ public class TransmissionInterpolator implements UnivariateFunction {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation, String xName, String yName, int nSmooth, double w, double border) throws InterruptedException {
|
||||
ActionResult<DataSet> pack = RunManager.executeAction(context, actionAnnotation);
|
||||
DataSet data = pack.iterator().next().get();
|
||||
ActionResult<PointSet> pack = RunManager.executeAction(context, actionAnnotation);
|
||||
PointSet data = pack.iterator().next().get();
|
||||
return new TransmissionInterpolator(data, xName, yName, nSmooth, w, border);
|
||||
}
|
||||
|
||||
|
@ -13,39 +13,39 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class DataModelUtils {
|
||||
|
||||
public static ListDataSet getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
|
||||
assert to != from;
|
||||
final String[] list = {"x", "time"};
|
||||
ListDataSet res = new ListDataSet(list);
|
||||
|
||||
for (int i = 0; i < numpoints; i++) {
|
||||
// формула работает даже в том случае когда порядок точек обратный
|
||||
double x = from + (to - from) / (numpoints - 1) * i;
|
||||
DataPoint point = new MapDataPoint(list, x,time);
|
||||
res.add(point);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// public static ListDataSet maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) {
|
||||
// ListDataSet res = new ListDataSet(XYDataPoint.names);
|
||||
// for (DataPoint point : data) {
|
||||
// res.add(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime));
|
||||
// }
|
||||
// return res;
|
||||
// }
|
||||
}
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class DataModelUtils {
|
||||
|
||||
public static ListPointSet getUniformSpectrumConfiguration(double from, double to, double time, int numpoints) {
|
||||
assert to != from;
|
||||
final String[] list = {"x", "time"};
|
||||
ListPointSet res = new ListPointSet(list);
|
||||
|
||||
for (int i = 0; i < numpoints; i++) {
|
||||
// формула работает даже в том случае когда порядок точек обратный
|
||||
double x = from + (to - from) / (numpoints - 1) * i;
|
||||
DataPoint point = new MapDataPoint(list, x,time);
|
||||
res.add(point);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// public static ListPointSet maskDataSet(Iterable<DataPoint> data, String maskForX, String maskForY, String maskForYerr, String maskForTime) {
|
||||
// ListPointSet res = new ListPointSet(XYDataPoint.names);
|
||||
// for (DataPoint point : data) {
|
||||
// res.add(SpectrumDataPoint.maskDataPoint(point, maskForX, maskForY, maskForYerr, maskForTime));
|
||||
// }
|
||||
// return res;
|
||||
// }
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import java.io.File;
|
||||
@ -25,6 +25,9 @@ import java.io.FileNotFoundException;
|
||||
import java.util.Locale;
|
||||
import static java.util.Locale.setDefault;
|
||||
import java.util.Scanner;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
import static java.util.Locale.setDefault;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -32,9 +35,9 @@ import java.util.Scanner;
|
||||
*/
|
||||
public class OldDataReader {
|
||||
|
||||
public static ListDataSet readConfig(String path) throws FileNotFoundException {
|
||||
public static ListPointSet readConfig(String path) throws FileNotFoundException {
|
||||
String[] list = {"X","time","ushift"};
|
||||
ListDataSet res = new ListDataSet(list);
|
||||
ListPointSet res = new ListPointSet(list);
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
Scanner sc = new Scanner(file);
|
||||
sc.nextLine();
|
||||
@ -54,9 +57,9 @@ public class OldDataReader {
|
||||
return res;
|
||||
}
|
||||
|
||||
public static ListDataSet readData(String path, double Elow) {
|
||||
public static ListPointSet readData(String path, double Elow) {
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListDataSet res = new ListDataSet(factory.getFormat());
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
int count;
|
||||
@ -106,9 +109,9 @@ public class OldDataReader {
|
||||
return res;
|
||||
}
|
||||
|
||||
public static ListDataSet readDataAsGun(String path, double Elow) {
|
||||
public static ListPointSet readDataAsGun(String path, double Elow) {
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListDataSet res = new ListDataSet(factory.getFormat());
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
long count;
|
||||
@ -139,9 +142,9 @@ public class OldDataReader {
|
||||
return res;
|
||||
}
|
||||
|
||||
public static ListDataSet readSpectrumData(String path){
|
||||
public static ListPointSet readSpectrumData(String path){
|
||||
SpectrumDataAdapter factory = new SpectrumDataAdapter();
|
||||
ListDataSet res = new ListDataSet(factory.getFormat());
|
||||
ListPointSet res = new ListPointSet(factory.getFormat());
|
||||
File file = GlobalContext.instance().io().getFile(path);
|
||||
double x;
|
||||
double count;
|
||||
|
@ -16,12 +16,15 @@
|
||||
package inr.numass.utils;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import inr.numass.data.SpectrumDataAdapter;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.exp;
|
||||
import static java.lang.Math.sqrt;
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
import static java.lang.Math.abs;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -38,10 +41,10 @@ public class TritiumUtils {
|
||||
// * @param driftPerSecond
|
||||
// * @return
|
||||
// */
|
||||
// public static ListDataSet applyDrift(ListDataSet data, double driftPerSecond) {
|
||||
// public static ListPointSet applyDrift(ListPointSet data, double driftPerSecond) {
|
||||
// double t = 0;
|
||||
//
|
||||
// ListDataSet res = new ListDataSet(data.getDataFormat());
|
||||
// ListPointSet res = new ListPointSet(data.getDataFormat());
|
||||
// for (DataPoint d : data) {
|
||||
// SpectrumDataPoint dp = (SpectrumDataPoint) d;
|
||||
// double corrFactor = 1 + driftPerSecond * t;
|
||||
@ -60,9 +63,9 @@ public class TritiumUtils {
|
||||
* @param dtime
|
||||
* @return
|
||||
*/
|
||||
public static ListDataSet correctForDeadTime(ListDataSet data, double dtime) {
|
||||
public static ListPointSet correctForDeadTime(ListPointSet data, double dtime) {
|
||||
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
|
||||
ListDataSet res = new ListDataSet(data.getDataFormat());
|
||||
ListPointSet res = new ListPointSet(data.getDataFormat());
|
||||
for (DataPoint dp : data) {
|
||||
double corrFactor = 1 / (1 - dtime * reader.getCount(dp) /reader.getTime(dp));
|
||||
res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue(), (long) (reader.getCount(dp)*corrFactor),reader.getTime(dp)));
|
||||
@ -78,9 +81,9 @@ public class TritiumUtils {
|
||||
* @param beta
|
||||
* @return
|
||||
*/
|
||||
public static ListDataSet setHVScale(ListDataSet data, double beta) {
|
||||
public static ListPointSet setHVScale(ListPointSet data, double beta) {
|
||||
SpectrumDataAdapter reader = new SpectrumDataAdapter(data.meta().getNode("aliases"));
|
||||
ListDataSet res = new ListDataSet(data.getDataFormat());
|
||||
ListPointSet res = new ListPointSet(data.getDataFormat());
|
||||
for (DataPoint dp : data) {
|
||||
double corrFactor = 1 + beta;
|
||||
res.add(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue()*corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||
|
@ -13,27 +13,27 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.prop;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.exceptions.NameNotFoundException;
|
||||
import hep.dataforge.values.Value;
|
||||
|
||||
/**
|
||||
* Simple adapter for Poisson-distributed y values.
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class PoissonAdapter extends XYDataAdapter {
|
||||
|
||||
public PoissonAdapter(String xName, String yName) {
|
||||
super(xName, yName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value getYerr(DataPoint point) throws NameNotFoundException {
|
||||
return Value.of(Math.sqrt(getY(point).doubleValue()));
|
||||
}
|
||||
|
||||
}
|
||||
package inr.numass.prop;
|
||||
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.exceptions.NameNotFoundException;
|
||||
import hep.dataforge.values.Value;
|
||||
|
||||
/**
|
||||
* Simple adapter for Poisson-distributed y values.
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class PoissonAdapter extends XYAdapter {
|
||||
|
||||
public PoissonAdapter(String xName, String yName) {
|
||||
super(xName, yName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value getYerr(DataPoint point) throws NameNotFoundException {
|
||||
return Value.of(Math.sqrt(getY(point).doubleValue()));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ package inr.numass.prop;
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import static hep.dataforge.context.GlobalContext.out;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.ParamSet;
|
||||
@ -30,6 +29,7 @@ import hep.dataforge.maths.RandomUtils;
|
||||
import inr.numass.models.BetaSpectrum;
|
||||
import inr.numass.models.NBkgSpectrum;
|
||||
import java.io.FileNotFoundException;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
* Hello world!
|
||||
@ -72,7 +72,7 @@ public class PropTest {
|
||||
//pm.plotFunction(trans.getProduct(bareBeta, allPars, 9000d), 1000d, 19000d, 400);
|
||||
// pm.plotFunction(FunctionUtils.fix1stArgument(trans.getBivariateFunction(allPars), 14000d), 1000, 18000, 400);
|
||||
HistogramGenerator generator = new HistogramGenerator(null, model, allPars);
|
||||
DataSet data = generator.generateUniformHistogram(1000d, 18500d, 350);
|
||||
PointSet data = generator.generateUniformHistogram(1000d, 18500d, 350);
|
||||
|
||||
long count = 0;
|
||||
for (DataPoint dp : data) {
|
||||
|
@ -18,8 +18,7 @@ package inr.numass.prop.ar;
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.datafitter.FitManager;
|
||||
import hep.dataforge.datafitter.FitPlugin;
|
||||
@ -38,17 +37,18 @@ import inr.numass.prop.SplitNormalSpectrum;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = DataSet.class, description = "Fit JNA data by apropriate model")
|
||||
@TypedActionDef(name = "fitJNA", inputType = JNAEpisode.class, outputType = PointSet.class, description = "Fit JNA data by apropriate model")
|
||||
@ValueDef(name = "saveResult", type = "BOOLEAN", def = "true", info = "Save the results of action to a file")
|
||||
@ValueDef(name = "suffix", def = "", info = "Suffix for saved file")
|
||||
@ValueDef(name = "loFitChanel", type = "NUMBER", def = "600", info = "Lo chanel to filter data for fit")
|
||||
@ValueDef(name = "upFitChanel", type = "NUMBER", def = "1100", info = "Up chanel to filter data for fit")
|
||||
public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
|
||||
public class FitJNAData extends OneToOneAction<JNAEpisode, PointSet> {
|
||||
|
||||
private final FitManager fm;
|
||||
|
||||
@ -63,7 +63,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSet execute(Logable log, Meta meta, JNAEpisode input){
|
||||
protected PointSet execute(Logable log, Meta meta, JNAEpisode input){
|
||||
List<DataPoint> res = new ArrayList<>(input.size());
|
||||
|
||||
Model model = buildModel();
|
||||
@ -96,7 +96,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
|
||||
res.add(point);
|
||||
}
|
||||
|
||||
DataSet data = new ListDataSet(input.getName(), input.meta(), res);
|
||||
PointSet data = new ListPointSet(input.getName(), input.meta(), res);
|
||||
|
||||
if (meta.getBoolean("saveResult")) {
|
||||
String suffix = meta.getString("suffix");
|
||||
@ -111,7 +111,7 @@ public class FitJNAData extends OneToOneAction<JNAEpisode, DataSet> {
|
||||
Meta reader = readMeta(spectrum.meta());
|
||||
double lowerChanel = reader.getDouble("loFitChanel");
|
||||
double upperChanel = reader.getDouble("upFitChanel");
|
||||
DataSet data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel);
|
||||
PointSet data = spectrum.asDataSet().filter("chanel", lowerChanel, upperChanel);
|
||||
ParamSet params = new ParamSet()
|
||||
.setPar("amp", 2e5, 1e3)
|
||||
.setPar("pos", 800d, 1d)
|
||||
|
@ -17,8 +17,7 @@ package inr.numass.prop.ar;
|
||||
|
||||
import hep.dataforge.content.NamedMetaHolder;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.description.NodeDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
@ -29,6 +28,7 @@ import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -61,12 +61,12 @@ public class JNASpectrum extends NamedMetaHolder {
|
||||
}
|
||||
}
|
||||
|
||||
public DataSet asDataSet() {
|
||||
public PointSet asDataSet() {
|
||||
List<DataPoint> points = new ArrayList<>();
|
||||
for (Map.Entry<Double, Long> point : spectrum.entrySet()) {
|
||||
points.add(new MapDataPoint(names, point.getKey(), point.getValue()));
|
||||
}
|
||||
return new ListDataSet(getName(), meta(), points);
|
||||
return new ListPointSet(getName(), meta(), points);
|
||||
}
|
||||
|
||||
public Map<Double, Long> asMap() {
|
||||
|
@ -18,7 +18,6 @@ package inr.numass.prop.ar;
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.FileData;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
@ -39,6 +38,7 @@ import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Scanner;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -72,7 +72,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
|
||||
Scanner timeScanner = new Scanner(timeFile);
|
||||
|
||||
String tempFileName = reader.getString("temperatureFile", "");
|
||||
DataSet tempData = null;
|
||||
PointSet tempData = null;
|
||||
if (!tempFileName.isEmpty()) {
|
||||
String[] format = {"time", "T2", "T4", "T5", "T6"};
|
||||
File tempFile = IOUtils.getFile(input.getInputFile(), tempFileName);
|
||||
@ -108,7 +108,7 @@ public class ReadJNADataAction extends OneToOneAction<FileData, JNAEpisode> {
|
||||
|
||||
}
|
||||
|
||||
private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, DataSet tempData) {
|
||||
private Meta prepareAnnotation(Meta parent, double startTime, double stopTime, PointSet tempData) {
|
||||
MetaBuilder meta = parent.getBuilder();
|
||||
meta.putValue("relativeStartTime", startTime);
|
||||
meta.putValue("relativeStopTime", stopTime);
|
||||
|
@ -16,7 +16,6 @@
|
||||
package inr.numass.prop;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.FileData;
|
||||
import hep.dataforge.datafitter.MINUITPlugin;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
@ -26,6 +25,7 @@ import inr.numass.prop.ar.JNAEpisode;
|
||||
import inr.numass.prop.ar.ReadJNADataAction;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -48,7 +48,7 @@ public class TestFit {
|
||||
);
|
||||
JNAEpisode spectra = new ReadJNADataAction(GlobalContext.instance(), null).runOne(file);
|
||||
|
||||
DataSet data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra);
|
||||
PointSet data = new FitJNAData(GlobalContext.instance(), null).runOne(spectra);
|
||||
|
||||
ColumnedDataWriter.writeDataSet(System.out, data, "***RESULT***");
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ package inr.numass.prop;
|
||||
|
||||
import hep.dataforge.context.GlobalContext;
|
||||
import hep.dataforge.data.FileData;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import hep.dataforge.plots.PlotFrame;
|
||||
@ -60,7 +60,7 @@ public class TestReader {
|
||||
|
||||
PlotFrame frame = FXPlotUtils.displayJFreeChart("JNA test", null);
|
||||
|
||||
frame.add(PlottableData.plot(sp.asDataSet(), new XYDataAdapter("chanel", "count")));
|
||||
frame.add(PlottableData.plot(sp.asDataSet(), new XYAdapter("chanel", "count")));
|
||||
|
||||
Meta temps = sp.meta().getNode("temperature");
|
||||
|
||||
@ -72,7 +72,7 @@ public class TestReader {
|
||||
|
||||
// double lowerChanel = 600;
|
||||
// double upperChanel = 1100;
|
||||
// DataSet data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel);
|
||||
// PointSet data = sp.asDataSet().filter("chanel", lowerChanel, upperChanel);
|
||||
// ParamSet params = new ParamSet()
|
||||
// .setPar("amp", 2e5, 1e3)
|
||||
// .setPar("pos", 800d, 1d)
|
||||
|
@ -21,10 +21,9 @@ package inr.numass.viewer;
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
import hep.dataforge.data.DataPoint;
|
||||
import hep.dataforge.data.DataSet;
|
||||
import hep.dataforge.data.ListDataSet;
|
||||
import hep.dataforge.data.ListPointSet;
|
||||
import hep.dataforge.data.MapDataPoint;
|
||||
import hep.dataforge.data.XYDataAdapter;
|
||||
import hep.dataforge.data.XYAdapter;
|
||||
import hep.dataforge.io.ColumnedDataWriter;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
@ -73,6 +72,7 @@ import org.controlsfx.validation.ValidationSupport;
|
||||
import org.controlsfx.validation.Validator;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import hep.dataforge.data.PointSet;
|
||||
|
||||
/**
|
||||
* FXML Controller class
|
||||
@ -359,7 +359,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
for (NMPoint point : points) {
|
||||
String seriesName = String.format("%d: %.2f (%.2f)", points.indexOf(point), point.getUset(), point.getUread());
|
||||
|
||||
PlottableData datum = PlottableData.plot(seriesName,new XYDataAdapter("chanel", "count"), point.getData(binning, normalize));
|
||||
PlottableData datum = PlottableData.plot(seriesName,new XYAdapter("chanel", "count"), point.getData(binning, normalize));
|
||||
datum.configure(plottableConfig);
|
||||
plottables.add(datum);
|
||||
}
|
||||
@ -402,7 +402,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
int loChannel = (int) channelSlider.getLowValue();
|
||||
int upChannel = (int) channelSlider.getHighValue();
|
||||
double dTime = getDTime();
|
||||
ListDataSet spectrumDataSet = new ListDataSet(names);
|
||||
ListPointSet spectrumDataSet = new ListPointSet(names);
|
||||
|
||||
for (NMPoint point : points) {
|
||||
spectrumDataSet.add(new MapDataPoint(names, new Object[]{
|
||||
@ -439,7 +439,7 @@ public class NumassLoaderViewComponent extends AnchorPane implements Initializab
|
||||
fileChooser.setInitialFileName(data.getName() + "_detector.out");
|
||||
File destination = fileChooser.showSaveDialog(detectorPlotPane.getScene().getWindow());
|
||||
if (destination != null) {
|
||||
DataSet detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true);
|
||||
PointSet detectorData = PlotDataUtils.collectXYDataFromPlot(detectorPlotFrame, true);
|
||||
try {
|
||||
ColumnedDataWriter
|
||||
.writeDataSet(destination, detectorData, "Numass data viewer detector data export for " + data.getName(),
|
||||
|
Loading…
Reference in New Issue
Block a user