Workbench working after refactoring

This commit is contained in:
darksnake 2016-03-22 11:31:54 +03:00
parent 2c6f466616
commit 793c7cefcc
7 changed files with 97 additions and 96 deletions

View File

@ -15,7 +15,6 @@
*/
package inr.numass;
import hep.dataforge.data.FileData;
import hep.dataforge.io.BasicIOManager;
import hep.dataforge.meta.Meta;
import hep.dataforge.names.Name;
@ -98,31 +97,31 @@ public class NumassIO extends BasicIOManager {
}
}
public static RawNMFile readAsDat(FileData source, Meta config) {
public static RawNMFile readAsDat(File source, Meta config) throws IOException {
return new NumassDataReader(source, config).read();
}
public static RawNMFile readAsPaw(File source) throws FileNotFoundException {
return new NumassPawReader().readPaw(source, source.getName());
}
public static RawNMFile getNumassData(File file, Meta config) {
try {
return new NumassDataReader(source, config).read();
RawNMFile dataFile;
String extension = FilenameUtils.getExtension(file.getName()).toLowerCase();
switch (extension) {
case "paw":
dataFile = readAsPaw(file);
break;
case "dat":
dataFile = readAsDat(file, config);
break;
default:
throw new RuntimeException("Wrong file format");
}
return dataFile;
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
public static RawNMFile readAsPaw(FileData source) {
return new NumassPawReader().readPaw(source.get(), source.fileName());
}
public static RawNMFile getNumassData(FileData source, Meta config) {
RawNMFile dataFile;
String extension = FilenameUtils.getExtension(source.fileName());
switch (extension) {
case "paw":
dataFile = readAsPaw(source);
break;
case "dat":
dataFile = readAsDat(source, config);
break;
default:
throw new RuntimeException("Wrong file format");
}
return dataFile;
}
}

View File

@ -15,26 +15,26 @@
*/
package inr.numass.actions;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.actions.GroupBuilder;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.points.MapPoint;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.log.Logable;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.ListPointSet;
import hep.dataforge.points.MapPoint;
import hep.dataforge.points.PointSet;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import hep.dataforge.points.PointSet;
import java.util.Collection;
import java.util.stream.Collectors;
/**
@ -42,7 +42,7 @@ import java.util.stream.Collectors;
* @author Darksnake
*/
@TypedActionDef(name = "merge", inputType = PointSet.class, outputType = PointSet.class, description = "Merge different numass data files into one.")
@NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.content.GroupBuilder.byAnnotation")
@NodeDef(name = "grouping", info = "The defenition of grouping rule for this merge", target = "method::hep.dataforge.actions.GroupBuilder.byAnnotation")
public class MergeDataAction extends ManyToOneAction<PointSet, PointSet> {
public static final String MERGE_NAME = "mergeName";

View File

@ -17,7 +17,6 @@ package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.FileData;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
@ -27,26 +26,27 @@ import hep.dataforge.meta.Meta;
import static inr.numass.NumassIO.getNumassData;
import inr.numass.data.NMFile;
import inr.numass.data.RawNMFile;
import java.io.File;
/**
*
* @author Darksnake
*/
@TypedActionDef(name = "readData",
inputType = FileData.class, outputType = NMFile.class, description = "Read binary numass data file")
inputType = File.class, outputType = NMFile.class, description = "Read binary numass data file")
@ValueDef(name = "fileName", info = "The name of the file. By default equals file name.")
@ValueDef(name = "HVdev", info = "Divider for HV measurements. Should be set to 1.0 for numass data 2014",
def = "2.468555393226049", type = "NUMBER")
@ValueDef(name = "noUset", info = "If 'true', then Uset = Uread")
@NodeDef(name = "debunch", target = "class::inr.numass.actions.DebunchAction", info = "If given, governs debunching")
public class ReadNumassDataAction extends OneToOneAction<FileData, NMFile> {
public class ReadNumassDataAction extends OneToOneAction<File, NMFile> {
public ReadNumassDataAction(Context context, Meta an) {
super(context, an);
}
@Override
protected NMFile execute(Logable log, String name, Meta reader, FileData source) throws ContentException {
protected NMFile execute(Logable log, String name, Meta reader, File source) throws ContentException {
// log.logString("File '%s' started", source.getName());
RawNMFile raw = getNumassData(source, meta());
if (meta().getBoolean("paw", false)) {

View File

@ -15,10 +15,9 @@
*/
package inr.numass.data;
import hep.dataforge.data.FileData;
import hep.dataforge.meta.MergeRule;
import hep.dataforge.meta.Meta;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -36,12 +35,11 @@ public class NumassDataReader {
private String name;
private final InputStream stream;
private Meta config;
private double HVdev;
private double HVdev = 2.468555393226049;
private boolean noUset = false;
public NumassDataReader(FileData data, Meta config) throws IOException {
this(data.get(), data.meta()
.getString("filename", data.fileName()), MergeRule.replace(config, data.meta()));
public NumassDataReader(File file, Meta config) throws IOException {
this(new FileInputStream(file), file.getName(), config);
}
public NumassDataReader(String file, String fname, Meta config) throws FileNotFoundException {
@ -55,8 +53,8 @@ public class NumassDataReader {
public NumassDataReader(InputStream is, String fname, Meta config) {
this.stream = new BufferedInputStream(is);
this.name = fname;
this.config = config;
HVdev = config.getDouble("HVdev", 2.468555393226049);
noUset = config.getBoolean("noUset", false);
}
public RawNMFile read() throws IOException {
@ -238,7 +236,7 @@ public class NumassDataReader {
point.setLength(time_out);
point.setUread(Uread / 10d / HVdev);
if (config.getBoolean("noUset", false)) {
if (noUset) {
point.setUset(Uread / 10d / HVdev);
} else {
point.setUset(voltage / 10d);

View File

@ -15,10 +15,9 @@
*/
package inr.numass.data;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Locale;
import java.util.Scanner;
@ -28,16 +27,14 @@ import java.util.Scanner;
*/
public class NumassPawReader {
public RawNMFile readPaw(InputStream stream, String name){
public RawNMFile readPaw(File file, String name) throws FileNotFoundException{
Locale.setDefault(Locale.US);
BufferedInputStream bs = new BufferedInputStream(stream);
FileInputStream bs = new FileInputStream(file);
return readPaw(new Scanner(bs), name);
}
public RawNMFile readPaw(String filePath) throws FileNotFoundException{
FileInputStream fs = new FileInputStream(filePath);
return readPaw(fs, filePath);
return readPaw(new File(filePath), filePath);
}

View File

@ -15,20 +15,19 @@
*/
package inr.numass.models;
import hep.dataforge.actions.ActionResult;
import hep.dataforge.actions.RunManager;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.points.DataPoint;
import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.meta.Meta;
import hep.dataforge.points.DataPoint;
import hep.dataforge.points.PointSet;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import hep.dataforge.points.PointSet;
/**
*
@ -47,9 +46,10 @@ public class TransmissionInterpolator implements UnivariateFunction {
}
@SuppressWarnings("unchecked")
public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation, String xName, String yName, int nSmooth, double w, double border) throws InterruptedException {
public static TransmissionInterpolator fromAction(Context context, Meta actionAnnotation,
String xName, String yName, int nSmooth, double w, double border) throws InterruptedException {
DataNode<PointSet> node = RunManager.executeAction(context, actionAnnotation);
PointSet data = node.iterator().next().get();
PointSet data = node.getData().get();
return new TransmissionInterpolator(data, xName, yName, nSmooth, w, border);
}

View File

@ -9,12 +9,10 @@ import ch.qos.logback.classic.Level;
import de.jensd.shichimifx.utils.ConsoleDude;
import hep.dataforge.actions.Action;
import hep.dataforge.actions.ActionManager;
import hep.dataforge.actions.ActionResult;
import hep.dataforge.actions.ActionStateListener;
import hep.dataforge.actions.RunManager;
import hep.dataforge.context.Context;
import hep.dataforge.context.GlobalContext;
import hep.dataforge.data.DataFactory;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.FileDataFactory;
import hep.dataforge.description.ActionDescriptor;
@ -202,6 +200,15 @@ public class NumassWorkbenchController implements Initializable, StagePaneHolder
//loading data configuration
if (config.hasNode("data")) {
dataConfig = new Configuration(config.getNode("data"));
//replacing file name value with appropriate nodes
if (dataConfig.hasValue("file")) {
Value fileValue = dataConfig.getValue("file");
dataConfig.removeValue("file");
fileValue.listValue().stream().forEach((fileName) -> {
dataConfig.putNode(new MetaBuilder("file")
.putValue("path", fileName));
});
}
} else {
dataConfig = new Configuration("data");
}
@ -299,7 +306,7 @@ public class NumassWorkbenchController implements Initializable, StagePaneHolder
clearAllStages();
new Thread(() -> {
DataNode data = new FileDataFactory().build(getContext(), getDataConfiguration());
if(data.isEmpty()){
if (data.isEmpty()) {
//FIXME evaluate error here
throw new Error("Empty data");
}