Revised workspace data loading mechanism. Not tested

This commit is contained in:
Alexander Nozik 2017-02-03 20:59:04 +03:00
parent 4abcebc5aa
commit 9904f9d9b2
8 changed files with 164 additions and 151 deletions

View File

@ -26,8 +26,11 @@ import hep.dataforge.io.XMLMetaWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.storage.NMPoint;
import inr.numass.storage.NumassData;
import inr.numass.storage.NumassDataLoader;
import inr.numass.storage.RawNMPoint;
import inr.numass.utils.ExpressionUtils;
@ -99,6 +102,14 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
utransform = Function.identity();
}
if(meta.hasMeta("debunch")){
if(dataFile instanceof NumassDataLoader){
dataFile = ((NumassDataLoader) dataFile).applyRawTransformation(raw->debunch(context,raw,meta.getMeta("debunch")));
} else {
throw new RuntimeException("Debunch not available");
}
}
List<DataPoint> dataList = new ArrayList<>();
for (NMPoint point : dataFile) {
@ -183,6 +194,22 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
};
}
private NMPoint debunch(Context context, RawNMPoint point, Meta meta) {
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
double rejectionprob = meta.getDouble("rejectprob", 1e-10);
double framelength = meta.getDouble("framelength", 1);
double maxCR = meta.getDouble("maxcr", 500d);
double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
return new NMPoint(report.getPoint());
} else {
return new NMPoint(point);
}
}
private interface Correction {
/**

View File

@ -1,60 +0,0 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToManyAction;
import hep.dataforge.context.Context;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.storage.api.Loader;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.storage.*;
import java.util.Map;
/**
* Created by darksnake on 29-Jan-17.
*/
public class ReadStorageAction extends OneToManyAction<NumassStorage, NumassData> {
@Override
protected Map<String, Meta> prepareMeta(Context context, String inputName, Laminate meta) {
return null;
}
@Override
protected NumassData execute(Context context, String inputName, String outputName, NumassStorage input, Laminate meta) {
try(Loader loader = input.getLoader(outputName)) {
if (loader instanceof NumassDataLoader) {
NumassDataLoader nd = (NumassDataLoader) loader;
return buildData(context, nd, meta);
} else {
throw new RuntimeException("Numass loader expected");
}
} catch (Exception ex){
throw new RuntimeException(ex);
}
}
private NumassData buildData(Context context, NumassDataLoader loader, Meta meta) {
if (meta.hasNode("debunch")) {
return loader.applyRawTransformation(rp -> debunch(context, rp, meta.getMeta("debunch")));
} else {
return loader;
}
}
private NMPoint debunch(Context context, RawNMPoint point, Meta meta) {
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
double rejectionprob = meta.getDouble("rejectprob", 1e-10);
double framelength = meta.getDouble("framelength", 1);
double maxCR = meta.getDouble("maxcr", 500d);
double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
return new NMPoint(report.getPoint());
} else {
return new NMPoint(point);
}
}
}

View File

@ -7,25 +7,20 @@ package inr.numass.tasks;
import hep.dataforge.actions.GenericAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.*;
import hep.dataforge.data.DataFilter;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataTree;
import hep.dataforge.description.DescriptorBuilder;
import hep.dataforge.description.NodeDescriptor;
import hep.dataforge.goals.Work;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.Template;
import hep.dataforge.storage.api.Loader;
import hep.dataforge.storage.commons.StorageUtils;
import hep.dataforge.tables.Table;
import hep.dataforge.workspace.AbstractTask;
import hep.dataforge.workspace.TaskModel;
import inr.numass.actions.MergeDataAction;
import inr.numass.actions.MonitorCorrectAction;
import inr.numass.actions.PrepareDataAction;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.storage.*;
import java.net.URI;
import inr.numass.storage.NumassData;
/**
* Prepare data task
@ -40,10 +35,23 @@ public class NumassPrepareTask extends AbstractTask<Table> {
Context context = model.getContext();
//acquiring initial data. Data node could not be empty
DataNode<NumassData> data;
Meta dataMeta = config.getMeta("data");
URI storageUri = input.getCheckedData("dataRoot", URI.class).get();
DataSet.Builder<NumassData> dataBuilder = readData(getWork(model, input.getName()), context, storageUri, dataMeta);
DataNode<NumassData> data = dataBuilder.build();
if (dataMeta.hasValue("from")) {
data = input.getCheckedNode(dataMeta.getString("from"), NumassData.class);
} else {
data = input.checked(NumassData.class);
}
DataFilter filter = new DataFilter().configure(dataMeta);
data = filter.filter(data);
// Meta dataMeta = config.getMeta("data");
// URI storageUri = input.getCheckedData("dataRoot", URI.class).get();
// DataSet.Builder<NumassData> dataBuilder = readData(getWork(model, input.getName()), context, storageUri, dataMeta);
// DataNode<NumassData> data = dataBuilder.build();
//preparing table data
Meta prepareMeta = config.getMeta("prepare");
@ -80,73 +88,73 @@ public class NumassPrepareTask extends AbstractTask<Table> {
return model;
}
private DataSet.Builder<NumassData> readData(Work callback, Context context, URI numassRoot, Meta meta) {
// private DataSet.Builder<NumassData> readData(Work callback, Context context, URI numassRoot, Meta meta) {
//
// NumassStorage storage = NumassStorage.buildNumassRoot(numassRoot, true, false);
// DataFilter filter = new DataFilter().configure(meta);
//
// boolean forwardOnly = meta.getBoolean("forwardOnly", false);
// boolean reverseOnly = meta.getBoolean("reverseOnly", false);
//// SetDirectionUtility.load(context);
//
// DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class);
// callback.setMaxProgress(StorageUtils.loaderStream(storage).count());
// StorageUtils.loaderStream(storage).forEach(pair -> {
// Loader loader = pair.getValue();
// if (loader instanceof NumassDataLoader) {
// NumassDataLoader nd = (NumassDataLoader) loader;
// Data<NumassData> datum = buildData(context, nd, meta);
// if (filter.acceptData(pair.getKey(), datum)) {
// boolean accept = true;
// if (forwardOnly || reverseOnly) {
// boolean reversed = nd.isReversed();
// accept = (reverseOnly && reversed) || (forwardOnly && !reversed);
// }
// if (accept) {
// builder.putData(pair.getKey(), datum);
// }
// }
// }
// callback.increaseProgress(1d);
// });
//
// if (meta.getBoolean("loadLegacy", false)) {
// storage.legacyFiles().forEach(nd -> {
// Data<NumassData> datum = Data.buildStatic(nd);
// if (filter.acceptData(nd.getName(), datum)) {
// builder.putData("legacy." + nd.getName(), datum);
// }
// });
// }
// //FIXME remove in later revisions
//// SetDirectionUtility.save(context);
//
// return builder;
// }
NumassStorage storage = NumassStorage.buildNumassRoot(numassRoot, true, false);
DataFilter filter = new DataFilter().configure(meta);
boolean forwardOnly = meta.getBoolean("forwardOnly", false);
boolean reverseOnly = meta.getBoolean("reverseOnly", false);
// SetDirectionUtility.load(context);
DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class);
callback.setMaxProgress(StorageUtils.loaderStream(storage).count());
StorageUtils.loaderStream(storage).forEach(pair -> {
Loader loader = pair.getValue();
if (loader instanceof NumassDataLoader) {
NumassDataLoader nd = (NumassDataLoader) loader;
Data<NumassData> datum = buildData(context, nd, meta);
if (filter.acceptData(pair.getKey(), datum)) {
boolean accept = true;
if (forwardOnly || reverseOnly) {
boolean reversed = nd.isReversed();
accept = (reverseOnly && reversed) || (forwardOnly && !reversed);
}
if (accept) {
builder.putData(pair.getKey(), datum);
}
}
}
callback.increaseProgress(1d);
});
if (meta.getBoolean("loadLegacy", false)) {
storage.legacyFiles().forEach(nd -> {
Data<NumassData> datum = Data.buildStatic(nd);
if (filter.acceptData(nd.getName(), datum)) {
builder.putData("legacy." + nd.getName(), datum);
}
});
}
//FIXME remove in later revisions
// SetDirectionUtility.save(context);
return builder;
}
private Data<NumassData> buildData(Context context, NumassDataLoader loader, Meta meta) {
if (meta.hasNode("debunch")) {
return Data.buildStatic(loader.applyRawTransformation(rp -> debunch(context, rp, meta.getMeta("debunch"))));
} else {
return Data.buildStatic(loader);
}
}
private NMPoint debunch(Context context, RawNMPoint point, Meta meta) {
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
double rejectionprob = meta.getDouble("rejectprob", 1e-10);
double framelength = meta.getDouble("framelength", 1);
double maxCR = meta.getDouble("maxcr", 500d);
double cr = point.selectChanels(lower, upper).getCR();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
return new NMPoint(report.getPoint());
} else {
return new NMPoint(point);
}
}
// private Data<NumassData> buildData(Context context, NumassDataLoader loader, Meta meta) {
// if (meta.hasNode("debunch")) {
// return Data.buildStatic(loader.applyRawTransformation(rp -> debunch(context, rp, meta.getMeta("debunch"))));
// } else {
// return Data.buildStatic(loader);
// }
// }
//
// private NMPoint debunch(Context context, RawNMPoint point, Meta meta) {
// int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
// int lower = meta.getInt("lowerchanel", 0);
// double rejectionprob = meta.getDouble("rejectprob", 1e-10);
// double framelength = meta.getDouble("framelength", 1);
// double maxCR = meta.getDouble("maxcr", 500d);
//
// double cr = point.selectChanels(lower, upper).getCR();
// if (cr < maxCR) {
// DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
// return new NMPoint(report.getPoint());
// } else {
// return new NMPoint(point);
// }
// }
private <T, R> DataNode<R> runAction(GenericAction<T, R> action, Context context, DataNode<T> data, Meta meta) {
return action.run(context, data, meta);

View File

@ -360,7 +360,7 @@ public class NumassWorkbenchController implements Initializable, StagePaneHolder
statusBar.setProgress(-1);
});
DataNode data = new FileDataFactory().load(getContext(), getDataConfiguration());
DataNode data = new FileDataFactory().build(getContext(), getDataConfiguration());
try {
ActionUtils.runAction(getContext(), data, getActionConfiguration()).computeAll();
Platform.runLater(() -> statusBar.setText("Execution complete"));

View File

@ -28,7 +28,7 @@ import static java.util.Arrays.sort;
* @author Darksnake
*/
public class NMPoint {
//TODO transform to annotated and move some parameters to meta
//TODO andThen to annotated and move some parameters to meta
static final String[] dataNames = {"chanel", "count"};
private final int[] spectrum;
private Instant startTime;

View File

@ -0,0 +1,37 @@
package inr.numass.storage;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataFactory;
import hep.dataforge.data.DataFilter;
import hep.dataforge.data.DataTree;
import hep.dataforge.meta.Meta;
import hep.dataforge.storage.commons.StorageUtils;
/**
* Created by darksnake on 03-Feb-17.
*/
public class NumassDataFactory extends DataFactory<NumassData> {
public NumassDataFactory() {
super(NumassData.class);
}
@Override
public String getName() {
return "numass";
}
@Override
protected void buildChildren(Context context, DataTree.Builder<NumassData> builder, DataFilter filter, Meta meta) {
NumassStorage storage = NumassStorage.buildNumassRoot(
meta.getString("path"),
meta.getBoolean("readOnly", true),
meta.getBoolean("monitor", false)
);
StorageUtils.loaderStream(storage).forEach(pair -> {
if (pair.getValue() instanceof NumassData) {
builder.putStatic(pair.getKey(), (NumassData) pair.getValue());
}
});
}
}

View File

@ -56,17 +56,6 @@ public class NumassStorage extends FileStorage {
public static final String NUMASS_DATA_LOADER_TYPE = "numassData";
public static final String GROUP_META_FILE = "numass_group_meta";
protected NumassStorage(FileStorage parent, String path, Meta config) throws StorageException {
super(parent, path, config);
super.refresh();
//TODO read meta from numass_group_meta to .numass element
}
protected NumassStorage(FileObject dir, Meta config) throws StorageException {
super(dir, config);
super.refresh();
}
/**
* Create root numass storage
*
@ -131,6 +120,17 @@ public class NumassStorage extends FileStorage {
}
}
protected NumassStorage(FileStorage parent, String path, Meta config) throws StorageException {
super(parent, path, config);
super.refresh();
//TODO read meta from numass_group_meta to .numass element
}
protected NumassStorage(FileObject dir, Meta config) throws StorageException {
super(dir, config);
super.refresh();
}
@Override
protected void updateDirectoryLoaders() {
try {

View File

@ -0,0 +1 @@
inr.numass.storage.NumassDataFactory