Trying to implement caching
This commit is contained in:
parent
45391ab0e0
commit
875123c81f
@ -45,7 +45,7 @@ import java.util.stream.StreamSupport;
|
||||
public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
|
||||
|
||||
@Override
|
||||
protected FitState execute(Context context, Reportable log, String name, Laminate metaData, FitState input) {
|
||||
protected FitState execute(Reportable log, String name, Laminate metaData, FitState input) {
|
||||
|
||||
PointSource data = input.getDataSet();
|
||||
if (!(input.getModel() instanceof XYModel)) {
|
||||
@ -66,7 +66,7 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
|
||||
Function<Double, Double> function = (x) -> model.getSpectrum().value(x, input.getParameters());
|
||||
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
|
||||
.buildFrom(context).buildPlotFrame(getName(), name,
|
||||
.buildFrom(getContext()).buildPlotFrame(getName(), name,
|
||||
metaData.getNode("plot", Meta.empty()));
|
||||
|
||||
PlottableXYFunction fit = new PlottableXYFunction("fit");
|
||||
|
@ -26,7 +26,6 @@ import hep.dataforge.exceptions.DescriptorException;
|
||||
import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import inr.numass.storage.SetDirectionUtility;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
/**
|
||||
@ -37,6 +36,7 @@ public class NumassContext extends Context {
|
||||
|
||||
public static UnivariateIntegrator defaultIntegrator = new GaussRuleIntegrator(300);
|
||||
public static UnivariateIntegrator highDensityIntegrator = new GaussRuleIntegrator(500);
|
||||
|
||||
|
||||
public NumassContext(Context parent, Meta config) {
|
||||
super(parent, "numass", config);
|
||||
|
@ -89,7 +89,7 @@ public class NumassPlugin extends BasicPlugin {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void detach(Context context) {
|
||||
public void detach() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,6 @@
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.io.reports.Reportable;
|
||||
import hep.dataforge.meta.Laminate;
|
||||
@ -27,7 +26,7 @@ import java.util.List;
|
||||
public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String name, Laminate meta, Table input) {
|
||||
protected Table execute(Reportable log, String name, Laminate meta, Table input) {
|
||||
List<DataPoint> points = new ArrayList<>();
|
||||
for (DataPoint dp : input) {
|
||||
points.add(evalPoint(meta, dp));
|
||||
|
@ -16,7 +16,6 @@
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.exceptions.ContentException;
|
||||
@ -41,7 +40,7 @@ import java.io.PrintWriter;
|
||||
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
||||
|
||||
@Override
|
||||
protected RawNMFile execute(Context context, Reportable log, String name, Laminate meta, RawNMFile source) throws ContentException {
|
||||
protected RawNMFile execute(Reportable log, String name, Laminate meta, RawNMFile source) throws ContentException {
|
||||
log.report("File {} started", source.getName());
|
||||
|
||||
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
|
||||
@ -67,7 +66,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
||||
});
|
||||
log.report("File {} completed", source.getName());
|
||||
|
||||
log.getReport().print(new PrintWriter(buildActionOutput(context, name)));
|
||||
log.getReport().print(new PrintWriter(buildActionOutput(name)));
|
||||
|
||||
// res.configure(source.meta());
|
||||
return res;
|
||||
|
@ -47,7 +47,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
||||
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String name, Laminate meta, NumassData source) throws ContentException {
|
||||
protected Table execute(Reportable log, String name, Laminate meta, NumassData source) throws ContentException {
|
||||
log.report("File {} started", source.getName());
|
||||
|
||||
int upperBorder = meta.getInt("upper", 4094);
|
||||
@ -67,7 +67,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
||||
fill(dataBuilder, source, lowerBorder, upperBorder, referencePoint);
|
||||
Table bData = dataBuilder.build();
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
OutputStream stream = buildActionOutput(name);
|
||||
|
||||
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
|
||||
|
||||
|
@ -50,11 +50,11 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(context, input.meta(), actionMeta);
|
||||
protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(input.meta(), actionMeta);
|
||||
List<DataNode<Table>> groups;
|
||||
if (meta.hasValue("grouping.byValue")) {
|
||||
groups = super.buildGroups(context, input, actionMeta);
|
||||
groups = super.buildGroups(input, actionMeta);
|
||||
} else {
|
||||
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input);
|
||||
}
|
||||
@ -62,14 +62,14 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String nodeName, Map<String, Table> data, Meta meta) {
|
||||
protected Table execute(Reportable log, String nodeName, Map<String, Table> data, Meta meta) {
|
||||
Table res = mergeDataSets(nodeName, data.values());
|
||||
return new ListTable(res.getFormat(),res.sort("Uset", true));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterGroup(Context context, Reportable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(context, groupName);
|
||||
protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(groupName);
|
||||
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
|
||||
}
|
||||
|
||||
|
@ -53,7 +53,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
||||
//FIXME remove from state
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String name, Laminate meta, Table sourceData) throws ContentException {
|
||||
protected Table execute(Reportable log, String name, Laminate meta, Table sourceData) throws ContentException {
|
||||
|
||||
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
||||
|
||||
@ -144,7 +144,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
||||
// }
|
||||
Table data = new ListTable(dataList);
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
OutputStream stream = buildActionOutput(name);
|
||||
|
||||
ColumnedDataWriter.writeDataSet(stream, data, head);
|
||||
|
||||
@ -152,15 +152,15 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterAction(Context context, String name, Table res, Laminate meta) {
|
||||
printMonitorData(context, meta);
|
||||
super.afterAction(context, name, res, meta);
|
||||
protected void afterAction(String name, Table res, Laminate meta) {
|
||||
printMonitorData(meta);
|
||||
super.afterAction(name, res, meta);
|
||||
}
|
||||
|
||||
private void printMonitorData(Context context, Meta meta) {
|
||||
private void printMonitorData(Meta meta) {
|
||||
if (!monitorPoints.isEmpty()) {
|
||||
String monitorFileName = meta.getString("monitorFile", "monitor");
|
||||
OutputStream stream = buildActionOutput(context, monitorFileName);
|
||||
OutputStream stream = buildActionOutput(monitorFileName);
|
||||
ListTable data = new ListTable(monitorPoints);
|
||||
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ import java.util.Map;
|
||||
public class PileupSimulationAction extends OneToOneAction<NumassData, Map<String, NumassData>> {
|
||||
|
||||
@Override
|
||||
protected Map<String, NumassData> execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||
protected Map<String, NumassData> execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||
int lowerChannel = inputMeta.getInt("lowerChannel", 1);
|
||||
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL - 1);
|
||||
|
||||
|
@ -16,7 +16,6 @@
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.exceptions.ContentException;
|
||||
@ -71,7 +70,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListTable execute(Context context, Reportable log, String name, Laminate meta, NumassData dataFile) {
|
||||
protected ListTable execute(Reportable log, String name, Laminate meta, NumassData dataFile) {
|
||||
// log.report("File %s started", dataFile.getName());
|
||||
|
||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||
@ -131,7 +130,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
||||
|
||||
ListTable data = new ListTable(format, dataList);
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
OutputStream stream = buildActionOutput(name);
|
||||
|
||||
ColumnedDataWriter.writeDataSet(stream, data, head);
|
||||
// log.logString("File %s completed", dataFile.getName());
|
||||
|
@ -42,19 +42,19 @@ import inr.numass.storage.RawNMFile;
|
||||
public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
|
||||
|
||||
@Override
|
||||
protected NMFile execute(Context context, Reportable log, String name, Laminate meta, Binary source) throws ContentException {
|
||||
protected NMFile execute(Reportable log, String name, Laminate meta, Binary source) throws ContentException {
|
||||
// log.logString("File '%s' started", source.getName());
|
||||
RawNMFile raw = getNumassData(source, meta);
|
||||
if (meta.getBoolean("paw", false)) {
|
||||
raw.generatePAW(buildActionOutput(context, name + ".paw"));
|
||||
raw.generatePAW(buildActionOutput(name + ".paw"));
|
||||
}
|
||||
|
||||
if (meta.getNodeNames(false).contains("debunch")) {
|
||||
DebunchAction debunch = new DebunchAction();
|
||||
Laminate laminate = new Laminate(meta.getNode("debunch"))
|
||||
.setValueContext(context)
|
||||
.setValueContext(getContext())
|
||||
.setDescriptor(debunch.getDescriptor());
|
||||
raw = debunch.execute(context, log, name, laminate, raw);
|
||||
raw = debunch.execute(log, name, laminate, raw);
|
||||
}
|
||||
|
||||
NMFile result = new NMFile(raw);
|
||||
|
@ -6,7 +6,6 @@
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.GenericAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.DFProcess;
|
||||
import hep.dataforge.context.ProcessManager.Callback;
|
||||
import hep.dataforge.data.Data;
|
||||
@ -35,7 +34,7 @@ import inr.numass.storage.SetDirectionUtility;
|
||||
public class ReadNumassStorageAction extends GenericAction<Void, NumassData> {
|
||||
|
||||
@Override
|
||||
public DataNode<NumassData> run(Context context, DataNode<Void> data, Meta actionMeta) {
|
||||
public DataNode<NumassData> run(DataNode<Void> data, Meta actionMeta) {
|
||||
try {
|
||||
NumassStorage storage = NumassStorage.buildNumassRoot(actionMeta.getString("uri"), true, false);
|
||||
DataFilter filter = new DataFilter().configure(actionMeta);
|
||||
@ -43,45 +42,46 @@ public class ReadNumassStorageAction extends GenericAction<Void, NumassData> {
|
||||
boolean forwardOnly = actionMeta.getBoolean("forwardOnly", false);
|
||||
boolean reverseOnly = actionMeta.getBoolean("reverseOnly", false);
|
||||
|
||||
DFProcess<DataSet<NumassData>> process = context.processManager().<DataSet<NumassData>>post(getName(), (Callback callback) -> {
|
||||
//FIXME remove in later revisions
|
||||
SetDirectionUtility.load(context);
|
||||
DFProcess<DataSet<NumassData>> process = getContext().processManager()
|
||||
.<DataSet<NumassData>>post(getName(), (Callback callback) -> {
|
||||
//FIXME remove in later revisions
|
||||
SetDirectionUtility.load(getContext());
|
||||
|
||||
DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class);
|
||||
callback.setMaxProgress(StorageUtils.loaderStream(storage).count());
|
||||
StorageUtils.loaderStream(storage).forEach(pair -> {
|
||||
Loader loader = pair.getValue();
|
||||
if (loader instanceof NumassData) {
|
||||
NumassDataLoader nd = (NumassDataLoader) loader;
|
||||
Data<NumassData> datum = new StaticData<>(nd);
|
||||
if (filter.acceptData(pair.getKey(), datum)) {
|
||||
boolean accept = true;
|
||||
if (forwardOnly || reverseOnly) {
|
||||
boolean reversed = nd.isReversed();
|
||||
accept = (reverseOnly && reversed) || (forwardOnly && !reversed);
|
||||
DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class);
|
||||
callback.setMaxProgress(StorageUtils.loaderStream(storage).count());
|
||||
StorageUtils.loaderStream(storage).forEach(pair -> {
|
||||
Loader loader = pair.getValue();
|
||||
if (loader instanceof NumassData) {
|
||||
NumassDataLoader nd = (NumassDataLoader) loader;
|
||||
Data<NumassData> datum = new StaticData<>(nd);
|
||||
if (filter.acceptData(pair.getKey(), datum)) {
|
||||
boolean accept = true;
|
||||
if (forwardOnly || reverseOnly) {
|
||||
boolean reversed = nd.isReversed();
|
||||
accept = (reverseOnly && reversed) || (forwardOnly && !reversed);
|
||||
}
|
||||
if (accept) {
|
||||
builder.putData(pair.getKey(), datum);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (accept) {
|
||||
builder.putData(pair.getKey(), datum);
|
||||
}
|
||||
}
|
||||
}
|
||||
callback.increaseProgress(1d);
|
||||
});
|
||||
callback.increaseProgress(1d);
|
||||
});
|
||||
|
||||
if (actionMeta.getBoolean("loadLegacy", false)) {
|
||||
logger().info("Loading legacy files");
|
||||
storage.legacyFiles().forEach(nd -> {
|
||||
Data<NumassData> datum = new StaticData<>(nd);
|
||||
if (filter.acceptData(nd.getName(), datum)) {
|
||||
builder.putData("legacy." + nd.getName(), datum);
|
||||
if (actionMeta.getBoolean("loadLegacy", false)) {
|
||||
logger().info("Loading legacy files");
|
||||
storage.legacyFiles().forEach(nd -> {
|
||||
Data<NumassData> datum = new StaticData<>(nd);
|
||||
if (filter.acceptData(nd.getName(), datum)) {
|
||||
builder.putData("legacy." + nd.getName(), datum);
|
||||
}
|
||||
});
|
||||
}
|
||||
//FIXME remove in later revisions
|
||||
SetDirectionUtility.save(getContext());
|
||||
|
||||
return builder.build();
|
||||
});
|
||||
}
|
||||
//FIXME remove in later revisions
|
||||
SetDirectionUtility.save(context);
|
||||
|
||||
return builder.build();
|
||||
});
|
||||
|
||||
return process.getTask().get();
|
||||
} catch (Exception ex) {
|
||||
|
@ -42,7 +42,7 @@ import java.util.stream.Collectors;
|
||||
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||
protected Table execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
|
||||
int binning = inputMeta.getInt("binning", 20);
|
||||
boolean normalize = inputMeta.getBoolean("normalize", true);
|
||||
List<NMPoint> points = input.getNMPoints();
|
||||
@ -85,14 +85,14 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
|
||||
builder.row(mb.build());
|
||||
});
|
||||
|
||||
OutputStream out = buildActionOutput(context, name);
|
||||
OutputStream out = buildActionOutput(name);
|
||||
Table table = builder.build();
|
||||
|
||||
ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString());
|
||||
|
||||
if (inputMeta.hasNode("plot") || inputMeta.getBoolean("plot", false)) {
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
|
||||
.buildFrom(context).buildPlotFrame(getName(), name,
|
||||
.buildFrom(getContext()).buildPlotFrame(getName(), name,
|
||||
inputMeta.getNode("plot", Meta.empty()));
|
||||
fillDetectorData(valueMap).forEach(frame::add);
|
||||
|
||||
|
@ -16,7 +16,6 @@
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.datafitter.FitState;
|
||||
import hep.dataforge.datafitter.FitTaskResult;
|
||||
import hep.dataforge.datafitter.Param;
|
||||
@ -66,7 +65,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
private static final String[] names = {"X", "exPos", "ionPos", "exW", "ionW", "exIonRatio"};
|
||||
|
||||
@Override
|
||||
protected FitState execute(Context context, Reportable log, String name, Laminate meta, FitState input) {
|
||||
protected FitState execute(Reportable log, String name, Laminate meta, FitState input) {
|
||||
ParamSet pars = input.getParameters();
|
||||
if (!pars.names().contains(names)) {
|
||||
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
|
||||
@ -75,7 +74,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
|
||||
UnivariateFunction scatterFunction;
|
||||
boolean calculateRatio = false;
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context)
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
|
||||
.buildPlotFrame(getName(), name + ".loss",
|
||||
new MetaBuilder("plot")
|
||||
.setValue("plotTitle", "Differential scattering crossection for " + name)
|
||||
@ -103,12 +102,12 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
threshold = meta.getDouble("ionThreshold", 17);
|
||||
ionRatio = calcultateIonRatio(pars, threshold);
|
||||
log.report("The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
|
||||
ionRatioError = calultateIonRatioError(context, name, input, threshold);
|
||||
ionRatioError = calultateIonRatioError(name, input, threshold);
|
||||
log.report("the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
|
||||
}
|
||||
|
||||
if (meta.getBoolean("printResult", false)) {
|
||||
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(context, name), Charset.forName("UTF-8")));
|
||||
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(name), Charset.forName("UTF-8")));
|
||||
// writer.println("*** FIT PARAMETERS ***");
|
||||
input.print(writer);
|
||||
// for (Param param : pars.getSubSet(names).getParams()) {
|
||||
@ -204,14 +203,14 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
return exProb / ionProb;
|
||||
}
|
||||
|
||||
public double calultateIonRatioError(Context context, String dataNeme, FitState state, double threshold) {
|
||||
public double calultateIonRatioError(String dataNeme, FitState state, double threshold) {
|
||||
ParamSet parameters = state.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"});
|
||||
NamedMatrix covariance = state.getCovariance();
|
||||
return calultateIonRatioError(context, dataNeme, parameters, covariance, threshold);
|
||||
return calultateIonRatioError(dataNeme, parameters, covariance, threshold);
|
||||
}
|
||||
|
||||
@SuppressWarnings("Unchecked")
|
||||
public double calultateIonRatioError(Context context, String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
|
||||
public double calultateIonRatioError(String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
|
||||
int number = 10000;
|
||||
|
||||
double[] res = new GaussianParameterGenerator(parameters, covariance)
|
||||
@ -223,7 +222,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
||||
|
||||
Histogram hist = new Histogram(0.3, 0.5, 0.002);
|
||||
hist.fill(res);
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context)
|
||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
|
||||
.buildPlotFrame(getName(), name + ".ionRatio",
|
||||
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
|
||||
);
|
||||
|
@ -46,7 +46,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NMFile execute(Context context, Reportable log, String name, Laminate meta, NMFile source) throws ContentException {
|
||||
protected NMFile execute(Reportable log, String name, Laminate meta, NMFile source) throws ContentException {
|
||||
boolean normalize;
|
||||
Map<String, Pair<Integer, Integer>> slicingConfig;
|
||||
|
||||
@ -70,7 +70,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
|
||||
|
||||
SlicedData sData = new SlicedData(source, slicingConfig, normalize);
|
||||
|
||||
OutputStream stream = buildActionOutput(context, name);
|
||||
OutputStream stream = buildActionOutput(name);
|
||||
|
||||
ColumnedDataWriter.writeDataSet(stream, sData, null);
|
||||
|
||||
|
@ -46,11 +46,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(context, input.meta(), actionMeta);
|
||||
protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
|
||||
Meta meta = inputMeta(input.meta(), actionMeta);
|
||||
List<DataNode<Table>> groups;
|
||||
if (meta.hasValue("grouping.byValue")) {
|
||||
groups = super.buildGroups(context, input, actionMeta);
|
||||
groups = super.buildGroups(input, actionMeta);
|
||||
} else {
|
||||
groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group(input);
|
||||
}
|
||||
@ -58,7 +58,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Table execute(Context context, Reportable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
||||
protected Table execute(Reportable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
||||
String[] parNames = meta.getStringArray("parnames");
|
||||
String[] names = new String[2 * parNames.length + 2];
|
||||
names[0] = "file";
|
||||
@ -108,11 +108,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterGroup(Context context, Reportable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(context, groupName);
|
||||
protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
|
||||
OutputStream stream = buildActionOutput(groupName);
|
||||
ColumnedDataWriter.writeDataSet(stream, output, groupName);
|
||||
|
||||
super.afterGroup(context, log, groupName, outputMeta, output);
|
||||
super.afterGroup(log, groupName, outputMeta, output);
|
||||
}
|
||||
|
||||
}
|
||||
|
89
numass-main/src/main/java/inr/numass/tasks/PrepareTask.java
Normal file
89
numass-main/src/main/java/inr/numass/tasks/PrepareTask.java
Normal file
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package inr.numass.tasks;
|
||||
|
||||
import hep.dataforge.actions.Action;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.context.ProcessManager;
|
||||
import hep.dataforge.data.DataNode;
|
||||
import hep.dataforge.data.DataTree;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.Template;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.workspace.GenericTask;
|
||||
import hep.dataforge.workspace.TaskModel;
|
||||
import hep.dataforge.workspace.TaskState;
|
||||
import inr.numass.actions.MergeDataAction;
|
||||
import inr.numass.actions.PrepareDataAction;
|
||||
import inr.numass.actions.ReadNumassStorageAction;
|
||||
import inr.numass.storage.NumassData;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
public class PrepareTask extends GenericTask {
|
||||
|
||||
/*
|
||||
<action type="readStorage" uri="file://D:\Work\Numass\data\2016_04\T2_data\">
|
||||
<include pattern="Fill_2*"/>
|
||||
<exclude pattern="Fill_2_4_Empty*"/>
|
||||
<exclude pattern="Fill_2_1.set_1*"/>
|
||||
<exclude pattern="Fill_2_1.set_2*"/>
|
||||
<exclude pattern="Fill_2_1.set_3*"/>
|
||||
<exclude pattern="Fill_2_1.set_4*"/>
|
||||
</action>
|
||||
<action type="prepareData" lowerWindow="500" upperWindow="1700" deadTime="4.9534e-6 + 1.51139e-10*U">
|
||||
<underflow function = "1.0 + 15.216 * Math.exp( - U / 2477.46 )" threshold = "14000"/>
|
||||
</action>
|
||||
<action type="monitor" monitorPoint="${numass.monitorPoint}" monitorFile="${numass.setName}_monitor"/>
|
||||
<action type="merge" mergeName="${numass.setName}"/>
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected TaskState transform(ProcessManager.Callback callback, Context context, TaskState state, Meta config) {
|
||||
//acquiring initial data. Data node could not be empty
|
||||
Meta dataMeta = Template.compileTemplate(config.getNode("data"), config);
|
||||
DataNode<NumassData> data = runAction(new ReadNumassStorageAction(), callback, context, DataNode.empty(), dataMeta);
|
||||
state.setData("data", data);
|
||||
//preparing table data
|
||||
Meta prepareMeta = Template.compileTemplate(config.getNode("prepare"), config);
|
||||
DataNode<Table> tables = runAction(new PrepareDataAction(), callback, context, data, prepareMeta);
|
||||
state.setData("prepare", tables);
|
||||
//merging if needed
|
||||
if (config.hasNode("merge")) {
|
||||
DataTree.Builder resultBuilder = DataTree.builder(Table.class);
|
||||
tables.dataStream().forEach(pair -> resultBuilder.putData(pair.getKey(), pair.getValue()));
|
||||
config.getNodes("merge").forEach(mergeNode -> {
|
||||
Meta mergeMeta = Template.compileTemplate(mergeNode, config);
|
||||
DataNode<Table> mergeData = runAction(new MergeDataAction(), callback, context, tables, mergeMeta);
|
||||
mergeData.dataStream().forEach(pair -> {
|
||||
resultBuilder.putData("merge." + pair.getKey(), pair.getValue());
|
||||
});
|
||||
});
|
||||
} else {
|
||||
state.finish(tables);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
private <T, R> DataNode<R> runAction(Action<T, R> action, ProcessManager.Callback callback, Context context, DataNode<T> data, Meta meta) {
|
||||
return action.withContext(context).withParentProcess(callback.processName()).run(data, meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(TaskModel model) {
|
||||
if (!model.meta().hasNode("data")) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "numass.prepare";
|
||||
}
|
||||
|
||||
}
|
41
numass-main/src/main/java/inr/numass/test/TestLazyData.java
Normal file
41
numass-main/src/main/java/inr/numass/test/TestLazyData.java
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package inr.numass.test;
|
||||
|
||||
import hep.dataforge.actions.ActionController;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
public class TestLazyData {
|
||||
|
||||
/**
|
||||
* @param args the command line arguments
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
ActionController lock = new ActionController();
|
||||
CompletableFuture<String> future = lock.hold(() -> {
|
||||
System.out.println(" I am initialized");
|
||||
return "abcd";
|
||||
});
|
||||
future = future.<String>thenCompose((String res) -> CompletableFuture.supplyAsync(() -> {
|
||||
System.out.println(" I am capitalized");
|
||||
return res.toUpperCase();
|
||||
}));
|
||||
future = future.handleAsync((res, err) -> {
|
||||
System.out.println(" I am handled");
|
||||
return "handled " + res;
|
||||
});
|
||||
System.out.println("Releasing hold");
|
||||
lock.release();
|
||||
|
||||
// dat1.getInFuture().thenRunAsync(() -> System.out.println(" I am finished"));
|
||||
// dat1.getInFuture().thenAcceptAsync((res) -> System.out.println(" I am finished"));
|
||||
}
|
||||
|
||||
}
|
@ -158,6 +158,7 @@ public class NumassWorkbenchController implements Initializable, StagePaneHolder
|
||||
context.setIO(new WorkbenchIOManager(new NumassIO(), this));
|
||||
buildContextPane();
|
||||
context.getReport().addReportListener(new FXReportListener(logPane));
|
||||
|
||||
|
||||
// display plots iside workbench
|
||||
PlotsPlugin.buildFrom(context).setPlotHolderDelegate(this);
|
||||
|
@ -41,6 +41,7 @@ public class SetDirectionUtility {
|
||||
return new File(context.io().getTmpDirectory(), FILE_NAME);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static synchronized void load(Context context) {
|
||||
context.getLogger().info("Loading set direction utility");
|
||||
File file = cacheFile(context);
|
||||
|
Loading…
Reference in New Issue
Block a user