Trying to implement caching

This commit is contained in:
Alexander Nozik 2016-07-17 17:35:07 +03:00
parent 45391ab0e0
commit 875123c81f
20 changed files with 217 additions and 89 deletions

View File

@ -45,7 +45,7 @@ import java.util.stream.StreamSupport;
public class PlotFitResultAction extends OneToOneAction<FitState, FitState> { public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
@Override @Override
protected FitState execute(Context context, Reportable log, String name, Laminate metaData, FitState input) { protected FitState execute(Reportable log, String name, Laminate metaData, FitState input) {
PointSource data = input.getDataSet(); PointSource data = input.getDataSet();
if (!(input.getModel() instanceof XYModel)) { if (!(input.getModel() instanceof XYModel)) {
@ -66,7 +66,7 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
Function<Double, Double> function = (x) -> model.getSpectrum().value(x, input.getParameters()); Function<Double, Double> function = (x) -> model.getSpectrum().value(x, input.getParameters());
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
.buildFrom(context).buildPlotFrame(getName(), name, .buildFrom(getContext()).buildPlotFrame(getName(), name,
metaData.getNode("plot", Meta.empty())); metaData.getNode("plot", Meta.empty()));
PlottableXYFunction fit = new PlottableXYFunction("fit"); PlottableXYFunction fit = new PlottableXYFunction("fit");

View File

@ -26,7 +26,6 @@ import hep.dataforge.exceptions.DescriptorException;
import hep.dataforge.maths.integration.GaussRuleIntegrator; import hep.dataforge.maths.integration.GaussRuleIntegrator;
import hep.dataforge.maths.integration.UnivariateIntegrator; import hep.dataforge.maths.integration.UnivariateIntegrator;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import inr.numass.storage.SetDirectionUtility;
import java.io.PrintWriter; import java.io.PrintWriter;
/** /**
@ -38,6 +37,7 @@ public class NumassContext extends Context {
public static UnivariateIntegrator defaultIntegrator = new GaussRuleIntegrator(300); public static UnivariateIntegrator defaultIntegrator = new GaussRuleIntegrator(300);
public static UnivariateIntegrator highDensityIntegrator = new GaussRuleIntegrator(500); public static UnivariateIntegrator highDensityIntegrator = new GaussRuleIntegrator(500);
public NumassContext(Context parent, Meta config) { public NumassContext(Context parent, Meta config) {
super(parent, "numass", config); super(parent, "numass", config);
init(); init();

View File

@ -89,7 +89,7 @@ public class NumassPlugin extends BasicPlugin {
} }
@Override @Override
public void detach(Context context) { public void detach() {
} }

View File

@ -6,7 +6,6 @@
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.reports.Reportable; import hep.dataforge.io.reports.Reportable;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
@ -27,7 +26,7 @@ import java.util.List;
public class AdjustErrorsAction extends OneToOneAction<Table, Table> { public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
@Override @Override
protected Table execute(Context context, Reportable log, String name, Laminate meta, Table input) { protected Table execute(Reportable log, String name, Laminate meta, Table input) {
List<DataPoint> points = new ArrayList<>(); List<DataPoint> points = new ArrayList<>();
for (DataPoint dp : input) { for (DataPoint dp : input) {
points.add(evalPoint(meta, dp)); points.add(evalPoint(meta, dp));

View File

@ -16,7 +16,6 @@
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException; import hep.dataforge.exceptions.ContentException;
@ -41,7 +40,7 @@ import java.io.PrintWriter;
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> { public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
@Override @Override
protected RawNMFile execute(Context context, Reportable log, String name, Laminate meta, RawNMFile source) throws ContentException { protected RawNMFile execute(Reportable log, String name, Laminate meta, RawNMFile source) throws ContentException {
log.report("File {} started", source.getName()); log.report("File {} started", source.getName());
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL); int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
@ -67,7 +66,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
}); });
log.report("File {} completed", source.getName()); log.report("File {} completed", source.getName());
log.getReport().print(new PrintWriter(buildActionOutput(context, name))); log.getReport().print(new PrintWriter(buildActionOutput(name)));
// res.configure(source.meta()); // res.configure(source.meta());
return res; return res;

View File

@ -47,7 +47,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4); private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
@Override @Override
protected Table execute(Context context, Reportable log, String name, Laminate meta, NumassData source) throws ContentException { protected Table execute(Reportable log, String name, Laminate meta, NumassData source) throws ContentException {
log.report("File {} started", source.getName()); log.report("File {} started", source.getName());
int upperBorder = meta.getInt("upper", 4094); int upperBorder = meta.getInt("upper", 4094);
@ -67,7 +67,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
fill(dataBuilder, source, lowerBorder, upperBorder, referencePoint); fill(dataBuilder, source, lowerBorder, upperBorder, referencePoint);
Table bData = dataBuilder.build(); Table bData = dataBuilder.build();
OutputStream stream = buildActionOutput(context, name); OutputStream stream = buildActionOutput(name);
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder)); ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));

View File

@ -50,11 +50,11 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) { protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta); Meta meta = inputMeta(input.meta(), actionMeta);
List<DataNode<Table>> groups; List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) { if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta); groups = super.buildGroups(input, actionMeta);
} else { } else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input); groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input);
} }
@ -62,14 +62,14 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
} }
@Override @Override
protected Table execute(Context context, Reportable log, String nodeName, Map<String, Table> data, Meta meta) { protected Table execute(Reportable log, String nodeName, Map<String, Table> data, Meta meta) {
Table res = mergeDataSets(nodeName, data.values()); Table res = mergeDataSets(nodeName, data.values());
return new ListTable(res.getFormat(),res.sort("Uset", true)); return new ListTable(res.getFormat(),res.sort("Uset", true));
} }
@Override @Override
protected void afterGroup(Context context, Reportable log, String groupName, Meta outputMeta, Table output) { protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName); OutputStream stream = buildActionOutput(groupName);
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString()); ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
} }

View File

@ -53,7 +53,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
//FIXME remove from state //FIXME remove from state
@Override @Override
protected Table execute(Context context, Reportable log, String name, Laminate meta, Table sourceData) throws ContentException { protected Table execute(Reportable log, String name, Laminate meta, Table sourceData) throws ContentException {
double monitor = meta.getDouble("monitorPoint", Double.NaN); double monitor = meta.getDouble("monitorPoint", Double.NaN);
@ -144,7 +144,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
// } // }
Table data = new ListTable(dataList); Table data = new ListTable(dataList);
OutputStream stream = buildActionOutput(context, name); OutputStream stream = buildActionOutput(name);
ColumnedDataWriter.writeDataSet(stream, data, head); ColumnedDataWriter.writeDataSet(stream, data, head);
@ -152,15 +152,15 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
} }
@Override @Override
protected void afterAction(Context context, String name, Table res, Laminate meta) { protected void afterAction(String name, Table res, Laminate meta) {
printMonitorData(context, meta); printMonitorData(meta);
super.afterAction(context, name, res, meta); super.afterAction(name, res, meta);
} }
private void printMonitorData(Context context, Meta meta) { private void printMonitorData(Meta meta) {
if (!monitorPoints.isEmpty()) { if (!monitorPoints.isEmpty()) {
String monitorFileName = meta.getString("monitorFile", "monitor"); String monitorFileName = meta.getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(context, monitorFileName); OutputStream stream = buildActionOutput(monitorFileName);
ListTable data = new ListTable(monitorPoints); ListTable data = new ListTable(monitorPoints);
ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames); ColumnedDataWriter.writeDataSet(stream, data.sort("Timestamp", true), "Monitor points", monitorNames);
} }

View File

@ -31,7 +31,7 @@ import java.util.Map;
public class PileupSimulationAction extends OneToOneAction<NumassData, Map<String, NumassData>> { public class PileupSimulationAction extends OneToOneAction<NumassData, Map<String, NumassData>> {
@Override @Override
protected Map<String, NumassData> execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) { protected Map<String, NumassData> execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
int lowerChannel = inputMeta.getInt("lowerChannel", 1); int lowerChannel = inputMeta.getInt("lowerChannel", 1);
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL - 1); int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL - 1);

View File

@ -16,7 +16,6 @@
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException; import hep.dataforge.exceptions.ContentException;
@ -71,7 +70,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
} }
@Override @Override
protected ListTable execute(Context context, Reportable log, String name, Laminate meta, NumassData dataFile) { protected ListTable execute(Reportable log, String name, Laminate meta, NumassData dataFile) {
// log.report("File %s started", dataFile.getName()); // log.report("File %s started", dataFile.getName());
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1); int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
@ -131,7 +130,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
ListTable data = new ListTable(format, dataList); ListTable data = new ListTable(format, dataList);
OutputStream stream = buildActionOutput(context, name); OutputStream stream = buildActionOutput(name);
ColumnedDataWriter.writeDataSet(stream, data, head); ColumnedDataWriter.writeDataSet(stream, data, head);
// log.logString("File %s completed", dataFile.getName()); // log.logString("File %s completed", dataFile.getName());

View File

@ -42,19 +42,19 @@ import inr.numass.storage.RawNMFile;
public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> { public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
@Override @Override
protected NMFile execute(Context context, Reportable log, String name, Laminate meta, Binary source) throws ContentException { protected NMFile execute(Reportable log, String name, Laminate meta, Binary source) throws ContentException {
// log.logString("File '%s' started", source.getName()); // log.logString("File '%s' started", source.getName());
RawNMFile raw = getNumassData(source, meta); RawNMFile raw = getNumassData(source, meta);
if (meta.getBoolean("paw", false)) { if (meta.getBoolean("paw", false)) {
raw.generatePAW(buildActionOutput(context, name + ".paw")); raw.generatePAW(buildActionOutput(name + ".paw"));
} }
if (meta.getNodeNames(false).contains("debunch")) { if (meta.getNodeNames(false).contains("debunch")) {
DebunchAction debunch = new DebunchAction(); DebunchAction debunch = new DebunchAction();
Laminate laminate = new Laminate(meta.getNode("debunch")) Laminate laminate = new Laminate(meta.getNode("debunch"))
.setValueContext(context) .setValueContext(getContext())
.setDescriptor(debunch.getDescriptor()); .setDescriptor(debunch.getDescriptor());
raw = debunch.execute(context, log, name, laminate, raw); raw = debunch.execute(log, name, laminate, raw);
} }
NMFile result = new NMFile(raw); NMFile result = new NMFile(raw);

View File

@ -6,7 +6,6 @@
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.actions.GenericAction; import hep.dataforge.actions.GenericAction;
import hep.dataforge.context.Context;
import hep.dataforge.context.DFProcess; import hep.dataforge.context.DFProcess;
import hep.dataforge.context.ProcessManager.Callback; import hep.dataforge.context.ProcessManager.Callback;
import hep.dataforge.data.Data; import hep.dataforge.data.Data;
@ -35,7 +34,7 @@ import inr.numass.storage.SetDirectionUtility;
public class ReadNumassStorageAction extends GenericAction<Void, NumassData> { public class ReadNumassStorageAction extends GenericAction<Void, NumassData> {
@Override @Override
public DataNode<NumassData> run(Context context, DataNode<Void> data, Meta actionMeta) { public DataNode<NumassData> run(DataNode<Void> data, Meta actionMeta) {
try { try {
NumassStorage storage = NumassStorage.buildNumassRoot(actionMeta.getString("uri"), true, false); NumassStorage storage = NumassStorage.buildNumassRoot(actionMeta.getString("uri"), true, false);
DataFilter filter = new DataFilter().configure(actionMeta); DataFilter filter = new DataFilter().configure(actionMeta);
@ -43,45 +42,46 @@ public class ReadNumassStorageAction extends GenericAction<Void, NumassData> {
boolean forwardOnly = actionMeta.getBoolean("forwardOnly", false); boolean forwardOnly = actionMeta.getBoolean("forwardOnly", false);
boolean reverseOnly = actionMeta.getBoolean("reverseOnly", false); boolean reverseOnly = actionMeta.getBoolean("reverseOnly", false);
DFProcess<DataSet<NumassData>> process = context.processManager().<DataSet<NumassData>>post(getName(), (Callback callback) -> { DFProcess<DataSet<NumassData>> process = getContext().processManager()
//FIXME remove in later revisions .<DataSet<NumassData>>post(getName(), (Callback callback) -> {
SetDirectionUtility.load(context); //FIXME remove in later revisions
SetDirectionUtility.load(getContext());
DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class); DataSet.Builder<NumassData> builder = DataSet.builder(NumassData.class);
callback.setMaxProgress(StorageUtils.loaderStream(storage).count()); callback.setMaxProgress(StorageUtils.loaderStream(storage).count());
StorageUtils.loaderStream(storage).forEach(pair -> { StorageUtils.loaderStream(storage).forEach(pair -> {
Loader loader = pair.getValue(); Loader loader = pair.getValue();
if (loader instanceof NumassData) { if (loader instanceof NumassData) {
NumassDataLoader nd = (NumassDataLoader) loader; NumassDataLoader nd = (NumassDataLoader) loader;
Data<NumassData> datum = new StaticData<>(nd); Data<NumassData> datum = new StaticData<>(nd);
if (filter.acceptData(pair.getKey(), datum)) { if (filter.acceptData(pair.getKey(), datum)) {
boolean accept = true; boolean accept = true;
if (forwardOnly || reverseOnly) { if (forwardOnly || reverseOnly) {
boolean reversed = nd.isReversed(); boolean reversed = nd.isReversed();
accept = (reverseOnly && reversed) || (forwardOnly && !reversed); accept = (reverseOnly && reversed) || (forwardOnly && !reversed);
}
if (accept) {
builder.putData(pair.getKey(), datum);
}
}
} }
if (accept) { callback.increaseProgress(1d);
builder.putData(pair.getKey(), datum); });
}
}
}
callback.increaseProgress(1d);
});
if (actionMeta.getBoolean("loadLegacy", false)) { if (actionMeta.getBoolean("loadLegacy", false)) {
logger().info("Loading legacy files"); logger().info("Loading legacy files");
storage.legacyFiles().forEach(nd -> { storage.legacyFiles().forEach(nd -> {
Data<NumassData> datum = new StaticData<>(nd); Data<NumassData> datum = new StaticData<>(nd);
if (filter.acceptData(nd.getName(), datum)) { if (filter.acceptData(nd.getName(), datum)) {
builder.putData("legacy." + nd.getName(), datum); builder.putData("legacy." + nd.getName(), datum);
}
});
} }
//FIXME remove in later revisions
SetDirectionUtility.save(getContext());
return builder.build();
}); });
}
//FIXME remove in later revisions
SetDirectionUtility.save(context);
return builder.build();
});
return process.getTask().get(); return process.getTask().get();
} catch (Exception ex) { } catch (Exception ex) {

View File

@ -42,7 +42,7 @@ import java.util.stream.Collectors;
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> { public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
@Override @Override
protected Table execute(Context context, Reportable log, String name, Laminate inputMeta, NumassData input) { protected Table execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
int binning = inputMeta.getInt("binning", 20); int binning = inputMeta.getInt("binning", 20);
boolean normalize = inputMeta.getBoolean("normalize", true); boolean normalize = inputMeta.getBoolean("normalize", true);
List<NMPoint> points = input.getNMPoints(); List<NMPoint> points = input.getNMPoints();
@ -85,14 +85,14 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
builder.row(mb.build()); builder.row(mb.build());
}); });
OutputStream out = buildActionOutput(context, name); OutputStream out = buildActionOutput(name);
Table table = builder.build(); Table table = builder.build();
ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString()); ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString());
if (inputMeta.hasNode("plot") || inputMeta.getBoolean("plot", false)) { if (inputMeta.hasNode("plot") || inputMeta.getBoolean("plot", false)) {
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
.buildFrom(context).buildPlotFrame(getName(), name, .buildFrom(getContext()).buildPlotFrame(getName(), name,
inputMeta.getNode("plot", Meta.empty())); inputMeta.getNode("plot", Meta.empty()));
fillDetectorData(valueMap).forEach(frame::add); fillDetectorData(valueMap).forEach(frame::add);

View File

@ -16,7 +16,6 @@
package inr.numass.actions; package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.datafitter.FitState; import hep.dataforge.datafitter.FitState;
import hep.dataforge.datafitter.FitTaskResult; import hep.dataforge.datafitter.FitTaskResult;
import hep.dataforge.datafitter.Param; import hep.dataforge.datafitter.Param;
@ -66,7 +65,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
private static final String[] names = {"X", "exPos", "ionPos", "exW", "ionW", "exIonRatio"}; private static final String[] names = {"X", "exPos", "ionPos", "exW", "ionW", "exIonRatio"};
@Override @Override
protected FitState execute(Context context, Reportable log, String name, Laminate meta, FitState input) { protected FitState execute(Reportable log, String name, Laminate meta, FitState input) {
ParamSet pars = input.getParameters(); ParamSet pars = input.getParameters();
if (!pars.names().contains(names)) { if (!pars.names().contains(names)) {
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit."); LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
@ -75,7 +74,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
UnivariateFunction scatterFunction; UnivariateFunction scatterFunction;
boolean calculateRatio = false; boolean calculateRatio = false;
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context) XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
.buildPlotFrame(getName(), name + ".loss", .buildPlotFrame(getName(), name + ".loss",
new MetaBuilder("plot") new MetaBuilder("plot")
.setValue("plotTitle", "Differential scattering crossection for " + name) .setValue("plotTitle", "Differential scattering crossection for " + name)
@ -103,12 +102,12 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
threshold = meta.getDouble("ionThreshold", 17); threshold = meta.getDouble("ionThreshold", 17);
ionRatio = calcultateIonRatio(pars, threshold); ionRatio = calcultateIonRatio(pars, threshold);
log.report("The ionization ratio (using threshold {}) is {}", threshold, ionRatio); log.report("The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
ionRatioError = calultateIonRatioError(context, name, input, threshold); ionRatioError = calultateIonRatioError(name, input, threshold);
log.report("the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError); log.report("the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
} }
if (meta.getBoolean("printResult", false)) { if (meta.getBoolean("printResult", false)) {
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(context, name), Charset.forName("UTF-8"))); PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(name), Charset.forName("UTF-8")));
// writer.println("*** FIT PARAMETERS ***"); // writer.println("*** FIT PARAMETERS ***");
input.print(writer); input.print(writer);
// for (Param param : pars.getSubSet(names).getParams()) { // for (Param param : pars.getSubSet(names).getParams()) {
@ -204,14 +203,14 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
return exProb / ionProb; return exProb / ionProb;
} }
public double calultateIonRatioError(Context context, String dataNeme, FitState state, double threshold) { public double calultateIonRatioError(String dataNeme, FitState state, double threshold) {
ParamSet parameters = state.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"}); ParamSet parameters = state.getParameters().getSubSet(new String[]{"exPos", "ionPos", "exW", "ionW", "exIonRatio"});
NamedMatrix covariance = state.getCovariance(); NamedMatrix covariance = state.getCovariance();
return calultateIonRatioError(context, dataNeme, parameters, covariance, threshold); return calultateIonRatioError(dataNeme, parameters, covariance, threshold);
} }
@SuppressWarnings("Unchecked") @SuppressWarnings("Unchecked")
public double calultateIonRatioError(Context context, String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) { public double calultateIonRatioError(String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
int number = 10000; int number = 10000;
double[] res = new GaussianParameterGenerator(parameters, covariance) double[] res = new GaussianParameterGenerator(parameters, covariance)
@ -223,7 +222,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
Histogram hist = new Histogram(0.3, 0.5, 0.002); Histogram hist = new Histogram(0.3, 0.5, 0.002);
hist.fill(res); hist.fill(res);
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context) XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
.buildPlotFrame(getName(), name + ".ionRatio", .buildPlotFrame(getName(), name + ".ionRatio",
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name) new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
); );

View File

@ -46,7 +46,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
} }
@Override @Override
protected NMFile execute(Context context, Reportable log, String name, Laminate meta, NMFile source) throws ContentException { protected NMFile execute(Reportable log, String name, Laminate meta, NMFile source) throws ContentException {
boolean normalize; boolean normalize;
Map<String, Pair<Integer, Integer>> slicingConfig; Map<String, Pair<Integer, Integer>> slicingConfig;
@ -70,7 +70,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
SlicedData sData = new SlicedData(source, slicingConfig, normalize); SlicedData sData = new SlicedData(source, slicingConfig, normalize);
OutputStream stream = buildActionOutput(context, name); OutputStream stream = buildActionOutput(name);
ColumnedDataWriter.writeDataSet(stream, sData, null); ColumnedDataWriter.writeDataSet(stream, sData, null);

View File

@ -46,11 +46,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) { protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta); Meta meta = inputMeta(input.meta(), actionMeta);
List<DataNode<Table>> groups; List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) { if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(context, input, actionMeta); groups = super.buildGroups(input, actionMeta);
} else { } else {
groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group(input); groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group(input);
} }
@ -58,7 +58,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
} }
@Override @Override
protected Table execute(Context context, Reportable log, String nodeName, Map<String, FitState> input, Meta meta) { protected Table execute(Reportable log, String nodeName, Map<String, FitState> input, Meta meta) {
String[] parNames = meta.getStringArray("parnames"); String[] parNames = meta.getStringArray("parnames");
String[] names = new String[2 * parNames.length + 2]; String[] names = new String[2 * parNames.length + 2];
names[0] = "file"; names[0] = "file";
@ -108,11 +108,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
} }
@Override @Override
protected void afterGroup(Context context, Reportable log, String groupName, Meta outputMeta, Table output) { protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName); OutputStream stream = buildActionOutput(groupName);
ColumnedDataWriter.writeDataSet(stream, output, groupName); ColumnedDataWriter.writeDataSet(stream, output, groupName);
super.afterGroup(context, log, groupName, outputMeta, output); super.afterGroup(log, groupName, outputMeta, output);
} }
} }

View File

@ -0,0 +1,89 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.tasks;
import hep.dataforge.actions.Action;
import hep.dataforge.context.Context;
import hep.dataforge.context.ProcessManager;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataTree;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.Template;
import hep.dataforge.tables.Table;
import hep.dataforge.workspace.GenericTask;
import hep.dataforge.workspace.TaskModel;
import hep.dataforge.workspace.TaskState;
import inr.numass.actions.MergeDataAction;
import inr.numass.actions.PrepareDataAction;
import inr.numass.actions.ReadNumassStorageAction;
import inr.numass.storage.NumassData;
/**
*
* @author Alexander Nozik
*/
public class PrepareTask extends GenericTask {
/*
<action type="readStorage" uri="file://D:\Work\Numass\data\2016_04\T2_data\">
<include pattern="Fill_2*"/>
<exclude pattern="Fill_2_4_Empty*"/>
<exclude pattern="Fill_2_1.set_1*"/>
<exclude pattern="Fill_2_1.set_2*"/>
<exclude pattern="Fill_2_1.set_3*"/>
<exclude pattern="Fill_2_1.set_4*"/>
</action>
<action type="prepareData" lowerWindow="500" upperWindow="1700" deadTime="4.9534e-6 + 1.51139e-10*U">
<underflow function = "1.0 + 15.216 * Math.exp( - U / 2477.46 )" threshold = "14000"/>
</action>
<action type="monitor" monitorPoint="${numass.monitorPoint}" monitorFile="${numass.setName}_monitor"/>
<action type="merge" mergeName="${numass.setName}"/>
*/
@Override
@SuppressWarnings("unchecked")
protected TaskState transform(ProcessManager.Callback callback, Context context, TaskState state, Meta config) {
//acquiring initial data. Data node could not be empty
Meta dataMeta = Template.compileTemplate(config.getNode("data"), config);
DataNode<NumassData> data = runAction(new ReadNumassStorageAction(), callback, context, DataNode.empty(), dataMeta);
state.setData("data", data);
//preparing table data
Meta prepareMeta = Template.compileTemplate(config.getNode("prepare"), config);
DataNode<Table> tables = runAction(new PrepareDataAction(), callback, context, data, prepareMeta);
state.setData("prepare", tables);
//merging if needed
if (config.hasNode("merge")) {
DataTree.Builder resultBuilder = DataTree.builder(Table.class);
tables.dataStream().forEach(pair -> resultBuilder.putData(pair.getKey(), pair.getValue()));
config.getNodes("merge").forEach(mergeNode -> {
Meta mergeMeta = Template.compileTemplate(mergeNode, config);
DataNode<Table> mergeData = runAction(new MergeDataAction(), callback, context, tables, mergeMeta);
mergeData.dataStream().forEach(pair -> {
resultBuilder.putData("merge." + pair.getKey(), pair.getValue());
});
});
} else {
state.finish(tables);
}
return state;
}
private <T, R> DataNode<R> runAction(Action<T, R> action, ProcessManager.Callback callback, Context context, DataNode<T> data, Meta meta) {
return action.withContext(context).withParentProcess(callback.processName()).run(data, meta);
}
@Override
public void validate(TaskModel model) {
if (!model.meta().hasNode("data")) {
}
}
@Override
public String getName() {
return "numass.prepare";
}
}

View File

@ -0,0 +1,41 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.test;
import hep.dataforge.actions.ActionController;
import java.util.concurrent.CompletableFuture;
/**
*
* @author Alexander Nozik
*/
public class TestLazyData {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
ActionController lock = new ActionController();
CompletableFuture<String> future = lock.hold(() -> {
System.out.println(" I am initialized");
return "abcd";
});
future = future.<String>thenCompose((String res) -> CompletableFuture.supplyAsync(() -> {
System.out.println(" I am capitalized");
return res.toUpperCase();
}));
future = future.handleAsync((res, err) -> {
System.out.println(" I am handled");
return "handled " + res;
});
System.out.println("Releasing hold");
lock.release();
// dat1.getInFuture().thenRunAsync(() -> System.out.println(" I am finished"));
// dat1.getInFuture().thenAcceptAsync((res) -> System.out.println(" I am finished"));
}
}

View File

@ -159,6 +159,7 @@ public class NumassWorkbenchController implements Initializable, StagePaneHolder
buildContextPane(); buildContextPane();
context.getReport().addReportListener(new FXReportListener(logPane)); context.getReport().addReportListener(new FXReportListener(logPane));
// display plots iside workbench // display plots iside workbench
PlotsPlugin.buildFrom(context).setPlotHolderDelegate(this); PlotsPlugin.buildFrom(context).setPlotHolderDelegate(this);
} }

View File

@ -41,6 +41,7 @@ public class SetDirectionUtility {
return new File(context.io().getTmpDirectory(), FILE_NAME); return new File(context.io().getTmpDirectory(), FILE_NAME);
} }
@SuppressWarnings("unchecked")
public static synchronized void load(Context context) { public static synchronized void load(Context context) {
context.getLogger().info("Loading set direction utility"); context.getLogger().info("Loading set direction utility");
File file = cacheFile(context); File file = cacheFile(context);