Made actions completely immutable.

This commit is contained in:
Alexander Nozik 2016-12-10 15:26:04 +03:00
parent 0547285f4c
commit 0f981818dc
21 changed files with 106 additions and 83 deletions

View File

@ -16,6 +16,7 @@
package hep.dataforge.plotfit;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
@ -44,11 +45,11 @@ import java.util.stream.StreamSupport;
public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
@Override
protected FitState execute(String name, Laminate metaData, FitState input) {
protected FitState execute(Context context, String name, FitState input, Laminate metaData) {
PointSource data = input.getDataSet();
if (!(input.getModel() instanceof XYModel)) {
getReport(name).reportError("The fit model should be instance of XYModel for this action. Action failed!");
getReport(context, name).reportError("The fit model should be instance of XYModel for this action. Action failed!");
return input;
}
XYModel model = (XYModel) input.getModel();
@ -65,7 +66,7 @@ public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
Function<Double, Double> function = (x) -> model.getSpectrum().value(x, input.getParameters());
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
.buildFrom(getContext()).buildPlotFrame(getName(), name,
.buildFrom(context).buildPlotFrame(getName(), name,
metaData.getMeta("plot", Meta.empty()));
PlottableXYFunction fit = new PlottableXYFunction("fit");

View File

@ -6,6 +6,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
@ -26,7 +27,7 @@ import java.util.List;
public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
@Override
protected Table execute(String name, Laminate meta, Table input) {
protected Table execute(Context context, String name, Table input, Laminate meta) {
List<DataPoint> points = new ArrayList<>();
for (DataPoint dp : input) {
points.add(evalPoint(meta, dp));

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
@ -39,8 +40,8 @@ import java.io.PrintWriter;
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
@Override
protected RawNMFile execute(String name, Laminate meta, RawNMFile source) throws ContentException {
report(name, "File {} started", source.getName());
protected RawNMFile execute(Context context, String name, RawNMFile source, Laminate meta) throws ContentException {
report(context, name, "File {} started", source.getName());
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
@ -55,7 +56,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
report(name, "Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
report(context, name, "Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
source.getName(), point.getUset(), report.eventsFiltred() * 100, report.timeFiltred() * 100);
point = report.getPoint();
}
@ -63,9 +64,9 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
}).forEach((point) -> {
res.putPoint(point);
});
report(name, "File {} completed", source.getName());
report(context, name, "File {} completed", source.getName());
getReport(name).print(new PrintWriter(buildActionOutput(name)));
getReport(context, name).print(new PrintWriter(buildActionOutput(context, name)));
// res.configure(source.meta());
return res;

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
@ -45,8 +46,8 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
@Override
protected Table execute(String name, Laminate meta, NumassData source) throws ContentException {
report(name, "File {} started", source.getName());
protected Table execute(Context context, String name, NumassData source, Laminate meta) throws ContentException {
report(context, name, "File {} started", source.getName());
int upperBorder = meta.getInt("upper", 4094);
int lowerBorder = meta.getInt("lower", 0);
@ -56,7 +57,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
if (substractReference > 0) {
referencePoint = source.getByUset(substractReference);
if (referencePoint == null) {
report(name, "Reference point {} not found", substractReference);
report(context, name, "Reference point {} not found", substractReference);
}
}
@ -65,11 +66,11 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
fill(dataBuilder, source, lowerBorder, upperBorder, referencePoint);
Table bData = dataBuilder.build();
OutputStream stream = buildActionOutput(name);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
report(name, "File {} completed", source.getName());
report(context, name, "File {} completed", source.getName());
return bData;
}

View File

@ -1,6 +1,7 @@
package inr.numass.actions;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Meta;
import inr.numass.storage.NMPoint;
@ -18,7 +19,7 @@ import java.util.stream.IntStream;
public class JoinNumassDataAction extends ManyToOneAction<NumassData, NumassData> {
@Override
protected NumassData execute(String nodeName, Map<String, NumassData> input, Meta meta) {
protected NumassData execute(Context context, String nodeName, Map<String, NumassData> input, Meta meta) {
throw new UnsupportedOperationException("not implemented");
}

View File

@ -17,6 +17,7 @@ package inr.numass.actions;
import hep.dataforge.actions.GroupBuilder;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
@ -39,11 +40,11 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override
@SuppressWarnings("unchecked")
protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
Meta meta = inputMeta(input.meta(), actionMeta);
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta);
List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(input, actionMeta);
groups = super.buildGroups(context, input, actionMeta);
} else {
groups = GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, "merge")).group(input);
}
@ -51,14 +52,14 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
}
@Override
protected Table execute(String nodeName, Map<String, Table> data, Meta meta) {
protected Table execute(Context context, String nodeName, Map<String, Table> data, Meta meta) {
Table res = mergeDataSets(nodeName, data.values());
return new ListTable(res.getFormat(), TableTransform.sort(res, "Uset", true));
}
@Override
protected void afterGroup(String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(groupName);
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName);
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
}

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
@ -51,13 +52,13 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
//FIXME remove from state
@Override
protected Table execute(String name, Laminate meta, Table sourceData) throws ContentException {
protected Table execute(Context context, String name, Table sourceData, Laminate meta) throws ContentException {
double monitor = meta.getDouble("monitorPoint", Double.NaN);
TreeMap<Instant, DataPoint> index = getMonitorIndex(monitor, sourceData);
if (index.isEmpty()) {
getReport(name).reportError("No monitor points found");
getReport(context, name).reportError("No monitor points found");
return sourceData;
}
double norm = 0;
@ -129,7 +130,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
// }
Table data = new ListTable(dataList);
OutputStream stream = buildActionOutput(name);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, data, head);
@ -186,15 +187,15 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
}
@Override
protected void afterAction(String name, Table res, Laminate meta) {
printMonitorData(meta);
super.afterAction(name, res, meta);
protected void afterAction(Context context, String name, Table res, Laminate meta) {
printMonitorData(context, meta);
super.afterAction(context, name, res, meta);
}
private void printMonitorData(Meta meta) {
private void printMonitorData(Context context, Meta meta) {
if (!monitorPoints.isEmpty()) {
String monitorFileName = meta.getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(monitorFileName);
OutputStream stream = buildActionOutput(context, monitorFileName);
ListTable data = new ListTable(monitorPoints);
ColumnedDataWriter.writeDataSet(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
}

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
@ -61,7 +62,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
}
@Override
protected ListTable execute(String name, Laminate meta, NumassData dataFile) {
protected ListTable execute(Context context, String name, NumassData dataFile, Laminate meta) {
// log.report("File %s started", dataFile.getName());
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
@ -136,7 +137,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
ListTable data = new ListTable(format, dataList);
OutputStream stream = buildActionOutput(name);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, data, head);
// log.logString("File %s completed", dataFile.getName());

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.binary.Binary;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
@ -41,19 +42,19 @@ import static inr.numass.NumassIO.getNumassData;
public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
@Override
protected NMFile execute(String name, Laminate meta, Binary source) throws ContentException {
protected NMFile execute(Context context, String name, Binary source, Laminate meta) throws ContentException {
// log.logString("File '%s' started", source.getName());
RawNMFile raw = getNumassData(source, meta);
if (meta.getBoolean("paw", false)) {
raw.generatePAW(buildActionOutput(name + ".paw"));
raw.generatePAW(buildActionOutput(context, name + ".paw"));
}
if (meta.getNodeNames(false).contains("debunch")) {
DebunchAction debunch = new DebunchAction();
Laminate laminate = new Laminate(meta.getMeta("debunch"))
.setValueContext(getContext())
.setValueContext(context)
.setDescriptor(debunch.getDescriptor());
raw = debunch.execute(name, laminate, raw);
raw = debunch.execute(context, name, raw, laminate);
}
NMFile result = new NMFile(raw);

View File

@ -6,6 +6,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate;
@ -32,7 +33,7 @@ import java.util.stream.Collectors;
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
@Override
protected Table execute(String name, Laminate inputMeta, NumassData input) {
protected Table execute(Context context, String name, NumassData input, Laminate inputMeta) {
int binning = inputMeta.getInt("binning", 20);
boolean normalize = inputMeta.getBoolean("normalize", true);
List<NMPoint> points = input.getNMPoints();
@ -75,14 +76,14 @@ public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table>
builder.row(mb.build());
});
OutputStream out = buildActionOutput(name);
OutputStream out = buildActionOutput(context, name);
Table table = builder.build();
ColumnedDataWriter.writeDataSet(out, table, inputMeta.toString());
if (inputMeta.hasMeta("plot") || inputMeta.getBoolean("plot", false)) {
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin
.buildFrom(getContext()).buildPlotFrame(getName(), name,
.buildFrom(context).buildPlotFrame(getName(), name,
inputMeta.getMeta("plot", Meta.empty()));
fillDetectorData(valueMap).forEach(frame::add);

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.PrintFunction;
@ -113,7 +114,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
}
@Override
protected FitState execute(String name, Laminate meta, FitState input) {
protected FitState execute(Context context, String name, FitState input, Laminate meta) {
ParamSet pars = input.getParameters();
if (!pars.names().contains(names)) {
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
@ -122,7 +123,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
UnivariateFunction scatterFunction;
boolean calculateRatio = false;
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context)
.buildPlotFrame(getName(), name + ".loss",
new MetaBuilder("plot")
.setValue("plotTitle", "Differential scattering crossection for " + name)
@ -149,13 +150,13 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
if (calculateRatio) {
threshold = meta.getDouble("ionThreshold", 17);
ionRatio = calcultateIonRatio(pars, threshold);
report(name, "The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
ionRatioError = calultateIonRatioError(name, input, threshold);
report(name, "the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
report(context, name, "The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
ionRatioError = calultateIonRatioError(context, name, input, threshold);
report(context, name, "the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
}
if (meta.getBoolean("printResult", false)) {
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(name), Charset.forName("UTF-8")));
PrintWriter writer = new PrintWriter(new OutputStreamWriter(buildActionOutput(context, name), Charset.forName("UTF-8")));
// writer.println("*** FIT PARAMETERS ***");
input.print(writer);
// for (Param param : pars.getSubSet(names).getParams()) {
@ -245,14 +246,14 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
return exProb / ionProb;
}
public double calultateIonRatioError(String dataNeme, FitState state, double threshold) {
public double calultateIonRatioError(Context context, String dataNeme, FitState state, double threshold) {
ParamSet parameters = state.getParameters().getSubSet("exPos", "ionPos", "exW", "ionW", "exIonRatio");
NamedMatrix covariance = state.getCovariance();
return calultateIonRatioError(dataNeme, parameters, covariance, threshold);
return calultateIonRatioError(context, dataNeme, parameters, covariance, threshold);
}
@SuppressWarnings("Unchecked")
public double calultateIonRatioError(String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
public double calultateIonRatioError(Context context, String name, NamedValueSet parameters, NamedMatrix covariance, double threshold) {
int number = 10000;
double[] res = new GaussianParameterGenerator(parameters, covariance)
@ -264,7 +265,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
Histogram hist = new Histogram(0.3, 0.5, 0.002);
hist.fill(res);
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(context)
.buildPlotFrame(getName(), name + ".ionRatio",
new MetaBuilder("plot").setValue("plotTitle", "Ion ratio Distribution for " + name)
);

View File

@ -16,6 +16,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
@ -44,7 +45,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
}
@Override
protected NMFile execute(String name, Laminate meta, NMFile source) throws ContentException {
protected NMFile execute(Context context, String name, NMFile source, Laminate meta) throws ContentException {
boolean normalize;
Map<String, Pair<Integer, Integer>> slicingConfig;
@ -64,15 +65,15 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
if (slicingConfig == null) {
throw new RuntimeException("Slice configuration not defined");
}
report(name, "File {} started", source.getName());
report(context, name, "File {} started", source.getName());
SlicedData sData = new SlicedData(source, slicingConfig, normalize);
OutputStream stream = buildActionOutput(name);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, sData, null);
report(name, "File {} completed", source.getName());
report(context, name, "File {} completed", source.getName());
return source;
}

View File

@ -6,6 +6,7 @@
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.io.ColumnedDataWriter;
@ -27,10 +28,10 @@ import java.util.Optional;
public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
@Override
protected Table execute(String name, Laminate inputMeta, Table input) {
protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
try {
String referencePath = inputMeta. getString("file", "empty.dat");
File referenceFile = getContext().io().getFile(referencePath);
File referenceFile = context.io().getFile(referencePath);
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
input.stream().forEach(point -> {
@ -41,13 +42,13 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
pointBuilder.putValue("CRerr", Math.sqrt(Math.pow(point.getDouble("CRerr"), 2d) + Math.pow(referencePoint.get().getDouble("CRerr"), 2d)));
} else {
report(name, "No reference point found for Uset = {}", point.getDouble("Uset"));
report(context, name, "No reference point found for Uset = {}", point.getDouble("Uset"));
}
builder.row(pointBuilder.build());
});
Table res = builder.build();
OutputStream stream = buildActionOutput(name);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeDataSet(stream, res, inputMeta.toString());
return res;
} catch (IOException ex) {

View File

@ -17,6 +17,7 @@ package inr.numass.actions;
import hep.dataforge.actions.GroupBuilder;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
@ -42,11 +43,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
@Override
@SuppressWarnings("unchecked")
protected List<DataNode<Table>> buildGroups(DataNode input, Meta actionMeta) {
Meta meta = inputMeta(input.meta(), actionMeta);
protected List<DataNode<Table>> buildGroups(Context context, DataNode input, Meta actionMeta) {
Meta meta = inputMeta(context, input.meta(), actionMeta);
List<DataNode<Table>> groups;
if (meta.hasValue("grouping.byValue")) {
groups = super.buildGroups(input, actionMeta);
groups = super.buildGroups(context, input, actionMeta);
} else {
groups = GroupBuilder.byValue(SUMMARY_NAME, meta.getString(SUMMARY_NAME, "summary")).group(input);
}
@ -54,7 +55,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
}
@Override
protected Table execute(String nodeName, Map<String, FitState> input, Meta meta) {
protected Table execute(Context context,String nodeName, Map<String, FitState> input, Meta meta) {
String[] parNames;
if (meta.hasValue("parnames")) {
parNames = meta.getStringArray("parnames");
@ -109,11 +110,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
}
@Override
protected void afterGroup(String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(groupName);
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName);
ColumnedDataWriter.writeDataSet(stream, output, groupName);
super.afterGroup(groupName, outputMeta, output);
super.afterGroup(context, groupName, outputMeta, output);
}
}

View File

@ -12,6 +12,7 @@ import org.apache.commons.math3.random.RandomGenerator;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import static java.lang.Math.max;
@ -29,6 +30,7 @@ public class PileUpSimulator {
private final List<NMEvent> registred = new ArrayList<>();
private Supplier<NMEvent> generator;
private double uSet = 0;
private AtomicInteger doublePileup = new AtomicInteger(0);
public PileUpSimulator(double length, RandomGenerator rnd, Supplier<NMEvent> sup) {
this.rnd = rnd;
@ -127,16 +129,21 @@ public class PileUpSimulator {
registred.add(next);
lastRegisteredTime = next.getTime();
pileupFlag = false;
} else if (pileup(delay) && !pileupFlag) {
//pileup event
short newChannel = pileupChannel(delay, next.getChanel(), next.getChanel());
NMEvent newEvent = new NMEvent(newChannel, next.getTime());
//replace already registered event by event with new channel
registred.remove(registred.size() - 1);
registred.add(newEvent);
pileup.add(newEvent);
//do not change DAQ close time
pileupFlag = true; // up the flag to avoid secondary pileup
} else if (pileup(delay)) {
if(pileupFlag){
//increase double pileup stack
doublePileup.incrementAndGet();
} else {
//pileup event
short newChannel = pileupChannel(delay, next.getChanel(), next.getChanel());
NMEvent newEvent = new NMEvent(newChannel, next.getTime());
//replace already registered event by event with new channel
registred.remove(registred.size() - 1);
registred.add(newEvent);
pileup.add(newEvent);
//do not change DAQ close time
pileupFlag = true; // up the flag to avoid secondary pileup
}
} else {
// second event not registered, DAQ closed
pileupFlag = false;

View File

@ -8,6 +8,7 @@ package inr.numass.workspace;
import hep.dataforge.actions.Action;
import hep.dataforge.actions.ManyToOneAction;
import hep.dataforge.computation.ProgressCallback;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataSet;
import hep.dataforge.description.TypedActionDef;
@ -33,10 +34,10 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
@Override
protected DataNode<Table> run(TaskModel model, ProgressCallback callback, DataNode<?> data) {
DataSet.Builder<Table> builder = DataSet.builder(Table.class);
Action<FitState, Table> action = new FitSummaryAction().withContext(model.getWorkspace().getContext());
Action<FitState, Table> action = new FitSummaryAction();
DataNode<FitState> input = data.getCheckedNode("fitscan", FitState.class);
input.nodeStream().filter(it -> it.dataSize(false) > 0).forEach(node ->
builder.putData(node.getName(), action.run(node, model.meta()).getData()));
builder.putData(node.getName(), action.run(model.getContext(), node, model.meta()).getData()));
return builder.build();
}
@ -56,7 +57,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
private class FitSummaryAction extends ManyToOneAction<FitState, Table> {
@Override
protected Table execute(String nodeName, Map<String, FitState> input, Meta meta) {
protected Table execute(Context context, String nodeName, Map<String, FitState> input, Meta meta) {
ListTable.Builder builder = new ListTable.Builder("msterile2", "U2", "U2err", "U2limit", "E0", "trap");
input.forEach((key, fitRes) -> {
ParamSet pars = fitRes.getParameters();
@ -80,7 +81,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
Table res = TableTransform.sort(builder.build(), "msterile2", true);
OutputStream stream = buildActionOutput(nodeName);
OutputStream stream = buildActionOutput(context, nodeName);
ColumnedDataWriter.writeDataSet(stream, res, "Sterile neutrino mass scan summary");

View File

@ -41,7 +41,7 @@ public class NumassFitScanTask extends AbstractTask<FitState> {
} else {
scanValues = config.getValue("scan.values", Value.of("[2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6]"));
}
Action<Table, FitState> action = new FitAction().withContext(model.getContext());
Action<Table, FitState> action = new FitAction();
DataTree.Builder<FitState> resultBuilder = DataTree.builder(FitState.class);
DataNode<Table> sourceNode = data.getCheckedNode("prepare", Table.class);
@ -66,7 +66,7 @@ public class NumassFitScanTask extends AbstractTask<FitState> {
.filter(par -> par.getString("name") == scanParameter).forEach(par -> par.setValue("value", val));
}
// Data<Table> newData = new Data<Table>(data.getGoal(),data.type(),overrideMeta);
DataNode node = action.run(DataNode.of("fit_" + i, d, Meta.empty()), overrideMeta);
DataNode node = action.run(model.getContext(), DataNode.of("fit_" + i, d, Meta.empty()), overrideMeta);
resultBuilder.putData(d.getName() + ".fit_" + i, node.getData());
}
});

View File

@ -37,7 +37,7 @@ public class NumassFitSummaryTask extends SingleActionTask<FitState, Table> {
@Override
protected Action<FitState, Table> getAction(TaskModel model) {
return new SummaryAction().withContext(model.getContext());
return new SummaryAction();
}
@Override

View File

@ -52,7 +52,7 @@ public class NumassFitTask extends SingleActionTask<Table, FitState> {
@Override
protected Action getAction(TaskModel model) {
Action action = new FitAction().withContext(model.getContext());
Action action = new FitAction();
if (model.meta().getBoolean("fit.plot", false)) {
return ActionUtils.compose(action, new PlotFitResultAction());
} else {

View File

@ -125,7 +125,7 @@ public class NumassPrepareTask extends AbstractTask<Table> {
}
private <T, R> DataNode<R> runAction(GenericAction<T, R> action, ProgressCallback callback, Context context, DataNode<T> data, Meta meta) {
return action.withContext(context).run(data, meta);
return action.run(context, data, meta);
}
@Override

View File

@ -2,6 +2,7 @@ package inr.numass.workspace;
import hep.dataforge.actions.Action;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.meta.Laminate;
@ -45,7 +46,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
@TypedActionDef(name = "filterTable", inputType = Table.class, outputType = Table.class)
private class FilterTableAction extends OneToOneAction<Table, Table> {
@Override
protected Table execute(String name, Laminate inputMeta, Table input) {
protected Table execute(Context context, String name, Table input, Laminate inputMeta) {
double uLo = inputMeta.getDouble("filter.from", 0);
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
getLogger(inputMeta).debug("Filtering finished");