Grind dsl refinement + task & workspace fixes
This commit is contained in:
parent
2cfc74dea8
commit
e1d0c50745
@ -16,22 +16,21 @@
|
|||||||
package hep.dataforge.plotfit;
|
package hep.dataforge.plotfit;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.stat.fit.FitState;
|
|
||||||
import hep.dataforge.stat.models.XYModel;
|
|
||||||
import hep.dataforge.description.NodeDef;
|
import hep.dataforge.description.NodeDef;
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.description.ValueDef;
|
import hep.dataforge.description.ValueDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.plots.PlotsPlugin;
|
import hep.dataforge.plots.PlotsPlugin;
|
||||||
import hep.dataforge.plots.XYPlotFrame;
|
import hep.dataforge.plots.XYPlotFrame;
|
||||||
import hep.dataforge.plots.data.PlottableData;
|
import hep.dataforge.plots.data.PlottableData;
|
||||||
import hep.dataforge.plots.data.PlottableXYFunction;
|
import hep.dataforge.plots.data.PlottableXYFunction;
|
||||||
|
import hep.dataforge.stat.fit.FitState;
|
||||||
|
import hep.dataforge.stat.models.XYModel;
|
||||||
import hep.dataforge.tables.PointSource;
|
import hep.dataforge.tables.PointSource;
|
||||||
import hep.dataforge.tables.XYAdapter;
|
import hep.dataforge.tables.XYAdapter;
|
||||||
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.StreamSupport;
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
@ -45,11 +44,11 @@ import java.util.stream.StreamSupport;
|
|||||||
public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
|
public class PlotFitResultAction extends OneToOneAction<FitState, FitState> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FitState execute(Reportable log, String name, Laminate metaData, FitState input) {
|
protected FitState execute(String name, Laminate metaData, FitState input) {
|
||||||
|
|
||||||
PointSource data = input.getDataSet();
|
PointSource data = input.getDataSet();
|
||||||
if (!(input.getModel() instanceof XYModel)) {
|
if (!(input.getModel() instanceof XYModel)) {
|
||||||
log.reportError("The fit model should be instance of XYModel for this action. Action failed!");
|
getReport(name).reportError("The fit model should be instance of XYModel for this action. Action failed!");
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
XYModel model = (XYModel) input.getModel();
|
XYModel model = (XYModel) input.getModel();
|
||||||
|
@ -7,13 +7,13 @@ package inr.numass.actions;
|
|||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.DataPoint;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
import hep.dataforge.tables.MapPoint;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ import java.util.List;
|
|||||||
public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
public class AdjustErrorsAction extends OneToOneAction<Table, Table> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String name, Laminate meta, Table input) {
|
protected Table execute(String name, Laminate meta, Table input) {
|
||||||
List<DataPoint> points = new ArrayList<>();
|
List<DataPoint> points = new ArrayList<>();
|
||||||
for (DataPoint dp : input) {
|
for (DataPoint dp : input) {
|
||||||
points.add(evalPoint(meta, dp));
|
points.add(evalPoint(meta, dp));
|
||||||
|
@ -19,16 +19,15 @@ import hep.dataforge.actions.OneToOneAction;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.description.ValueDef;
|
import hep.dataforge.description.ValueDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import inr.numass.storage.RawNMFile;
|
|
||||||
import inr.numass.storage.RawNMPoint;
|
|
||||||
import inr.numass.debunch.DebunchReport;
|
import inr.numass.debunch.DebunchReport;
|
||||||
import inr.numass.debunch.FrameAnalizer;
|
import inr.numass.debunch.FrameAnalizer;
|
||||||
|
import inr.numass.storage.RawNMFile;
|
||||||
|
import inr.numass.storage.RawNMPoint;
|
||||||
|
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(name = "debunch", inputType = RawNMFile.class, outputType = RawNMFile.class)
|
@TypedActionDef(name = "debunch", inputType = RawNMFile.class, outputType = RawNMFile.class)
|
||||||
@ -40,8 +39,8 @@ import java.io.PrintWriter;
|
|||||||
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected RawNMFile execute(Reportable log, String name, Laminate meta, RawNMFile source) throws ContentException {
|
protected RawNMFile execute(String name, Laminate meta, RawNMFile source) throws ContentException {
|
||||||
log.report("File {} started", source.getName());
|
report(name, "File {} started", source.getName());
|
||||||
|
|
||||||
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
|
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
|
||||||
int lower = meta.getInt("lowerchanel", 0);
|
int lower = meta.getInt("lowerchanel", 0);
|
||||||
@ -56,7 +55,7 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
|||||||
if (cr < maxCR) {
|
if (cr < maxCR) {
|
||||||
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
|
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
|
||||||
|
|
||||||
log.report("Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
|
report(name, "Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
|
||||||
source.getName(), point.getUset(), report.eventsFiltred() * 100, report.timeFiltred() * 100);
|
source.getName(), point.getUset(), report.eventsFiltred() * 100, report.timeFiltred() * 100);
|
||||||
point = report.getPoint();
|
point = report.getPoint();
|
||||||
}
|
}
|
||||||
@ -64,9 +63,9 @@ public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
|||||||
}).forEach((point) -> {
|
}).forEach((point) -> {
|
||||||
res.putPoint(point);
|
res.putPoint(point);
|
||||||
});
|
});
|
||||||
log.report("File {} completed", source.getName());
|
report(name, "File {} completed", source.getName());
|
||||||
|
|
||||||
log.getReport().print(new PrintWriter(buildActionOutput(name)));
|
getReport(name).print(new PrintWriter(buildActionOutput(name)));
|
||||||
|
|
||||||
// res.configure(source.meta());
|
// res.configure(source.meta());
|
||||||
return res;
|
return res;
|
||||||
|
@ -16,11 +16,9 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
import hep.dataforge.tables.MapPoint;
|
||||||
@ -29,13 +27,13 @@ import hep.dataforge.values.Value;
|
|||||||
import inr.numass.storage.NMFile;
|
import inr.numass.storage.NMFile;
|
||||||
import inr.numass.storage.NMPoint;
|
import inr.numass.storage.NMPoint;
|
||||||
import inr.numass.storage.NumassData;
|
import inr.numass.storage.NumassData;
|
||||||
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(name = "findBorder", inputType = NMFile.class, outputType = Table.class)
|
@TypedActionDef(name = "findBorder", inputType = NMFile.class, outputType = Table.class)
|
||||||
@ -47,8 +45,8 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
|||||||
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
|
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String name, Laminate meta, NumassData source) throws ContentException {
|
protected Table execute(String name, Laminate meta, NumassData source) throws ContentException {
|
||||||
log.report("File {} started", source.getName());
|
report(name, "File {} started", source.getName());
|
||||||
|
|
||||||
int upperBorder = meta.getInt("upper", 4094);
|
int upperBorder = meta.getInt("upper", 4094);
|
||||||
int lowerBorder = meta.getInt("lower", 0);
|
int lowerBorder = meta.getInt("lower", 0);
|
||||||
@ -58,7 +56,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
|||||||
if (substractReference > 0) {
|
if (substractReference > 0) {
|
||||||
referencePoint = source.getByUset(substractReference);
|
referencePoint = source.getByUset(substractReference);
|
||||||
if (referencePoint == null) {
|
if (referencePoint == null) {
|
||||||
log.report("Reference point {} not found", substractReference);
|
report(name, "Reference point {} not found", substractReference);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,7 +69,7 @@ public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
|||||||
|
|
||||||
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
|
ColumnedDataWriter.writeDataSet(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
|
||||||
|
|
||||||
log.report("File {} completed", source.getName());
|
report(name, "File {} completed", source.getName());
|
||||||
return bData;
|
return bData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@ import hep.dataforge.data.DataNode;
|
|||||||
import hep.dataforge.description.NodeDef;
|
import hep.dataforge.description.NodeDef;
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.*;
|
||||||
@ -53,13 +52,13 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String nodeName, Map<String, Table> data, Meta meta) {
|
protected Table execute(String nodeName, Map<String, Table> data, Meta meta) {
|
||||||
Table res = mergeDataSets(nodeName, data.values());
|
Table res = mergeDataSets(nodeName, data.values());
|
||||||
return new ListTable(res.getFormat(), res.sort("Uset", true));
|
return new ListTable(res.getFormat(), res.sort("Uset", true));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
|
protected void afterGroup(String groupName, Meta outputMeta, Table output) {
|
||||||
OutputStream stream = buildActionOutput(groupName);
|
OutputStream stream = buildActionOutput(groupName);
|
||||||
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
|
ColumnedDataWriter.writeDataSet(stream, output, outputMeta.toString());
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,6 @@ import hep.dataforge.description.TypedActionDef;
|
|||||||
import hep.dataforge.description.ValueDef;
|
import hep.dataforge.description.ValueDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.DataPoint;
|
||||||
@ -56,13 +55,13 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
//FIXME remove from state
|
//FIXME remove from state
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String name, Laminate meta, Table sourceData) throws ContentException {
|
protected Table execute(String name, Laminate meta, Table sourceData) throws ContentException {
|
||||||
|
|
||||||
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
double monitor = meta.getDouble("monitorPoint", Double.NaN);
|
||||||
|
|
||||||
TreeMap<Instant, DataPoint> index = getMonitorIndex(monitor, sourceData);
|
TreeMap<Instant, DataPoint> index = getMonitorIndex(monitor, sourceData);
|
||||||
if (index.isEmpty()) {
|
if (index.isEmpty()) {
|
||||||
log.reportError("No monitor points found");
|
getReport(name).reportError("No monitor points found");
|
||||||
return sourceData;
|
return sourceData;
|
||||||
}
|
}
|
||||||
double norm = 0;
|
double norm = 0;
|
||||||
|
@ -6,9 +6,7 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import inr.numass.storage.NMPoint;
|
import inr.numass.storage.NMPoint;
|
||||||
@ -16,6 +14,7 @@ import inr.numass.storage.NumassData;
|
|||||||
import inr.numass.storage.RawNMPoint;
|
import inr.numass.storage.RawNMPoint;
|
||||||
import inr.numass.utils.PileUpSimulator;
|
import inr.numass.utils.PileUpSimulator;
|
||||||
import inr.numass.utils.TritiumUtils;
|
import inr.numass.utils.TritiumUtils;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
@ -31,7 +30,7 @@ import java.util.Map;
|
|||||||
public class PileupSimulationAction extends OneToOneAction<NumassData, Map<String, NumassData>> {
|
public class PileupSimulationAction extends OneToOneAction<NumassData, Map<String, NumassData>> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Map<String, NumassData> execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
|
protected Map<String, NumassData> execute(String name, Laminate inputMeta, NumassData input) {
|
||||||
int lowerChannel = inputMeta.getInt("lowerChannel", 1);
|
int lowerChannel = inputMeta.getInt("lowerChannel", 1);
|
||||||
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL - 1);
|
int upperChannel = inputMeta.getInt("upperChannel", RawNMPoint.MAX_CHANEL - 1);
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ListTable execute(Reportable log, String name, Laminate meta, NumassData dataFile) {
|
protected ListTable execute(String name, Laminate meta, NumassData dataFile) {
|
||||||
// log.report("File %s started", dataFile.getName());
|
// log.report("File %s started", dataFile.getName());
|
||||||
|
|
||||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||||
@ -99,7 +99,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
|||||||
// count rate error after all corrections
|
// count rate error after all corrections
|
||||||
double crErr = TritiumUtils.countRateWithDeadTimeErr(point, a, b, deadTimeFunction.apply(point));
|
double crErr = TritiumUtils.countRateWithDeadTimeErr(point, a, b, deadTimeFunction.apply(point));
|
||||||
|
|
||||||
double correctionFactor = correction(log, point, meta);
|
double correctionFactor = correction(getReport(name), point, meta);
|
||||||
|
|
||||||
cr = cr * correctionFactor;
|
cr = cr * correctionFactor;
|
||||||
crErr = crErr * correctionFactor;
|
crErr = crErr * correctionFactor;
|
||||||
|
@ -16,18 +16,17 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.data.binary.Binary;
|
import hep.dataforge.data.binary.Binary;
|
||||||
import hep.dataforge.description.NodeDef;
|
import hep.dataforge.description.NodeDef;
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.description.ValueDef;
|
import hep.dataforge.description.ValueDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import static inr.numass.NumassIO.getNumassData;
|
|
||||||
import inr.numass.storage.NMFile;
|
import inr.numass.storage.NMFile;
|
||||||
import inr.numass.storage.RawNMFile;
|
import inr.numass.storage.RawNMFile;
|
||||||
|
|
||||||
|
import static inr.numass.NumassIO.getNumassData;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
@ -42,7 +41,7 @@ import inr.numass.storage.RawNMFile;
|
|||||||
public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
|
public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected NMFile execute(Reportable log, String name, Laminate meta, Binary source) throws ContentException {
|
protected NMFile execute(String name, Laminate meta, Binary source) throws ContentException {
|
||||||
// log.logString("File '%s' started", source.getName());
|
// log.logString("File '%s' started", source.getName());
|
||||||
RawNMFile raw = getNumassData(source, meta);
|
RawNMFile raw = getNumassData(source, meta);
|
||||||
if (meta.getBoolean("paw", false)) {
|
if (meta.getBoolean("paw", false)) {
|
||||||
@ -54,7 +53,7 @@ public class ReadNumassDataAction extends OneToOneAction<Binary, NMFile> {
|
|||||||
Laminate laminate = new Laminate(meta.getNode("debunch"))
|
Laminate laminate = new Laminate(meta.getNode("debunch"))
|
||||||
.setValueContext(getContext())
|
.setValueContext(getContext())
|
||||||
.setDescriptor(debunch.getDescriptor());
|
.setDescriptor(debunch.getDescriptor());
|
||||||
raw = debunch.execute(log, name, laminate, raw);
|
raw = debunch.execute(name, laminate, raw);
|
||||||
}
|
}
|
||||||
|
|
||||||
NMFile result = new NMFile(raw);
|
NMFile result = new NMFile(raw);
|
||||||
|
@ -6,10 +6,8 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
@ -17,21 +15,13 @@ import hep.dataforge.plots.PlotsPlugin;
|
|||||||
import hep.dataforge.plots.XYPlotFrame;
|
import hep.dataforge.plots.XYPlotFrame;
|
||||||
import hep.dataforge.plots.XYPlottable;
|
import hep.dataforge.plots.XYPlottable;
|
||||||
import hep.dataforge.plots.data.PlottableData;
|
import hep.dataforge.plots.data.PlottableData;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.tables.ListTable;
|
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
|
||||||
import hep.dataforge.tables.TableFormatBuilder;
|
|
||||||
import hep.dataforge.tables.XYAdapter;
|
|
||||||
import hep.dataforge.values.ValueType;
|
import hep.dataforge.values.ValueType;
|
||||||
import inr.numass.storage.NMPoint;
|
import inr.numass.storage.NMPoint;
|
||||||
import inr.numass.storage.NumassData;
|
import inr.numass.storage.NumassData;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -42,7 +32,7 @@ import java.util.stream.Collectors;
|
|||||||
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
|
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String name, Laminate inputMeta, NumassData input) {
|
protected Table execute(String name, Laminate inputMeta, NumassData input) {
|
||||||
int binning = inputMeta.getInt("binning", 20);
|
int binning = inputMeta.getInt("binning", 20);
|
||||||
boolean normalize = inputMeta.getBoolean("normalize", true);
|
boolean normalize = inputMeta.getBoolean("normalize", true);
|
||||||
List<NMPoint> points = input.getNMPoints();
|
List<NMPoint> points = input.getNMPoints();
|
||||||
|
@ -19,7 +19,6 @@ import hep.dataforge.actions.OneToOneAction;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.PrintFunction;
|
import hep.dataforge.io.PrintFunction;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.maths.GridCalculator;
|
import hep.dataforge.maths.GridCalculator;
|
||||||
import hep.dataforge.maths.NamedMatrix;
|
import hep.dataforge.maths.NamedMatrix;
|
||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
@ -56,7 +55,6 @@ import java.nio.charset.Charset;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author darksnake
|
* @author darksnake
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(name = "showLoss", inputType = FitState.class, outputType = FitState.class,
|
@TypedActionDef(name = "showLoss", inputType = FitState.class, outputType = FitState.class,
|
||||||
@ -115,7 +113,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FitState execute(Reportable log, String name, Laminate meta, FitState input) {
|
protected FitState execute(String name, Laminate meta, FitState input) {
|
||||||
ParamSet pars = input.getParameters();
|
ParamSet pars = input.getParameters();
|
||||||
if (!pars.names().contains(names)) {
|
if (!pars.names().contains(names)) {
|
||||||
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
|
LoggerFactory.getLogger(getClass()).error("Wrong input FitState. Must be loss spectrum fit.");
|
||||||
@ -127,7 +125,7 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
|||||||
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
|
XYPlotFrame frame = (XYPlotFrame) PlotsPlugin.buildFrom(getContext())
|
||||||
.buildPlotFrame(getName(), name + ".loss",
|
.buildPlotFrame(getName(), name + ".loss",
|
||||||
new MetaBuilder("plot")
|
new MetaBuilder("plot")
|
||||||
.setValue("plotTitle", "Differential scattering crossection for " + name)
|
.setValue("plotTitle", "Differential scattering crossection for " + name)
|
||||||
);
|
);
|
||||||
switch (input.getModel().meta().getString("name", "")) {
|
switch (input.getModel().meta().getString("name", "")) {
|
||||||
case "scatter-variable":
|
case "scatter-variable":
|
||||||
@ -151,9 +149,9 @@ public class ShowLossSpectrumAction extends OneToOneAction<FitState, FitState> {
|
|||||||
if (calculateRatio) {
|
if (calculateRatio) {
|
||||||
threshold = meta.getDouble("ionThreshold", 17);
|
threshold = meta.getDouble("ionThreshold", 17);
|
||||||
ionRatio = calcultateIonRatio(pars, threshold);
|
ionRatio = calcultateIonRatio(pars, threshold);
|
||||||
log.report("The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
|
report(name, "The ionization ratio (using threshold {}) is {}", threshold, ionRatio);
|
||||||
ionRatioError = calultateIonRatioError(name, input, threshold);
|
ionRatioError = calultateIonRatioError(name, input, threshold);
|
||||||
log.report("the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
|
report(name, "the ionization ration standard deviation (using threshold {}) is {}", threshold, ionRatioError);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (meta.getBoolean("printResult", false)) {
|
if (meta.getBoolean("printResult", false)) {
|
||||||
|
@ -16,23 +16,21 @@
|
|||||||
package inr.numass.actions;
|
package inr.numass.actions;
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.exceptions.ContentException;
|
import hep.dataforge.exceptions.ContentException;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import inr.numass.storage.NMFile;
|
import inr.numass.storage.NMFile;
|
||||||
import inr.numass.storage.RawNMPoint;
|
import inr.numass.storage.RawNMPoint;
|
||||||
|
import org.apache.commons.math3.util.Pair;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.apache.commons.math3.util.Pair;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Darksnake
|
* @author Darksnake
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(name = "slicing", inputType = NMFile.class, outputType = NMFile.class)
|
@TypedActionDef(name = "slicing", inputType = NMFile.class, outputType = NMFile.class)
|
||||||
@ -46,7 +44,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected NMFile execute(Reportable log, String name, Laminate meta, NMFile source) throws ContentException {
|
protected NMFile execute(String name, Laminate meta, NMFile source) throws ContentException {
|
||||||
boolean normalize;
|
boolean normalize;
|
||||||
Map<String, Pair<Integer, Integer>> slicingConfig;
|
Map<String, Pair<Integer, Integer>> slicingConfig;
|
||||||
|
|
||||||
@ -66,7 +64,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
|
|||||||
if (slicingConfig == null) {
|
if (slicingConfig == null) {
|
||||||
throw new RuntimeException("Slice configuration not defined");
|
throw new RuntimeException("Slice configuration not defined");
|
||||||
}
|
}
|
||||||
log.report("File {} started", source.getName());
|
report(name, "File {} started", source.getName());
|
||||||
|
|
||||||
SlicedData sData = new SlicedData(source, slicingConfig, normalize);
|
SlicedData sData = new SlicedData(source, slicingConfig, normalize);
|
||||||
|
|
||||||
@ -74,7 +72,7 @@ public class SlicingAction extends OneToOneAction<NMFile, NMFile> {
|
|||||||
|
|
||||||
ColumnedDataWriter.writeDataSet(stream, sData, null);
|
ColumnedDataWriter.writeDataSet(stream, sData, null);
|
||||||
|
|
||||||
log.report("File {} completed", source.getName());
|
report(name, "File {} completed", source.getName());
|
||||||
|
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
@ -9,26 +9,25 @@ import hep.dataforge.actions.OneToOneAction;
|
|||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.ColumnedDataReader;
|
import hep.dataforge.io.ColumnedDataReader;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.DataPoint;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
import hep.dataforge.tables.MapPoint;
|
import hep.dataforge.tables.MapPoint;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Alexander Nozik <altavir@gmail.com>
|
* @author Alexander Nozik <altavir@gmail.com>
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(name = "substractSpectrum", inputType = Table.class, outputType = Table.class, info = "Substract reference spectrum (background)")
|
@TypedActionDef(name = "substractSpectrum", inputType = Table.class, outputType = Table.class, info = "Substract reference spectrum (background)")
|
||||||
public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
|
public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String name, Laminate inputMeta, Table input) {
|
protected Table execute(String name, Laminate inputMeta, Table input) {
|
||||||
try {
|
try {
|
||||||
String referencePath = inputMeta.getString("file", "empty.dat");
|
String referencePath = inputMeta.getString("file", "empty.dat");
|
||||||
File referenceFile = getContext().io().getFile(referencePath);
|
File referenceFile = getContext().io().getFile(referencePath);
|
||||||
@ -44,7 +43,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
|
|||||||
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
||||||
pointBuilder.putValue("CRerr", Math.sqrt(Math.pow(point.getDouble("CRerr"), 2d) + Math.pow(referencePoint.get().getDouble("CRerr"), 2d)));
|
pointBuilder.putValue("CRerr", Math.sqrt(Math.pow(point.getDouble("CRerr"), 2d) + Math.pow(referencePoint.get().getDouble("CRerr"), 2d)));
|
||||||
} else {
|
} else {
|
||||||
log.report("No reference point found for Uset = {}", point.getDouble("Uset"));
|
report(name, "No reference point found for Uset = {}", point.getDouble("Uset"));
|
||||||
}
|
}
|
||||||
builder.row(pointBuilder.build());
|
builder.row(pointBuilder.build());
|
||||||
});
|
});
|
||||||
|
@ -18,17 +18,13 @@ package inr.numass.actions;
|
|||||||
import hep.dataforge.actions.GroupBuilder;
|
import hep.dataforge.actions.GroupBuilder;
|
||||||
import hep.dataforge.actions.ManyToOneAction;
|
import hep.dataforge.actions.ManyToOneAction;
|
||||||
import hep.dataforge.data.DataNode;
|
import hep.dataforge.data.DataNode;
|
||||||
import hep.dataforge.stat.fit.FitState;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.stat.fit.FitState;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.tables.MapPoint;
|
|
||||||
import hep.dataforge.tables.Table;
|
|
||||||
import hep.dataforge.tables.TableFormat;
|
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -57,7 +53,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
protected Table execute(String nodeName, Map<String, FitState> input, Meta meta) {
|
||||||
String[] parNames = meta.getStringArray("parnames");
|
String[] parNames = meta.getStringArray("parnames");
|
||||||
String[] names = new String[2 * parNames.length + 2];
|
String[] names = new String[2 * parNames.length + 2];
|
||||||
names[0] = "file";
|
names[0] = "file";
|
||||||
@ -107,11 +103,11 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void afterGroup(Reportable log, String groupName, Meta outputMeta, Table output) {
|
protected void afterGroup(String groupName, Meta outputMeta, Table output) {
|
||||||
OutputStream stream = buildActionOutput(groupName);
|
OutputStream stream = buildActionOutput(groupName);
|
||||||
ColumnedDataWriter.writeDataSet(stream, output, groupName);
|
ColumnedDataWriter.writeDataSet(stream, output, groupName);
|
||||||
|
|
||||||
super.afterGroup(log, groupName, outputMeta, output);
|
super.afterGroup(groupName, outputMeta, output);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ package inr.numass.models;
|
|||||||
import hep.dataforge.io.IOUtils;
|
import hep.dataforge.io.IOUtils;
|
||||||
import hep.dataforge.tables.DataPoint;
|
import hep.dataforge.tables.DataPoint;
|
||||||
import hep.dataforge.tables.PointSource;
|
import hep.dataforge.tables.PointSource;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -43,10 +44,10 @@ public class FSS{
|
|||||||
norm += p;
|
norm += p;
|
||||||
}
|
}
|
||||||
if(ps.isEmpty()) {
|
if(ps.isEmpty()) {
|
||||||
throw new Error("Error reading FSS FILE. No points.");
|
throw new RuntimeException("Error reading FSS FILE. No points.");
|
||||||
}
|
}
|
||||||
} catch (FileNotFoundException ex) {
|
} catch (FileNotFoundException ex) {
|
||||||
throw new Error("Error reading FSS FILE. File not found.");
|
throw new RuntimeException("Error reading FSS FILE. File not found.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,22 +15,24 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.models;
|
package inr.numass.models;
|
||||||
|
|
||||||
import hep.dataforge.stat.parametric.FunctionCaching;
|
|
||||||
import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
import hep.dataforge.maths.integration.GaussRuleIntegrator;
|
||||||
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
import hep.dataforge.maths.integration.UnivariateIntegrator;
|
||||||
import hep.dataforge.plots.XYPlotFrame;
|
import hep.dataforge.plots.XYPlotFrame;
|
||||||
import hep.dataforge.plots.data.PlottableXYFunction;
|
import hep.dataforge.plots.data.PlottableXYFunction;
|
||||||
|
import hep.dataforge.stat.parametric.FunctionCaching;
|
||||||
import hep.dataforge.values.NamedValueSet;
|
import hep.dataforge.values.NamedValueSet;
|
||||||
import static java.lang.Math.exp;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import org.apache.commons.math3.analysis.BivariateFunction;
|
import org.apache.commons.math3.analysis.BivariateFunction;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
import org.apache.commons.math3.exception.OutOfRangeException;
|
import org.apache.commons.math3.exception.OutOfRangeException;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.lang.Math.exp;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Вычисление произвольного порядка функции рассеяния. Не учитывается
|
* Вычисление произвольного порядка функции рассеяния. Не учитывается
|
||||||
* зависимость сечения от энергии электрона
|
* зависимость сечения от энергии электрона
|
||||||
@ -46,7 +48,12 @@ public class LossCalculator {
|
|||||||
|
|
||||||
private static final LossCalculator instance = new LossCalculator();
|
private static final LossCalculator instance = new LossCalculator();
|
||||||
private static final UnivariateIntegrator integrator = new GaussRuleIntegrator(100);
|
private static final UnivariateIntegrator integrator = new GaussRuleIntegrator(100);
|
||||||
|
private final Map<Integer, UnivariateFunction> cache = new HashMap<>();
|
||||||
|
|
||||||
|
private LossCalculator() {
|
||||||
|
cache.put(1, getSingleScatterFunction());
|
||||||
|
// cache.put(2, getDoubleScatterFunction());
|
||||||
|
}
|
||||||
|
|
||||||
public static UnivariateFunction getSingleScatterFunction() {
|
public static UnivariateFunction getSingleScatterFunction() {
|
||||||
final double A1 = 0.204;
|
final double A1 = 0.204;
|
||||||
@ -196,13 +203,6 @@ public class LossCalculator {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Map<Integer, UnivariateFunction> cache = new HashMap<>();
|
|
||||||
|
|
||||||
private LossCalculator() {
|
|
||||||
cache.put(1, getSingleScatterFunction());
|
|
||||||
// cache.put(2, getDoubleScatterFunction());
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Double> getGunLossProbabilities(double X) {
|
public List<Double> getGunLossProbabilities(double X) {
|
||||||
List<Double> res = new ArrayList<>();
|
List<Double> res = new ArrayList<>();
|
||||||
double prob;
|
double prob;
|
||||||
@ -238,17 +238,19 @@ public class LossCalculator {
|
|||||||
* @param order
|
* @param order
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private synchronized UnivariateFunction getLoss(int order) {
|
private UnivariateFunction getLoss(int order) {
|
||||||
if (order <= 0) {
|
if (order <= 0) {
|
||||||
throw new IllegalArgumentException();
|
throw new IllegalArgumentException();
|
||||||
}
|
}
|
||||||
if (cache.containsKey(order)) {
|
if (cache.containsKey(order)) {
|
||||||
return cache.get(order);
|
return cache.get(order);
|
||||||
} else {
|
} else {
|
||||||
LoggerFactory.getLogger(getClass())
|
synchronized (this) {
|
||||||
.debug("Scatter cache of order {} not found. Updating", order);
|
LoggerFactory.getLogger(getClass())
|
||||||
cache.put(order, getNextLoss(getMargin(order), getLoss(order - 1)));
|
.debug("Scatter cache of order {} not found. Updating", order);
|
||||||
return cache.get(order);
|
cache.putIfAbsent(order, getNextLoss(getMargin(order), getLoss(order - 1)));
|
||||||
|
return cache.get(order);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,7 +327,7 @@ public class LossCalculator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized double getLossValue(int order, double Ei, double Ef) {
|
public double getLossValue(int order, double Ei, double Ef) {
|
||||||
if (Ei - Ef < 5d) {
|
if (Ei - Ef < 5d) {
|
||||||
return 0;
|
return 0;
|
||||||
} else if (Ei - Ef >= getMargin(order)) {
|
} else if (Ei - Ef >= getMargin(order)) {
|
||||||
|
@ -10,7 +10,7 @@ import java.io.File;
|
|||||||
public class GrindCaller {
|
public class GrindCaller {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
JavaGrindLauncher.buildWorkspace(new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")).runTask("numass.prepare", "fill_2").computeAll();
|
JavaGrindLauncher.buildWorkspace(new File("D:\\Work\\Numass\\sterile2016\\workspace.groovy")).runTask("numass.fitsum", "fill_2").computeAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -5,12 +5,14 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.tasks;
|
package inr.numass.tasks;
|
||||||
|
|
||||||
|
import hep.dataforge.actions.Action;
|
||||||
import hep.dataforge.actions.ManyToOneAction;
|
import hep.dataforge.actions.ManyToOneAction;
|
||||||
import hep.dataforge.computation.WorkManager;
|
import hep.dataforge.computation.WorkManager;
|
||||||
import hep.dataforge.context.Context;
|
import hep.dataforge.context.Context;
|
||||||
import hep.dataforge.data.DataNode;
|
import hep.dataforge.data.DataNode;
|
||||||
|
import hep.dataforge.data.DataSet;
|
||||||
import hep.dataforge.description.TypedActionDef;
|
import hep.dataforge.description.TypedActionDef;
|
||||||
import hep.dataforge.io.reports.Reportable;
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.stat.fit.FitState;
|
import hep.dataforge.stat.fit.FitState;
|
||||||
import hep.dataforge.stat.fit.ParamSet;
|
import hep.dataforge.stat.fit.ParamSet;
|
||||||
@ -20,23 +22,27 @@ import hep.dataforge.workspace.GenericTask;
|
|||||||
import hep.dataforge.workspace.TaskModel;
|
import hep.dataforge.workspace.TaskModel;
|
||||||
import hep.dataforge.workspace.TaskState;
|
import hep.dataforge.workspace.TaskState;
|
||||||
|
|
||||||
|
import java.io.OutputStream;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
|
||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
public class NumassFitScanSummaryTask extends GenericTask {
|
public class NumassFitScanSummaryTask extends GenericTask {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
||||||
state.finish(new FitSummaryAction().withContext(context).run((DataNode<FitState>) state.getData(), config));
|
DataSet.Builder<Table> builder = DataSet.builder(Table.class);
|
||||||
|
Action<FitState, Table> action = new FitSummaryAction().withContext(context);
|
||||||
|
state.getData().getNode("fitscan").get().nodeStream().forEach(node ->
|
||||||
|
builder.putData(node.getName(), action.run((DataNode<FitState>) node, config).getData()));
|
||||||
|
state.finish(builder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TaskModel transformModel(TaskModel model) {
|
protected TaskModel transformModel(TaskModel model) {
|
||||||
//Transmit meta as-is
|
//Transmit meta as-is
|
||||||
model.dependsOn("numass.fitscan", model.meta());
|
model.dependsOn("numass.fitscan", model.meta(), "fitscan");
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,11 +51,11 @@ public class NumassFitScanSummaryTask extends GenericTask {
|
|||||||
return "numass.fitsum";
|
return "numass.fitsum";
|
||||||
}
|
}
|
||||||
|
|
||||||
@TypedActionDef(name = "fitSummary", inputType = FitState.class, outputType = Table.class)
|
@TypedActionDef(name = "sterileSummary", inputType = FitState.class, outputType = Table.class)
|
||||||
private class FitSummaryAction extends ManyToOneAction<FitState, Table> {
|
private class FitSummaryAction extends ManyToOneAction<FitState, Table> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Reportable log, String nodeName, Map<String, FitState> input, Meta meta) {
|
protected Table execute(String nodeName, Map<String, FitState> input, Meta meta) {
|
||||||
ListTable.Builder builder = new ListTable.Builder("msterile2", "U2", "U2err", "E0", "trap");
|
ListTable.Builder builder = new ListTable.Builder("msterile2", "U2", "U2err", "E0", "trap");
|
||||||
input.forEach((key, fitRes) -> {
|
input.forEach((key, fitRes) -> {
|
||||||
ParamSet pars = fitRes.getParameters();
|
ParamSet pars = fitRes.getParameters();
|
||||||
@ -59,7 +65,14 @@ public class NumassFitScanSummaryTask extends GenericTask {
|
|||||||
pars.getValue("E0"),
|
pars.getValue("E0"),
|
||||||
pars.getValue("trap"));
|
pars.getValue("trap"));
|
||||||
});
|
});
|
||||||
return builder.build();
|
Table res = builder.build().sort("msterile2", true);
|
||||||
|
|
||||||
|
|
||||||
|
OutputStream stream = buildActionOutput(nodeName);
|
||||||
|
|
||||||
|
ColumnedDataWriter.writeDataSet(stream, res, "Sterile neutrino mass scan summary");
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -9,11 +9,9 @@ import hep.dataforge.actions.Action;
|
|||||||
import hep.dataforge.computation.WorkManager;
|
import hep.dataforge.computation.WorkManager;
|
||||||
import hep.dataforge.context.Context;
|
import hep.dataforge.context.Context;
|
||||||
import hep.dataforge.data.DataNode;
|
import hep.dataforge.data.DataNode;
|
||||||
import hep.dataforge.data.DataSet;
|
|
||||||
import hep.dataforge.data.DataTree;
|
import hep.dataforge.data.DataTree;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.meta.MetaBuilder;
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
import hep.dataforge.meta.MetaUtils;
|
|
||||||
import hep.dataforge.stat.fit.FitAction;
|
import hep.dataforge.stat.fit.FitAction;
|
||||||
import hep.dataforge.stat.fit.FitState;
|
import hep.dataforge.stat.fit.FitState;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
@ -30,10 +28,10 @@ public class NumassFitScanTask extends GenericTask {
|
|||||||
@Override
|
@Override
|
||||||
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
||||||
String scanParameter = config.getString("scanPar", "msterile2");
|
String scanParameter = config.getString("scanPar", "msterile2");
|
||||||
Value scanValues = config.getValue("scanValues", Value.of(new String[]{"0.5, 1, 1.5, 2, 2.5, 3"}));
|
Value scanValues = config.getValue("scanValues", Value.of("[2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6]"));
|
||||||
Action action = new FitAction().withContext(context).withParentProcess(callback.workName());
|
Action<Table, FitState> action = new FitAction().withContext(context).withParentProcess(callback.workName());
|
||||||
DataTree.Builder resultBuilder = DataTree.builder(FitState.class);
|
DataTree.Builder<FitState> resultBuilder = DataTree.builder(FitState.class);
|
||||||
DataNode<?> sourceNode = state.getData();
|
DataNode<?> sourceNode = state.getData().getNode("prepare").get();
|
||||||
|
|
||||||
if (config.hasNode("merge")) {
|
if (config.hasNode("merge")) {
|
||||||
//use merged data and ignore raw data
|
//use merged data and ignore raw data
|
||||||
@ -41,20 +39,20 @@ public class NumassFitScanTask extends GenericTask {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//do fit
|
//do fit
|
||||||
|
|
||||||
|
Meta fitConfig = config.getMeta("fit");
|
||||||
sourceNode.forEachDataWithType(Table.class, data -> {
|
sourceNode.forEachDataWithType(Table.class, data -> {
|
||||||
DataNode res = scanValues.listValue().stream().parallel().map(val -> {
|
for (int i = 0; i < scanValues.listValue().size(); i++) {
|
||||||
MetaBuilder overrideMeta = new MetaBuilder("override");
|
Value val = scanValues.listValue().get(i);
|
||||||
|
MetaBuilder overrideMeta = new MetaBuilder(fitConfig);
|
||||||
overrideMeta.setValue("@resultName", String.format("%s[%s=%s]", data.getName(), scanParameter, val.stringValue()));
|
overrideMeta.setValue("@resultName", String.format("%s[%s=%s]", data.getName(), scanParameter, val.stringValue()));
|
||||||
MetaBuilder paramMeta = MetaUtils.findNodeByValue(config, "params.param", data.getName(), scanParameter).getBuilder()
|
|
||||||
.setValue("value", val);
|
overrideMeta.getNodes("params.param").stream()
|
||||||
overrideMeta.setNode("params.param", paramMeta);
|
.filter(par -> par.getString("name") == scanParameter).forEach(par -> par.setValue("value", val));
|
||||||
return action.run(DataNode.of(data.getName(), data, overrideMeta), config);
|
// Data<Table> newData = new Data<Table>(data.getGoal(),data.dataType(),overrideMeta);
|
||||||
}).collect(
|
DataNode node = action.run(DataNode.of("fit_" + i, data, Meta.empty()), overrideMeta);
|
||||||
() -> DataSet.builder(FitState.class),
|
resultBuilder.putData(data.getName() + ".fit_" + i, node.getData());
|
||||||
(DataSet.Builder builder, DataNode node) -> builder.putData(node.getName(), node.getData()),
|
}
|
||||||
(DataSet.Builder builder1, DataSet.Builder builder2) -> builder1.putAll(builder2.getDataMap())
|
|
||||||
).build();
|
|
||||||
resultBuilder.putData(data.getName(), res.getData());
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -64,7 +62,12 @@ public class NumassFitScanTask extends GenericTask {
|
|||||||
@Override
|
@Override
|
||||||
protected TaskModel transformModel(TaskModel model) {
|
protected TaskModel transformModel(TaskModel model) {
|
||||||
//Transmit meta as-is
|
//Transmit meta as-is
|
||||||
model.dependsOn("numass.prepare", model.meta());
|
MetaBuilder metaBuilder = new MetaBuilder(model.meta()).removeNode("fit");
|
||||||
|
if (model.meta().hasNode("filter")) {
|
||||||
|
model.dependsOn("numass.filter", metaBuilder.build(), "prepare");
|
||||||
|
} else {
|
||||||
|
model.dependsOn("numass.prepare", metaBuilder.build(), "prepare");
|
||||||
|
}
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,16 +49,16 @@ public class NumassPrepareTask extends GenericTask {
|
|||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
||||||
//acquiring initial data. Data node could not be empty
|
//acquiring initial data. Data node could not be empty
|
||||||
Meta dataMeta = Template.compileTemplate(config.getNode("data"), config);
|
Meta dataMeta = config.getNode("data");
|
||||||
DataNode<NumassData> data = runAction(new ReadNumassStorageAction(), callback, context, DataNode.empty(), dataMeta);
|
DataNode<NumassData> data = runAction(new ReadNumassStorageAction(), callback, context, DataNode.empty(), dataMeta);
|
||||||
state.setData("data", data);
|
state.setData("data", data);
|
||||||
//preparing table data
|
//preparing table data
|
||||||
Meta prepareMeta = Template.compileTemplate(config.getNode("prepare"), config);
|
Meta prepareMeta = config.getNode("prepare");
|
||||||
DataNode<Table> tables = runAction(new PrepareDataAction(), callback, context, data, prepareMeta);
|
DataNode<Table> tables = runAction(new PrepareDataAction(), callback, context, data, prepareMeta);
|
||||||
state.setData("prepare", tables);
|
state.setData("prepare", tables);
|
||||||
|
|
||||||
if (config.hasNode("monitor")) {
|
if (config.hasNode("monitor")) {
|
||||||
Meta monitorMeta = Template.compileTemplate(config.getNode("monitor"), config);
|
Meta monitorMeta = config.getNode("monitor");
|
||||||
tables = runAction(new MonitorCorrectAction(), callback, context, tables, monitorMeta);
|
tables = runAction(new MonitorCorrectAction(), callback, context, tables, monitorMeta);
|
||||||
state.setData("monitor", tables);
|
state.setData("monitor", tables);
|
||||||
}
|
}
|
||||||
@ -79,7 +79,7 @@ public class NumassPrepareTask extends GenericTask {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config.hasNode("transform")) {
|
if (config.hasNode("transform")) {
|
||||||
Meta filterMeta = Template.compileTemplate(config.getNode("transform"), config);
|
Meta filterMeta = config.getNode("transform");
|
||||||
tables = runAction(new TransformTableAction(), callback, context, tables, filterMeta);
|
tables = runAction(new TransformTableAction(), callback, context, tables, filterMeta);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
package inr.numass.tasks;
|
||||||
|
|
||||||
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
|
import hep.dataforge.computation.WorkManager;
|
||||||
|
import hep.dataforge.context.Context;
|
||||||
|
import hep.dataforge.data.DataNode;
|
||||||
|
import hep.dataforge.description.TypedActionDef;
|
||||||
|
import hep.dataforge.meta.Laminate;
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.meta.MetaBuilder;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.workspace.GenericTask;
|
||||||
|
import hep.dataforge.workspace.TaskModel;
|
||||||
|
import hep.dataforge.workspace.TaskState;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created by darksnake on 13-Aug-16.
|
||||||
|
*/
|
||||||
|
public class NumassTableFilterTask extends GenericTask<Table> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getName() {
|
||||||
|
return "numass.filter";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void transform(WorkManager.Callback callback, Context context, TaskState state, Meta config) {
|
||||||
|
DataNode<Table> sourceNode = (DataNode<Table>) state.getData().getNode("prepare").get();
|
||||||
|
state.finish(new FilterTableAction().withContext(context).run(sourceNode, config));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskModel transformModel(TaskModel model) {
|
||||||
|
super.transformModel(model);
|
||||||
|
MetaBuilder metaBuilder = new MetaBuilder(model.meta()).removeNode("filter");
|
||||||
|
model.dependsOn("numass.prepare", metaBuilder.build(), "prepare");
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
|
||||||
|
@TypedActionDef(name = "filterTable", inputType = Table.class, outputType = Table.class)
|
||||||
|
private class FilterTableAction extends OneToOneAction<Table, Table> {
|
||||||
|
@Override
|
||||||
|
protected Table execute(String name, Laminate inputMeta, Table input) {
|
||||||
|
double uLo = inputMeta.getDouble("filter.from", 0);
|
||||||
|
double uHi = inputMeta.getDouble("filter.to", Double.POSITIVE_INFINITY);
|
||||||
|
return input.filter("Uset", uLo, uHi);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user