Plots and Numass update
This commit is contained in:
parent
af3dcfe986
commit
1ec62e795d
@ -41,7 +41,7 @@ public class SpectrumDataAdapter extends XYAdapter {
|
||||
}
|
||||
|
||||
public SpectrumDataAdapter(String xName, String yName, String yErrName, String measurementTime) {
|
||||
super(new MetaBuilder(ValuesAdapter.DATA_ADAPTER_KEY)
|
||||
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
|
||||
.setValue(X_VALUE_KEY, xName)
|
||||
.setValue(Y_VALUE_KEY, yName)
|
||||
.setValue(Y_ERROR_KEY, yErrName)
|
||||
@ -51,7 +51,7 @@ public class SpectrumDataAdapter extends XYAdapter {
|
||||
}
|
||||
|
||||
public SpectrumDataAdapter(String xName, String yName, String measurementTime) {
|
||||
super(new MetaBuilder(ValuesAdapter.DATA_ADAPTER_KEY)
|
||||
super(new MetaBuilder(ValuesAdapter.ADAPTER_KEY)
|
||||
.setValue(X_VALUE_KEY, xName)
|
||||
.setValue(Y_VALUE_KEY, yName)
|
||||
.setValue(POINT_LENGTH_NAME, measurementTime)
|
||||
|
@ -55,7 +55,7 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
return res;
|
||||
}
|
||||
|
||||
protected Stream<NumassEvent> getAllEvents(NumassBlock block){
|
||||
protected Stream<NumassEvent> getAllEvents(NumassBlock block) {
|
||||
if (block.getFrames().count() == 0) {
|
||||
return block.getEvents();
|
||||
} else if (getProcessor() == null) {
|
||||
@ -67,10 +67,11 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
|
||||
/**
|
||||
* Get table format for summary table
|
||||
*
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
protected TableFormat getTableFormat(Meta config){
|
||||
protected TableFormat getTableFormat(Meta config) {
|
||||
return new TableFormatBuilder()
|
||||
.addNumber(HV_KEY, X_VALUE_KEY)
|
||||
.addNumber(LENGTH_KEY)
|
||||
@ -84,11 +85,11 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||
|
||||
|
||||
@Override
|
||||
public Table analyze(NumassSet set, Meta config) {
|
||||
public Table analyzeSet(NumassSet set, Meta config) {
|
||||
TableFormat format = getTableFormat(config);
|
||||
|
||||
return new ListTable.Builder(format)
|
||||
.rows(set.getPoints().map(point -> analyze(point, config)))
|
||||
.rows(set.getPoints().map(point -> analyzePoint(point, config)))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,6 @@ import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
import inr.numass.data.api.NumassPoint;
|
||||
import inr.numass.data.api.SignalProcessor;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
@ -27,28 +26,17 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
|
||||
int loChannel = config.getInt("window.lo", 0);
|
||||
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
|
||||
long count = getEvents(block, config).count();
|
||||
double length = (double) block.getLength().toNanos()/1e9;
|
||||
double length = (double) block.getLength().toNanos() / 1e9;
|
||||
double countRate = (double) count / length;
|
||||
double countRateError = Math.sqrt((double) count) / length;
|
||||
|
||||
if (block instanceof NumassPoint) {
|
||||
return ValueMap.of(NAME_LIST_WITH_HV,
|
||||
((NumassPoint) block).getVoltage(),
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime());
|
||||
} else {
|
||||
return ValueMap.of(NAME_LIST,
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime());
|
||||
}
|
||||
return ValueMap.of(NAME_LIST,
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime());
|
||||
}
|
||||
|
||||
|
||||
|
@ -4,6 +4,7 @@ import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
import inr.numass.data.api.NumassEvent;
|
||||
@ -12,6 +13,8 @@ import inr.numass.data.api.SignalProcessor;
|
||||
import javafx.util.Pair;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Stream;
|
||||
@ -27,7 +30,7 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
public static String T0_KEY = "t0";
|
||||
|
||||
public static String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
|
||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
|
||||
// public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, WINDOW_KEY, TIME_KEY, T0_KEY};
|
||||
|
||||
public TimeAnalyzer(@Nullable SignalProcessor processor) {
|
||||
super(processor);
|
||||
@ -38,6 +41,12 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
|
||||
@Override
|
||||
public Values analyze(NumassBlock block, Meta config) {
|
||||
//In case points inside points
|
||||
if (block instanceof NumassPoint) {
|
||||
return analyzePoint((NumassPoint) block, config);
|
||||
}
|
||||
|
||||
|
||||
int loChannel = config.getInt("window.lo", 0);
|
||||
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
|
||||
long t0 = getT0(block, config);
|
||||
@ -58,29 +67,61 @@ public class TimeAnalyzer extends AbstractAnalyzer {
|
||||
double length = totalT.get() / 1e9;
|
||||
long count = (long) (length * countRate);
|
||||
|
||||
return ValueMap.of(NAME_LIST,
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime(),
|
||||
(double) t0 / 1000d
|
||||
);
|
||||
}
|
||||
|
||||
if (block instanceof NumassPoint) {
|
||||
return ValueMap.of(NAME_LIST_WITH_HV,
|
||||
((NumassPoint) block).getVoltage(),
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime(),
|
||||
(double)t0 / 1000d
|
||||
);
|
||||
} else {
|
||||
return ValueMap.of(NAME_LIST,
|
||||
length,
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new Integer[]{loChannel, upChannel},
|
||||
block.getStartTime(),
|
||||
(double)t0 / 1000d
|
||||
);
|
||||
@Override
|
||||
public Values analyzePoint(NumassPoint point, Meta config) {
|
||||
//Average count rates, do not sum events
|
||||
Values res = point.getBlocks().map(it -> analyze(it, config)).reduce(null, this::combineBlockResults);
|
||||
|
||||
Map<String, Value> map = new HashMap<>(res.asMap());
|
||||
map.put(HV_KEY, Value.of(point.getVoltage()));
|
||||
return new ValueMap(map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine two blocks from the same point into one
|
||||
*
|
||||
* @param v1
|
||||
* @param v2
|
||||
* @return
|
||||
*/
|
||||
private Values combineBlockResults(Values v1, Values v2) {
|
||||
if (v1 == null) {
|
||||
return v2;
|
||||
}
|
||||
if (v2 == null) {
|
||||
return v1;
|
||||
}
|
||||
|
||||
double cr1 = v1.getDouble(COUNT_RATE_KEY);
|
||||
double cr2 = v2.getDouble(COUNT_RATE_KEY);
|
||||
double w1 = Math.pow(v1.getDouble(COUNT_RATE_ERROR_KEY), -2);
|
||||
double w2 = Math.pow(v2.getDouble(COUNT_RATE_ERROR_KEY), -2);
|
||||
|
||||
double countRate = (cr1 * w1 + cr2 * w2) / (1d * w1 + 1d * w2);
|
||||
|
||||
double countRateErr = Math.sqrt(1d / (w1 + w2));
|
||||
|
||||
|
||||
return ValueMap.of(NAME_LIST,
|
||||
v1.getDouble(LENGTH_KEY) + v2.getDouble(LENGTH_KEY),
|
||||
v1.getInt(COUNT_KEY) + v2.getInt(COUNT_KEY),
|
||||
countRate,
|
||||
countRateErr,
|
||||
v1.getValue(WINDOW_KEY),
|
||||
v1.getValue(TIME_KEY),
|
||||
v1.getDouble(T0_KEY)
|
||||
);
|
||||
}
|
||||
|
||||
private long getT0(NumassBlock block, Meta config) {
|
||||
|
@ -2,13 +2,17 @@ package inr.numass.data.api;
|
||||
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.*;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.values.Values;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static hep.dataforge.tables.XYAdapter.*;
|
||||
import static inr.numass.data.api.NumassPoint.HV_KEY;
|
||||
|
||||
/**
|
||||
* A general raw data analysis utility. Could have different implementations
|
||||
@ -33,7 +37,6 @@ public interface NumassAnalyzer {
|
||||
}
|
||||
|
||||
|
||||
|
||||
String CHANNEL_KEY = "channel";
|
||||
String COUNT_KEY = "count";
|
||||
String LENGTH_KEY = "length";
|
||||
@ -49,6 +52,18 @@ public interface NumassAnalyzer {
|
||||
*/
|
||||
Values analyze(NumassBlock block, Meta config);
|
||||
|
||||
/**
|
||||
* Analysis result for point including hv information
|
||||
* @param point
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
default Values analyzePoint(NumassPoint point, Meta config) {
|
||||
Map<String, Value> map = new HashMap<>(analyze(point, config).asMap());
|
||||
map.put(HV_KEY, Value.of(point.getVoltage()));
|
||||
return new ValueMap(map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return unsorted stream of events including events from frames
|
||||
*
|
||||
@ -64,7 +79,7 @@ public interface NumassAnalyzer {
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
Table analyze(NumassSet set, Meta config);
|
||||
Table analyzeSet(NumassSet set, Meta config);
|
||||
|
||||
/**
|
||||
* Calculate the energy spectrum for a given block. The s
|
||||
|
@ -5,6 +5,7 @@ import hep.dataforge.context.Global
|
||||
import hep.dataforge.data.DataSet
|
||||
import hep.dataforge.grind.Grind
|
||||
import hep.dataforge.grind.GrindShell
|
||||
import hep.dataforge.kodex.fx.plots.PlotManager
|
||||
import hep.dataforge.meta.Meta
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.actions.TimeAnalyzedAction
|
||||
@ -21,7 +22,7 @@ import inr.numass.data.storage.NumassStorageFactory
|
||||
|
||||
|
||||
Context ctx = Global.instance()
|
||||
ctx.pluginManager().load(FXPlotManager)
|
||||
ctx.pluginManager().load(PlotManager)
|
||||
ctx.pluginManager().load(NumassPlugin.class)
|
||||
|
||||
new GrindShell(ctx).eval {
|
||||
@ -29,12 +30,15 @@ new GrindShell(ctx).eval {
|
||||
|
||||
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
|
||||
|
||||
Meta meta = Grind.buildMeta(binNum: 200) {
|
||||
Meta meta = Grind.buildMeta(binNum: 200, plotHist: false) {
|
||||
window(lo: 500, up: 1800)
|
||||
plot(showErrors: false)
|
||||
}
|
||||
|
||||
def sets = (20..31).collect { "set_$it" }
|
||||
def sets = ((2..14) + (22..31)).collect { "set_$it" }
|
||||
//def sets = (2..14).collect { "set_$it" }
|
||||
//def sets = (16..31).collect { "set_$it" }
|
||||
//def sets = (20..28).collect { "set_$it" }
|
||||
|
||||
def loaders = sets.collect { set ->
|
||||
storage.provide("loader::$set", NumassSet.class).orElse(null)
|
@ -294,8 +294,8 @@ public class NumassPlugin extends BasicPlugin {
|
||||
}
|
||||
|
||||
private XYAdapter getAdapter(Meta an) {
|
||||
if (an.hasMeta(ValuesAdapter.DATA_ADAPTER_KEY)) {
|
||||
return new XYAdapter(an.getMeta(ValuesAdapter.DATA_ADAPTER_KEY));
|
||||
if (an.hasMeta(ValuesAdapter.ADAPTER_KEY)) {
|
||||
return new XYAdapter(an.getMeta(ValuesAdapter.ADAPTER_KEY));
|
||||
} else {
|
||||
return new XYAdapter(NumassPoint.HV_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY);
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ public class AnalyzeDataAction extends OneToOneAction<NumassSet, Table> {
|
||||
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
|
||||
//TODO add processor here
|
||||
NumassAnalyzer analyzer = new SmartAnalyzer();
|
||||
Table res = analyzer.analyze(input, inputMeta);
|
||||
Table res = analyzer.analyzeSet(input, inputMeta);
|
||||
output(context, name, stream -> NumassUtils.writeSomething(stream, inputMeta, res));
|
||||
return res;
|
||||
}
|
||||
|
@ -42,8 +42,8 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
|
||||
|
||||
List<Correction> corrections = new ArrayList<>();
|
||||
|
||||
meta.optMeta("corrections").ifPresent(corrs ->
|
||||
MetaUtils.nodeStream(corrs)
|
||||
meta.optMeta("corrections").ifPresent(cors ->
|
||||
MetaUtils.nodeStream(cors)
|
||||
.map(Pair::getValue)
|
||||
.map(this::makeCorrection)
|
||||
.forEach(corrections::add)
|
||||
|
@ -10,7 +10,7 @@ import hep.dataforge.meta.Laminate
|
||||
import hep.dataforge.plots.PlotPlugin
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.tables.ValueMap
|
||||
import hep.dataforge.tables.XYAdapter
|
||||
import hep.dataforge.values.ValueType
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassAnalyzer
|
||||
@ -51,6 +51,9 @@ class TimeAnalyzedAction : OneToOneAction<NumassPoint, Table>() {
|
||||
"window.up" to upChannel
|
||||
}).getDouble("cr")
|
||||
|
||||
log.report("The expected count rate for 30 us delay is $trueCR")
|
||||
|
||||
|
||||
val binNum = inputMeta.getInt("binNum", 1000);
|
||||
val binSize = inputMeta.getDouble("binSize", 1.0 / trueCR * 10 / binNum * 1e6)
|
||||
|
||||
@ -63,67 +66,69 @@ class TimeAnalyzedAction : OneToOneAction<NumassPoint, Table>() {
|
||||
//.histogram(input, loChannel, upChannel, binSize, binNum).asTable();
|
||||
log.report("Finished histogram calculation...");
|
||||
|
||||
val histPlot = pm.getPlotFrame(getName(), "histogram");
|
||||
if (inputMeta.getBoolean("plotHist", true)) {
|
||||
|
||||
histPlot.configure {
|
||||
node("xAxis") {
|
||||
"axisTitle" to "delay"
|
||||
"axisUnits" to "us"
|
||||
}
|
||||
node("yAxis") {
|
||||
"type" to "log"
|
||||
}
|
||||
}
|
||||
val histPlot = pm.getPlotFrame(getName(), "histogram");
|
||||
|
||||
histPlot.add(DataPlot(name)
|
||||
.configure {
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"connectionType" to "step"
|
||||
node("adapter") {
|
||||
"y.value" to "count"
|
||||
}
|
||||
}.apply {
|
||||
configure(inputMeta.getMetaOrEmpty("histogram"))
|
||||
}
|
||||
.fillData(histogram)
|
||||
)
|
||||
|
||||
log.report("The expected count rate for 30 us delay is $trueCR")
|
||||
|
||||
val statPlotPoints = (1..150).map { 1000 * it }.map { t ->
|
||||
val result = analyzer.analyze(input, buildMeta {
|
||||
"t0" to t
|
||||
"window.lo" to loChannel
|
||||
"window.up" to upChannel
|
||||
})
|
||||
|
||||
|
||||
val norm = if (inputMeta.getBoolean("normalize", true)) {
|
||||
trueCR
|
||||
} else {
|
||||
1.0
|
||||
histPlot.configure {
|
||||
node("xAxis") {
|
||||
"axisTitle" to "delay"
|
||||
"axisUnits" to "us"
|
||||
}
|
||||
node("yAxis") {
|
||||
"type" to "log"
|
||||
}
|
||||
}
|
||||
|
||||
ValueMap.ofMap(
|
||||
mapOf(
|
||||
"x" to t / 1000,
|
||||
"y" to result.getDouble("cr") / norm,
|
||||
"y.err" to result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm
|
||||
)
|
||||
);
|
||||
val histogramPlot = DataPlot(name)
|
||||
.configure {
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"connectionType" to "step"
|
||||
node("@adapter") {
|
||||
"y.value" to "count"
|
||||
}
|
||||
}.apply { configure(inputMeta.getMetaOrEmpty("histogram")) }
|
||||
.fillData(histogram)
|
||||
|
||||
histPlot.add(histogramPlot)
|
||||
}
|
||||
|
||||
if(inputMeta.getBoolean("plotStat",true)) {
|
||||
|
||||
val statPlotPoints = (1..150).map { 1000 * it }.map { t ->
|
||||
val result = analyzer.analyze(input, buildMeta {
|
||||
"t0" to t
|
||||
"window.lo" to loChannel
|
||||
"window.up" to upChannel
|
||||
})
|
||||
|
||||
|
||||
val norm = if (inputMeta.getBoolean("normalize", true)) {
|
||||
trueCR
|
||||
} else {
|
||||
1.0
|
||||
}
|
||||
|
||||
XYAdapter.DEFAULT_ADAPTER.buildXYDataPoint(
|
||||
t / 1000.0,
|
||||
result.getDouble("cr") / norm,
|
||||
result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY) / norm
|
||||
)
|
||||
}
|
||||
|
||||
val statPlot = DataPlot(name).configure {
|
||||
"showLine" to true
|
||||
"thickness" to 4
|
||||
"title" to "${name}_${input.voltage}"
|
||||
}.apply {
|
||||
configure(inputMeta.getMetaOrEmpty("plot"))
|
||||
}.fillData(statPlotPoints)
|
||||
|
||||
pm.getPlotFrame(getName(), "stat-method").add(statPlot)
|
||||
}
|
||||
|
||||
pm.getPlotFrame(getName(), "stat-method").add(
|
||||
DataPlot(name).configure {
|
||||
"showLine" to true
|
||||
"thickness" to 4
|
||||
"title" to "${name}_${input.voltage}"
|
||||
}.apply {
|
||||
configure(inputMeta.getMetaOrEmpty("plot"))
|
||||
}.fillData(statPlotPoints)
|
||||
)
|
||||
return histogram;
|
||||
}
|
||||
}
|
@ -372,7 +372,7 @@ class NumassLoaderView : View() {
|
||||
// point.startTime
|
||||
// )
|
||||
// }
|
||||
val spectrumDataSet = analyzer.analyze(data, buildMeta {
|
||||
val spectrumDataSet = analyzer.analyzeSet(data, buildMeta {
|
||||
"window.lo" to loChannel
|
||||
"window.up" to upChannel
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user