Column table implementation
This commit is contained in:
parent
acae9aae4c
commit
c898422e5d
@ -0,0 +1,72 @@
|
|||||||
|
package inr.numass.data.analyzers;
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.tables.ListTable;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.tables.TableFormat;
|
||||||
|
import hep.dataforge.tables.TableFormatBuilder;
|
||||||
|
import inr.numass.data.api.*;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static hep.dataforge.tables.XYAdapter.*;
|
||||||
|
import static inr.numass.data.api.NumassPoint.HV_KEY;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
public abstract class AbstractAnalyzer implements NumassAnalyzer {
|
||||||
|
public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
||||||
|
public static String[] NAME_LIST_WITH_HV = {HV_KEY, "length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
||||||
|
@Nullable
|
||||||
|
private final SignalProcessor processor;
|
||||||
|
|
||||||
|
public AbstractAnalyzer(@Nullable SignalProcessor processor) {
|
||||||
|
this.processor = processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AbstractAnalyzer() {
|
||||||
|
this.processor = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return unsorted stream of events including events from frames
|
||||||
|
*
|
||||||
|
* @param block
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) {
|
||||||
|
if (getProcessor() == null && block.getFrames().count() > 0) {
|
||||||
|
throw new IllegalArgumentException("Signal processor needed to analyze frames");
|
||||||
|
} else {
|
||||||
|
int loChannel = config.getInt("window.lo", 0);
|
||||||
|
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
|
||||||
|
return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze))
|
||||||
|
.filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Table analyze(NumassSet set, Meta config) {
|
||||||
|
TableFormat format = new TableFormatBuilder()
|
||||||
|
.addNumber(HV_KEY, X_VALUE_KEY)
|
||||||
|
.addNumber("length")
|
||||||
|
.addNumber("count")
|
||||||
|
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
||||||
|
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||||
|
.addColumn("window")
|
||||||
|
.addTime()
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return new ListTable.Builder(format)
|
||||||
|
.rows(set.getPoints().map(point -> analyze(point, config)))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public SignalProcessor getProcessor() {
|
||||||
|
return processor;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
package inr.numass.data.analyzers;
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.data.api.NumassBlock;
|
||||||
|
import inr.numass.data.api.SignalProcessor;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Block analyzer that can perform debunching
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
public class DebunchAnalyzer extends AbstractAnalyzer {
|
||||||
|
public DebunchAnalyzer(@Nullable SignalProcessor processor) {
|
||||||
|
super(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DebunchAnalyzer() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Values analyze(NumassBlock block, Meta config) {
|
||||||
|
throw new UnsupportedOperationException("TODO");
|
||||||
|
}
|
||||||
|
}
|
@ -1,59 +1,31 @@
|
|||||||
package inr.numass.data.analyzers;
|
package inr.numass.data.analyzers;
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.api.*;
|
import inr.numass.data.api.NumassBlock;
|
||||||
|
import inr.numass.data.api.NumassPoint;
|
||||||
|
import inr.numass.data.api.SignalProcessor;
|
||||||
import org.jetbrains.annotations.Nullable;
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
import java.util.NavigableMap;
|
|
||||||
import java.util.TreeMap;
|
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import static hep.dataforge.tables.XYAdapter.*;
|
|
||||||
import static inr.numass.data.api.NumassPoint.HV_KEY;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A simple event counter
|
* A simple event counter
|
||||||
* Created by darksnake on 07.07.2017.
|
* Created by darksnake on 07.07.2017.
|
||||||
*/
|
*/
|
||||||
public class SimpleAnalyzer implements NumassAnalyzer {
|
public class SimpleAnalyzer extends AbstractAnalyzer {
|
||||||
public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
|
||||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, "length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private final SignalProcessor processor;
|
|
||||||
|
|
||||||
public SimpleAnalyzer(@Nullable SignalProcessor processor) {
|
public SimpleAnalyzer(@Nullable SignalProcessor processor) {
|
||||||
this.processor = processor;
|
super(processor);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SimpleAnalyzer() {
|
public SimpleAnalyzer() {
|
||||||
this.processor = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return unsorted stream of events including events from frames
|
|
||||||
*
|
|
||||||
* @param block
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
protected Stream<NumassEvent> getEventStream(NumassBlock block) {
|
|
||||||
if (processor == null && block.getFrames().count() > 0) {
|
|
||||||
throw new IllegalArgumentException("Signal processor needed to analyze frames");
|
|
||||||
} else {
|
|
||||||
return Stream.concat(block.getEvents(), block.getFrames().flatMap(processor::analyze));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Values analyze(NumassBlock block, Meta config) {
|
public Values analyze(NumassBlock block, Meta config) {
|
||||||
int loChannel = config.getInt("energy.lo", 0);
|
int loChannel = config.getInt("window.lo", 0);
|
||||||
int upChannel = config.getInt("energy.up", Integer.MAX_VALUE);
|
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
|
||||||
long count = getEventStream(block)
|
long count = getEventStream(block, config).count();
|
||||||
.filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel)
|
|
||||||
.count();
|
|
||||||
double countRate = (double) count / block.getLength().toMillis() * 1000;
|
double countRate = (double) count / block.getLength().toMillis() * 1000;
|
||||||
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
|
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
|
||||||
|
|
||||||
@ -84,50 +56,4 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Table analyze(NumassSet set, Meta config) {
|
|
||||||
TableFormat format = new TableFormatBuilder()
|
|
||||||
.addNumber(HV_KEY, X_VALUE_KEY)
|
|
||||||
.addNumber("length")
|
|
||||||
.addNumber("count")
|
|
||||||
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
|
||||||
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
|
||||||
.addColumn("window")
|
|
||||||
.addTime()
|
|
||||||
.build();
|
|
||||||
|
|
||||||
return new ListTable.Builder(format)
|
|
||||||
.rows(set.getPoints().map(point -> analyze(point, config)))
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Table getSpectrum(NumassBlock block, Meta config) {
|
|
||||||
TableFormat format = new TableFormatBuilder()
|
|
||||||
.addNumber("channel", X_VALUE_KEY)
|
|
||||||
.addNumber("count")
|
|
||||||
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
|
||||||
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
|
||||||
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
|
|
||||||
.build();
|
|
||||||
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
|
|
||||||
getEventStream(block).forEach(event -> {
|
|
||||||
if (map.containsKey(event.getChanel())) {
|
|
||||||
map.get(event.getChanel()).incrementAndGet();
|
|
||||||
} else {
|
|
||||||
map.put(event.getChanel(), new AtomicLong(1));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return new ListTable.Builder(format)
|
|
||||||
.rows(map.entrySet().stream()
|
|
||||||
.map(entry ->
|
|
||||||
new ValueMap(format.namesAsArray(),
|
|
||||||
entry.getKey(),
|
|
||||||
entry.getValue(),
|
|
||||||
entry.getValue().get() / block.getLength().toMillis() * 1000,
|
|
||||||
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).build();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,42 @@
|
|||||||
|
package inr.numass.data.analyzers;
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.data.api.NumassBlock;
|
||||||
|
import inr.numass.data.api.SignalProcessor;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An analyzer dispatcher which uses different analyzer for different meta
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
public class SmartAnalyzer extends AbstractAnalyzer {
|
||||||
|
private SimpleAnalyzer simpleAnalyzer = new SimpleAnalyzer();
|
||||||
|
private DebunchAnalyzer debunchAnalyzer = new DebunchAnalyzer();
|
||||||
|
private TimeAnalyzer timeAnalyzer = new TimeAnalyzer();
|
||||||
|
|
||||||
|
public SmartAnalyzer(@Nullable SignalProcessor processor) {
|
||||||
|
super(processor);
|
||||||
|
this.simpleAnalyzer = new SimpleAnalyzer(processor);
|
||||||
|
this.debunchAnalyzer = new DebunchAnalyzer(processor);
|
||||||
|
this.timeAnalyzer = new TimeAnalyzer(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SmartAnalyzer() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Values analyze(NumassBlock block, Meta config) {
|
||||||
|
//TODO do something more... smart... using information from point if block is point
|
||||||
|
switch (config.getString("type", "simple")) {
|
||||||
|
case "simple":
|
||||||
|
return simpleAnalyzer.analyze(block, config);
|
||||||
|
case "time":
|
||||||
|
return timeAnalyzer.analyze(block, config);
|
||||||
|
case "debunch":
|
||||||
|
return debunchAnalyzer.analyze(block, config);
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException("Analyzer not found");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,25 @@
|
|||||||
|
package inr.numass.data.analyzers;
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.data.api.NumassBlock;
|
||||||
|
import inr.numass.data.api.SignalProcessor;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
public class TimeAnalyzer extends AbstractAnalyzer {
|
||||||
|
|
||||||
|
public TimeAnalyzer(@Nullable SignalProcessor processor) {
|
||||||
|
super(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TimeAnalyzer() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Values analyze(NumassBlock block, Meta config) {
|
||||||
|
throw new UnsupportedOperationException("TODO");
|
||||||
|
}
|
||||||
|
}
|
@ -1,14 +1,52 @@
|
|||||||
package inr.numass.data.api;
|
package inr.numass.data.api;
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.data.analyzers.SmartAnalyzer;
|
||||||
|
|
||||||
|
import java.util.NavigableMap;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static hep.dataforge.tables.XYAdapter.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A general raw data analysis utility. Could have different implementations
|
* A general raw data analysis utility. Could have different implementations
|
||||||
* Created by darksnake on 06-Jul-17.
|
* Created by darksnake on 06-Jul-17.
|
||||||
*/
|
*/
|
||||||
public interface NumassAnalyzer {
|
public interface NumassAnalyzer {
|
||||||
|
|
||||||
|
static Table getSpectrum(NumassBlock block, Meta config) {
|
||||||
|
TableFormat format = new TableFormatBuilder()
|
||||||
|
.addNumber("channel", X_VALUE_KEY)
|
||||||
|
.addNumber("count")
|
||||||
|
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
||||||
|
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||||
|
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
|
||||||
|
.build();
|
||||||
|
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
|
||||||
|
new SmartAnalyzer().getEventStream(block, config).forEach(event -> {
|
||||||
|
if (map.containsKey(event.getChanel())) {
|
||||||
|
map.get(event.getChanel()).incrementAndGet();
|
||||||
|
} else {
|
||||||
|
map.put(event.getChanel(), new AtomicLong(1));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return new ListTable.Builder(format)
|
||||||
|
.rows(map.entrySet().stream()
|
||||||
|
.map(entry ->
|
||||||
|
new ValueMap(format.namesAsArray(),
|
||||||
|
entry.getKey(),
|
||||||
|
entry.getValue(),
|
||||||
|
entry.getValue().get() / block.getLength().toMillis() * 1000,
|
||||||
|
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).build();
|
||||||
|
}
|
||||||
|
|
||||||
String COUNT_RATE_KEY = "cr";
|
String COUNT_RATE_KEY = "cr";
|
||||||
String COUNT_RATE_ERROR_KEY = "crErr";
|
String COUNT_RATE_ERROR_KEY = "crErr";
|
||||||
|
|
||||||
@ -21,21 +59,21 @@ public interface NumassAnalyzer {
|
|||||||
*/
|
*/
|
||||||
Values analyze(NumassBlock block, Meta config);
|
Values analyze(NumassBlock block, Meta config);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return unsorted stream of events including events from frames
|
||||||
|
*
|
||||||
|
* @param block
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
Stream<NumassEvent> getEventStream(NumassBlock block, Meta config);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Analyze the whole set. And return results as a table
|
* Analyze the whole set. And return results as a table
|
||||||
|
*
|
||||||
* @param set
|
* @param set
|
||||||
* @param config
|
* @param config
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
Table analyze(NumassSet set, Meta config);
|
Table analyze(NumassSet set, Meta config);
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate energy spectrum for the given block
|
|
||||||
* @param block
|
|
||||||
* @param config
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
Table getSpectrum(NumassBlock block, Meta config);
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -17,31 +17,27 @@ import java.util.stream.IntStream;
|
|||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Protobuf based numass point
|
||||||
* Created by darksnake on 09.07.2017.
|
* Created by darksnake on 09.07.2017.
|
||||||
*/
|
*/
|
||||||
public class ProtoNumassPoint implements NumassPoint {
|
public class ProtoNumassPoint implements NumassPoint {
|
||||||
private final Envelope envelope;
|
private final Envelope envelope;
|
||||||
|
|
||||||
NumassProto.Point point;
|
|
||||||
|
|
||||||
public ProtoNumassPoint(Envelope envelope) {
|
public ProtoNumassPoint(Envelope envelope) {
|
||||||
this.envelope = envelope;
|
this.envelope = envelope;
|
||||||
}
|
}
|
||||||
|
|
||||||
private NumassProto.Point getPoint() {
|
private NumassProto.Point getPoint() {
|
||||||
if (point == null) {
|
|
||||||
try (InputStream stream = envelope.getData().getStream()) {
|
try (InputStream stream = envelope.getData().getStream()) {
|
||||||
point = NumassProto.Point.parseFrom(stream);
|
return NumassProto.Point.parseFrom(stream);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
throw new RuntimeException("Failed to read point via protbuf");
|
throw new RuntimeException("Failed to read point via protobuf");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return point;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Stream<NumassBlock> getBlocks() {
|
public Stream<NumassBlock> getBlocks() {
|
||||||
return point.getChannelsList().stream().flatMap(channel ->
|
return getPoint().getChannelsList().stream().flatMap(channel ->
|
||||||
channel.getBlocksList().stream().map(block -> new ProtoBlock((int) channel.getNum(), block))
|
channel.getBlocksList().stream().map(block -> new ProtoBlock((int) channel.getNum(), block))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ package inr.numass.scripts
|
|||||||
|
|
||||||
import hep.dataforge.grind.GrindMetaBuilder
|
import hep.dataforge.grind.GrindMetaBuilder
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import inr.numass.actions.FindBorderAction
|
|
||||||
import inr.numass.data.storage.NumassDataLoader
|
import inr.numass.data.storage.NumassDataLoader
|
||||||
|
|
||||||
File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
|
File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
|
||||||
|
@ -0,0 +1,37 @@
|
|||||||
|
package inr.numass.actions;
|
||||||
|
|
||||||
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
|
import hep.dataforge.context.Context;
|
||||||
|
import hep.dataforge.description.TypedActionDef;
|
||||||
|
import hep.dataforge.description.ValueDef;
|
||||||
|
import hep.dataforge.io.ColumnedDataWriter;
|
||||||
|
import hep.dataforge.meta.Laminate;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import inr.numass.data.analyzers.SmartAnalyzer;
|
||||||
|
import inr.numass.data.api.NumassAnalyzer;
|
||||||
|
import inr.numass.data.api.NumassSet;
|
||||||
|
|
||||||
|
import java.io.OutputStream;
|
||||||
|
|
||||||
|
import static hep.dataforge.values.ValueType.NUMBER;
|
||||||
|
import static hep.dataforge.values.ValueType.STRING;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The action performs the readout of data and collection of count rate into a table
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
@TypedActionDef(name = "numass.analyze", inputType = NumassSet.class, outputType = Table.class)
|
||||||
|
@ValueDef(name = "window.lo", type = {NUMBER, STRING}, def = "0", info = "Lower bound for window")
|
||||||
|
@ValueDef(name = "window.up", type = {NUMBER, STRING}, def = "10000", info = "Upper bound for window")
|
||||||
|
public class AnalyzeDataAction extends OneToOneAction<NumassSet, Table> {
|
||||||
|
@Override
|
||||||
|
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
|
||||||
|
//TODO add processor here
|
||||||
|
NumassAnalyzer analyzer = new SmartAnalyzer();
|
||||||
|
Table res = analyzer.analyze(input,inputMeta);
|
||||||
|
OutputStream stream = buildActionOutput(context, name);
|
||||||
|
|
||||||
|
ColumnedDataWriter.writeTable(stream, data, head);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
}
|
@ -1,129 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass.actions;
|
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
|
||||||
import hep.dataforge.exceptions.ContentException;
|
|
||||||
import hep.dataforge.io.ColumnedDataWriter;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
|
||||||
import hep.dataforge.tables.ListTable;
|
|
||||||
import hep.dataforge.tables.Table;
|
|
||||||
import hep.dataforge.tables.ValueMap;
|
|
||||||
import hep.dataforge.values.Value;
|
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
|
||||||
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Darksnake
|
|
||||||
*/
|
|
||||||
@TypedActionDef(name = "findBorder", inputType = NMFile.class, outputType = Table.class)
|
|
||||||
public class FindBorderAction extends OneToOneAction<NumassData, Table> {
|
|
||||||
|
|
||||||
private final static String[] names = {"U", "80%", "90%", "95%", "99%"};
|
|
||||||
private final static double[] percents = {0.8, 0.9, 0.95, 0.99};
|
|
||||||
|
|
||||||
private UnivariateFunction normCorrection = e -> 1 + 13.265 * Math.exp(-e / 2343.4);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Table execute(Context context, String name, NumassData source, Laminate meta) throws ContentException {
|
|
||||||
report(context, name, "File {} started", source.getName());
|
|
||||||
|
|
||||||
int upperBorder = meta.getInt("upper", 4094);
|
|
||||||
int lowerBorder = meta.getInt("lower", 0);
|
|
||||||
double substractReference = meta.getDouble("reference", 0);
|
|
||||||
|
|
||||||
NumassPoint referencePoint = null;
|
|
||||||
if (substractReference > 0) {
|
|
||||||
referencePoint = source.getByVoltage(substractReference);
|
|
||||||
if (referencePoint == null) {
|
|
||||||
report(context, name, "Reference point {} not found", substractReference);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ListTable.Builder dataBuilder = new ListTable.Builder(names);
|
|
||||||
|
|
||||||
fill(dataBuilder, source, lowerBorder, upperBorder, referencePoint);
|
|
||||||
Table bData = dataBuilder.build();
|
|
||||||
|
|
||||||
OutputStream stream = buildActionOutput(context, name);
|
|
||||||
|
|
||||||
ColumnedDataWriter.writeTable(stream, bData, String.format("%s : lower = %d upper = %d", name, lowerBorder, upperBorder));
|
|
||||||
|
|
||||||
report(context, name, "File {} completed", source.getName());
|
|
||||||
return bData;
|
|
||||||
}
|
|
||||||
|
|
||||||
private double getNorm(Map<Double, Double> spectrum, int lower, int upper) {
|
|
||||||
double res = 0;
|
|
||||||
for (Map.Entry<Double, Double> entry : spectrum.entrySet()) {
|
|
||||||
if ((entry.getKey() >= lower) && (entry.getKey() <= upper)) {
|
|
||||||
res += entry.getValue();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void fill(ListTable.Builder dataBuilder, NumassData file, int lower, int upper, NumassPoint reference) {
|
|
||||||
for (NumassPoint point : file) {
|
|
||||||
if ((reference != null) && (point.getVoltage() == reference.getVoltage())) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
//создаем основу для будущей точки
|
|
||||||
HashMap<String, Value> map = new HashMap<>();
|
|
||||||
map.put(names[0], Value.of(point.getVoltage()));
|
|
||||||
Map<Double, Double> spectrum;
|
|
||||||
if (reference != null) {
|
|
||||||
|
|
||||||
Map<Double, Double> sp = point.getMap(0, true);
|
|
||||||
Map<Double, Double> referenceSpectrum = reference.getMap(0, true);
|
|
||||||
|
|
||||||
spectrum = sp.entrySet().stream()
|
|
||||||
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> Math.max(entry.getValue() - referenceSpectrum.get(entry.getKey()), 0)));
|
|
||||||
} else {
|
|
||||||
spectrum = point.getMap(0, true);
|
|
||||||
}
|
|
||||||
double norm = getNorm(spectrum, lower, upper) * normCorrection.value(point.getVoltage());
|
|
||||||
double counter = 0;
|
|
||||||
int chanel = upper;
|
|
||||||
while (chanel > lower) {
|
|
||||||
chanel--;
|
|
||||||
counter += spectrum.get((double) chanel);
|
|
||||||
for (int i = 0; i < percents.length; i++) {
|
|
||||||
if (counter / norm > percents[i]) {
|
|
||||||
if (!map.containsKey(names[i + 1])) {
|
|
||||||
map.put(names[i + 1], Value.of(chanel));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (String n : names) {
|
|
||||||
if (!map.containsKey(n)) {
|
|
||||||
map.put(n, Value.of(lower));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dataBuilder.row(new ValueMap(map));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
package inr.numass.actions;
|
|
||||||
|
|
||||||
import hep.dataforge.actions.ManyToOneAction;
|
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.IntStream;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Created by darksnake on 04-Nov-16.
|
|
||||||
*/
|
|
||||||
@TypedActionDef(name = "joinData", inputType = NumassData.class, outputType = NumassData.class,
|
|
||||||
info = "Join a number of numass data files into one single file via spectrum summing")
|
|
||||||
public class JoinNumassDataAction extends ManyToOneAction<NumassData, NumassData> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected NumassData execute(Context context, String nodeName, Map<String, NumassData> input, Laminate meta) {
|
|
||||||
throw new UnsupportedOperationException("not implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
private NumassPoint joinPoint(Collection<NumassPoint> points) {
|
|
||||||
return points.stream().reduce((p1, p2) -> {
|
|
||||||
if (p1.getVoltage() != p2.getVoltage()) {
|
|
||||||
throw new RuntimeException("Can't sum points with different Uset");
|
|
||||||
}
|
|
||||||
return new NumassPointImpl(
|
|
||||||
(p1.getVoltage() + p2.getVoltage()) / 2,
|
|
||||||
p1.getStartTime(),
|
|
||||||
p1.getLength() + p2.getLength(),
|
|
||||||
IntStream.range(0, p1.getSpectrum().length).map(i -> p1.getSpectrum()[i] * p2.getSpectrum()[i]).toArray()
|
|
||||||
);
|
|
||||||
}).get();
|
|
||||||
}
|
|
||||||
}
|
|
@ -161,7 +161,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
|
|||||||
res.add(curPoint);
|
res.add(curPoint);
|
||||||
});
|
});
|
||||||
|
|
||||||
return new ListTable(TableFormat.forNames(parnames), res);
|
return new ListTable(MetaTableFormat.forNames(parnames), res);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,14 +25,10 @@ import hep.dataforge.io.ColumnedDataWriter;
|
|||||||
import hep.dataforge.io.XMLMetaWriter;
|
import hep.dataforge.io.XMLMetaWriter;
|
||||||
import hep.dataforge.meta.Laminate;
|
import hep.dataforge.meta.Laminate;
|
||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.tables.Table;
|
|
||||||
import hep.dataforge.tables.TableFormat;
|
|
||||||
import hep.dataforge.tables.ValueMap;
|
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.NumassPoint;
|
import inr.numass.data.api.NumassPoint;
|
||||||
import inr.numass.data.api.NumassSet;
|
import inr.numass.data.api.NumassSet;
|
||||||
import inr.numass.data.storage.NumassDataLoader;
|
|
||||||
import inr.numass.debunch.DebunchReport;
|
import inr.numass.debunch.DebunchReport;
|
||||||
import inr.numass.debunch.FrameAnalizer;
|
import inr.numass.debunch.FrameAnalizer;
|
||||||
import inr.numass.utils.ExpressionUtils;
|
import inr.numass.utils.ExpressionUtils;
|
||||||
@ -77,7 +73,7 @@ public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
|
|||||||
protected ListTable execute(Context context, String name, NumassSet dataFile, Laminate meta) {
|
protected ListTable execute(Context context, String name, NumassSet dataFile, Laminate meta) {
|
||||||
// log.report("File %s started", dataFile.getName());
|
// log.report("File %s started", dataFile.getName());
|
||||||
|
|
||||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
int upper = meta.getInt("upperWindow", Integer.MAX_VALUE);
|
||||||
|
|
||||||
List<Correction> corrections = new ArrayList<>();
|
List<Correction> corrections = new ArrayList<>();
|
||||||
if (meta.hasValue("deadTime")) {
|
if (meta.hasValue("deadTime")) {
|
||||||
@ -107,16 +103,16 @@ public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
|
|||||||
utransform = Function.identity();
|
utransform = Function.identity();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (meta.hasMeta("debunch")) {
|
// if (meta.hasMeta("debunch")) {
|
||||||
if (dataFile instanceof NumassDataLoader) {
|
// if (dataFile instanceof NumassDataLoader) {
|
||||||
dataFile = ((NumassDataLoader) dataFile).applyRawTransformation(raw -> debunch(context, raw, meta.getMeta("debunch")));
|
// dataFile = ((NumassDataLoader) dataFile).applyRawTransformation(raw -> debunch(context, raw, meta.getMeta("debunch")));
|
||||||
} else {
|
// } else {
|
||||||
throw new RuntimeException("Debunch not available");
|
// throw new RuntimeException("Debunch not available");
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
List<Values> dataList = new ArrayList<>();
|
List<Values> dataList = new ArrayList<>();
|
||||||
for (NumassPoint point : dataFile) {
|
dataFile.getPoints().forEach( point -> {
|
||||||
|
|
||||||
long total = point.getTotalCount();
|
long total = point.getTotalCount();
|
||||||
double uset = utransform.apply(point.getVoltage());
|
double uset = utransform.apply(point.getVoltage());
|
||||||
@ -148,15 +144,15 @@ public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
|
|||||||
Instant timestamp = point.getStartTime();
|
Instant timestamp = point.getStartTime();
|
||||||
|
|
||||||
dataList.add(new ValueMap(parnames, new Object[]{uset, uread, time, total, wind, correctionFactor, cr, crErr, timestamp}));
|
dataList.add(new ValueMap(parnames, new Object[]{uset, uread, time, total, wind, correctionFactor, cr, crErr, timestamp}));
|
||||||
}
|
});
|
||||||
|
|
||||||
TableFormat format;
|
TableFormat format;
|
||||||
|
|
||||||
if (!dataList.isEmpty()) {
|
if (!dataList.isEmpty()) {
|
||||||
//Генерируем автоматический формат по первой строчке
|
//Генерируем автоматический формат по первой строчке
|
||||||
format = TableFormat.forPoint(dataList.get(0));
|
format = MetaTableFormat.forPoint(dataList.get(0));
|
||||||
} else {
|
} else {
|
||||||
format = TableFormat.forNames(parnames);
|
format = MetaTableFormat.forNames(parnames);
|
||||||
}
|
}
|
||||||
|
|
||||||
String head;
|
String head;
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass.actions;
|
|
||||||
|
|
||||||
import hep.dataforge.actions.OneToOneAction;
|
|
||||||
import hep.dataforge.context.Context;
|
|
||||||
import hep.dataforge.data.binary.Binary;
|
|
||||||
import hep.dataforge.description.NodeDef;
|
|
||||||
import hep.dataforge.description.TypedActionDef;
|
|
||||||
import hep.dataforge.description.ValueDef;
|
|
||||||
import hep.dataforge.exceptions.ContentException;
|
|
||||||
import hep.dataforge.meta.Laminate;
|
|
||||||
|
|
||||||
import static hep.dataforge.values.ValueType.NUMBER;
|
|
||||||
import static inr.numass.NumassIO.getNumassData;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author Darksnake
|
|
||||||
*/
|
|
||||||
@TypedActionDef(name = "readData",
|
|
||||||
inputType = Binary.class, outputType = NMFile.class, info = "Read binary numass data file")
|
|
||||||
@ValueDef(name = "fileName", info = "The name of the file. By default equals file name.")
|
|
||||||
@ValueDef(name = "HVdev", info = "Divider for HV measurements. Should be set to 1.0 for numass data 2014",
|
|
||||||
def = "2.468555393226049", type = {NUMBER})
|
|
||||||
@ValueDef(name = "noUset", info = "If 'true', then Uset = Uread")
|
|
||||||
@NodeDef(name = "debunch", target = "class::inr.numass.actions.DebunchAction", info = "If given, governs debunching")
|
|
||||||
public class ReadLegacyDataAction extends OneToOneAction<Binary, NMFile> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected NMFile execute(Context context, String name, Binary source, Laminate meta) throws ContentException {
|
|
||||||
// log.logString("File '%s' started", source.getName());
|
|
||||||
RawNMFile raw = getNumassData(source, meta);
|
|
||||||
// if (meta.getBoolean("paw", false)) {
|
|
||||||
// raw.generatePAW(buildActionOutput(context, name + ".paw"));
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (meta.hasMeta("debunch")) {
|
|
||||||
DebunchAction debunch = new DebunchAction();
|
|
||||||
Laminate laminate = new Laminate(meta.getMeta("debunch"))
|
|
||||||
.setDescriptor(debunch.getDescriptor());
|
|
||||||
raw = debunch.execute(context, name, raw, laminate);
|
|
||||||
}
|
|
||||||
|
|
||||||
NMFile result = new NMFile(raw);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -19,6 +19,7 @@ import hep.dataforge.plots.data.XYPlottable;
|
|||||||
import hep.dataforge.tables.*;
|
import hep.dataforge.tables.*;
|
||||||
import hep.dataforge.values.ValueType;
|
import hep.dataforge.values.ValueType;
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.data.api.NumassSet;
|
||||||
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@ -28,19 +29,14 @@ import java.util.stream.Collectors;
|
|||||||
*
|
*
|
||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
@TypedActionDef(inputType = NumassData.class, outputType = Table.class, name = "energySpectrum", info = "Generate output table and optionally plot for detector energy spectra")
|
@TypedActionDef(inputType = NumassSet.class, outputType = Table.class, name = "energySpectrum", info = "Generate output table and optionally plot for detector energy spectra")
|
||||||
public class ShowEnergySpectrumAction extends OneToOneAction<NumassData, Table> {
|
public class ShowEnergySpectrumAction extends OneToOneAction<NumassSet, Table> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Table execute(Context context, String name, NumassData input, Laminate inputMeta) {
|
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
|
||||||
int binning = inputMeta.getInt("binning", 20);
|
int binning = inputMeta.getInt("binning", 20);
|
||||||
boolean normalize = inputMeta.getBoolean("normalize", true);
|
boolean normalize = inputMeta.getBoolean("normalize", true);
|
||||||
List<NumassPoint> points = input.getNMPoints();
|
|
||||||
|
|
||||||
if (points.isEmpty()) {
|
|
||||||
getLogger(inputMeta).error("Empty data");
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
//build header
|
//build header
|
||||||
List<String> names = new ArrayList<>();
|
List<String> names = new ArrayList<>();
|
||||||
|
@ -34,9 +34,9 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
|
|||||||
File referenceFile = context.io().getFile(referencePath);
|
File referenceFile = context.io().getFile(referencePath);
|
||||||
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
|
Table referenceTable = new ColumnedDataReader(referenceFile).toTable();
|
||||||
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
|
ListTable.Builder builder = new ListTable.Builder(input.getFormat());
|
||||||
input.stream().forEach(point -> {
|
input.getRows().forEach(point -> {
|
||||||
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
||||||
Optional<Values> referencePoint = referenceTable.stream()
|
Optional<Values> referencePoint = referenceTable.getRows()
|
||||||
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
||||||
if (referencePoint.isPresent()) {
|
if (referencePoint.isPresent()) {
|
||||||
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
||||||
|
@ -26,8 +26,8 @@ import hep.dataforge.meta.Laminate;
|
|||||||
import hep.dataforge.meta.Meta;
|
import hep.dataforge.meta.Meta;
|
||||||
import hep.dataforge.stat.fit.FitState;
|
import hep.dataforge.stat.fit.FitState;
|
||||||
import hep.dataforge.tables.ListTable;
|
import hep.dataforge.tables.ListTable;
|
||||||
|
import hep.dataforge.tables.MetaTableFormat;
|
||||||
import hep.dataforge.tables.Table;
|
import hep.dataforge.tables.Table;
|
||||||
import hep.dataforge.tables.TableFormat;
|
|
||||||
import hep.dataforge.tables.ValueMap;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Value;
|
import hep.dataforge.values.Value;
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
@ -75,7 +75,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
|
|||||||
}
|
}
|
||||||
names[names.length - 1] = "chi2";
|
names[names.length - 1] = "chi2";
|
||||||
|
|
||||||
ListTable.Builder res = new ListTable.Builder(TableFormat.forNames(names));
|
ListTable.Builder res = new ListTable.Builder(MetaTableFormat.forNames(names));
|
||||||
|
|
||||||
double[] weights = new double[parNames.length];
|
double[] weights = new double[parNames.length];
|
||||||
Arrays.fill(weights, 0);
|
Arrays.fill(weights, 0);
|
||||||
|
@ -0,0 +1,114 @@
|
|||||||
|
package inr.numass.actions;
|
||||||
|
|
||||||
|
import hep.dataforge.actions.OneToOneAction;
|
||||||
|
import hep.dataforge.context.Context;
|
||||||
|
import hep.dataforge.description.ValueDef;
|
||||||
|
import hep.dataforge.meta.Laminate;
|
||||||
|
import hep.dataforge.meta.Meta;
|
||||||
|
import hep.dataforge.tables.Table;
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
|
import inr.numass.utils.ExpressionUtils;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.UnaryOperator;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static hep.dataforge.values.ValueType.NUMBER;
|
||||||
|
import static hep.dataforge.values.ValueType.STRING;
|
||||||
|
import static inr.numass.utils.TritiumUtils.pointExpression;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply corrections and transformations to analyzed data
|
||||||
|
* Created by darksnake on 11.07.2017.
|
||||||
|
*/
|
||||||
|
public class TransformDataAction extends OneToOneAction<Table, Table> {
|
||||||
|
@Override
|
||||||
|
protected Table execute(Context context, String name, Table input, Laminate meta) {
|
||||||
|
UnaryOperator<Values> transformation = UnaryOperator.identity();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
List<Correction> corrections = new ArrayList<>();
|
||||||
|
if (meta.optMeta("correction").isPresent()) {
|
||||||
|
corrections.addAll(meta.getMetaList("correction").stream()
|
||||||
|
.map((Function<Meta, Correction>) this::makeCorrection)
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.hasValue("correction")) {
|
||||||
|
final String correction = meta.getString("correction");
|
||||||
|
corrections.add((point) -> pointExpression(correction, point));
|
||||||
|
}
|
||||||
|
|
||||||
|
Function<Double, Double> utransform;
|
||||||
|
if (meta.hasValue("utransform")) {
|
||||||
|
String func = meta.getString("utransform");
|
||||||
|
utransform = u -> {
|
||||||
|
Map<String, Object> binding = new HashMap<>();
|
||||||
|
binding.put("U", u);
|
||||||
|
return ExpressionUtils.function(func, binding);
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
utransform = Function.identity();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ValueDef(name = "value", type = {NUMBER, STRING}, info = "Value or function to multiply count rate")
|
||||||
|
@ValueDef(name = "err", type = {NUMBER, STRING}, info = "error of the value")
|
||||||
|
private Correction makeCorrection(Meta corrMeta) {
|
||||||
|
final String expr = corrMeta.getString("value");
|
||||||
|
final String errExpr = corrMeta.getString("err", "");
|
||||||
|
return new Correction() {
|
||||||
|
@Override
|
||||||
|
public double corr(Values point) {
|
||||||
|
return pointExpression(expr, point);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double corrErr(Values point) {
|
||||||
|
if (errExpr.isEmpty()) {
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
return pointExpression(errExpr, point);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private interface Correction {
|
||||||
|
/**
|
||||||
|
* correction coefficient
|
||||||
|
*
|
||||||
|
* @param point
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
double corr(Values point);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* correction coefficient uncertainty
|
||||||
|
*
|
||||||
|
* @param point
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
default double corrErr(Values point) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
default double relativeErr(Values point) {
|
||||||
|
double corrErr = corrErr(point);
|
||||||
|
if (corrErr == 0) {
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
return corrErr / corr(point);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -54,7 +54,7 @@ public class NamedSpectrumCaching extends AbstractParametricFunction {
|
|||||||
this.a = a;
|
this.a = a;
|
||||||
this.b = b;
|
this.b = b;
|
||||||
this.source = spectrum;
|
this.source = spectrum;
|
||||||
spectrumDerivCache = new HashMap<>(source.size());
|
spectrumDerivCache = new HashMap<>(source.getNames().size());
|
||||||
// spectrumDerivCache = new CacheElement[source.getDimension()];
|
// spectrumDerivCache = new CacheElement[source.getDimension()];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,9 +83,9 @@ public class NumassSubstractEmptySourceTask extends AbstractTask<Table> {
|
|||||||
|
|
||||||
private Table subtract(Table merge, Table empty) {
|
private Table subtract(Table merge, Table empty) {
|
||||||
ListTable.Builder builder = new ListTable.Builder(merge.getFormat());
|
ListTable.Builder builder = new ListTable.Builder(merge.getFormat());
|
||||||
merge.stream().forEach(point -> {
|
merge.getRows().forEach(point -> {
|
||||||
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
ValueMap.Builder pointBuilder = new ValueMap.Builder(point);
|
||||||
Optional<Values> referencePoint = empty.stream()
|
Optional<Values> referencePoint = empty.getRows()
|
||||||
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
.filter(p -> Math.abs(p.getDouble("Uset") - point.getDouble("Uset")) < 0.1).findFirst();
|
||||||
if (referencePoint.isPresent()) {
|
if (referencePoint.isPresent()) {
|
||||||
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
pointBuilder.putValue("CR", Math.max(0, point.getDouble("CR") - referencePoint.get().getDouble("CR")));
|
||||||
|
@ -73,7 +73,7 @@ public class NumassTableFilterTask extends SingleActionTask<Table, Table> {
|
|||||||
for (String field : dp.getNames()) {
|
for (String field : dp.getNames()) {
|
||||||
Value val = dp.getValue(field);
|
Value val = dp.getValue(field);
|
||||||
Object obj;
|
Object obj;
|
||||||
switch (val.valueType()) {
|
switch (val.getType()) {
|
||||||
case BOOLEAN:
|
case BOOLEAN:
|
||||||
obj = val.booleanValue();
|
obj = val.booleanValue();
|
||||||
break;
|
break;
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
*/
|
*/
|
||||||
package inr.numass.utils;
|
package inr.numass.utils;
|
||||||
|
|
||||||
|
import hep.dataforge.values.Values;
|
||||||
import org.apache.commons.math3.analysis.UnivariateFunction;
|
import org.apache.commons.math3.analysis.UnivariateFunction;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@ -68,12 +69,14 @@ public class TritiumUtils {
|
|||||||
* @param point
|
* @param point
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static double pointExpression(String expression, NumassPoint point) {
|
public static double pointExpression(String expression, Values point) {
|
||||||
Map<String, Object> exprParams = new HashMap<>();
|
Map<String, Object> exprParams = new HashMap<>();
|
||||||
exprParams.put("T", point.getLength());
|
//Adding all point values to expression parameters
|
||||||
exprParams.put("U", point.getVoltage());
|
point.getNames().forEach(name-> exprParams.put(name,point.getValue(name).value()));
|
||||||
exprParams.put("cr", ((double) point.getTotalCount()) / point.getLength());
|
//Adding aliases for commonly used parameters
|
||||||
exprParams.put("point", point);
|
exprParams.put("T", point.getDouble("length"));
|
||||||
|
exprParams.put("U", point.getDouble("voltage"));
|
||||||
|
|
||||||
return ExpressionUtils.function(expression, exprParams);
|
return ExpressionUtils.function(expression, exprParams);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ public class HandlerUtils {
|
|||||||
for (String state : states.getStateNames()) {
|
for (String state : states.getStateNames()) {
|
||||||
Value val = states.getValue(state);
|
Value val = states.getValue(state);
|
||||||
String color;
|
String color;
|
||||||
switch (val.valueType()) {
|
switch (val.getType()) {
|
||||||
case NUMBER:
|
case NUMBER:
|
||||||
color = "blue";
|
color = "blue";
|
||||||
break;
|
break;
|
||||||
|
Loading…
Reference in New Issue
Block a user