Revision of numass data architecture
This commit is contained in:
parent
626cac31a4
commit
acae9aae4c
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,11 @@
|
||||
package inr.numass.data;
|
||||
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.values.Values;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.meta.MetaBuilder;
|
||||
import inr.numass.data.api.NumassPoint;
|
||||
import inr.numass.data.api.NumassSet;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
@ -19,118 +13,139 @@ import java.util.stream.Stream;
|
||||
*/
|
||||
public class NumassDataUtils {
|
||||
|
||||
public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
|
||||
Map<Double, NumassPoint> map = new LinkedHashMap<>();
|
||||
spectra.forEach(datum -> {
|
||||
datum.forEach(point -> {
|
||||
double uset = point.getVoltage();
|
||||
if (map.containsKey(uset)) {
|
||||
map.put(uset, join(point, map.get(uset)));
|
||||
} else {
|
||||
map.put(uset, point);
|
||||
}
|
||||
});
|
||||
});
|
||||
return map.values();
|
||||
public static NumassSet join(String name, Collection<NumassSet> sets) {
|
||||
return new NumassSet() {
|
||||
@Override
|
||||
public Stream<NumassPoint> getPoints() {
|
||||
return sets.stream().flatMap(set -> getPoints());
|
||||
}
|
||||
|
||||
/**
|
||||
* Spectral sum of two points
|
||||
*
|
||||
* @param first
|
||||
* @param second
|
||||
* @return
|
||||
*/
|
||||
public static NumassPoint join(NumassPoint first, NumassPoint second) {
|
||||
if (first.getVoltage() != second.getVoltage()) {
|
||||
throw new RuntimeException("Voltage mismatch");
|
||||
}
|
||||
int[] newArray = new int[first.getSpectrum().length];
|
||||
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
|
||||
return new NumassPointImpl(
|
||||
first.getVoltage(),
|
||||
Instant.EPOCH,
|
||||
first.getLength() + second.getLength(),
|
||||
newArray
|
||||
);
|
||||
@Override
|
||||
public Meta meta() {
|
||||
MetaBuilder metaBuilder = new MetaBuilder("meta");
|
||||
sets.forEach(set -> metaBuilder.putNode(set.getName(), set.meta()));
|
||||
return metaBuilder;
|
||||
}
|
||||
|
||||
public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) {
|
||||
int[] array = new int[point.getSpectrum().length];
|
||||
Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
|
||||
return new NumassPointImpl(
|
||||
point.getVoltage(),
|
||||
point.getStartTime(),
|
||||
point.getLength(),
|
||||
array
|
||||
);
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) {
|
||||
NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst()
|
||||
.orElseThrow(() -> new RuntimeException("Reference point not found"));
|
||||
return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Поправка масштаба высокого.
|
||||
*
|
||||
* @param data
|
||||
* @param beta
|
||||
* @return
|
||||
*/
|
||||
public static Table setHVScale(ListTable data, double beta) {
|
||||
SpectrumDataAdapter reader = adapter();
|
||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
for (Values dp : data) {
|
||||
double corrFactor = 1 + beta;
|
||||
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||
}
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static SpectrumDataAdapter adapter() {
|
||||
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
|
||||
}
|
||||
|
||||
public static Table correctForDeadTime(ListTable data, double dtime) {
|
||||
return correctForDeadTime(data, adapter(), dtime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Коррекция на мертвое время в секундах
|
||||
*
|
||||
* @param data
|
||||
* @param dtime
|
||||
* @return
|
||||
*/
|
||||
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
|
||||
// SpectrumDataAdapter adapter = adapter();
|
||||
ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
for (Values dp : data) {
|
||||
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
|
||||
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
||||
}
|
||||
return res.build();
|
||||
}
|
||||
|
||||
public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) {
|
||||
double wind = p.getCountInWindow(from, to) / p.getLength();
|
||||
double res;
|
||||
if (deadTime > 0) {
|
||||
double total = p.getTotalCount();
|
||||
// double time = p.getLength();
|
||||
// res = wind / (1 - total * deadTime / time);
|
||||
double timeRatio = deadTime / p.getLength();
|
||||
res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio;
|
||||
} else {
|
||||
res = wind;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) {
|
||||
return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength());
|
||||
}
|
||||
// public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
|
||||
// Map<Double, NumassPoint> map = new LinkedHashMap<>();
|
||||
// spectra.forEach(datum -> {
|
||||
// datum.forEach(point -> {
|
||||
// double uset = point.getVoltage();
|
||||
// if (map.containsKey(uset)) {
|
||||
// map.put(uset, join(point, map.get(uset)));
|
||||
// } else {
|
||||
// map.put(uset, point);
|
||||
// }
|
||||
// });
|
||||
// });
|
||||
// return map.values();
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Spectral sum of two points
|
||||
// *
|
||||
// * @param first
|
||||
// * @param second
|
||||
// * @return
|
||||
// */
|
||||
// public static NumassPoint join(NumassPoint first, NumassPoint second) {
|
||||
// if (first.getVoltage() != second.getVoltage()) {
|
||||
// throw new RuntimeException("Voltage mismatch");
|
||||
// }
|
||||
// int[] newArray = new int[first.getSpectrum().length];
|
||||
// Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
|
||||
// return new NumassPointImpl(
|
||||
// first.getVoltage(),
|
||||
// Instant.EPOCH,
|
||||
// first.getLength() + second.getLength(),
|
||||
// newArray
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) {
|
||||
// int[] array = new int[point.getSpectrum().length];
|
||||
// Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
|
||||
// return new NumassPointImpl(
|
||||
// point.getVoltage(),
|
||||
// point.getStartTime(),
|
||||
// point.getLength(),
|
||||
// array
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) {
|
||||
// NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst()
|
||||
// .orElseThrow(() -> new RuntimeException("Reference point not found"));
|
||||
// return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList());
|
||||
// }
|
||||
//
|
||||
//
|
||||
// /**
|
||||
// * Поправка масштаба высокого.
|
||||
// *
|
||||
// * @param data
|
||||
// * @param beta
|
||||
// * @return
|
||||
// */
|
||||
// public static Table setHVScale(ListTable data, double beta) {
|
||||
// SpectrumDataAdapter reader = adapter();
|
||||
// ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
// for (Values dp : data) {
|
||||
// double corrFactor = 1 + beta;
|
||||
// res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
|
||||
// }
|
||||
// return res.build();
|
||||
// }
|
||||
//
|
||||
// public static SpectrumDataAdapter adapter() {
|
||||
// return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
|
||||
// }
|
||||
//
|
||||
// public static Table correctForDeadTime(ListTable data, double dtime) {
|
||||
// return correctForDeadTime(data, adapter(), dtime);
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Коррекция на мертвое время в секундах
|
||||
// *
|
||||
// * @param data
|
||||
// * @param dtime
|
||||
// * @return
|
||||
// */
|
||||
// public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
|
||||
//// SpectrumDataAdapter adapter = adapter();
|
||||
// ListTable.Builder res = new ListTable.Builder(data.getFormat());
|
||||
// for (Values dp : data) {
|
||||
// double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
|
||||
// res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
|
||||
// }
|
||||
// return res.build();
|
||||
// }
|
||||
//
|
||||
// public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) {
|
||||
// double wind = p.getCountInWindow(from, to) / p.getLength();
|
||||
// double res;
|
||||
// if (deadTime > 0) {
|
||||
// double total = p.getTotalCount();
|
||||
//// double time = p.getLength();
|
||||
//// res = wind / (1 - total * deadTime / time);
|
||||
// double timeRatio = deadTime / p.getLength();
|
||||
// res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio;
|
||||
// } else {
|
||||
// res = wind;
|
||||
// }
|
||||
// return res;
|
||||
// }
|
||||
//
|
||||
// public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) {
|
||||
// return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength());
|
||||
// }
|
||||
}
|
||||
|
@ -1,58 +0,0 @@
|
||||
package inr.numass.data;
|
||||
|
||||
import inr.numass.data.api.NumassPoint;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
/**
|
||||
* Created by darksnake on 13-Apr-17.
|
||||
*/
|
||||
public class PointBuilders {
|
||||
public static NumassPoint readProtoPoint(double u, Instant startTime, double pointLength, InputStream stream, Function<NumassProto.Point.Channel.Block.Event, Integer> peakFinder) throws IOException {
|
||||
NumassProto.Point point = NumassProto.Point.parseFrom(stream);
|
||||
NumassProto.Point.Channel ch = point.getChannels(0);
|
||||
int[] spectrum = count(ch.getBlocksList().stream()
|
||||
.flatMapToInt(block -> IntStream.concat(
|
||||
block.getPeaks().getAmplitudesList()
|
||||
.stream().mapToInt(Long::intValue),
|
||||
block.getEventsList().stream()
|
||||
.mapToInt(peakFinder::apply)
|
||||
)),0
|
||||
);
|
||||
|
||||
return new NumassPointImpl(u, startTime, pointLength, spectrum);
|
||||
}
|
||||
|
||||
private static int[] calculateSpectrum(RawNMPoint point) {
|
||||
assert point.getEventsCount() > 0;
|
||||
return count(point.getEvents().stream().mapToInt(event -> event.getChanel()),RawNMPoint.MAX_CHANEL);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
public static NumassPoint readRawPoint(@NotNull RawNMPoint point) {
|
||||
return new NumassPointImpl(point.getUset(), point.getStartTime(), point.getLength(), calculateSpectrum(point));
|
||||
}
|
||||
|
||||
private static int[] count(IntStream stream, int maxChannel) {
|
||||
List<AtomicInteger> list = new ArrayList<>();
|
||||
while (list.size() <= maxChannel) {
|
||||
list.add(new AtomicInteger(0));
|
||||
}
|
||||
|
||||
stream.forEach(i -> {
|
||||
while (list.size() <= i) {
|
||||
list.add(new AtomicInteger(0));
|
||||
}
|
||||
list.get(i).incrementAndGet();
|
||||
});
|
||||
return list.stream().mapToInt(AtomicInteger::get).toArray();
|
||||
}
|
||||
}
|
@ -1,14 +1,14 @@
|
||||
package inr.numass.data.analyzers;
|
||||
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.tables.*;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.*;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static hep.dataforge.tables.XYAdapter.*;
|
||||
@ -20,6 +20,7 @@ import static inr.numass.data.api.NumassPoint.HV_KEY;
|
||||
*/
|
||||
public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
||||
public static String[] NAME_LIST_WITH_HV = {HV_KEY, "length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
|
||||
|
||||
@Nullable
|
||||
private final SignalProcessor processor;
|
||||
@ -38,7 +39,7 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
* @param block
|
||||
* @return
|
||||
*/
|
||||
private Stream<NumassEvent> getEventStream(NumassBlock block) {
|
||||
protected Stream<NumassEvent> getEventStream(NumassBlock block) {
|
||||
if (processor == null && block.getFrames().count() > 0) {
|
||||
throw new IllegalArgumentException("Signal processor needed to analyze frames");
|
||||
} else {
|
||||
@ -48,7 +49,6 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
|
||||
@Override
|
||||
public Values analyze(NumassBlock block, Meta config) {
|
||||
|
||||
int loChannel = config.getInt("energy.lo", 0);
|
||||
int upChannel = config.getInt("energy.up", Integer.MAX_VALUE);
|
||||
long count = getEventStream(block)
|
||||
@ -57,6 +57,19 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
double countRate = (double) count / block.getLength().toMillis() * 1000;
|
||||
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
|
||||
|
||||
if (block instanceof NumassPoint) {
|
||||
return new ValueMap(NAME_LIST_WITH_HV,
|
||||
new Object[]{
|
||||
((NumassPoint) block).getVoltage(),
|
||||
block.getLength().toNanos(),
|
||||
count,
|
||||
countRate,
|
||||
countRateError,
|
||||
new int[]{loChannel, upChannel},
|
||||
block.getStartTime()
|
||||
}
|
||||
);
|
||||
} else {
|
||||
return new ValueMap(NAME_LIST,
|
||||
new Object[]{
|
||||
block.getLength().toNanos(),
|
||||
@ -68,6 +81,8 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Table analyze(NumassSet set, Meta config) {
|
||||
@ -81,6 +96,38 @@ public class SimpleAnalyzer implements NumassAnalyzer {
|
||||
.addTime()
|
||||
.build();
|
||||
|
||||
return new ListTable.Builder(format)
|
||||
.rows(set.getPoints().map(point -> analyze(point, config)))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table getSpectrum(NumassBlock block, Meta config) {
|
||||
TableFormat format = new TableFormatBuilder()
|
||||
.addNumber("channel", X_VALUE_KEY)
|
||||
.addNumber("count")
|
||||
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
|
||||
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
|
||||
.build();
|
||||
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
|
||||
getEventStream(block).forEach(event -> {
|
||||
if (map.containsKey(event.getChanel())) {
|
||||
map.get(event.getChanel()).incrementAndGet();
|
||||
} else {
|
||||
map.put(event.getChanel(), new AtomicLong(1));
|
||||
}
|
||||
});
|
||||
return new ListTable.Builder(format)
|
||||
.rows(map.entrySet().stream()
|
||||
.map(entry ->
|
||||
new ValueMap(format.namesAsArray(),
|
||||
entry.getKey(),
|
||||
entry.getValue(),
|
||||
entry.getValue().get() / block.getLength().toMillis() * 1000,
|
||||
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
|
||||
)
|
||||
)
|
||||
).build();
|
||||
}
|
||||
}
|
||||
|
@ -22,12 +22,20 @@ public interface NumassAnalyzer {
|
||||
Values analyze(NumassBlock block, Meta config);
|
||||
|
||||
/**
|
||||
* Analyze the whole set
|
||||
* Analyze the whole set. And return results as a table
|
||||
* @param set
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
Table analyze(NumassSet set, Meta config);
|
||||
|
||||
/**
|
||||
* Generate energy spectrum for the given block
|
||||
* @param block
|
||||
* @param config
|
||||
* @return
|
||||
*/
|
||||
Table getSpectrum(NumassBlock block, Meta config);
|
||||
|
||||
|
||||
}
|
||||
|
@ -17,7 +17,9 @@ import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
@ -69,6 +71,15 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
|
||||
return getPoints().filter(it -> it.getVoltage() == voltage).findFirst();
|
||||
}
|
||||
|
||||
/**
|
||||
* List all points with given voltage
|
||||
* @param voltage
|
||||
* @return
|
||||
*/
|
||||
default List<NumassPoint> listPoints(double voltage){
|
||||
return getPoints().filter(it -> it.getVoltage() == voltage).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Provides(NUMASS_POINT_PROVIDER_KEY)
|
||||
default Optional<NumassPoint> optPoint(String voltage) {
|
||||
return optPoint(Double.parseDouble(voltage));
|
||||
|
@ -10,8 +10,10 @@ import inr.numass.data.api.NumassPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
@ -39,40 +41,62 @@ public class ProtoNumassPoint implements NumassPoint {
|
||||
|
||||
@Override
|
||||
public Stream<NumassBlock> getBlocks() {
|
||||
return null;
|
||||
return point.getChannelsList().stream().flatMap(channel ->
|
||||
channel.getBlocksList().stream().map(block -> new ProtoBlock((int) channel.getNum(), block))
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Meta meta() {
|
||||
return null;
|
||||
return envelope.meta();
|
||||
}
|
||||
|
||||
public static Instant ofEpochNanos(long nanos) {
|
||||
long seconds = Math.floorDiv(nanos, (int) 1e9);
|
||||
int reminder = (int) (nanos % 1e9);
|
||||
return Instant.ofEpochSecond(seconds, reminder);
|
||||
}
|
||||
|
||||
private class ProtoBlock implements NumassBlock {
|
||||
|
||||
final int channel;
|
||||
final NumassProto.Point.Channel.Block block;
|
||||
|
||||
private ProtoBlock(NumassProto.Point.Channel.Block block) {
|
||||
private ProtoBlock(int channel, NumassProto.Point.Channel.Block block) {
|
||||
this.channel = channel;
|
||||
this.block = block;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Instant getStartTime() {
|
||||
|
||||
return ofEpochNanos(block.getTime());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Duration getLength() {
|
||||
return null;
|
||||
return Duration.ofNanos((long) (meta().getInt("b_size") / meta().getInt("sample_freq") * 1e9));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<NumassEvent> getEvents() {
|
||||
return null;
|
||||
if (block.hasEvents()) {
|
||||
NumassProto.Point.Channel.Block.Events events = block.getEvents();
|
||||
return IntStream.range(0, events.getTimesCount()).mapToObj(i ->
|
||||
new NumassEvent((short) events.getAmplitudes(i), events.getTimes(i))
|
||||
);
|
||||
} else {
|
||||
return Stream.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<NumassFrame> getFrames() {
|
||||
return null;
|
||||
Duration tickSize = Duration.ofNanos((long) (1e9 / meta().getInt("sample_freq")));
|
||||
return block.getFramesList().stream().map(frame -> {
|
||||
Instant time = getStartTime().plusNanos(frame.getTime());
|
||||
ByteBuffer data = frame.getData().asReadOnlyByteBuffer();
|
||||
return new NumassFrame(time, tickSize, data.asShortBuffer());
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,28 +3,29 @@ syntax = "proto3";
|
||||
package inr.numass.data;
|
||||
|
||||
message Point {
|
||||
// A single channel for multichannel detector readout
|
||||
message Channel {
|
||||
//A continuous measurement block
|
||||
message Block {
|
||||
// Необработанное событие
|
||||
// Raw data frame
|
||||
message Frame {
|
||||
uint64 time = 1; //время в наносекундах от начала блока
|
||||
bytes data = 2; // массив кадра события в формате int16
|
||||
// ед. измерения - каналы
|
||||
uint64 time = 1; // Time in nanos from the beginning of the block
|
||||
bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
|
||||
}
|
||||
// Обработанные события. Содержат только время и амплитуду сигналов.
|
||||
// Для экономии места при сериализации амплитуды и времена лежат в
|
||||
// разных массивах. Амплитуда и время имеющие одинаковые индексы
|
||||
// соответствуют одному событию
|
||||
message Events {
|
||||
repeated uint64 times = 1; //время в наносекундах от начала блока
|
||||
repeated uint64 amplitudes = 2; //амплитуда события в каналах
|
||||
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
|
||||
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
|
||||
}
|
||||
uint64 time = 1; // время начала блока в наносекундах с начала эпохи
|
||||
repeated Frame frames = 2; // массив необработанных событий
|
||||
Events events = 3; // массив обработанных событий
|
||||
uint64 time = 1; // Block start in epoch nanos
|
||||
repeated Frame frames = 2; // Frames array
|
||||
Events events = 3; // Events array
|
||||
}
|
||||
uint64 num = 1; // номер канала
|
||||
repeated Block blocks = 2; // набранные блоки
|
||||
uint64 num = 1; // The number of measuring channel
|
||||
repeated Block blocks = 2; // Blocks
|
||||
}
|
||||
repeated Channel channels = 1; // массив данных по каналам
|
||||
repeated Channel channels = 1; // Array of measuring channels
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package inr.numass.actions;
|
||||
|
||||
import hep.dataforge.actions.OneToOneAction;
|
||||
import hep.dataforge.context.Context;
|
||||
import hep.dataforge.description.TypedActionDef;
|
||||
import hep.dataforge.description.ValueDef;
|
||||
import hep.dataforge.exceptions.ContentException;
|
||||
import hep.dataforge.meta.Laminate;
|
||||
import inr.numass.debunch.DebunchReport;
|
||||
import inr.numass.debunch.FrameAnalizer;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import static hep.dataforge.values.ValueType.NUMBER;
|
||||
|
||||
/**
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "debunch", inputType = RawNMFile.class, outputType = RawNMFile.class)
|
||||
@ValueDef(name = "upperchanel", type = {NUMBER}, def = "4095", info = "An upper chanel for debuncing")
|
||||
@ValueDef(name = "lowerchanel", type = {NUMBER}, def = "0", info = "A lower chanel for debuncing")
|
||||
@ValueDef(name = "rejectprob", type = {NUMBER}, def = "1e-5", info = "Rejection probability")
|
||||
@ValueDef(name = "framelength", type = {NUMBER}, def = "5", info = "Frame length in seconds")
|
||||
@ValueDef(name = "maxcr", type = {NUMBER}, def = "100", info = "Maximum count rate for debunching")
|
||||
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
|
||||
|
||||
@Override
|
||||
protected RawNMFile execute(Context context, String name, RawNMFile source, Laminate meta) throws ContentException {
|
||||
report(context, name, "File {} started", source.getName());
|
||||
|
||||
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
|
||||
int lower = meta.getInt("lowerchanel", 0);
|
||||
double rejectionprob = meta.getDouble("rejectprob", 1e-5);
|
||||
double framelength = meta.getDouble("framelength", 5);
|
||||
double maxCR = meta.getDouble("maxcr", 100d);
|
||||
|
||||
RawNMFile res = new RawNMFile(source.getName(), source.getHead());
|
||||
source.getData().stream().map((point) -> {
|
||||
double cr = point.selectChanels(lower, upper).getCr();
|
||||
if (cr < maxCR) {
|
||||
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
|
||||
|
||||
report(context, name, "Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
|
||||
source.getName(), point.getUset(), report.eventsFiltred() * 100, report.timeFiltred() * 100);
|
||||
point = report.getPoint();
|
||||
}
|
||||
return point;
|
||||
}).forEach(res::putPoint);
|
||||
report(context, name, "File {} completed", source.getName());
|
||||
|
||||
context.getChronicle(name).print(new PrintWriter(buildActionOutput(context, name)));
|
||||
|
||||
// res.configure(source.meta());
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
@ -31,7 +31,7 @@ import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.NumassPoint;
|
||||
import inr.numass.data.PointBuilders;
|
||||
import inr.numass.data.api.NumassSet;
|
||||
import inr.numass.data.storage.NumassDataLoader;
|
||||
import inr.numass.debunch.DebunchReport;
|
||||
import inr.numass.debunch.FrameAnalizer;
|
||||
@ -53,7 +53,7 @@ import static inr.numass.utils.TritiumUtils.pointExpression;
|
||||
/**
|
||||
* @author Darksnake
|
||||
*/
|
||||
@TypedActionDef(name = "prepareData", inputType = NumassData.class, outputType = Table.class)
|
||||
@TypedActionDef(name = "prepareData", inputType = NumassSet.class, outputType = Table.class)
|
||||
@ValueDef(name = "lowerWindow", type = {NUMBER}, def = "0", info = "Base for the window lowerWindow bound")
|
||||
@ValueDef(name = "lowerWindowSlope", type = {NUMBER}, def = "0", info = "Slope for the window lowerWindow bound")
|
||||
@ValueDef(name = "upperWindow", type = {NUMBER}, info = "Upper bound for window")
|
||||
@ -62,7 +62,7 @@ import static inr.numass.utils.TritiumUtils.pointExpression;
|
||||
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`")
|
||||
@ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
|
||||
@NodeDef(name = "correction", multiple = true, target = "method::inr.numass.actions.PrepareDataAction.makeCorrection")
|
||||
public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
||||
public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
|
||||
|
||||
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"};
|
||||
|
||||
@ -74,7 +74,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ListTable execute(Context context, String name, NumassData dataFile, Laminate meta) {
|
||||
protected ListTable execute(Context context, String name, NumassSet dataFile, Laminate meta) {
|
||||
// log.report("File %s started", dataFile.getName());
|
||||
|
||||
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);
|
||||
|
@ -15,6 +15,7 @@
|
||||
*/
|
||||
package inr.numass.debunch;
|
||||
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
import inr.numass.data.api.NumassEvent;
|
||||
|
||||
import java.util.List;
|
||||
@ -24,8 +25,8 @@ import java.util.List;
|
||||
* @author Darksnake
|
||||
*/
|
||||
public interface DebunchReport {
|
||||
RawNMPoint getInitialPoint();
|
||||
RawNMPoint getPoint();
|
||||
NumassBlock getInitialPoint();
|
||||
NumassBlock getPoint();
|
||||
|
||||
List<Frame> getBunches();
|
||||
List<NumassEvent> getBunchEvents();
|
||||
|
@ -15,39 +15,39 @@
|
||||
*/
|
||||
package inr.numass.debunch;
|
||||
|
||||
import inr.numass.data.api.NumassBlock;
|
||||
import inr.numass.data.api.NumassEvent;
|
||||
import inr.numass.data.api.NumassPoint;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Darksnake
|
||||
*/
|
||||
public class DebunchReportImpl implements DebunchReport {
|
||||
|
||||
private final List<Frame> bunches;
|
||||
private final RawNMPoint pointAfter;
|
||||
private final RawNMPoint pointBefore;
|
||||
private final NumassBlock pointAfter;
|
||||
private final NumassBlock pointBefore;
|
||||
|
||||
public DebunchReportImpl(RawNMPoint pointBefore, RawNMPoint pointAfter, List<Frame> bunches) {
|
||||
public DebunchReportImpl(NumassBlock pointBefore, NumassBlock pointAfter, List<Frame> bunches) {
|
||||
this.pointBefore = pointBefore;
|
||||
this.pointAfter = pointAfter;
|
||||
this.bunches = bunches;
|
||||
}
|
||||
|
||||
DebunchReportImpl(RawNMPoint pointBefore, DebunchData debunchData) {
|
||||
DebunchReportImpl(NumassBlock pointBefore, NumassBlock debunchData) {
|
||||
this.pointBefore = pointBefore;
|
||||
pointAfter = new RawNMPoint(pointBefore.getUset(),pointBefore.getUread(),
|
||||
pointAfter = new NumassPoint(pointBefore.getUset(), pointBefore.getUread(),
|
||||
debunchData.getDebunchedEvents(), debunchData.getDebunchedLength(), pointBefore.getStartTime());
|
||||
this.bunches = debunchData.getBunches();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public double eventsFiltred() {
|
||||
return 1-(double)getPoint().getEventsCount()/getInitialPoint().getEventsCount();
|
||||
return 1 - (double) getPoint().getEvents().count() / getInitialPoint().getEvents().count();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -65,18 +65,18 @@ public class DebunchReportImpl implements DebunchReport {
|
||||
}
|
||||
|
||||
@Override
|
||||
public RawNMPoint getInitialPoint() {
|
||||
public NumassBlock getInitialPoint() {
|
||||
return pointBefore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RawNMPoint getPoint() {
|
||||
public NumassBlock getPoint() {
|
||||
return pointAfter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double timeFiltred() {
|
||||
return 1-getPoint().getLength()/getInitialPoint().getLength();
|
||||
return 1d - getPoint().getLength().toNanos() / getInitialPoint().getLength().toNanos();
|
||||
}
|
||||
|
||||
|
||||
|
@ -5,7 +5,6 @@
|
||||
*/
|
||||
package inr.numass.utils;
|
||||
|
||||
import inr.numass.data.PointBuilders;
|
||||
import inr.numass.data.api.NumassEvent;
|
||||
import org.apache.commons.math3.random.RandomGenerator;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user