Revision of numass data architecture

This commit is contained in:
darksnake 2017-07-10 16:44:37 +03:00
parent 626cac31a4
commit acae9aae4c
13 changed files with 448 additions and 456 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,11 @@
package inr.numass.data; package inr.numass.data;
import hep.dataforge.tables.ListTable; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.Table; import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/** /**
@ -19,118 +13,139 @@ import java.util.stream.Stream;
*/ */
public class NumassDataUtils { public class NumassDataUtils {
public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) { public static NumassSet join(String name, Collection<NumassSet> sets) {
Map<Double, NumassPoint> map = new LinkedHashMap<>(); return new NumassSet() {
spectra.forEach(datum -> { @Override
datum.forEach(point -> { public Stream<NumassPoint> getPoints() {
double uset = point.getVoltage(); return sets.stream().flatMap(set -> getPoints());
if (map.containsKey(uset)) {
map.put(uset, join(point, map.get(uset)));
} else {
map.put(uset, point);
}
});
});
return map.values();
} }
/** @Override
* Spectral sum of two points public Meta meta() {
* MetaBuilder metaBuilder = new MetaBuilder("meta");
* @param first sets.forEach(set -> metaBuilder.putNode(set.getName(), set.meta()));
* @param second return metaBuilder;
* @return
*/
public static NumassPoint join(NumassPoint first, NumassPoint second) {
if (first.getVoltage() != second.getVoltage()) {
throw new RuntimeException("Voltage mismatch");
}
int[] newArray = new int[first.getSpectrum().length];
Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
return new NumassPointImpl(
first.getVoltage(),
Instant.EPOCH,
first.getLength() + second.getLength(),
newArray
);
} }
public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) { @Override
int[] array = new int[point.getSpectrum().length]; public String getName() {
Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i])); return name;
return new NumassPointImpl( }
point.getVoltage(), };
point.getStartTime(),
point.getLength(),
array
);
} }
public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) { // public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst() // Map<Double, NumassPoint> map = new LinkedHashMap<>();
.orElseThrow(() -> new RuntimeException("Reference point not found")); // spectra.forEach(datum -> {
return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList()); // datum.forEach(point -> {
} // double uset = point.getVoltage();
// if (map.containsKey(uset)) {
// map.put(uset, join(point, map.get(uset)));
/** // } else {
* Поправка масштаба высокого. // map.put(uset, point);
* // }
* @param data // });
* @param beta // });
* @return // return map.values();
*/ // }
public static Table setHVScale(ListTable data, double beta) { //
SpectrumDataAdapter reader = adapter(); // /**
ListTable.Builder res = new ListTable.Builder(data.getFormat()); // * Spectral sum of two points
for (Values dp : data) { // *
double corrFactor = 1 + beta; // * @param first
res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp))); // * @param second
} // * @return
return res.build(); // */
} // public static NumassPoint join(NumassPoint first, NumassPoint second) {
// if (first.getVoltage() != second.getVoltage()) {
public static SpectrumDataAdapter adapter() { // throw new RuntimeException("Voltage mismatch");
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time"); // }
} // int[] newArray = new int[first.getSpectrum().length];
// Arrays.setAll(newArray, i -> first.getSpectrum()[i] + second.getSpectrum()[i]);
public static Table correctForDeadTime(ListTable data, double dtime) { // return new NumassPointImpl(
return correctForDeadTime(data, adapter(), dtime); // first.getVoltage(),
} // Instant.EPOCH,
// first.getLength() + second.getLength(),
/** // newArray
* Коррекция на мертвое время в секундах // );
* // }
* @param data //
* @param dtime // public static NumassPoint substractPoint(NumassPoint point, NumassPoint reference) {
* @return // int[] array = new int[point.getSpectrum().length];
*/ // Arrays.setAll(array, i -> Math.max(0, point.getSpectrum()[i] - reference.getSpectrum()[i]));
public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) { // return new NumassPointImpl(
// SpectrumDataAdapter adapter = adapter(); // point.getVoltage(),
ListTable.Builder res = new ListTable.Builder(data.getFormat()); // point.getStartTime(),
for (Values dp : data) { // point.getLength(),
double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp)); // array
res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp))); // );
} // }
return res.build(); //
} // public static Collection<NumassPoint> substractReferencePoint(Collection<NumassPoint> points, double uset) {
// NumassPoint reference = points.stream().filter(it -> it.getVoltage() == uset).findFirst()
public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) { // .orElseThrow(() -> new RuntimeException("Reference point not found"));
double wind = p.getCountInWindow(from, to) / p.getLength(); // return points.stream().map(it -> substractPoint(it, reference)).collect(Collectors.toList());
double res; // }
if (deadTime > 0) { //
double total = p.getTotalCount(); //
// double time = p.getLength(); // /**
// res = wind / (1 - total * deadTime / time); // * Поправка масштаба высокого.
double timeRatio = deadTime / p.getLength(); // *
res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio; // * @param data
} else { // * @param beta
res = wind; // * @return
} // */
return res; // public static Table setHVScale(ListTable data, double beta) {
} // SpectrumDataAdapter reader = adapter();
// ListTable.Builder res = new ListTable.Builder(data.getFormat());
public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) { // for (Values dp : data) {
return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength()); // double corrFactor = 1 + beta;
} // res.row(reader.buildSpectrumDataPoint(reader.getX(dp).doubleValue() * corrFactor, reader.getCount(dp), reader.getTime(dp)));
// }
// return res.build();
// }
//
// public static SpectrumDataAdapter adapter() {
// return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
// }
//
// public static Table correctForDeadTime(ListTable data, double dtime) {
// return correctForDeadTime(data, adapter(), dtime);
// }
//
// /**
// * Коррекция на мертвое время в секундах
// *
// * @param data
// * @param dtime
// * @return
// */
// public static Table correctForDeadTime(ListTable data, SpectrumDataAdapter adapter, double dtime) {
//// SpectrumDataAdapter adapter = adapter();
// ListTable.Builder res = new ListTable.Builder(data.getFormat());
// for (Values dp : data) {
// double corrFactor = 1 / (1 - dtime * adapter.getCount(dp) / adapter.getTime(dp));
// res.row(adapter.buildSpectrumDataPoint(adapter.getX(dp).doubleValue(), (long) (adapter.getCount(dp) * corrFactor), adapter.getTime(dp)));
// }
// return res.build();
// }
//
// public static double countRateWithDeadTime(NumassPoint p, int from, int to, double deadTime) {
// double wind = p.getCountInWindow(from, to) / p.getLength();
// double res;
// if (deadTime > 0) {
// double total = p.getTotalCount();
//// double time = p.getLength();
//// res = wind / (1 - total * deadTime / time);
// double timeRatio = deadTime / p.getLength();
// res = wind / total * (1d - Math.sqrt(1d - 4d * total * timeRatio)) / 2d / timeRatio;
// } else {
// res = wind;
// }
// return res;
// }
//
// public static double countRateWithDeadTimeErr(NumassPoint p, int from, int to, double deadTime) {
// return Math.sqrt(countRateWithDeadTime(p, from, to, deadTime) / p.getLength());
// }
} }

View File

@ -1,58 +0,0 @@
package inr.numass.data;
import inr.numass.data.api.NumassPoint;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.io.InputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.IntStream;
/**
* Created by darksnake on 13-Apr-17.
*/
public class PointBuilders {
public static NumassPoint readProtoPoint(double u, Instant startTime, double pointLength, InputStream stream, Function<NumassProto.Point.Channel.Block.Event, Integer> peakFinder) throws IOException {
NumassProto.Point point = NumassProto.Point.parseFrom(stream);
NumassProto.Point.Channel ch = point.getChannels(0);
int[] spectrum = count(ch.getBlocksList().stream()
.flatMapToInt(block -> IntStream.concat(
block.getPeaks().getAmplitudesList()
.stream().mapToInt(Long::intValue),
block.getEventsList().stream()
.mapToInt(peakFinder::apply)
)),0
);
return new NumassPointImpl(u, startTime, pointLength, spectrum);
}
private static int[] calculateSpectrum(RawNMPoint point) {
assert point.getEventsCount() > 0;
return count(point.getEvents().stream().mapToInt(event -> event.getChanel()),RawNMPoint.MAX_CHANEL);
}
@NotNull
public static NumassPoint readRawPoint(@NotNull RawNMPoint point) {
return new NumassPointImpl(point.getUset(), point.getStartTime(), point.getLength(), calculateSpectrum(point));
}
private static int[] count(IntStream stream, int maxChannel) {
List<AtomicInteger> list = new ArrayList<>();
while (list.size() <= maxChannel) {
list.add(new AtomicInteger(0));
}
stream.forEach(i -> {
while (list.size() <= i) {
list.add(new AtomicInteger(0));
}
list.get(i).incrementAndGet();
});
return list.stream().mapToInt(AtomicInteger::get).toArray();
}
}

View File

@ -1,14 +1,14 @@
package inr.numass.data.analyzers; package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.Table; import hep.dataforge.tables.*;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.*; import inr.numass.data.api.*;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.XYAdapter.*;
@ -20,6 +20,7 @@ import static inr.numass.data.api.NumassPoint.HV_KEY;
*/ */
public class SimpleAnalyzer implements NumassAnalyzer { public class SimpleAnalyzer implements NumassAnalyzer {
public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"}; public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
public static String[] NAME_LIST_WITH_HV = {HV_KEY, "length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
@Nullable @Nullable
private final SignalProcessor processor; private final SignalProcessor processor;
@ -38,7 +39,7 @@ public class SimpleAnalyzer implements NumassAnalyzer {
* @param block * @param block
* @return * @return
*/ */
private Stream<NumassEvent> getEventStream(NumassBlock block) { protected Stream<NumassEvent> getEventStream(NumassBlock block) {
if (processor == null && block.getFrames().count() > 0) { if (processor == null && block.getFrames().count() > 0) {
throw new IllegalArgumentException("Signal processor needed to analyze frames"); throw new IllegalArgumentException("Signal processor needed to analyze frames");
} else { } else {
@ -48,7 +49,6 @@ public class SimpleAnalyzer implements NumassAnalyzer {
@Override @Override
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
int loChannel = config.getInt("energy.lo", 0); int loChannel = config.getInt("energy.lo", 0);
int upChannel = config.getInt("energy.up", Integer.MAX_VALUE); int upChannel = config.getInt("energy.up", Integer.MAX_VALUE);
long count = getEventStream(block) long count = getEventStream(block)
@ -57,6 +57,19 @@ public class SimpleAnalyzer implements NumassAnalyzer {
double countRate = (double) count / block.getLength().toMillis() * 1000; double countRate = (double) count / block.getLength().toMillis() * 1000;
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000; double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;
if (block instanceof NumassPoint) {
return new ValueMap(NAME_LIST_WITH_HV,
new Object[]{
((NumassPoint) block).getVoltage(),
block.getLength().toNanos(),
count,
countRate,
countRateError,
new int[]{loChannel, upChannel},
block.getStartTime()
}
);
} else {
return new ValueMap(NAME_LIST, return new ValueMap(NAME_LIST,
new Object[]{ new Object[]{
block.getLength().toNanos(), block.getLength().toNanos(),
@ -68,6 +81,8 @@ public class SimpleAnalyzer implements NumassAnalyzer {
} }
); );
} }
}
@Override @Override
public Table analyze(NumassSet set, Meta config) { public Table analyze(NumassSet set, Meta config) {
@ -81,6 +96,38 @@ public class SimpleAnalyzer implements NumassAnalyzer {
.addTime() .addTime()
.build(); .build();
return new ListTable.Builder(format)
.rows(set.getPoints().map(point -> analyze(point, config)))
.build();
}
@Override
public Table getSpectrum(NumassBlock block, Meta config) {
TableFormat format = new TableFormatBuilder()
.addNumber("channel", X_VALUE_KEY)
.addNumber("count")
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
.build();
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
getEventStream(block).forEach(event -> {
if (map.containsKey(event.getChanel())) {
map.get(event.getChanel()).incrementAndGet();
} else {
map.put(event.getChanel(), new AtomicLong(1));
}
});
return new ListTable.Builder(format)
.rows(map.entrySet().stream()
.map(entry ->
new ValueMap(format.namesAsArray(),
entry.getKey(),
entry.getValue(),
entry.getValue().get() / block.getLength().toMillis() * 1000,
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
)
)
).build();
} }
} }

View File

@ -22,12 +22,20 @@ public interface NumassAnalyzer {
Values analyze(NumassBlock block, Meta config); Values analyze(NumassBlock block, Meta config);
/** /**
* Analyze the whole set * Analyze the whole set. And return results as a table
* @param set * @param set
* @param config * @param config
* @return * @return
*/ */
Table analyze(NumassSet set, Meta config); Table analyze(NumassSet set, Meta config);
/**
* Generate energy spectrum for the given block
* @param block
* @param config
* @return
*/
Table getSpectrum(NumassBlock block, Meta config);
} }

View File

@ -17,7 +17,9 @@ import org.jetbrains.annotations.NotNull;
import java.time.Instant; import java.time.Instant;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/** /**
@ -69,6 +71,15 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
return getPoints().filter(it -> it.getVoltage() == voltage).findFirst(); return getPoints().filter(it -> it.getVoltage() == voltage).findFirst();
} }
/**
* List all points with given voltage
* @param voltage
* @return
*/
default List<NumassPoint> listPoints(double voltage){
return getPoints().filter(it -> it.getVoltage() == voltage).collect(Collectors.toList());
}
@Provides(NUMASS_POINT_PROVIDER_KEY) @Provides(NUMASS_POINT_PROVIDER_KEY)
default Optional<NumassPoint> optPoint(String voltage) { default Optional<NumassPoint> optPoint(String voltage) {
return optPoint(Double.parseDouble(voltage)); return optPoint(Double.parseDouble(voltage));

View File

@ -10,8 +10,10 @@ import inr.numass.data.api.NumassPoint;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.ByteBuffer;
import java.time.Duration; import java.time.Duration;
import java.time.Instant; import java.time.Instant;
import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
/** /**
@ -39,40 +41,62 @@ public class ProtoNumassPoint implements NumassPoint {
@Override @Override
public Stream<NumassBlock> getBlocks() { public Stream<NumassBlock> getBlocks() {
return null; return point.getChannelsList().stream().flatMap(channel ->
channel.getBlocksList().stream().map(block -> new ProtoBlock((int) channel.getNum(), block))
);
} }
@Override @Override
public Meta meta() { public Meta meta() {
return null; return envelope.meta();
}
public static Instant ofEpochNanos(long nanos) {
long seconds = Math.floorDiv(nanos, (int) 1e9);
int reminder = (int) (nanos % 1e9);
return Instant.ofEpochSecond(seconds, reminder);
} }
private class ProtoBlock implements NumassBlock { private class ProtoBlock implements NumassBlock {
final int channel;
final NumassProto.Point.Channel.Block block; final NumassProto.Point.Channel.Block block;
private ProtoBlock(NumassProto.Point.Channel.Block block) { private ProtoBlock(int channel, NumassProto.Point.Channel.Block block) {
this.channel = channel;
this.block = block; this.block = block;
} }
@Override @Override
public Instant getStartTime() { public Instant getStartTime() {
return ofEpochNanos(block.getTime());
} }
@Override @Override
public Duration getLength() { public Duration getLength() {
return null; return Duration.ofNanos((long) (meta().getInt("b_size") / meta().getInt("sample_freq") * 1e9));
} }
@Override @Override
public Stream<NumassEvent> getEvents() { public Stream<NumassEvent> getEvents() {
return null; if (block.hasEvents()) {
NumassProto.Point.Channel.Block.Events events = block.getEvents();
return IntStream.range(0, events.getTimesCount()).mapToObj(i ->
new NumassEvent((short) events.getAmplitudes(i), events.getTimes(i))
);
} else {
return Stream.empty();
}
} }
@Override @Override
public Stream<NumassFrame> getFrames() { public Stream<NumassFrame> getFrames() {
return null; Duration tickSize = Duration.ofNanos((long) (1e9 / meta().getInt("sample_freq")));
return block.getFramesList().stream().map(frame -> {
Instant time = getStartTime().plusNanos(frame.getTime());
ByteBuffer data = frame.getData().asReadOnlyByteBuffer();
return new NumassFrame(time, tickSize, data.asShortBuffer());
});
} }
} }
} }

View File

@ -3,28 +3,29 @@ syntax = "proto3";
package inr.numass.data; package inr.numass.data;
message Point { message Point {
// A single channel for multichannel detector readout
message Channel { message Channel {
//A continuous measurement block
message Block { message Block {
// Необработанное событие // Raw data frame
message Frame { message Frame {
uint64 time = 1; //время в наносекундах от начала блока uint64 time = 1; // Time in nanos from the beginning of the block
bytes data = 2; // массив кадра события в формате int16 bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
// ед. измерения - каналы
} }
// Обработанные события. Содержат только время и амплитуду сигналов. // Обработанные события. Содержат только время и амплитуду сигналов.
// Для экономии места при сериализации амплитуды и времена лежат в // Для экономии места при сериализации амплитуды и времена лежат в
// разных массивах. Амплитуда и время имеющие одинаковые индексы // разных массивах. Амплитуда и время имеющие одинаковые индексы
// соответствуют одному событию // соответствуют одному событию
message Events { message Events {
repeated uint64 times = 1; //время в наносекундах от начала блока repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
repeated uint64 amplitudes = 2; //амплитуда события в каналах repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
} }
uint64 time = 1; // время начала блока в наносекундах с начала эпохи uint64 time = 1; // Block start in epoch nanos
repeated Frame frames = 2; // массив необработанных событий repeated Frame frames = 2; // Frames array
Events events = 3; // массив обработанных событий Events events = 3; // Events array
} }
uint64 num = 1; // номер канала uint64 num = 1; // The number of measuring channel
repeated Block blocks = 2; // набранные блоки repeated Block blocks = 2; // Blocks
} }
repeated Channel channels = 1; // массив данных по каналам repeated Channel channels = 1; // Array of measuring channels
} }

View File

@ -1,72 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.meta.Laminate;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import java.io.PrintWriter;
import static hep.dataforge.values.ValueType.NUMBER;
/**
* @author Darksnake
*/
@TypedActionDef(name = "debunch", inputType = RawNMFile.class, outputType = RawNMFile.class)
@ValueDef(name = "upperchanel", type = {NUMBER}, def = "4095", info = "An upper chanel for debuncing")
@ValueDef(name = "lowerchanel", type = {NUMBER}, def = "0", info = "A lower chanel for debuncing")
@ValueDef(name = "rejectprob", type = {NUMBER}, def = "1e-5", info = "Rejection probability")
@ValueDef(name = "framelength", type = {NUMBER}, def = "5", info = "Frame length in seconds")
@ValueDef(name = "maxcr", type = {NUMBER}, def = "100", info = "Maximum count rate for debunching")
public class DebunchAction extends OneToOneAction<RawNMFile, RawNMFile> {
@Override
protected RawNMFile execute(Context context, String name, RawNMFile source, Laminate meta) throws ContentException {
report(context, name, "File {} started", source.getName());
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
double rejectionprob = meta.getDouble("rejectprob", 1e-5);
double framelength = meta.getDouble("framelength", 5);
double maxCR = meta.getDouble("maxcr", 100d);
RawNMFile res = new RawNMFile(source.getName(), source.getHead());
source.getData().stream().map((point) -> {
double cr = point.selectChanels(lower, upper).getCr();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
report(context, name, "Debunching file '{}', point '{}': {} percent events {} percent time in bunches",
source.getName(), point.getUset(), report.eventsFiltred() * 100, report.timeFiltred() * 100);
point = report.getPoint();
}
return point;
}).forEach(res::putPoint);
report(context, name, "File {} completed", source.getName());
context.getChronicle(name).print(new PrintWriter(buildActionOutput(context, name)));
// res.configure(source.meta());
return res;
}
}

View File

@ -31,7 +31,7 @@ import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.NumassPoint; import inr.numass.data.NumassPoint;
import inr.numass.data.PointBuilders; import inr.numass.data.api.NumassSet;
import inr.numass.data.storage.NumassDataLoader; import inr.numass.data.storage.NumassDataLoader;
import inr.numass.debunch.DebunchReport; import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer; import inr.numass.debunch.FrameAnalizer;
@ -53,7 +53,7 @@ import static inr.numass.utils.TritiumUtils.pointExpression;
/** /**
* @author Darksnake * @author Darksnake
*/ */
@TypedActionDef(name = "prepareData", inputType = NumassData.class, outputType = Table.class) @TypedActionDef(name = "prepareData", inputType = NumassSet.class, outputType = Table.class)
@ValueDef(name = "lowerWindow", type = {NUMBER}, def = "0", info = "Base for the window lowerWindow bound") @ValueDef(name = "lowerWindow", type = {NUMBER}, def = "0", info = "Base for the window lowerWindow bound")
@ValueDef(name = "lowerWindowSlope", type = {NUMBER}, def = "0", info = "Slope for the window lowerWindow bound") @ValueDef(name = "lowerWindowSlope", type = {NUMBER}, def = "0", info = "Slope for the window lowerWindow bound")
@ValueDef(name = "upperWindow", type = {NUMBER}, info = "Upper bound for window") @ValueDef(name = "upperWindow", type = {NUMBER}, info = "Upper bound for window")
@ -62,7 +62,7 @@ import static inr.numass.utils.TritiumUtils.pointExpression;
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`") info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`")
@ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input") @ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
@NodeDef(name = "correction", multiple = true, target = "method::inr.numass.actions.PrepareDataAction.makeCorrection") @NodeDef(name = "correction", multiple = true, target = "method::inr.numass.actions.PrepareDataAction.makeCorrection")
public class PrepareDataAction extends OneToOneAction<NumassData, Table> { public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"}; public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"};
@ -74,7 +74,7 @@ public class PrepareDataAction extends OneToOneAction<NumassData, Table> {
} }
@Override @Override
protected ListTable execute(Context context, String name, NumassData dataFile, Laminate meta) { protected ListTable execute(Context context, String name, NumassSet dataFile, Laminate meta) {
// log.report("File %s started", dataFile.getName()); // log.report("File %s started", dataFile.getName());
int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1); int upper = meta.getInt("upperWindow", RawNMPoint.MAX_CHANEL - 1);

View File

@ -15,6 +15,7 @@
*/ */
package inr.numass.debunch; package inr.numass.debunch;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import java.util.List; import java.util.List;
@ -24,8 +25,8 @@ import java.util.List;
* @author Darksnake * @author Darksnake
*/ */
public interface DebunchReport { public interface DebunchReport {
RawNMPoint getInitialPoint(); NumassBlock getInitialPoint();
RawNMPoint getPoint(); NumassBlock getPoint();
List<Frame> getBunches(); List<Frame> getBunches();
List<NumassEvent> getBunchEvents(); List<NumassEvent> getBunchEvents();

View File

@ -15,39 +15,39 @@
*/ */
package inr.numass.debunch; package inr.numass.debunch;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
/** /**
*
* @author Darksnake * @author Darksnake
*/ */
public class DebunchReportImpl implements DebunchReport { public class DebunchReportImpl implements DebunchReport {
private final List<Frame> bunches; private final List<Frame> bunches;
private final RawNMPoint pointAfter; private final NumassBlock pointAfter;
private final RawNMPoint pointBefore; private final NumassBlock pointBefore;
public DebunchReportImpl(RawNMPoint pointBefore, RawNMPoint pointAfter, List<Frame> bunches) { public DebunchReportImpl(NumassBlock pointBefore, NumassBlock pointAfter, List<Frame> bunches) {
this.pointBefore = pointBefore; this.pointBefore = pointBefore;
this.pointAfter = pointAfter; this.pointAfter = pointAfter;
this.bunches = bunches; this.bunches = bunches;
} }
DebunchReportImpl(RawNMPoint pointBefore, DebunchData debunchData) { DebunchReportImpl(NumassBlock pointBefore, NumassBlock debunchData) {
this.pointBefore = pointBefore; this.pointBefore = pointBefore;
pointAfter = new RawNMPoint(pointBefore.getUset(),pointBefore.getUread(), pointAfter = new NumassPoint(pointBefore.getUset(), pointBefore.getUread(),
debunchData.getDebunchedEvents(), debunchData.getDebunchedLength(), pointBefore.getStartTime()); debunchData.getDebunchedEvents(), debunchData.getDebunchedLength(), pointBefore.getStartTime());
this.bunches = debunchData.getBunches(); this.bunches = debunchData.getBunches();
} }
@Override @Override
public double eventsFiltred() { public double eventsFiltred() {
return 1-(double)getPoint().getEventsCount()/getInitialPoint().getEventsCount(); return 1 - (double) getPoint().getEvents().count() / getInitialPoint().getEvents().count();
} }
@Override @Override
@ -65,18 +65,18 @@ public class DebunchReportImpl implements DebunchReport {
} }
@Override @Override
public RawNMPoint getInitialPoint() { public NumassBlock getInitialPoint() {
return pointBefore; return pointBefore;
} }
@Override @Override
public RawNMPoint getPoint() { public NumassBlock getPoint() {
return pointAfter; return pointAfter;
} }
@Override @Override
public double timeFiltred() { public double timeFiltred() {
return 1-getPoint().getLength()/getInitialPoint().getLength(); return 1d - getPoint().getLength().toNanos() / getInitialPoint().getLength().toNanos();
} }

View File

@ -5,7 +5,6 @@
*/ */
package inr.numass.utils; package inr.numass.utils;
import inr.numass.data.PointBuilders;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.RandomGenerator;