Numass underflow update

This commit is contained in:
darksnake 2017-07-30 19:33:20 +03:00
parent 5ea87aae51
commit 31f1fa0a71
6 changed files with 41 additions and 26 deletions

View File

@ -37,15 +37,12 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
* @return * @return
*/ */
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) { public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
if (block.getFrames().count() == 0) { if (block.getFrames().count() == 0) {
return block.getEvents().filter(it -> it.getChanel() >= loChannel && it.getChanel() < upChannel); return block.getEvents();
} else if (getProcessor() == null) { } else if (getProcessor() == null) {
throw new IllegalArgumentException("Signal processor needed to analyze frames"); throw new IllegalArgumentException("Signal processor needed to analyze frames");
} else { } else {
return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze)) return Stream.concat(block.getEvents(), block.getFrames().flatMap(getProcessor()::analyze));
.filter(it -> it.getChanel() >= loChannel && it.getChanel() <= upChannel);
} }
} }

View File

@ -4,10 +4,13 @@ import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassBlock; import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.SignalProcessor; import inr.numass.data.api.SignalProcessor;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import java.util.stream.Stream;
/** /**
* A simple event counter * A simple event counter
* Created by darksnake on 07.07.2017. * Created by darksnake on 07.07.2017.
@ -21,11 +24,15 @@ public class SimpleAnalyzer extends AbstractAnalyzer {
public SimpleAnalyzer() { public SimpleAnalyzer() {
} }
public Stream<NumassEvent> getEventStream(NumassBlock block, int loChannel, int upChannel) {
return getEventStream(block, Meta.empty()).filter(it -> it.getChanel() >= loChannel && it.getChanel() < upChannel);
}
@Override @Override
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0); int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE); int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
long count = getEventStream(block, config).count(); long count = getEventStream(block, loChannel, upChannel).count();
double countRate = (double) count / block.getLength().toMillis() * 1000; double countRate = (double) count / block.getLength().toMillis() * 1000;
double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000; double countRateError = Math.sqrt((double) count) / block.getLength().toMillis() * 1000;

View File

@ -7,11 +7,12 @@ import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.SignalProcessor; import inr.numass.data.api.SignalProcessor;
import javafx.util.Pair;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.LongStream; import java.util.stream.Stream;
/** /**
* An analyzer which uses time information from events * An analyzer which uses time information from events
@ -30,18 +31,20 @@ public class TimeAnalyzer extends AbstractAnalyzer {
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
int loChannel = config.getInt("window.lo", 0); int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE); int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
long t0 = config.getValue("t0").longValue(); long t0 = config.getValue("t0").longValue();
AtomicLong totalN = new AtomicLong(0); AtomicLong totalN = new AtomicLong(0);
AtomicLong totalT = new AtomicLong(0); AtomicLong totalT = new AtomicLong(0);
timeChain(block, config).forEach(delay -> { extendedEventStream(block, config)
if (delay >= t0) { .filter(pair -> {
short channel = pair.getKey().getChanel();
return channel >= loChannel && channel < upChannel;
})
.forEach(pair -> {
totalN.incrementAndGet(); totalN.incrementAndGet();
//TODO add progress listener here //TODO add progress listener here
totalT.addAndGet(delay); totalT.addAndGet(pair.getValue());
}
}); });
double countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000);//1e9 / (totalT.get() / totalN.get() - t0); double countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000);//1e9 / (totalT.get() / totalN.get() - t0);
@ -77,24 +80,32 @@ public class TimeAnalyzer extends AbstractAnalyzer {
* @param config * @param config
* @return * @return
*/ */
public LongStream timeChain(NumassBlock block, Meta config) { public Stream<Pair<NumassEvent, Long>> extendedEventStream(NumassBlock block, Meta config) {
long t0 = config.getValue("t0").longValue();
AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null); AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null);
return getEventStream(block, config)
return super.getEventStream(block, config) //using super implementation
.sorted() .sorted()
.mapToLong(event -> { .map(event -> {
if (lastEvent.get() == null) { if (lastEvent.get() == null) {
lastEvent.set(event); lastEvent.set(event);
return 0; return new Pair<>(event, 0L);
} else { } else {
long res = event.getTimeOffset() - lastEvent.get().getTimeOffset(); long res = event.getTimeOffset() - lastEvent.get().getTimeOffset();
if (res >= 0) { if (res >= 0) {
lastEvent.set(event); lastEvent.set(event);
return res; return new Pair<>(event, res);
} else { } else {
lastEvent.set(null); lastEvent.set(null);
return 0; return new Pair<>(event, 0L);
} }
} }
}); }).filter(pair -> pair.getValue() >= t0);
}
@Override
public Stream<NumassEvent> getEventStream(NumassBlock block, Meta config) {
return extendedEventStream(block,config).map(Pair::getKey);
} }
} }

View File

@ -30,7 +30,7 @@ public interface NumassAnalyzer {
static long countInWindow(Table spectrum, short loChannel, short upChannel) { static long countInWindow(Table spectrum, short loChannel, short upChannel) {
return spectrum.getRows().filter(row -> { return spectrum.getRows().filter(row -> {
int channel = row.getInt(CHANNEL_KEY); int channel = row.getInt(CHANNEL_KEY);
return channel > loChannel && channel < upChannel; return channel >= loChannel && channel < upChannel;
}).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum(); }).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum();
} }
@ -59,7 +59,7 @@ public interface NumassAnalyzer {
spectrum.getRows().filter(row -> { spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY); int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c <= binUp; return c >= binLo && c < binUp;
}).forEach(row -> { }).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY, 0).longValue()); count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2); countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);

View File

@ -20,7 +20,7 @@ class PointAnalyzer {
static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) { static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize) return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize)
.fill(analyzer.timeChain(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)).mapToDouble {it / 1000 as double}) .fill(analyzer.extendedEventStream(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)).mapToDouble {it.value / 1000 as double})
} }
static Histogram histogram(DoubleStream stream, double binSize = 0.5, int binNum = 500) { static Histogram histogram(DoubleStream stream, double binSize = 0.5, int binNum = 500) {

View File

@ -44,7 +44,7 @@ new GrindShell(ctx).eval {
def loChannel = 400; def loChannel = 400;
def upChannel = 800; def upChannel = 800;
def chain = new TimeAnalyzer().timeChain(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)) def chain = new TimeAnalyzer().extendedEventStream(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
def histogram = PointAnalyzer.histogram(chain, 5e-6, 500).asTable(); def histogram = PointAnalyzer.histogram(chain, 5e-6, 500).asTable();