New numass data structures. Almost finished. Not tested. Cleanup of unused scripts.

This commit is contained in:
Alexander Nozik 2017-07-16 22:00:33 +03:00
parent c898422e5d
commit 0ffefe3a3b
43 changed files with 698 additions and 1849 deletions

View File

@ -105,9 +105,9 @@ public class NumassDataUtils {
// return res.build(); // return res.build();
// } // }
// //
// public static SpectrumDataAdapter adapter() { public static SpectrumDataAdapter adapter() {
// return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time"); return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
// } }
// //
// public static Table correctForDeadTime(ListTable data, double dtime) { // public static Table correctForDeadTime(ListTable data, double dtime) {
// return correctForDeadTime(data, adapter(), dtime); // return correctForDeadTime(data, adapter(), dtime);

View File

@ -17,8 +17,8 @@ import static inr.numass.data.api.NumassPoint.HV_KEY;
* Created by darksnake on 11.07.2017. * Created by darksnake on 11.07.2017.
*/ */
public abstract class AbstractAnalyzer implements NumassAnalyzer { public abstract class AbstractAnalyzer implements NumassAnalyzer {
public static String[] NAME_LIST = {"length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"}; public static String[] NAME_LIST = {LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
public static String[] NAME_LIST_WITH_HV = {HV_KEY, "length", "count", COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"}; public static String[] NAME_LIST_WITH_HV = {HV_KEY, LENGTH_KEY, COUNT_KEY, COUNT_RATE_KEY, COUNT_RATE_ERROR_KEY, "window", "timestamp"};
@Nullable @Nullable
private final SignalProcessor processor; private final SignalProcessor processor;
@ -52,8 +52,8 @@ public abstract class AbstractAnalyzer implements NumassAnalyzer {
public Table analyze(NumassSet set, Meta config) { public Table analyze(NumassSet set, Meta config) {
TableFormat format = new TableFormatBuilder() TableFormat format = new TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY) .addNumber(HV_KEY, X_VALUE_KEY)
.addNumber("length") .addNumber(LENGTH_KEY)
.addNumber("count") .addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY) .addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) .addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn("window") .addColumn("window")

View File

@ -27,6 +27,7 @@ public class SmartAnalyzer extends AbstractAnalyzer {
@Override @Override
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
//TODO add result caching
//TODO do something more... smart... using information from point if block is point //TODO do something more... smart... using information from point if block is point
switch (config.getString("type", "simple")) { switch (config.getString("type", "simple")) {
case "simple": case "simple":

View File

@ -1,12 +1,21 @@
package inr.numass.data.analyzers; package inr.numass.data.analyzers;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassBlock; import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.SignalProcessor; import inr.numass.data.api.SignalProcessor;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.DoubleStream;
/** /**
* An analyzer which uses time information from events
* Created by darksnake on 11.07.2017. * Created by darksnake on 11.07.2017.
*/ */
public class TimeAnalyzer extends AbstractAnalyzer { public class TimeAnalyzer extends AbstractAnalyzer {
@ -20,6 +29,69 @@ public class TimeAnalyzer extends AbstractAnalyzer {
@Override @Override
public Values analyze(NumassBlock block, Meta config) { public Values analyze(NumassBlock block, Meta config) {
throw new UnsupportedOperationException("TODO"); int loChannel = config.getInt("window.lo", 0);
int upChannel = config.getInt("window.up", Integer.MAX_VALUE);
double t0 = config.getDouble("t0");
AtomicLong totalN = new AtomicLong(0);
AtomicReference<Double> totalT = new AtomicReference<>(0d);
timeChain(block, config).forEach(delay -> {
if (delay >= t0) {
totalN.incrementAndGet();
//TODO add progress listener here
totalT.accumulateAndGet(delay, (d1, d2) -> d1 + d2);
}
});
double countRate = 1d / (totalT.get() / totalN.get() - t0);
double countRateError = countRate/Math.sqrt(totalN.get());
long count = (long) (countRate * totalT.get());
double length = totalT.get();
if (block instanceof NumassPoint) {
return new ValueMap(NAME_LIST_WITH_HV,
new Object[]{
((NumassPoint) block).getVoltage(),
length,
count,
countRate,
countRateError,
new int[]{loChannel, upChannel},
block.getStartTime()
}
);
} else {
return new ValueMap(NAME_LIST,
new Object[]{
length,
count,
countRate,
countRateError,
new int[]{loChannel, upChannel},
block.getStartTime()
}
);
}
}
public DoubleStream timeChain(NumassBlock block, Meta config) {
AtomicReference<NumassEvent> lastEvent = new AtomicReference<>(null);
return getEventStream(block, config).mapToDouble(event -> {
if (lastEvent.get() == null) {
lastEvent.set(event);
return 0d;
} else {
double res = Duration.between(lastEvent.get().getTime(),event.getTime()).toNanos();//event.getTimeOffset() - lastEvent.get().getTimeOffset();
if (res > 0) {
lastEvent.set(event);
return res;
} else {
lastEvent.set(null);
return 0d;
}
}
});
} }
} }

View File

@ -0,0 +1,42 @@
package inr.numass.data.api;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.stream.Stream;
/**
* A block constructed from a set of other blocks. Internal blocks are not necessary subsequent. Blocks are automatically sorted.
* Created by darksnake on 16.07.2017.
*/
public class MetaBlock implements NumassBlock {
private SortedSet<NumassBlock> blocks = new TreeSet<>(Comparator.comparing(NumassBlock::getStartTime));
public MetaBlock(NumassBlock... blocks) {
this.blocks.addAll(Arrays.asList(blocks));
}
public MetaBlock(Collection<NumassBlock> blocks) {
this.blocks.addAll(blocks);
}
@Override
public Instant getStartTime() {
return blocks.first().getStartTime();
}
@Override
public Duration getLength() {
return Duration.ofNanos(blocks.stream().mapToLong(block -> block.getLength().toNanos()).sum());
}
@Override
public Stream<NumassEvent> getEvents() {
return blocks.stream().flatMap(NumassBlock::getEvents);
}
@Override
public Stream<NumassFrame> getFrames() {
return blocks.stream().flatMap(NumassBlock::getFrames);
}
}

View File

@ -3,7 +3,6 @@ package inr.numass.data.api;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*; import hep.dataforge.tables.*;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.analyzers.SmartAnalyzer;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.TreeMap; import java.util.TreeMap;
@ -18,37 +17,25 @@ import static hep.dataforge.tables.XYAdapter.*;
*/ */
public interface NumassAnalyzer { public interface NumassAnalyzer {
static Table getSpectrum(NumassBlock block, Meta config) { /**
TableFormat format = new TableFormatBuilder() * Calculate number of counts in the given channel
.addNumber("channel", X_VALUE_KEY) * @param spectrum
.addNumber("count") * @param loChannel
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY) * @param upChannel
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) * @return
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config)) */
.build(); static long countInWindow(Table spectrum, short loChannel, short upChannel) {
NavigableMap<Short, AtomicLong> map = new TreeMap<>(); return spectrum.getRows().filter(row -> {
new SmartAnalyzer().getEventStream(block, config).forEach(event -> { int channel = row.getInt(CHANNEL_KEY);
if (map.containsKey(event.getChanel())) { return channel > loChannel && channel < upChannel;
map.get(event.getChanel()).incrementAndGet(); }).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum();
} else {
map.put(event.getChanel(), new AtomicLong(1));
}
});
return new ListTable.Builder(format)
.rows(map.entrySet().stream()
.map(entry ->
new ValueMap(format.namesAsArray(),
entry.getKey(),
entry.getValue(),
entry.getValue().get() / block.getLength().toMillis() * 1000,
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
)
)
).build();
} }
String CHANNEL_KEY = "channel";
String COUNT_KEY = "count";
String LENGTH_KEY = "length";
String COUNT_RATE_KEY = "cr"; String COUNT_RATE_KEY = "cr";
String COUNT_RATE_ERROR_KEY = "crErr"; String COUNT_RATE_ERROR_KEY = "cr.err";
/** /**
* Perform analysis on block. The values for count rate, its error and point length in nanos must * Perform analysis on block. The values for count rate, its error and point length in nanos must
@ -76,4 +63,63 @@ public interface NumassAnalyzer {
*/ */
Table analyze(NumassSet set, Meta config); Table analyze(NumassSet set, Meta config);
/**
* Calculate the energy spectrum for a given block. The s
*
* @param block
* @param config
* @return
*/
default Table getSpectrum(NumassBlock block, Meta config) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
.build();
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
getEventStream(block, config).forEach(event -> {
if (map.containsKey(event.getChanel())) {
map.get(event.getChanel()).incrementAndGet();
} else {
map.put(event.getChanel(), new AtomicLong(1));
}
});
return new ListTable.Builder(format)
.rows(map.entrySet().stream()
.map(entry ->
new ValueMap(format.namesAsArray(),
entry.getKey(),
entry.getValue(),
entry.getValue().get() / block.getLength().toMillis() * 1000,
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000
)
)
).build();
}
/**
* Get the approximate number of events in block. Not all analyzers support precise event counting
*
* @param block
* @param config
* @return
*/
default long getCount(NumassBlock block, Meta config) {
return analyze(block, config).getValue(COUNT_KEY).numberValue().longValue();
}
/**
* Get approximate effective point length in nanos. It is not necessary corresponds to real point length.
*
* @param block
* @param config
* @return
*/
default long getLength(NumassBlock block, Meta config) {
return analyze(block, config).getValue(LENGTH_KEY).numberValue().longValue();
}
} }

View File

@ -21,19 +21,26 @@ import java.io.Serializable;
import java.time.Instant; import java.time.Instant;
/** /**
* A single numass event with given amplitude ant time. * A single numass event with given amplitude and time.
* *
* @author Darksnake * @author Darksnake
*/ */
public class NumassEvent implements Comparable<NumassEvent>, Serializable { public class NumassEvent implements Comparable<NumassEvent>, Serializable {
// channel // channel
protected final short chanel; protected final short chanel;
//time in nanoseconds //The time of the block start
protected final long time; protected final Instant blockTime;
//time in nanoseconds relative to block start
protected final long timeOffset;
public NumassEvent(short chanel, long time) { public NumassEvent(short chanel, Instant blockTime, long offset) {
this.chanel = chanel; this.chanel = chanel;
this.time = time; this.blockTime = blockTime;
this.timeOffset = offset;
}
public NumassEvent(short chanel, long offset) {
this(chanel, Instant.EPOCH, offset);
} }
/** /**
@ -46,16 +53,20 @@ public class NumassEvent implements Comparable<NumassEvent>, Serializable {
/** /**
* @return the time * @return the time
*/ */
public long getTime() { public long getTimeOffset() {
return time; return timeOffset;
} }
public Instant getAbsoluteTime(Instant offset) { public Instant getBlockTime() {
return offset.plusNanos(time); return blockTime;
}
public Instant getTime() {
return blockTime.plusNanos(timeOffset);
} }
@Override @Override
public int compareTo(@NotNull NumassEvent o) { public int compareTo(@NotNull NumassEvent o) {
return Long.compare(this.getTime(), o.getTime()); return this.getTime().compareTo(o.getTime());
} }
} }

View File

@ -14,6 +14,11 @@ import java.util.stream.Stream;
public class SimpleNumassPoint extends MetaHolder implements NumassPoint { public class SimpleNumassPoint extends MetaHolder implements NumassPoint {
private final List<NumassBlock> blocks; private final List<NumassBlock> blocks;
/**
* Input blocks must be sorted
* @param voltage
* @param blocks
*/
public SimpleNumassPoint(double voltage, List<NumassBlock> blocks) { public SimpleNumassPoint(double voltage, List<NumassBlock> blocks) {
this.blocks = blocks; this.blocks = blocks;
super.setMeta(new MetaBuilder("point").setValue(HV_KEY, voltage)); super.setMeta(new MetaBuilder("point").setValue(HV_KEY, voltage));

View File

@ -112,7 +112,7 @@ public class ClassicNumassPoint implements NumassPoint {
short channel = (short) Short.toUnsignedInt(buffer.getShort()); short channel = (short) Short.toUnsignedInt(buffer.getShort());
long time = Integer.toUnsignedLong(buffer.getInt()); long time = Integer.toUnsignedLong(buffer.getInt());
byte status = buffer.get(); // status is ignored byte status = buffer.get(); // status is ignored
return new NumassEvent(channel, (long) (time * timeCoef)); return new NumassEvent(channel, startTime, (long) (time * timeCoef));
} catch (IOException ex) { } catch (IOException ex) {
LoggerFactory.getLogger(ClassicNumassPoint.this.getClass()).error("Unexpected IOException " + LoggerFactory.getLogger(ClassicNumassPoint.this.getClass()).error("Unexpected IOException " +
"when reading block", ex); "when reading block", ex);

View File

@ -75,10 +75,11 @@ public class ProtoNumassPoint implements NumassPoint {
@Override @Override
public Stream<NumassEvent> getEvents() { public Stream<NumassEvent> getEvents() {
Instant blockTime = getStartTime();
if (block.hasEvents()) { if (block.hasEvents()) {
NumassProto.Point.Channel.Block.Events events = block.getEvents(); NumassProto.Point.Channel.Block.Events events = block.getEvents();
return IntStream.range(0, events.getTimesCount()).mapToObj(i -> return IntStream.range(0, events.getTimesCount()).mapToObj(i ->
new NumassEvent((short) events.getAmplitudes(i), events.getTimes(i)) new NumassEvent((short) events.getAmplitudes(i), blockTime, events.getTimes(i))
); );
} else { } else {
return Stream.empty(); return Stream.empty();

View File

@ -1,9 +1,11 @@
package inr.numass.data package inr.numass.data
import groovy.transform.CompileStatic import groovy.transform.CompileStatic
import hep.dataforge.grind.Grind
import hep.dataforge.maths.histogram.Histogram import hep.dataforge.maths.histogram.Histogram
import hep.dataforge.maths.histogram.UnivariateHistogram import hep.dataforge.maths.histogram.UnivariateHistogram
import inr.numass.data.api.NumassEvent import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassBlock
import java.util.stream.DoubleStream import java.util.stream.DoubleStream
@ -13,67 +15,15 @@ import java.util.stream.DoubleStream
@CompileStatic @CompileStatic
class PointAnalyzer { class PointAnalyzer {
static Result analyzePoint(RawNMPoint point, double t0 = 0, int loChannel = 0, int upChannel = 4000) { static TimeAnalyzer analyzer = new TimeAnalyzer();
int totalN = 0
double totalT = 0;
NumassEvent lastEvent = point.events[0];
for (int i = 1; i < point.events.size(); i++) { static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 4000, double binSize = 1e-6d, int binNum = 500) {
NumassEvent event = point.events[i]; return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize)
double t = event.time - lastEvent.time; .fill(analyzer.timeChain(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)))
if (t < 0) {
lastEvent = event
} else if (t >= t0 && event.chanel <= upChannel && event.chanel >= loChannel) {
totalN++
totalT += t
lastEvent = event
}
}
double cr = 1d / (totalT / totalN - t0);
return new Result(cr: cr, crErr: cr / Math.sqrt(totalN), num: totalN, t0: t0, loChannel: loChannel, upChannel: upChannel)
}
static DoubleStream timeChain(int loChannel = 0, int upChannel = 4000, RawNMPoint... points) {
DoubleStream stream = DoubleStream.empty();
for(RawNMPoint point: points){
List<Double> ts = new ArrayList<>();
NumassEvent lastEvent = point.events[0];
for (int i = 1; i < point.events.size(); i++) {
NumassEvent event = point.events[i];
double t = event.time - lastEvent.time;
if (t < 0) {
lastEvent = event
} else if (t >= 0 && event.chanel <= upChannel && event.chanel >= loChannel) {
ts << t
lastEvent = event
}
}
stream = DoubleStream.concat(stream,ts.stream().mapToDouble{it});
}
return stream
}
/**
* Calculate the number of events in chain with delay and channel in given regions
* @param point
* @param t1
* @param t2
* @param loChannel
* @param upChannel
* @return
*/
static long count(RawNMPoint point, double t1, double t2, int loChannel = 0, int upChannel = 4000) {
return timeChain(loChannel, upChannel, point).filter { it > t1 && it < t2 }.count();
}
static Histogram histogram(RawNMPoint point, int loChannel = 0, int upChannel = 4000, double binSize = 1e-6d, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize*binNum, binSize).fill(timeChain(loChannel, upChannel, point))
} }
static Histogram histogram(DoubleStream stream, double binSize = 1e-6d, int binNum = 500) { static Histogram histogram(DoubleStream stream, double binSize = 1e-6d, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize*binNum, binSize).fill(stream) return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize).fill(stream)
} }
static class Result { static class Result {

View File

@ -1,62 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.tables.ValueMap
NumassData.metaClass.findPoint{double u ->
delegate.getNMPoints().getWork { it.getVoltage() == u }.getMap(20, true)
}
Map<Double, Double> dif(NumassData data1, NumassData data2, double uset){
Map<Double, Double> spectrum1 = data1.findPoint(uset);
Map<Double, Double> spectrum2 = data2.findPoint(uset);
Map<Double, Double> dif = new LinkedHashMap<>();
spectrum1.each{ key, value -> dif.put(key, Math.max(spectrum1.get(key)-spectrum2.get(key), 0d))}
return dif;
}
def buildSet(NumassData data1, NumassData data2, double... points){
TableFormatBuilder builder = new TableFormatBuilder().addNumber("channel");
List<ValueMap> pointList = new ArrayList<>();
for(double point: points){
builder.addNumber(Double.toString(point));
Map<Double, Double> dif = dif(data1, data2, point);
if(pointList.isEmpty()){
for(Double channel : dif.keySet()){
ValueMap p = new ValueMap();
p.putValue("channel",channel);
pointList.add(p);
}
}
for(ValueMap mp:pointList){
double channel = mp.getValue("channel").doubleValue();
mp.putValue(Double.toString(point), dif.get(channel));
}
}
ListTable set = new ListTable(pointList,builder.build());
}
NumassData data1 = NMFile.readFile(new File("D:\\Work\\Numass\\transmission 2013\\STABIL04.DAT"));
NumassData data2 = NMFile.readFile(new File("D:\\Work\\Numass\\transmission 2013\\DARK04.DAT"));
double[] points = [14500,15000,15500,16000,18100,18200,18300]
ColumnedDataWriter.writeTable(System.out, buildSet(data1,data2,points), "Detector spectrum substraction");

View File

@ -1,20 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.scripts
import hep.dataforge.grind.GrindMetaBuilder
import hep.dataforge.meta.Meta
import inr.numass.data.storage.NumassDataLoader
File dataDir = new File("D:\\Work\\Numass\\data\\2016_04\\T2_data\\Fill_2_2\\set_6_e26d123e54010000")
if(!dataDir.exists()){
println "dataDir directory does not exist"
}
Meta config = new GrindMetaBuilder().config(lower: 400, upper: 1800, reference: 18500)
println config
NumassData data = NumassDataLoader.fromLocalDir(null, dataDir)
new FindBorderAction().simpleRun(data, config)

View File

@ -8,10 +8,11 @@ package inr.numass.scripts
import hep.dataforge.grind.Grind import hep.dataforge.grind.Grind
import hep.dataforge.values.Values import hep.dataforge.values.Values
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.NumassDataLoader import inr.numass.data.storage.NumassDataLoader
import inr.numass.utils.NMEventGeneratorWithPulser import inr.numass.utils.NMEventGeneratorWithPulser
import inr.numass.utils.NumassUtils
import inr.numass.utils.PileUpSimulator import inr.numass.utils.PileUpSimulator
import inr.numass.utils.TritiumUtils
import inr.numass.utils.UnderflowCorrection import inr.numass.utils.UnderflowCorrection
import org.apache.commons.math3.random.JDKRandomGenerator import org.apache.commons.math3.random.JDKRandomGenerator
@ -52,7 +53,7 @@ List<NumassPoint> pileup = new ArrayList<>();
lowerChannel = 400; lowerChannel = 400;
upperChannel = 1800; upperChannel = 1800;
PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPoint reference = null, boolean extrapolate = true, double scale = 1d) { PileUpSimulator buildSimulator(NumassPoint point, double cr, NumassPoint reference = null, boolean extrapolate = true, double scale = 1d) {
def cfg = Grind.buildMeta(cr: cr) { def cfg = Grind.buildMeta(cr: cr) {
pulser(mean: 3450, sigma: 86.45, freq: 66.43) pulser(mean: 3450, sigma: 86.45, freq: 66.43)
} }
@ -61,7 +62,7 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPoint ref
if (extrapolate) { if (extrapolate) {
double[] chanels = new double[RawNMPoint.MAX_CHANEL]; double[] chanels = new double[RawNMPoint.MAX_CHANEL];
double[] values = new double[RawNMPoint.MAX_CHANEL]; double[] values = new double[RawNMPoint.MAX_CHANEL];
Values fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20); numa Values fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20);
def amp = fitResult.getDouble("amp") def amp = fitResult.getDouble("amp")
def sigma = fitResult.getDouble("expConst") def sigma = fitResult.getDouble("expConst")
@ -86,14 +87,14 @@ PileUpSimulator buildSimulator(NumassPointImpl point, double cr, NumassPoint ref
return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate(); return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
} }
double adjustCountRate(PileUpSimulator simulator, NumassPointImpl point) { double adjustCountRate(PileUpSimulator simulator, NumassPoint point) {
double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel); double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel); double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength()); return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
} }
data.forEach { point -> data.forEach { point ->
double cr = TritiumUtils.countRateWithDeadTime(point, lowerChannel, upperChannel, 6.55e-6); double cr = NumassUtils.countRateWithDeadTime(point, lowerChannel, upperChannel, 6.55e-6);
PileUpSimulator simulator = buildSimulator(point, cr); PileUpSimulator simulator = buildSimulator(point, cr);

View File

@ -66,7 +66,7 @@ FittingIOUtils.printSpectrum(Global.out(), spectrum, allPars, 14000, 18600.0, 40
// //
//ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90)); //ListTable data = generator.generateData(DataModelUtils.getUniformSpectrumConfiguration(14000d, 18500, 2000, 90));
// //
//data = TritiumUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 1e-8); //data = NumassUtils.correctForDeadTime(data, new SpectrumDataAdapter(), 1e-8);
//// data = data.filter("X", Value.of(15510.0), Value.of(18610.0)); //// data = data.filter("X", Value.of(15510.0), Value.of(18610.0));
//// allPars.setParValue("X", 0.4); //// allPars.setParValue("X", 0.4);
//FitState state = new FitState(data, model, allPars); //FitState state = new FitState(data, model, allPars);

View File

@ -7,6 +7,8 @@ import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.plots.fx.FXPlotManager import hep.dataforge.plots.fx.FXPlotManager
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer import inr.numass.data.PointAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.NumassDataLoader import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassStorage import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory import inr.numass.data.storage.NumassStorageFactory
@ -33,12 +35,12 @@ new GrindShell(ctx).eval {
println "Found ${loaders.size()} loaders matching pattern" println "Found ${loaders.size()} loaders matching pattern"
def hv = 16000.toString(); def hv = 16000.toString();
List<RawNMPoint> points = loaders.collect { loader -> loader.optRawPoint(hv).get()} List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get()}
def loChannel = 400; def loChannel = 400;
def upChannel = 800; def upChannel = 800;
def chain = PointAnalyzer.timeChain(loChannel,upChannel, points as RawNMPoint[]) def chain = new TimeAnalyzer().timeChain(loChannel,upChannel, points as NumassPoint[])
def histogram = PointAnalyzer.histogram(chain, 5e-6,500).asTable(); def histogram = PointAnalyzer.histogram(chain, 5e-6,500).asTable();

View File

@ -15,15 +15,14 @@
*/ */
package inr.numass; package inr.numass;
import hep.dataforge.data.FileDataFactory;
import hep.dataforge.data.binary.Binary;
import hep.dataforge.io.BasicIOManager; import hep.dataforge.io.BasicIOManager;
import hep.dataforge.meta.Meta;
import hep.dataforge.names.Name; import hep.dataforge.names.Name;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.output.TeeOutputStream; import org.apache.commons.io.output.TeeOutputStream;
import java.io.*; import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -36,9 +35,9 @@ public class NumassIO extends BasicIOManager {
public static final String NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir"; public static final String NUMASS_OUTPUT_CONTEXT_KEY = "numass.outputDir";
public static RawNMFile readAsDat(Binary source, Meta config) throws IOException { // public static RawNMFile readAsDat(Binary source, Meta config) throws IOException {
return new LegacyDataReader(source, config).read(); // return new LegacyDataReader(source, config).read();
} // }
// private File getOutputDir() { // private File getOutputDir() {
// String outputDirPath = getContext().getString(NUMASS_OUTPUT_CONTEXT_KEY, "."); // String outputDirPath = getContext().getString(NUMASS_OUTPUT_CONTEXT_KEY, ".");
@ -54,22 +53,23 @@ public class NumassIO extends BasicIOManager {
// return new NumassPawReader().readPaw(source, config.getString(FileDataFactory.FILE_NAME_KEY)); // return new NumassPawReader().readPaw(source, config.getString(FileDataFactory.FILE_NAME_KEY));
// } // }
public static RawNMFile getNumassData(Binary binary, Meta config) { // public static RawNMFile getNumassData(Binary binary, Meta config) {
try { // try {
RawNMFile dataFile; // RawNMFile dataFile;
String extension = FilenameUtils.getExtension(config.getString(FileDataFactory.FILE_NAME_KEY)).toLowerCase(); // String extension = FilenameUtils.getExtension(config.getString(FileDataFactory.FILE_NAME_KEY)).toLowerCase();
switch (extension) { // switch (extension) {
case "dat": // case "dat":
dataFile = readAsDat(binary, config); // dataFile = readAsDat(binary, config);
break; // break;
default: // default:
throw new RuntimeException("Wrong file format"); // throw new RuntimeException("Wrong file format");
} // }
return dataFile; // return dataFile;
} catch (IOException ex) { // } catch (IOException ex) {
throw new RuntimeException(ex); // throw new RuntimeException(ex);
} // }
} // }
@Override @Override
public OutputStream out(Name stage, Name name) { public OutputStream out(Name stage, Name name) {

View File

@ -80,16 +80,12 @@ public class NumassPlugin extends BasicPlugin {
ActionManager actions = context.pluginManager().getOrLoad(ActionManager.class); ActionManager actions = context.pluginManager().getOrLoad(ActionManager.class);
actions.attach(context); actions.attach(context);
actions.putAction(PrepareDataAction.class);
actions.putAction(ReadLegacyDataAction.class);
actions.putAction(MergeDataAction.class); actions.putAction(MergeDataAction.class);
actions.putAction(FindBorderAction.class);
actions.putAction(MonitorCorrectAction.class); actions.putAction(MonitorCorrectAction.class);
actions.putAction(SummaryAction.class); actions.putAction(SummaryAction.class);
actions.putAction(PlotDataAction.class); actions.putAction(PlotDataAction.class);
actions.putAction(PlotFitResultAction.class); actions.putAction(PlotFitResultAction.class);
actions.putAction(AdjustErrorsAction.class); actions.putAction(AdjustErrorsAction.class);
actions.putAction(ShowEnergySpectrumAction.class);
actions.putAction(SubstractSpectrumAction.class); actions.putAction(SubstractSpectrumAction.class);
actions.putTask(NumassPrepareTask.class); actions.putTask(NumassPrepareTask.class);

View File

@ -4,14 +4,12 @@ import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import inr.numass.data.analyzers.SmartAnalyzer; import inr.numass.data.analyzers.SmartAnalyzer;
import inr.numass.data.api.NumassAnalyzer; import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import inr.numass.utils.NumassUtils;
import java.io.OutputStream;
import static hep.dataforge.values.ValueType.NUMBER; import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING; import static hep.dataforge.values.ValueType.STRING;
@ -28,10 +26,8 @@ public class AnalyzeDataAction extends OneToOneAction<NumassSet, Table> {
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) { protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
//TODO add processor here //TODO add processor here
NumassAnalyzer analyzer = new SmartAnalyzer(); NumassAnalyzer analyzer = new SmartAnalyzer();
Table res = analyzer.analyze(input,inputMeta); Table res = analyzer.analyze(input, inputMeta);
OutputStream stream = buildActionOutput(context, name); output(context, name, stream -> NumassUtils.writeSomething(stream, inputMeta, res));
ColumnedDataWriter.writeTable(stream, data, head);
return res; return res;
} }
} }

View File

@ -21,13 +21,12 @@ import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode; import hep.dataforge.data.DataNode;
import hep.dataforge.description.NodeDef; import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*; import hep.dataforge.tables.*;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import java.io.OutputStream;
import java.util.*; import java.util.*;
/** /**
@ -61,8 +60,7 @@ public class MergeDataAction extends ManyToOneAction<Table, Table> {
@Override @Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) { protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName); output(context, groupName, stream -> NumassUtils.writeSomething(stream, outputMeta, output));
ColumnedDataWriter.writeTable(stream, output, outputMeta.toString());
} }
// @Override // @Override

View File

@ -20,20 +20,18 @@ import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException; import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import javafx.util.Pair; import javafx.util.Pair;
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator; import org.apache.commons.math3.analysis.interpolation.SplineInterpolator;
import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction; import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction;
import java.io.OutputStream;
import java.time.Instant; import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -134,13 +132,11 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
// } else { // } else {
// format = DataFormat.of(parnames); // format = DataFormat.of(parnames);
// } // }
Table data = new ListTable(dataList); Table res = new ListTable(dataList);
OutputStream stream = buildActionOutput(context, name); output(context, name, stream -> NumassUtils.writeSomething(stream, meta, res));
ColumnedDataWriter.writeTable(stream, data, head); return res;
return data;
} }
private Pair<Double, Double> getSplineCorrection(TreeMap<Instant, Values> index, Values dp, double norm) { private Pair<Double, Double> getSplineCorrection(TreeMap<Instant, Values> index, Values dp, double norm) {
@ -201,9 +197,10 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
private void printMonitorData(Context context, Meta meta) { private void printMonitorData(Context context, Meta meta) {
if (!monitorPoints.isEmpty()) { if (!monitorPoints.isEmpty()) {
String monitorFileName = meta.getString("monitorFile", "monitor"); String monitorFileName = meta.getString("monitorFile", "monitor");
OutputStream stream = buildActionOutput(context, monitorFileName);
ListTable data = new ListTable(monitorPoints); ListTable data = new ListTable(monitorPoints);
ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
output(context, monitorFileName, stream -> NumassUtils.writeSomething(stream, meta, data));
// ColumnedDataWriter.writeTable(stream, TableTransform.sort(data, "Timestamp", true), "Monitor points", monitorNames);
} }
} }

View File

@ -1,269 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef;
import hep.dataforge.exceptions.ContentException;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.io.XMLMetaWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet;
import inr.numass.debunch.DebunchReport;
import inr.numass.debunch.FrameAnalizer;
import inr.numass.utils.ExpressionUtils;
import java.io.OutputStream;
import java.time.Instant;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING;
import static inr.numass.utils.TritiumUtils.pointExpression;
/**
* @author Darksnake
*/
@TypedActionDef(name = "prepareData", inputType = NumassSet.class, outputType = Table.class)
@ValueDef(name = "lowerWindow", type = {NUMBER}, def = "0", info = "Base for the window lowerWindow bound")
@ValueDef(name = "lowerWindowSlope", type = {NUMBER}, def = "0", info = "Slope for the window lowerWindow bound")
@ValueDef(name = "upperWindow", type = {NUMBER}, info = "Upper bound for window")
@ValueDef(name = "deadTime", type = {NUMBER, STRING}, info = "Dead time in s. Could be an expression.")
@ValueDef(name = "correction",
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`")
@ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
@NodeDef(name = "correction", multiple = true, target = "method::inr.numass.actions.PrepareDataAction.makeCorrection")
public class PrepareDataAction extends OneToOneAction<NumassSet, Table> {
public static String[] parnames = {"Uset", "Uread", "Length", "Total", "Window", "Corr", "CR", "CRerr", "Timestamp"};
private int getLowerBorder(Meta meta, double Uset) throws ContentException {
double b = meta.getDouble("lowerWindow", 0);
double a = meta.getDouble("lowerWindowSlope", 0);
return Math.max((int) (b + Uset * a), 0);
}
@Override
protected ListTable execute(Context context, String name, NumassSet dataFile, Laminate meta) {
// log.report("File %s started", dataFile.getName());
int upper = meta.getInt("upperWindow", Integer.MAX_VALUE);
List<Correction> corrections = new ArrayList<>();
if (meta.hasValue("deadTime")) {
corrections.add(new DeadTimeCorrection(meta.getString("deadTime")));
}
if (meta.optMeta("correction").isPresent()) {
corrections.addAll(meta.getMetaList("correction").stream()
.map((Function<Meta, Correction>) this::makeCorrection)
.collect(Collectors.toList()));
}
if (meta.hasValue("correction")) {
final String correction = meta.getString("correction");
corrections.add((point) -> pointExpression(correction, point));
}
Function<Double, Double> utransform;
if (meta.hasValue("utransform")) {
String func = meta.getString("utransform");
utransform = u -> {
Map<String, Object> binding = new HashMap<>();
binding.put("U", u);
return ExpressionUtils.function(func, binding);
};
} else {
utransform = Function.identity();
}
// if (meta.hasMeta("debunch")) {
// if (dataFile instanceof NumassDataLoader) {
// dataFile = ((NumassDataLoader) dataFile).applyRawTransformation(raw -> debunch(context, raw, meta.getMeta("debunch")));
// } else {
// throw new RuntimeException("Debunch not available");
// }
// }
List<Values> dataList = new ArrayList<>();
dataFile.getPoints().forEach( point -> {
long total = point.getTotalCount();
double uset = utransform.apply(point.getVoltage());
double uread = utransform.apply(point.getVoltage());
double time = point.getLength();
int a = getLowerBorder(meta, uset);
int b = Math.min(upper, RawNMPoint.MAX_CHANEL);
// count in window
long wind = point.getCountInWindow(a, b);
double correctionFactor = corrections.stream()
.mapToDouble(cor -> cor.corr(point))
.reduce((d1, d2) -> d1 * d2).orElse(1);
double relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble(cor -> cor.relativeErr(point))
.reduce((d1, d2) -> d1 * d1 + d2 * d2).orElse(0)
);
double cr = wind / point.getLength() * correctionFactor;
double crErr;
if (relativeCorrectionError == 0) {
crErr = Math.sqrt(wind) / point.getLength() * correctionFactor;
} else {
crErr = Math.sqrt(1d / wind + Math.pow(relativeCorrectionError, 2)) * cr;
}
Instant timestamp = point.getStartTime();
dataList.add(new ValueMap(parnames, new Object[]{uset, uread, time, total, wind, correctionFactor, cr, crErr, timestamp}));
});
TableFormat format;
if (!dataList.isEmpty()) {
//Генерируем автоматический формат по первой строчке
format = MetaTableFormat.forPoint(dataList.get(0));
} else {
format = MetaTableFormat.forNames(parnames);
}
String head;
if (dataFile.meta() != null) {
head = dataFile.meta().toString();
} else {
head = dataFile.getName();
}
head = head + "\n" + new XMLMetaWriter().writeString(meta) + "\n";
ListTable data = new ListTable(format, dataList);
OutputStream stream = buildActionOutput(context, name);
ColumnedDataWriter.writeTable(stream, data, head);
// log.logString("File %s completed", dataFile.getName());
return data;
}
@ValueDef(name = "value", type = {NUMBER, STRING}, info = "Value or function to multiply count rate")
@ValueDef(name = "err", type = {NUMBER, STRING}, info = "error of the value")
private Correction makeCorrection(Meta corrMeta) {
final String expr = corrMeta.getString("value");
final String errExpr = corrMeta.getString("err", "");
return new Correction() {
@Override
public double corr(NumassPoint point) {
return pointExpression(expr, point);
}
@Override
public double corrErr(NumassPoint point) {
if (errExpr.isEmpty()) {
return 0;
} else {
return pointExpression(errExpr, point);
}
}
};
}
private NumassPoint debunch(Context context, RawNMPoint point, Meta meta) {
int upper = meta.getInt("upperchanel", RawNMPoint.MAX_CHANEL);
int lower = meta.getInt("lowerchanel", 0);
double rejectionprob = meta.getDouble("rejectprob", 1e-10);
double framelength = meta.getDouble("framelength", 1);
double maxCR = meta.getDouble("maxcr", 500d);
double cr = point.selectChanels(lower, upper).getCr();
if (cr < maxCR) {
DebunchReport report = new FrameAnalizer(rejectionprob, framelength, lower, upper).debunchPoint(point);
return PointBuilders.readRawPoint(report.getPoint());
} else {
return PointBuilders.readRawPoint(point);
}
}
private interface Correction {
/**
* correction coefficient
*
* @param point
* @return
*/
double corr(NumassPoint point);
/**
* correction coefficient uncertainty
*
* @param point
* @return
*/
default double corrErr(NumassPoint point) {
return 0;
}
default double relativeErr(NumassPoint point) {
double corrErr = corrErr(point);
if (corrErr == 0) {
return 0;
} else {
return corrErr / corr(point);
}
}
}
private class DeadTimeCorrection implements Correction {
private final Function<NumassPoint, Double> deadTimeFunction;
public DeadTimeCorrection(String expr) {
deadTimeFunction = point -> pointExpression(expr, point);
}
@Override
public double corr(NumassPoint point) {
double deadTime = deadTimeFunction.apply(point);
if (deadTime > 0) {
double factor = deadTime / point.getLength() * point.getTotalCount();
// double total = point.getTotalCount();
// double time = point.getLength();
// return 1d/(1d - factor);
return (1d - Math.sqrt(1d - 4d * factor)) / 2d / factor;
} else {
return 1d;
}
}
}
}

View File

@ -1,115 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.plots.PlotFrame;
import hep.dataforge.plots.PlotUtils;
import hep.dataforge.plots.data.PlottableData;
import hep.dataforge.plots.data.XYPlottable;
import hep.dataforge.tables.*;
import hep.dataforge.values.ValueType;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassSet;
import java.io.OutputStream;
import java.util.*;
import java.util.stream.Collectors;
/**
*
* @author Alexander Nozik
*/
@TypedActionDef(inputType = NumassSet.class, outputType = Table.class, name = "energySpectrum", info = "Generate output table and optionally plot for detector energy spectra")
public class ShowEnergySpectrumAction extends OneToOneAction<NumassSet, Table> {
@Override
protected Table execute(Context context, String name, NumassSet input, Laminate inputMeta) {
int binning = inputMeta.getInt("binning", 20);
boolean normalize = inputMeta.getBoolean("normalize", true);
//build header
List<String> names = new ArrayList<>();
for (int i = 0; i < points.size(); i++) {
names.add(String.format("%d: %.2f", i, points.get(i).getVoltage()));
}
LinkedHashMap<String, Map<Double, Double>> valueMap = points.stream()
.collect(Collectors.toMap(
p -> names.get(points.indexOf(p)),
p -> p.getMap(binning, normalize),
(v1, v2) -> v1,
() -> new LinkedHashMap<>()
));
Collection<Double> rows = valueMap.values().stream().findAny().get().keySet();
//Building table format
TableFormatBuilder formatBuilder = new TableFormatBuilder();
formatBuilder.addColumn("channel",ValueType.NUMBER);
names.stream().forEach((columnName) -> {
formatBuilder.addColumn(columnName, ValueType.NUMBER);
});
ListTable.Builder builder = new ListTable.Builder(formatBuilder.build());
rows.stream().forEachOrdered((Double channel) -> {
ValueMap.Builder mb = new ValueMap.Builder();
mb.putValue("channel", channel);
valueMap.entrySet().forEach((Map.Entry<String, Map<Double, Double>> entry) -> {
mb.putValue(entry.getKey(), entry.getValue().get(channel));
});
builder.row(mb.build());
});
OutputStream out = buildActionOutput(context, name);
Table table = builder.build();
ColumnedDataWriter.writeTable(out, table, inputMeta.toString());
if (inputMeta.hasMeta("plot") || inputMeta.getBoolean("plot", false)) {
PlotFrame frame = PlotUtils.getPlotManager(context)
.buildPlotFrame(getName(), name,
inputMeta.getMeta("plot", Meta.empty()));
fillDetectorData(valueMap).forEach(frame::add);
}
return table;
}
private List<XYPlottable> fillDetectorData(LinkedHashMap<String, Map<Double, Double>> map) {
List<XYPlottable> plottables = new ArrayList<>();
Meta plottableConfig = new MetaBuilder("plot")
.setValue("connectionType", "step")
.setValue("thickness", 2)
.setValue("showLine", true)
.setValue("showSymbol", false)
.setValue("showErrors", false)
.build();
int index = 0;
for (Map.Entry<String, Map<Double, Double>> entry : map.entrySet()) {
index++;
String seriesName = String.format("%d: %s", index, entry.getKey());
String[] nameList = {XYAdapter.X_VALUE_KEY, XYAdapter.Y_VALUE_KEY};
List<Values> data = entry.getValue().entrySet().stream()
.map(e -> new ValueMap(nameList, e.getKey(), e.getValue()))
.collect(Collectors.toList());
PlottableData datum = PlottableData.plot(seriesName, XYAdapter.DEFAULT_ADAPTER, data);
datum.configure(plottableConfig);
plottables.add(datum);
}
return plottables;
}
}

View File

@ -9,16 +9,15 @@ import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataReader; import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream;
import java.util.Optional; import java.util.Optional;
/** /**
@ -48,8 +47,7 @@ public class SubstractSpectrumAction extends OneToOneAction<Table, Table> {
}); });
Table res = builder.build(); Table res = builder.build();
OutputStream stream = buildActionOutput(context, name); output(context,name, stream -> NumassUtils.writeSomething(stream,inputMeta,res));
ColumnedDataWriter.writeTable(stream, res, inputMeta.toString());
return res; return res;
} catch (IOException ex) { } catch (IOException ex) {
throw new RuntimeException("Could not read reference file", ex); throw new RuntimeException("Could not read reference file", ex);

View File

@ -21,7 +21,6 @@ import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode; import hep.dataforge.data.DataNode;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.stat.fit.FitState; import hep.dataforge.stat.fit.FitState;
@ -31,8 +30,8 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Value; import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.NumassUtils;
import java.io.OutputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -116,9 +115,7 @@ public class SummaryAction extends ManyToOneAction<FitState, Table> {
@Override @Override
protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) { protected void afterGroup(Context context, String groupName, Meta outputMeta, Table output) {
OutputStream stream = buildActionOutput(context, groupName); output(context, groupName, stream -> NumassUtils.writeSomething(stream, outputMeta, output));
ColumnedDataWriter.writeTable(stream, output, groupName);
super.afterGroup(context, groupName, outputMeta, output); super.afterGroup(context, groupName, outputMeta, output);
} }

View File

@ -2,37 +2,42 @@ package inr.numass.actions;
import hep.dataforge.actions.OneToOneAction; import hep.dataforge.actions.OneToOneAction;
import hep.dataforge.context.Context; import hep.dataforge.context.Context;
import hep.dataforge.description.NodeDef;
import hep.dataforge.description.TypedActionDef;
import hep.dataforge.description.ValueDef; import hep.dataforge.description.ValueDef;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.names.Named;
import hep.dataforge.tables.ColumnFormat;
import hep.dataforge.tables.ColumnTable;
import hep.dataforge.tables.ListColumn;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.ExpressionUtils; import inr.numass.utils.NumassUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static hep.dataforge.values.ValueType.NUMBER; import static hep.dataforge.values.ValueType.NUMBER;
import static hep.dataforge.values.ValueType.STRING; import static hep.dataforge.values.ValueType.STRING;
import static inr.numass.utils.TritiumUtils.pointExpression; import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_ERROR_KEY;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.utils.NumassUtils.pointExpression;
/** /**
* Apply corrections and transformations to analyzed data * Apply corrections and transformations to analyzed data
* Created by darksnake on 11.07.2017. * Created by darksnake on 11.07.2017.
*/ */
@TypedActionDef(name = "numass.transform", inputType = Table.class, outputType = Table.class)
@ValueDef(name = "correction",
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`")
@ValueDef(name = "utransform", info = "Expression for voltage transformation. Uses U as input")
@NodeDef(name = "correction", multiple = true, target = "method::inr.numass.actions.PrepareDataAction.makeCorrection")
public class TransformDataAction extends OneToOneAction<Table, Table> { public class TransformDataAction extends OneToOneAction<Table, Table> {
@Override @Override
protected Table execute(Context context, String name, Table input, Laminate meta) { protected Table execute(Context context, String name, Table input, Laminate meta) {
UnaryOperator<Values> transformation = UnaryOperator.identity();
List<Correction> corrections = new ArrayList<>(); List<Correction> corrections = new ArrayList<>();
if (meta.optMeta("correction").isPresent()) { if (meta.optMeta("correction").isPresent()) {
@ -43,21 +48,60 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
if (meta.hasValue("correction")) { if (meta.hasValue("correction")) {
final String correction = meta.getString("correction"); final String correction = meta.getString("correction");
corrections.add((point) -> pointExpression(correction, point)); corrections.add(point -> pointExpression(correction, point));
} }
Function<Double, Double> utransform;
if (meta.hasValue("utransform")) { ColumnTable table = ColumnTable.copy(input);
String func = meta.getString("utransform");
utransform = u -> { for (Correction correction : corrections) {
Map<String, Object> binding = new HashMap<>(); //adding correction columns
binding.put("U", u); if (!correction.isAnonimous()) {
return ExpressionUtils.function(func, binding); table = table.addColumn(ColumnFormat.build(correction.getName(), NUMBER),
}; correction::corr);
} else { if (correction.hasError()) {
utransform = Function.identity(); table = table.addColumn(ColumnFormat.build(correction.getName() + ".err", NUMBER),
correction::corrErr);
} }
} }
}
// adding original count rate and error columns
table = table.addColumn(new ListColumn(ColumnFormat.build(COUNT_RATE_KEY + ".orig", NUMBER), table.getColumn
(COUNT_RATE_KEY).stream()));
table = table.addColumn(new ListColumn(ColumnFormat.build(COUNT_RATE_ERROR_KEY + ".orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()));
List<Double> cr = new ArrayList<>();
List<Double> crErr = new ArrayList<>();
table.getRows().forEach(point -> {
double correctionFactor = corrections.stream()
.mapToDouble(cor -> cor.corr(point))
.reduce((d1, d2) -> d1 * d2).orElse(1);
double relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble(cor -> cor.relativeErr(point))
.reduce((d1, d2) -> d1 * d1 + d2 * d2).orElse(0)
);
double originalCR = point.getDouble(COUNT_RATE_KEY);
double originalCRErr = point.getDouble(COUNT_RATE_ERROR_KEY);
cr.add(originalCR * correctionFactor);
if (relativeCorrectionError == 0) {
crErr.add(originalCRErr * correctionFactor);
} else {
crErr.add(Math.sqrt(Math.pow(originalCRErr / originalCR, 2d) + Math.pow(relativeCorrectionError, 2d))
* originalCR);
}
});
//replacing cr column
Table res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).getFormat(), cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).getFormat(), crErr.stream()));
output(context, name, stream -> NumassUtils.writeSomething(stream, meta, res));
return res;
}
@ValueDef(name = "value", type = {NUMBER, STRING}, info = "Value or function to multiply count rate") @ValueDef(name = "value", type = {NUMBER, STRING}, info = "Value or function to multiply count rate")
@ -66,6 +110,11 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
final String expr = corrMeta.getString("value"); final String expr = corrMeta.getString("value");
final String errExpr = corrMeta.getString("err", ""); final String errExpr = corrMeta.getString("err", "");
return new Correction() { return new Correction() {
@Override
public String getName() {
return corrMeta.getString("name", "");
}
@Override @Override
public double corr(Values point) { public double corr(Values point) {
return pointExpression(expr, point); return pointExpression(expr, point);
@ -79,10 +128,21 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
return pointExpression(errExpr, point); return pointExpression(errExpr, point);
} }
} }
@Override
public boolean hasError() {
return !errExpr.isEmpty();
}
}; };
} }
private interface Correction { private interface Correction extends Named {
@Override
default String getName() {
return "";
}
/** /**
* correction coefficient * correction coefficient
* *
@ -101,6 +161,10 @@ public class TransformDataAction extends OneToOneAction<Table, Table> {
return 0; return 0;
} }
default boolean hasError() {
return false;
}
default double relativeErr(Values point) { default double relativeErr(Values point) {
double corrErr = corrErr(point); double corrErr = corrErr(point);
if (corrErr == 0) { if (corrErr == 0) {

View File

@ -1,207 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import inr.numass.data.api.NumassEvent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Хранит сортированный набор событий с возможностью вырезать куски и склеивать
* концы
*
* @author Darksnake
*/
class DebunchData {
/**
* Удаляет из листа события в определенном диапазоне времени. При этом общее
* время не изменяется, поэтому скорость счета меняется. Возвращает
* количество удаленных событий.
*
* @param from
* @param to
* @return
*/
private static List<NumassEvent> removeFrame(List<NumassEvent> events, Frame frame) {
List<NumassEvent> res = new ArrayList<>();
for (NumassEvent event : events) {
if (event.getTime() >= frame.getEnd()) {
res.add(new NumassEvent(event.getChanel(), event.getTime() - frame.length()));
} else if (event.getTime() <= frame.getBegin()) {
res.add(event);
}
}
return res;
}
private final List<Frame> bunches = new ArrayList<>();
private final List<NumassEvent> events;
private final double length;
public DebunchData(RawNMPoint point) {
events = point.getEvents();
Collections.sort(events, new EventComparator());
length = point.getLength();
}
public Frame countInFrame(double start, double length, int lowerChanel, int upperChanel) {
double end;
if (start + length < this.getLength()) {
end = start + length;
} else {
end = this.getLength();
}
ArrayList<NumassEvent> sum = new ArrayList<>();
int i = 0;
while ((i < this.size()) && (events.get(i).getTime() < start)) {
i++;
}
while ((i < this.size()) && (events.get(i).getTime() < end)) {
if ((events.get(i).getChanel() >= lowerChanel) && (events.get(i).getChanel() <= upperChanel)) {
sum.add(getEvents().get(i));
}
i++;
}
return new Frame(start, end, sum);
}
/**
* Same as CountInFrame, but it does not copy all of the event times, only
* total count in frame.
*
* @param start
* @param length
* @return
*/
public Frame countInFrameFast(double start, double length, int lowerChanel, int upperChanel) {
//PENDING самый долгий метод
if (start > this.getLength()) {
throw new IllegalArgumentException();
}
double end;
if (start + length < this.getLength()) {
end = start + length;
} else {
end = this.getLength();
}
int sumCount = 0;
int i = 0;
while ((i < this.size()) && (events.get(i).getTime() < start)) {
i++;
}
while ((i < this.size()) && (events.get(i).getTime() < end)) {
if ((events.get(i).getChanel() >= lowerChanel) && (events.get(i).getChanel() <= upperChanel)) {
sumCount++;
}
i++;
}
return new Frame(start, end, sumCount);
}
public List<Frame> getBunches() {
return bunches;
}
/**
* Возвращает скорректированную скорость счета по всему интервалу
*
* @return
*/
public double getCountRate() {
return this.size() / this.getLength();
}
/**
* Медленный метод, вызывать минимальное количество рах
*
* @return
*/
public List<NumassEvent> getDebunchedEvents() {
List<NumassEvent> res = getEvents();
for (Frame frame : getBunches()) {
res = removeFrame(res, frame);
}
return res;
}
/**
* Медленный метод, вызывать минимальное количество рах
*
* @return
*/
public double getDebunchedLength() {
double res = length;
for (Frame frame : getBunches()) {
res -= frame.length();
}
if (res > 0) {
return res;
} else {
throw new RuntimeException("Zero length point after debunching");
}
}
public List<NumassEvent> getEvents() {
return events;
}
/**
* @return the length
*/
public double getLength() {
return length;
}
public void setAsBunch(Frame bunch) {
//FIXME сделать проверку пересечения кадров
this.bunches.add(bunch);
}
public void setAsBunch(double from, double to) {
assert to > from;
setAsBunch(countInFrame(from, to - from,0, RawNMPoint.MAX_CHANEL));
}
public long size() {
return this.getEvents().size();
}
private static class EventComparator implements Comparator<NumassEvent> {
@Override
public int compare(NumassEvent o1, NumassEvent o2) {
return (int) Math.signum(o1.getTime() - o2.getTime());
}
}
}

View File

@ -1,78 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import inr.numass.data.api.NumassEvent;
/**
*
* @author Darksnake
*/
public class DebunchEvent extends NumassEvent {
public static double getEventWeight(NumassEvent event) {
if (event instanceof DebunchEvent) {
return ((DebunchEvent) event).getWeight();
} else {
return 1;
}
}
private double shift = 0;
/**
* В общем случае принимает значение от 0 (событие полностью выкинуто) до
* 1(событие полностью принято)
*/
private double weight;
public DebunchEvent(NumassEvent event, double weight) {
super(event.getChanel(), event.getTime());
this.weight = weight;
}
protected DebunchEvent(double weight, double shift, short chanel, double time) {
super(chanel, time);
this.weight = weight;
this.shift = shift;
}
@Override
public DebunchEvent clone() {
return new DebunchEvent(weight, shift, chanel, time);
}
@Override
public double getTime() {
return super.getTime() + shift;
}
public double getWeight() {
return this.weight;
}
/**
* @param marker
*/
public void setWeight(int marker){
this.weight = marker;
}
public void shiftTime(double shift) {
this.shift += shift;
}
}

View File

@ -1,36 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import java.util.List;
/**
*
* @author Darksnake
*/
public interface DebunchReport {
NumassBlock getInitialPoint();
NumassBlock getPoint();
List<Frame> getBunches();
List<NumassEvent> getBunchEvents();
double eventsFiltred();
double timeFiltred();
}

View File

@ -1,83 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.NumassPoint;
import java.util.ArrayList;
import java.util.List;
/**
* @author Darksnake
*/
public class DebunchReportImpl implements DebunchReport {
private final List<Frame> bunches;
private final NumassBlock pointAfter;
private final NumassBlock pointBefore;
public DebunchReportImpl(NumassBlock pointBefore, NumassBlock pointAfter, List<Frame> bunches) {
this.pointBefore = pointBefore;
this.pointAfter = pointAfter;
this.bunches = bunches;
}
DebunchReportImpl(NumassBlock pointBefore, NumassBlock debunchData) {
this.pointBefore = pointBefore;
pointAfter = new NumassPoint(pointBefore.getUset(), pointBefore.getUread(),
debunchData.getDebunchedEvents(), debunchData.getDebunchedLength(), pointBefore.getStartTime());
this.bunches = debunchData.getBunches();
}
@Override
public double eventsFiltred() {
return 1 - (double) getPoint().getEvents().count() / getInitialPoint().getEvents().count();
}
@Override
public List<NumassEvent> getBunchEvents() {
List<NumassEvent> res = new ArrayList<>();
for (Frame interval : getBunches()) {
res.addAll(interval.getEvents());
}
return res;
}
@Override
public List<Frame> getBunches() {
return bunches;
}
@Override
public NumassBlock getInitialPoint() {
return pointBefore;
}
@Override
public NumassBlock getPoint() {
return pointAfter;
}
@Override
public double timeFiltred() {
return 1d - getPoint().getLength().toNanos() / getInitialPoint().getLength().toNanos();
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
/**
*
* @author Darksnake
*/
public interface Debuncher {
DebunchReport debunchPoint(RawNMPoint point);
}

View File

@ -1,102 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import inr.numass.data.api.NumassEvent;
import org.apache.commons.math3.distribution.PoissonDistribution;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Darksnake
*/
public class Frame {
private final double begin;
private final double end;
private List<NumassEvent> events;
private final int eventsCount;
public Frame(double begin, double end, List<NumassEvent> events) {
assert end > begin;
this.begin = begin;
this.end = end;
this.events = events;
this.eventsCount = events.size();
}
/**
* Сокращенная версия для экономии памяти
*
* @param begin
* @param end
* @param count
*/
public Frame(double begin, double end, int count) {
assert end > begin;
this.begin = begin;
this.end = end;
this.eventsCount = count;
}
public Frame cloneFast() {
return new Frame(begin, end, eventsCount);
}
public double getBegin() {
return begin;
}
public int getCount() {
if (this.events != null) {
return events.size();
} else {
return eventsCount;
}
}
public double getCountRate(){
return this.getCount() / this.length();
}
public double getCountRateError(){
return Math.sqrt(this.getCount()) / this.length();
}
public double getEnd() {
return end;
}
public List<NumassEvent> getEvents() {
if(events!=null)
return events;
else
return new ArrayList<>();
}
public double getProbability(double cr){
PoissonDistribution distr = new PoissonDistribution(cr * this.length());
return distr.probability(getCount());
}
public double length() {
assert this.end > this.begin;
return this.end - this.begin;
}
}

View File

@ -1,241 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.debunch;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator;
import org.apache.commons.math3.util.FastMath;
/**
*
* @author Darksnake
*/
public class FrameAnalizer implements Debuncher {
private static final double POISSON_THRESHOLD = 1e-80;
static double getGaussianCRThreshold(double cr, double frameLength, double prob) {
double[] xs = {9, 8, 7, 6, 5, 4, 3, 2, 1};
double[] probs = {1.15e-19, 6.22e-16, 1.27e-12, 9.86e-10, 2.86e-7, 3.167e-5, 0.001349, 0.0227, 0.1586};
LinearInterpolator interpolator = new LinearInterpolator();
UnivariateFunction function = interpolator.interpolate(probs, xs);
double sigmas = function.value(prob);
return cr + sigmas * Math.sqrt(cr / frameLength);
}
//double frameShift;
double frameLength;
int lowerChanel = 0;
int numCircles = 1;
double rejectionProb;
int upperChanel = RawNMPoint.MAX_CHANEL;
public FrameAnalizer(double rejectionProb, double frameLength) {
this.rejectionProb = rejectionProb;
this.frameLength = frameLength;
}
public FrameAnalizer(double rejectionProb, double frameLength, int lower, int upper) {
assert upper > lower;
this.rejectionProb = rejectionProb;
this.frameLength = frameLength;
this.lowerChanel = lower;
this.upperChanel = upper;
}
public FrameAnalizer(double rejectionProb, double frameLength, int numCircles) {
this.rejectionProb = rejectionProb;
this.frameLength = frameLength;
this.numCircles = numCircles;
}
/**
* Полный аналог Сережиной программы
*
* @param numCicles
* @param prob
* @param frameShift
* @param frameLength
* @return
*/
private DebunchReport cicledDebunch(RawNMPoint point, int numCicles, double prob, double frameShift, double frameLength) {
DebunchReport res = this.debunch(point, prob, frameShift, frameLength);
for (int i = 0; i < numCicles-1; i++) {
res = this.debunch(res, prob, frameShift, frameLength);
}
return res;
}
private DebunchReport debunch(DebunchReport res, double prob, double frameShift, double frameLength) {
return debunch(res.getPoint(), prob, frameShift, frameLength);
}
private DebunchReport debunch(RawNMPoint point, double prob, double frameShift, double frameLength) {
double cr = point.selectChanels(lowerChanel, upperChanel).getCr();
return debunch(point, cr, prob, frameShift, frameLength);
}
private DebunchReport debunch(RawNMPoint point, double averageCR, double prob, double frameShift, double frameLength) {
DebunchData data = new DebunchData(point);
double timeTotal = data.getLength();
// long countTotal = data.size();
double curPos = 0;
double baseThreshold = getCRThreshold(averageCR, frameLength, prob);
Frame workFrame;
boolean bunchFlag = false;// Флаг символизирует, находимся ли мы в состоянии пачки
while (curPos < (data.getLength() - frameLength)) {
workFrame = data.countInFrameFast(curPos, frameLength,lowerChanel,upperChanel);
if (workFrame.getCountRate() > baseThreshold) {
/*
* Если счет в рамке превышает порог, то выкидываем рамку из результата и сдвигаем
* каретку на один шаг. При этом выставляем флаг.
* Если видим флаг,то вырезаем только последний шаг, чтобы избежать двойного вырезания
*/
if (bunchFlag) {
/*Тут возможен косяк, когда две пачки рядом, но не вплотную. Можно сделать
* так, чтобы запоминалось не состояние флага, а конец последнего вырезанного кадра
*/
workFrame = data.countInFrameFast(curPos + frameLength - frameShift, frameShift,lowerChanel,upperChanel);
}
data.setAsBunch(workFrame);
timeTotal -= workFrame.length();
if (timeTotal <= 0) {
throw new RuntimeException("Total time after cleaning is zero.");
}
bunchFlag = true;
} else {
/*
* Если пачки нет, то просто сдвигаем каретку к следующей рамке и убираем флаг
*/
bunchFlag = false;
}
curPos += frameShift;
}
return new DebunchReportImpl(point, data);
}
@Override
public DebunchReport debunchPoint(RawNMPoint point) {
return cicledDebunch(point, numCircles, rejectionProb, frameLength/4, frameLength);
}
private double getCRThreshold(double cr, double frameLength, double prob) {
if (cr * frameLength > 20) {
return getGaussianCRThreshold(cr, frameLength, prob);
} else {
return getPoissonThreshold(cr * frameLength, prob) / frameLength;
}
}
/**
* Returns set of intervals begining with frameStarts[i]. All FrameStart
* should be inside data region
*
* @param frameStarts
* @param frameLength
* @param fast
* @return
*/
private Frame[] getIntervals(DebunchData data, double[] frameStarts, double frameLength, boolean fast) {
Frame[] res = new Frame[frameStarts.length];
for (int i = 0; i < frameStarts.length; i++) {
if (fast) {
res[i] = data.countInFrameFast(frameStarts[i], frameLength,lowerChanel,upperChanel);
} else {
res[i] = data.countInFrame(frameStarts[i], frameLength,lowerChanel,upperChanel);
}
}
return res;
}
/**
* Returns count rate in consequent frames with the length of frameLength.
* The last frame could be shorter than the overs. This method could be used
* for fast distribution calculation.
*
* @param frameLength
* @return
*/
private double[] getNonIntercectFramesCountRate(DebunchData data, double frameLength) {
double dataLength = data.getLength();
int maxFramesCount = (int) Math.ceil(dataLength / frameLength);
if (maxFramesCount < 2) {
throw new IllegalArgumentException("The frameLength is too large.");
}
double[] res = new double[maxFramesCount];
double frameBegin;
for (int i = 0; i < res.length; i++) {
frameBegin = i * frameLength;
res[i] = data.countInFrameFast(frameBegin, frameLength,lowerChanel,upperChanel).getCountRate();
}
return res;
}
private int getPoissonThreshold(double mean, double prob) {
/*
* Находим точку "обнуления" распределения и значения коммулятивной плотности в этой точке.
*/
double pdf = FastMath.exp(-mean);
double cdf = pdf;
int k = 0;
while (pdf > POISSON_THRESHOLD) {
k++;
pdf *= mean / k;
cdf += pdf;
}
/*
* Начинаем считать комулятивную плотность в обратном порядке
*/
cdf = 1 - cdf;
if (pdf <= 0) {
throw new Error();// Проверяем чтобы там точно не было нуля;
}
while (cdf < prob) {
k--;
pdf *= k / mean;
cdf += pdf;
}
return k;
}
private Frame[] getUniformShiftedIntervals(DebunchData data, double frameShift, double frameLength, boolean fast) {
double dataLength = data.getLength();
int maxFramesCount = (int) Math.ceil(dataLength / frameShift);
double[] frameStarts = new double[maxFramesCount];
for (int i = 0; i < frameStarts.length; i++) {
frameStarts[i] = i * frameShift;
}
return getIntervals(data, frameStarts, frameLength, fast);
}
}

View File

@ -8,7 +8,7 @@ package inr.numass.models;
import hep.dataforge.stat.parametric.ParametricFunction; import hep.dataforge.stat.parametric.ParametricFunction;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.utils.NumassIntegrator; import inr.numass.utils.NumassIntegrator;
import inr.numass.utils.TritiumUtils; import inr.numass.utils.NumassUtils;
import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.UnivariateFunction;
/** /**
@ -19,7 +19,7 @@ import org.apache.commons.math3.analysis.UnivariateFunction;
public class CustomNBkgSpectrum extends NBkgSpectrum { public class CustomNBkgSpectrum extends NBkgSpectrum {
public static CustomNBkgSpectrum tritiumBkgSpectrum(ParametricFunction source, double amplitude){ public static CustomNBkgSpectrum tritiumBkgSpectrum(ParametricFunction source, double amplitude){
UnivariateFunction differentialBkgFunction = TritiumUtils.tritiumBackgroundFunction(amplitude); UnivariateFunction differentialBkgFunction = NumassUtils.tritiumBackgroundFunction(amplitude);
UnivariateFunction integralBkgFunction = UnivariateFunction integralBkgFunction =
(x) -> NumassIntegrator.getDefaultIntegrator() (x) -> NumassIntegrator.getDefaultIntegrator()
.integrate(differentialBkgFunction, x, 18580d); .integrate(differentialBkgFunction, x, 18580d);

View File

@ -11,7 +11,6 @@ import hep.dataforge.context.Context;
import hep.dataforge.data.DataNode; import hep.dataforge.data.DataNode;
import hep.dataforge.data.DataSet; import hep.dataforge.data.DataSet;
import hep.dataforge.description.TypedActionDef; import hep.dataforge.description.TypedActionDef;
import hep.dataforge.io.ColumnedDataWriter;
import hep.dataforge.meta.Laminate; import hep.dataforge.meta.Laminate;
import hep.dataforge.stat.fit.FitResult; import hep.dataforge.stat.fit.FitResult;
import hep.dataforge.stat.fit.ParamSet; import hep.dataforge.stat.fit.ParamSet;
@ -21,9 +20,8 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform; import hep.dataforge.tables.TableTransform;
import hep.dataforge.workspace.AbstractTask; import hep.dataforge.workspace.AbstractTask;
import hep.dataforge.workspace.TaskModel; import hep.dataforge.workspace.TaskModel;
import inr.numass.utils.NumassUtils;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map; import java.util.Map;
/** /**
@ -81,15 +79,7 @@ public class NumassFitScanSummaryTask extends AbstractTask<Table> {
pars.getValue("trap")); pars.getValue("trap"));
}); });
Table res = TableTransform.sort(builder.build(), "m", true); Table res = TableTransform.sort(builder.build(), "m", true);
output(context, nodeName, stream -> NumassUtils.writeSomething(stream,meta,res));
try (OutputStream stream = buildActionOutput(context, nodeName)) {
String head = "Sterile neutrino mass scan summary\n" + meta.toString();
ColumnedDataWriter.writeTable(stream, res, head);
} catch (IOException e) {
getLogger(meta).error("Failed to close output stream", e);
}
return res; return res;
} }

View File

@ -16,9 +16,11 @@ import hep.dataforge.meta.Template;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.workspace.AbstractTask; import hep.dataforge.workspace.AbstractTask;
import hep.dataforge.workspace.TaskModel; import hep.dataforge.workspace.TaskModel;
import inr.numass.actions.AnalyzeDataAction;
import inr.numass.actions.MergeDataAction; import inr.numass.actions.MergeDataAction;
import inr.numass.actions.MonitorCorrectAction; import inr.numass.actions.MonitorCorrectAction;
import inr.numass.actions.PrepareDataAction; import inr.numass.actions.TransformDataAction;
import inr.numass.data.api.NumassSet;
/** /**
* Prepare data task * Prepare data task
@ -40,7 +42,7 @@ public class NumassPrepareTask extends AbstractTask<Table> {
DataFilter filter = new DataFilter().configure(config.getMeta("data")); DataFilter filter = new DataFilter().configure(config.getMeta("data"));
DataNode<NumassData> data = filter.filter(input.checked(NumassData.class)); DataNode<NumassSet> data = filter.filter(input.checked(NumassSet.class));
// Meta dataMeta = config.getMeta("data"); // Meta dataMeta = config.getMeta("data");
// URI storageUri = input.getCheckedData("dataRoot", URI.class).get(); // URI storageUri = input.getCheckedData("dataRoot", URI.class).get();
@ -49,7 +51,9 @@ public class NumassPrepareTask extends AbstractTask<Table> {
//preparing table data //preparing table data
Meta prepareMeta = config.getMeta("prepare"); Meta prepareMeta = config.getMeta("prepare");
DataNode<Table> tables = runAction(new PrepareDataAction(), context, data, prepareMeta); DataNode<Table> tables = runAction(new AnalyzeDataAction(), context, data, prepareMeta);
tables = runAction(new TransformDataAction(), context, tables, prepareMeta);
if (config.hasMeta("monitor")) { if (config.hasMeta("monitor")) {
Meta monitorMeta = config.getMeta("monitor"); Meta monitorMeta = config.getMeta("monitor");

View File

@ -1,204 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.utils;
import inr.numass.data.api.NumassEvent;
import org.apache.commons.math3.random.MersenneTwister;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.random.SynchronizedRandomGenerator;
import java.util.ArrayList;
/**
*
* @author Darksnake
*/
public class BunchGenerator {
private double bunchCr; // additional count rate in bunch
private double bunchDist;// average distance between bunches
private double bunchLength; // length of bunches
private double cr; // count rate of normal events
private ExpGenerator expGen;
// private ExponentialDistribution expGen;
public BunchGenerator(double cr, double bunchLength, double bunchDist, double bunchCr) {
this.cr = cr;
this.bunchLength = bunchLength;
this.bunchDist = bunchDist;
this.bunchCr = bunchCr;
expGen = new ExpGenerator(new SynchronizedRandomGenerator(new MersenneTwister()));
}
public BunchGenerator(double cr, double bunchLength, double bunchDist, double bunchCr, RandomGenerator gen) {
this.cr = cr;
this.bunchLength = bunchLength;
this.bunchDist = bunchDist;
this.bunchCr = bunchCr;
expGen = new ExpGenerator(gen);
}
public ArrayList<NumassEvent> generate(double dist, double length, double timeShift, boolean isBunch) {
ArrayList<NumassEvent> res = new ArrayList<>();
ArrayList<Double> events = generateEvents(dist, length);
for (Double event : events) {
if (event < length) {
res.add(new NumassEvent((short)0,event + timeShift));
// if (isBunch) {
// res.add(new DebunchEvent(event + timeShift, 10));
// } else {
// res.add(new DebunchEvent(event + timeShift));
// }
}
}
return res;
}
ArrayList<Double> generateEvents(double dist, double timeTotal) {
ArrayList<Double> res = new ArrayList<>();
double timeCount = 0;
double delta;
while (timeCount < timeTotal) {
delta = expGen.nextExp(dist);
timeCount += delta;
if (timeCount < timeTotal) {
res.add(timeCount);
}
}
return res;
}
/**
* Создает пачку с треугольным распределением
*
* @param dist
* @param timeTotal
* @return
*/
ArrayList<Double> generateEventsTriangle(double dist, double timeTotal) {
ArrayList<Double> res = new ArrayList<>();
double timeCount = 0;
double delta;
while (timeCount < timeTotal) {
delta = expGen.nextExp(dist * timeTotal / (timeTotal - timeCount));
timeCount += delta;
if (timeCount < timeTotal) {
res.add(timeCount);
}
}
return res;
}
public ArrayList<NumassEvent> generateNormalEvents(double measurementTime) {
return generate(1 / cr, measurementTime, 0, false);
}
public ArrayList<NumassEvent> generateTriangle(double dist, double length, double timeShift, boolean isBunch) {
ArrayList<NumassEvent> res = new ArrayList<>();
ArrayList<Double> events = generateEventsTriangle(dist, length);
for (Double event : events) {
if (event < length) {
res.add(new NumassEvent((short)0,event + timeShift));
// if (isBunch) {
// res.add(new DebunchEvent(event + timeShift, 10));
// } else {
// res.add(new DebunchEvent(event + timeShift));
// }
}
}
return res;
}
/**
*
* @param measurementTime - total measurement time
* @return
*/
public RawNMPoint generateWithBunches(double measurementTime) {
ArrayList<NumassEvent> res = generateNormalEvents(measurementTime);
ArrayList<Double> bunchList = generateEvents(bunchDist, measurementTime);
for (Double bunchPos : bunchList) {
res.addAll(generate(1 / bunchCr, bunchLength, bunchPos, true));
}
return new RawNMPoint(0, res, measurementTime);
}
/**
*
* @param measurementTime - total measurement time
* @return
*/
public RawNMPoint generateWithRandomBunches(double measurementTime) {
ArrayList<NumassEvent> res = generateNormalEvents(measurementTime);
ArrayList<Double> bunchList = generateEvents(bunchDist, measurementTime);
for (Double bunchPos : bunchList) {
double l = expGen.nextSafeGaussian(bunchLength, bunchLength / 3);
double lambda = expGen.nextSafeGaussian(1 / bunchCr, 1 / bunchCr / 3);
res.addAll(generate(lambda, l, bunchPos, true));
}
return new RawNMPoint(0, res, measurementTime);
}
public RawNMPoint generateWithTriangleBunches(double measurementTime) {
ArrayList<NumassEvent> res = generateNormalEvents(measurementTime);
ArrayList<Double> bunchList = generateEvents(bunchDist, measurementTime);
for (Double bunchPos : bunchList) {
res.addAll(generateTriangle(1 / bunchCr, bunchLength, bunchPos, true));
}
return new RawNMPoint(0, res, measurementTime);
}
public void setSeed(int seed) {
this.expGen.setSeed(seed);
}
private static class ExpGenerator {
private final RandomGenerator generator;
public ExpGenerator(RandomGenerator generator) {
this.generator = generator;
}
public ExpGenerator(RandomGenerator generator, int seed) {
this.generator = generator;
this.generator.setSeed(seed);
}
void setSeed(int seed) {
generator.setSeed(seed);
}
double nextUniform() {
return generator.nextDouble();
}
double nextExp(double mean) {
double rand = this.nextUniform();
return -mean * Math.log(1 - rand);
}
double nextSafeGaussian(double mean, double sigma) {
double res = -1;
while (res <= 0) {
res = mean + generator.nextGaussian() * sigma;
}
return res;
}
}
}

View File

@ -16,23 +16,27 @@
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.Table;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.SimpleBlock;
import org.apache.commons.math3.distribution.EnumeratedRealDistribution; import org.apache.commons.math3.distribution.EnumeratedRealDistribution;
import org.apache.commons.math3.distribution.RealDistribution; import org.apache.commons.math3.distribution.RealDistribution;
import org.apache.commons.math3.random.EmpiricalDistribution;
import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.RandomGenerator;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.function.Supplier; import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
/** /**
* A generator for Numass events with given energy spectrum * A generator for Numass events with given energy spectrum
* *
* @author Darksnake * @author Darksnake
*/ */
public class NMEventGenerator implements Supplier<NumassEvent> { public class NMEventGenerator {
protected final RandomGenerator rnd; protected final RandomGenerator rnd;
protected double cr; protected double cr;
@ -49,91 +53,119 @@ public class NMEventGenerator implements Supplier<NumassEvent> {
this.rnd = rnd; this.rnd = rnd;
} }
public void loadSpectrum(RawNMPoint point, int minChanel, int maxChanel) { public void setSpectrum(Table spectrum) {
List<Short> shorts = new ArrayList<>();
point.getEvents().stream()
.filter((event) -> ((event.getChanel() > minChanel) && (event.getChanel() < maxChanel)))
.forEach((event) -> shorts.add(event.getChanel()));
double[] doubles = new double[shorts.size()];
for (int i = 0; i < shorts.size(); i++) {
doubles[i] = shorts.get(i);
}
EmpiricalDistribution d = new EmpiricalDistribution();
d.load(doubles);
distribution = d;
}
public void loadSpectrum(Map<Double, Double> spectrum) {
double[] chanels = new double[spectrum.size()]; double[] chanels = new double[spectrum.size()];
double[] values = new double[spectrum.size()]; double[] values = new double[spectrum.size()];
int i = 0; for (int i = 0; i < spectrum.size(); i++) {
for (Map.Entry<Double, Double> entry : spectrum.entrySet()) { chanels[i] = spectrum.get("channel", i).doubleValue();
chanels[i] = entry.getKey(); values[i] = spectrum.get(COUNT_RATE_KEY, i).doubleValue();
values[i] = entry.getValue();
} }
distribution = new EnumeratedRealDistribution(chanels, values); distribution = new EnumeratedRealDistribution(chanels, values);
} }
public void loadSpectrum(double[] channels, double[] values) { // public void loadSpectrum(RawNMPoint point, int minChanel, int maxChanel) {
distribution = new EnumeratedRealDistribution(channels, values); // List<Short> shorts = new ArrayList<>();
// point.getEvents().stream()
// .filter((event) -> ((event.getChanel() > minChanel) && (event.getChanel() < maxChanel)))
// .forEach((event) -> shorts.add(event.getChanel()));
// double[] doubles = new double[shorts.size()];
//
// for (int i = 0; i < shorts.size(); i++) {
// doubles[i] = shorts.get(i);
// }
//
// EmpiricalDistribution d = new EmpiricalDistribution();
// d.load(doubles);
//
// distribution = d;
// }
//
// public void loadSpectrum(Map<Double, Double> spectrum) {
// double[] chanels = new double[spectrum.size()];
// double[] values = new double[spectrum.size()];
// int i = 0;
// for (Map.Entry<Double, Double> entry : spectrum.entrySet()) {
// chanels[i] = entry.getKey();
// values[i] = entry.getValue();
// }
// distribution = new EnumeratedRealDistribution(chanels, values);
// }
//
// public void loadSpectrum(double[] channels, double[] values) {
// distribution = new EnumeratedRealDistribution(channels, values);
// }
//
// public void loadSpectrum(NumassPoint point) {
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
// double[] values = new double[RawNMPoint.MAX_CHANEL];
// for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
// chanels[i] = i;
// values[i] = point.getCount(i);
// }
// distribution = new EnumeratedRealDistribution(chanels, values);
// }
//
// public void loadSpectrum(NumassPoint point, NumassPoint reference) {
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
// double[] values = new double[RawNMPoint.MAX_CHANEL];
// for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
// chanels[i] = i;
// values[i] = Math.max(0, point.getCount(i) - reference.getCount(i));
// }
// distribution = new EnumeratedRealDistribution(chanels, values);
// }
//
// /**
// * @param point
// * @param reference
// * @param lower lower channel for spectrum generation
// * @param upper upper channel for spectrum generation
// */
// public void loadSpectrum(NumassPoint point, NumassPoint reference, int lower, int upper) {
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
// double[] values = new double[RawNMPoint.MAX_CHANEL];
// for (int i = lower; i < upper; i++) {
// chanels[i] = i;
// values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)));
// }
// distribution = new EnumeratedRealDistribution(chanels, values);
// }
protected short generateChannel() {
if (distribution != null) {
return (short) distribution.sample();
} else {
return 1600;
}
} }
public void loadSpectrum(NumassPoint point) { protected long generateDeltaTime() {
double[] chanels = new double[RawNMPoint.MAX_CHANEL]; return (long) (nextExpDecay(1d / cr) * 1e9);
double[] values = new double[RawNMPoint.MAX_CHANEL];
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
chanels[i] = i;
values[i] = point.getCount(i);
}
distribution = new EnumeratedRealDistribution(chanels, values);
}
public void loadSpectrum(NumassPoint point, NumassPoint reference) {
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
double[] values = new double[RawNMPoint.MAX_CHANEL];
for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
chanels[i] = i;
values[i] = Math.max(0, point.getCount(i) - reference.getCount(i));
}
distribution = new EnumeratedRealDistribution(chanels, values);
}
/**
* @param point
* @param reference
* @param lower lower channel for spectrum generation
* @param upper upper channel for spectrum generation
*/
public void loadSpectrum(NumassPoint point, NumassPoint reference, int lower, int upper) {
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
double[] values = new double[RawNMPoint.MAX_CHANEL];
for (int i = lower; i < upper; i++) {
chanels[i] = i;
values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)));
}
distribution = new EnumeratedRealDistribution(chanels, values);
} }
protected NumassEvent nextEvent(NumassEvent prev) { protected NumassEvent nextEvent(NumassEvent prev) {
short chanel; if (prev == null) {
return new NumassEvent(generateChannel(), Instant.EPOCH, 0);
if (distribution != null) {
chanel = (short) distribution.sample();
} else { } else {
chanel = 1600; return new NumassEvent(generateChannel(), prev.getBlockTime(), prev.getTimeOffset() + generateDeltaTime());
} }
return new NumassEvent(chanel, (prev == null ? 0 : prev.getTime()) + nextExpDecay(1d / cr));
} }
@Override // @Override
public synchronized NumassEvent get() { // public synchronized NumassEvent get() {
return prevEvent = nextEvent(prevEvent); // return prevEvent = nextEvent(prevEvent);
// }
public NumassBlock generateBlock(Instant stsrt, long length) {
List<NumassEvent> events = new ArrayList<>();
NumassEvent event = nextEvent(null);
while (event.getTimeOffset() < length) {
events.add(event);
event = nextEvent(event);
}
return new SimpleBlock(stsrt, Duration.ofNanos(length), events);
} }
private double nextExpDecay(double mean) { private double nextExpDecay(double mean) {

View File

@ -25,14 +25,13 @@ public class NMEventGeneratorWithPulser extends NMEventGenerator {
pulserEvent = generatePulserEvent(); pulserEvent = generatePulserEvent();
} }
@Override
public synchronized NumassEvent get() { public synchronized NumassEvent get() {
//expected next event //expected next event
if (nextEvent == null) { if (nextEvent == null) {
nextEvent = nextEvent(prevEvent); nextEvent = nextEvent(prevEvent);
} }
//if pulser event is first, then leave next event as is and return pulser event //if pulser event is first, then leave next event as is and return pulser event
if (pulserEvent.getTime() < nextEvent.getTime()) { if (pulserEvent.getTimeOffset() < nextEvent.getTimeOffset()) {
NumassEvent res = pulserEvent; NumassEvent res = pulserEvent;
pulserEvent = generatePulserEvent(); pulserEvent = generatePulserEvent();
return res; return res;
@ -48,10 +47,10 @@ public class NMEventGeneratorWithPulser extends NMEventGenerator {
short channel = (short) pulserChanelDistribution.sample(); short channel = (short) pulserChanelDistribution.sample();
double time; double time;
if (pulserEvent == null) { if (pulserEvent == null) {
time = rnd.nextDouble() * pulserDist; time = rnd.nextDouble() * pulserDist * 1e9;
} else { } else {
time = pulserEvent.getTime() + pulserDist; time = pulserEvent.getTimeOffset() + pulserDist*1e9;
} }
return new NumassEvent(channel, time); return new NumassEvent(channel, (long) time);
} }
} }

View File

@ -0,0 +1,157 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.utils;
import hep.dataforge.io.envelopes.DefaultEnvelopeWriter;
import hep.dataforge.io.envelopes.EnvelopeBuilder;
import hep.dataforge.io.markup.Markedup;
import hep.dataforge.io.markup.SimpleMarkupRenderer;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Values;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import static hep.dataforge.tables.XYAdapter.X_VALUE_KEY;
import static hep.dataforge.tables.XYAdapter.Y_VALUE_KEY;
import static inr.numass.data.api.NumassAnalyzer.*;
import static java.lang.Math.*;
/**
* @author Darksnake
*/
public class NumassUtils {
/**
* Integral beta spectrum background with given amplitude (total count rate
* from)
*
* @param amplitude
* @return
*/
public static UnivariateFunction tritiumBackgroundFunction(double amplitude) {
return (e) -> {
/*чистый бета-спектр*/
double e0 = 18575d;
double D = e0 - e;//E0-E
if (D <= 0) {
return 0;
}
return amplitude * factor(e) * D * D;
};
}
private static double factor(double E) {
double me = 0.511006E6;
double Etot = E + me;
double pe = sqrt(E * (E + 2d * me));
double ve = pe / Etot;
double yfactor = 2d * 2d * 1d / 137.039 * Math.PI;
double y = yfactor / ve;
double Fn = y / abs(1d - exp(-y));
double Fermi = Fn * (1.002037 - 0.001427 * ve);
double res = Fermi * pe * Etot;
return res * 1E-23;
}
/**
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @return
*/
public static double pointExpression(String expression, Values point) {
Map<String, Object> exprParams = new HashMap<>();
//Adding all point values to expression parameters
point.getNames().forEach(name -> exprParams.put(name, point.getValue(name).value()));
//Adding aliases for commonly used parameters
exprParams.put("T", point.getDouble("length"));
exprParams.put("U", point.getDouble("voltage"));
return ExpressionUtils.function(expression, exprParams);
}
/**
* Write an envelope wrapping given data to given stream
*
* @param stream
* @param meta
* @param dataWriter
* @throws IOException
*/
public static void writeEnvelope(OutputStream stream, Meta meta, Consumer<OutputStream> dataWriter) {
//TODO replace by text envelope when it is ready
try {
new DefaultEnvelopeWriter().write(
stream,
new EnvelopeBuilder()
.setMeta(meta)
.setData(dataWriter)
.build()
);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void writeSomething(OutputStream stream, Meta meta, Markedup something) {
writeEnvelope(stream, meta, out -> new SimpleMarkupRenderer(out).render(something.markup(meta)));
}
/**
* Apply window and binning to a spectrum
*
* @param lo
* @param up
* @param binSize
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int lo, int up, int binSize) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
for (int chan = lo; chan < up - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= lo && c <= up;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, up - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
}
return builder.build();
}
}

View File

@ -5,13 +5,16 @@
*/ */
package inr.numass.utils; package inr.numass.utils;
import inr.numass.data.api.NumassBlock;
import inr.numass.data.api.NumassEvent; import inr.numass.data.api.NumassEvent;
import inr.numass.data.api.SimpleBlock;
import org.apache.commons.math3.random.RandomGenerator; import org.apache.commons.math3.random.RandomGenerator;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import static java.lang.Math.max; import static java.lang.Math.max;
@ -20,47 +23,43 @@ import static java.lang.Math.max;
*/ */
public class PileUpSimulator { public class PileUpSimulator {
private final static double us = 1e-6;//microsecond private final static double us = 1e-6;//microsecond
private final double pointLength; private final long pointLength;
private final RandomGenerator rnd; private final RandomGenerator rnd;
private final List<NumassEvent> generated = new ArrayList<>(); private final List<NumassEvent> generated = new ArrayList<>();
private final List<NumassEvent> pileup = new ArrayList<>(); private final List<NumassEvent> pileup = new ArrayList<>();
private final List<NumassEvent> registered = new ArrayList<>(); private final List<NumassEvent> registered = new ArrayList<>();
private Supplier<NumassEvent> generator; private NMEventGenerator generator;
private double uSet = 0; private double uSet = 0;
private AtomicInteger doublePileup = new AtomicInteger(0); private AtomicInteger doublePileup = new AtomicInteger(0);
public PileUpSimulator(double length, RandomGenerator rnd, Supplier<NumassEvent> sup) {
this.rnd = rnd;
generator = sup;//new NMEventGenerator(countRate, rnd);
this.pointLength = length;
}
public PileUpSimulator(double length, RandomGenerator rnd, double countRate) { public PileUpSimulator(long length, RandomGenerator rnd, double countRate) {
this.rnd = rnd; this.rnd = rnd;
generator = new NMEventGenerator(rnd, countRate); generator = new NMEventGenerator(rnd, countRate);
this.pointLength = length; this.pointLength = length;
} }
public PileUpSimulator withGenerator(Supplier<NumassEvent> sup){ public PileUpSimulator(long pointLength, NMEventGenerator generator) {
this.generator = sup; this.pointLength = pointLength;
return this; this.generator = generator;
this.rnd = generator.rnd;
} }
public PileUpSimulator withUset(double uset){ public PileUpSimulator withUset(double uset) {
this.uSet = uset; this.uSet = uset;
return this; return this;
} }
public NumassPoint generated() { public NumassBlock generated() {
return PointBuilders.readRawPoint(new RawNMPoint(uSet, generated, pointLength)); return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), generated);
} }
public NumassPoint registered() { public NumassBlock registered() {
return PointBuilders.readRawPoint(new RawNMPoint(uSet, registered, pointLength)); return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), registered);
} }
public NumassPoint pileup() { public NumassBlock pileup() {
return PointBuilders.readRawPoint(new RawNMPoint(uSet, pileup, pointLength)); return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), pileup);
} }
/** /**
@ -108,32 +107,32 @@ public class PileUpSimulator {
} }
public synchronized PileUpSimulator generate() { public synchronized PileUpSimulator generate() {
NumassEvent next; NumassEvent next = null;
double lastRegisteredTime = 0; // Time of DAQ closing double lastRegisteredTime = 0; // Time of DAQ closing
//flag that shows that previous event was pileup //flag that shows that previous event was pileup
boolean pileupFlag = false; boolean pileupFlag = false;
while (true) { while (true) {
next = generator.get(); next = generator.nextEvent(next);
if (next.getTime() > pointLength) { if (next.getTimeOffset() > pointLength) {
break; break;
} }
generated.add(next); generated.add(next);
//not counting double pileups //not counting double pileups
if (generated.size() > 1) { if (generated.size() > 1) {
double delay = (next.getTime() - lastRegisteredTime) / us; //time between events in microseconds double delay = (next.getTimeOffset() - lastRegisteredTime) / us; //time between events in microseconds
if (nextEventRegistered(next.getChanel(), delay)) { if (nextEventRegistered(next.getChanel(), delay)) {
//just register new event //just register new event
registered.add(next); registered.add(next);
lastRegisteredTime = next.getTime(); lastRegisteredTime = next.getTimeOffset();
pileupFlag = false; pileupFlag = false;
} else if (pileup(delay)) { } else if (pileup(delay)) {
if(pileupFlag){ if (pileupFlag) {
//increase double pileup stack //increase double pileup stack
doublePileup.incrementAndGet(); doublePileup.incrementAndGet();
} else { } else {
//pileup event //pileup event
short newChannel = pileupChannel(delay, next.getChanel(), next.getChanel()); short newChannel = pileupChannel(delay, next.getChanel(), next.getChanel());
NumassEvent newEvent = new NumassEvent(newChannel, next.getTime()); NumassEvent newEvent = new NumassEvent(newChannel, next.getBlockTime(), next.getTimeOffset());
//replace already registered event by event with new channel //replace already registered event by event with new channel
registered.remove(registered.size() - 1); registered.remove(registered.size() - 1);
registered.add(newEvent); registered.add(newEvent);
@ -148,7 +147,7 @@ public class PileUpSimulator {
} else { } else {
//register first event //register first event
registered.add(next); registered.add(next);
lastRegisteredTime = next.getTime(); lastRegisteredTime = next.getTimeOffset();
} }
} }
return this; return this;

View File

@ -1,82 +0,0 @@
/*
* Copyright 2015 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.utils;
import hep.dataforge.values.Values;
import org.apache.commons.math3.analysis.UnivariateFunction;
import java.util.HashMap;
import java.util.Map;
import static java.lang.Math.*;
/**
* @author Darksnake
*/
public class TritiumUtils {
/**
* Integral beta spectrum background with given amplitude (total count rate
* from)
*
* @param amplitude
* @return
*/
public static UnivariateFunction tritiumBackgroundFunction(double amplitude) {
return (e) -> {
/*чистый бета-спектр*/
double e0 = 18575d;
double D = e0 - e;//E0-E
if (D <= 0) {
return 0;
}
return amplitude * factor(e) * D * D;
};
}
private static double factor(double E) {
double me = 0.511006E6;
double Etot = E + me;
double pe = sqrt(E * (E + 2d * me));
double ve = pe / Etot;
double yfactor = 2d * 2d * 1d / 137.039 * Math.PI;
double y = yfactor / ve;
double Fn = y / abs(1d - exp(-y));
double Fermi = Fn * (1.002037 - 0.001427 * ve);
double res = Fermi * pe * Etot;
return res * 1E-23;
}
/**
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @return
*/
public static double pointExpression(String expression, Values point) {
Map<String, Object> exprParams = new HashMap<>();
//Adding all point values to expression parameters
point.getNames().forEach(name-> exprParams.put(name,point.getValue(name).value()));
//Adding aliases for commonly used parameters
exprParams.put("T", point.getDouble("length"));
exprParams.put("U", point.getDouble("voltage"));
return ExpressionUtils.function(expression, exprParams);
}
}

View File

@ -5,10 +5,13 @@
*/ */
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction; import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.DimensionMismatchException;
import org.apache.commons.math3.fitting.SimpleCurveFitter; import org.apache.commons.math3.fitting.SimpleCurveFitter;
@ -17,6 +20,9 @@ import org.apache.commons.math3.fitting.WeightedObservedPoint;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY;
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY;
/** /**
* A class to calculate underflow correction * A class to calculate underflow correction
* *
@ -24,6 +30,9 @@ import java.util.stream.Collectors;
*/ */
public class UnderflowCorrection { public class UnderflowCorrection {
private NumassAnalyzer analyzer;
private static String[] pointNames = {"U", "amp", "expConst", "correction"}; private static String[] pointNames = {"U", "amp", "expConst", "correction"};
// private final static int CUTOFF = -200; // private final static int CUTOFF = -200;
@ -62,8 +71,14 @@ public class UnderflowCorrection {
// } // }
public Values fitPoint(NumassPoint point, int xLow, int xHigh, int upper, int binning) { public Values fitPoint(NumassPoint point, int xLow, int xHigh, int upper, int binning) {
double norm = ((double) point.getCountInWindow(xLow, upper)) / point.getLength(); Table spectrum = analyzer.getSpectrum(point, Meta.empty());
double[] fitRes = getUnderflowExpParameters(point, xLow, xHigh, binning);
double norm = spectrum.getRows().filter(row -> {
int channel = row.getInt(CHANNEL_KEY);
return channel > xLow && channel < upper;
}).mapToDouble(it -> it.getValue(COUNT_RATE_KEY).numberValue().longValue()).sum();
double[] fitRes = getUnderflowExpParameters(spectrum, xLow, xHigh, binning);
double a = fitRes[0]; double a = fitRes[0];
double sigma = fitRes[1]; double sigma = fitRes[1];
@ -73,7 +88,7 @@ public class UnderflowCorrection {
public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) { public Table fitAllPoints(Iterable<NumassPoint> data, int xLow, int xHigh, int upper, int binning) {
ListTable.Builder builder = new ListTable.Builder(pointNames); ListTable.Builder builder = new ListTable.Builder(pointNames);
for (NumassPoint point : data) { for (NumassPoint point : data) {
builder.row(fitPoint(point,xLow,xHigh,upper,binning)); builder.row(fitPoint(point, xLow, xHigh, upper, binning));
} }
return builder.build(); return builder.build();
} }
@ -82,23 +97,22 @@ public class UnderflowCorrection {
* Calculate underflow exponent parameters using (xLow, xHigh) window for * Calculate underflow exponent parameters using (xLow, xHigh) window for
* extrapolation * extrapolation
* *
* @param point
* @param xLow * @param xLow
* @param xHigh * @param xHigh
* @return * @return
*/ */
private double[] getUnderflowExpParameters(NumassPoint point, int xLow, int xHigh, int binning) { private double[] getUnderflowExpParameters(Table spectrum, int xLow, int xHigh, int binning) {
try { try {
if (xHigh <= xLow) { if (xHigh <= xLow) {
throw new IllegalArgumentException("Wrong borders for underflow calculation"); throw new IllegalArgumentException("Wrong borders for underflow calculation");
} }
List<WeightedObservedPoint> points = point.getMap(binning, false) Table binned = NumassUtils.spectrumWithBinning(spectrum, xLow, xHigh, binning);
.entrySet().stream()
.filter(entry -> entry.getKey() >= xLow && entry.getKey() <= xHigh) List<WeightedObservedPoint> points = binned.getRows()
.map(p -> new WeightedObservedPoint( .map(p -> new WeightedObservedPoint(
1d,//1d / p.getValue() , //weight 1d,//1d / p.getValue() , //weight
p.getKey(), // x p.getDouble(CHANNEL_KEY), // x
p.getValue() / binning / point.getLength()) //y p.getDouble(COUNT_RATE_KEY) / binning ) //y
) )
.collect(Collectors.toList()); .collect(Collectors.toList());
SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), new double[]{1d, 200d}); SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), new double[]{1d, 200d});