Revising numass scripts

This commit is contained in:
darksnake 2017-07-29 11:24:13 +03:00
parent c7369b6b34
commit bc1b79784d
5 changed files with 87 additions and 53 deletions

View File

@ -6,10 +6,12 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat; import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder; import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import java.util.Collection; import java.util.Collection;
import java.util.Optional;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.XYAdapter.*;
@ -56,8 +58,17 @@ public class NumassDataUtils {
.build(); .build();
ListTable.Builder builder = new ListTable.Builder(format); ListTable.Builder builder = new ListTable.Builder(format);
for (int i = 0; i < sp1.size(); i++) { for (int i = 0; i < sp1.size(); i++) {
double value = sp1.getDouble(COUNT_RATE_KEY, i) - sp2.getDouble(COUNT_RATE_KEY, i); Values row1 = sp1.getRow(i);
double error = Math.sqrt(Math.pow(sp1.getDouble(COUNT_RATE_ERROR_KEY, i), 2d) + Math.pow(sp2.getDouble(COUNT_RATE_ERROR_KEY, i), 2d)); Optional<Values> row2 = sp2.getRows().filter(row -> row.getDouble(CHANNEL_KEY) == row1.getDouble(CHANNEL_KEY)).findFirst();
double value1 = row1.getDouble(COUNT_RATE_KEY);
double value2 = row2.map(it -> it.getDouble(COUNT_RATE_KEY)).orElse(0d);
double error1 = row1.getDouble(COUNT_RATE_ERROR_KEY);
double error2 = row2.map(it-> it.getDouble(COUNT_RATE_ERROR_KEY)).orElse(0d);
double value = value1 - value2;
double error = Math.sqrt(error1*error1 + error2*error2);
builder.row(sp1.get(CHANNEL_KEY, i).intValue(), value, error); builder.row(sp1.get(CHANNEL_KEY, i).intValue(), value, error);
} }
return builder.build(); return builder.build();

View File

@ -2,12 +2,12 @@ package inr.numass.data.api;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*; import hep.dataforge.tables.*;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*; import static hep.dataforge.tables.XYAdapter.*;
@ -17,9 +17,11 @@ import static hep.dataforge.tables.XYAdapter.*;
* Created by darksnake on 06-Jul-17. * Created by darksnake on 06-Jul-17.
*/ */
public interface NumassAnalyzer { public interface NumassAnalyzer {
short MAX_CHANNEL = 10000;
/** /**
* Calculate number of counts in the given channel * Calculate number of counts in the given channel
*
* @param spectrum * @param spectrum
* @param loChannel * @param loChannel
* @param upChannel * @param upChannel
@ -35,19 +37,20 @@ public interface NumassAnalyzer {
/** /**
* Apply window and binning to a spectrum * Apply window and binning to a spectrum
* *
* @param lo
* @param up
* @param binSize * @param binSize
* @return * @return
*/ */
static Table spectrumWithBinning(Table spectrum, int lo, int up, int binSize) { static Table spectrumWithBinning(Table spectrum, int binSize) {
TableFormat format = new TableFormatBuilder() TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY) .addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY) .addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY) .addNumber(COUNT_RATE_KEY)
.addNumber("binSize"); .addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format); ListTable.Builder builder = new ListTable.Builder(format);
for (int chan = lo; chan < up - binSize; chan += binSize) { int loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
int upChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).max().orElse(1);
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0); AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d); AtomicReference<Double> countRate = new AtomicReference<>(0d);
@ -61,7 +64,7 @@ public interface NumassAnalyzer {
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue()); count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2); countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2);
}); });
int bin = Math.min(binSize, up - chan); int bin = Math.min(binSize, upChannel - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin); builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
} }
return builder.build(); return builder.build();
@ -114,24 +117,32 @@ public interface NumassAnalyzer {
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) .addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config)) .updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
.build(); .build();
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
//optimized for fastest computation
//TODO requires additional performance optimization
AtomicLong[] spectrum = new AtomicLong[MAX_CHANNEL];
getEventStream(block, config).forEach(event -> { getEventStream(block, config).forEach(event -> {
if (map.containsKey(event.getChanel())) { if (spectrum[event.getChanel()] == null) {
map.get(event.getChanel()).incrementAndGet(); spectrum[event.getChanel()] = new AtomicLong(1);
} else { } else {
map.put(event.getChanel(), new AtomicLong(1)); spectrum[event.getChanel()].incrementAndGet();
} }
}); });
double seconds = (double) block.getLength().toMillis() / 1000d;
return new ListTable.Builder(format) return new ListTable.Builder(format)
.rows(map.entrySet().stream() .rows(IntStream.range(0, MAX_CHANNEL)
.map(entry -> .filter(i -> spectrum[i] != null)
new ValueMap(format.namesAsArray(), .mapToObj(i -> {
entry.getKey(), long value = spectrum[i].get();
entry.getValue(), return new ValueMap(
(double) entry.getValue().get() / block.getLength().toMillis() * 1000d, format.namesAsArray(),
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000d i,
) value,
) (double) value / seconds,
Math.sqrt(value) / seconds
);
})
).build(); ).build();
} }
@ -156,6 +167,4 @@ public interface NumassAnalyzer {
default long getLength(NumassBlock block, Meta config) { default long getLength(NumassBlock block, Meta config) {
return analyze(block, config).getValue(LENGTH_KEY).numberValue().longValue(); return analyze(block, config).getValue(LENGTH_KEY).numberValue().longValue();
} }
} }

View File

@ -54,40 +54,28 @@ Map spectra = allPoints.collectEntries {
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)] return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)]
} }
def refereceVoltage = 18600d
int binning = 20
//subtracting reference point //subtracting reference point
if (refereceVoltage) {
def refereceVoltage = 18600 def referencePoint = spectra[refereceVoltage]
def referencePoint = spectra[refereceVoltage]
if (referencePoint) {
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries { spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries {
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)] return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)]
} }
} }
//println "Empty files:" //Apply binning
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra( if (binning) {
// StorageUtils.loaderStream(storage).filter{ it.key.matches("Empty.*")}.map { spectra = spectra.collectEntries {
// println it.key [(it.key): NumassAnalyzer.spectrumWithBinning(it.value as Table, binning)];
// it.value }
// } }
//)
//emptySpectra = NumassDataUtils.substractReferencePoint(emptySpectra,18600);
//
//data = data.collect { point ->
// NMPoint ref = emptySpectra.find { it.u == point.u }
// if (ref) {
// println "substracting tritium background for point ${point.u}"
// NumassDataUtils.substractPoint(point, ref)
// } else {
// println "point ${point.u} skipped"
// point
// }
//}
//printing selected points //printing selected points
def printPoint = { Map<Double, Table> points, int binning = 20, normalize = true -> def printPoint = { Map<Double, Table> points ->
print "channel" print "channel"
points.each { print "\t${it.key}" } points.each { print "\t${it.key}" }
println() println()
@ -113,3 +101,23 @@ println()
//Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20); //Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20);
//ColumnedDataWriter.writeTable(System.out, t, "underflow parameters") //ColumnedDataWriter.writeTable(System.out, t, "underflow parameters")
//println "Empty files:"
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
// StorageUtils.loaderStream(storage).filter{ it.key.matches("Empty.*")}.map {
// println it.key
// it.value
// }
//)
//emptySpectra = NumassDataUtils.substractReferencePoint(emptySpectra,18600);
//
//data = data.collect { point ->
// NMPoint ref = emptySpectra.find { it.u == point.u }
// if (ref) {
// println "substracting tritium background for point ${point.u}"
// NumassDataUtils.substractPoint(point, ref)
// } else {
// println "point ${point.u} skipped"
// point
// }
//}

View File

@ -8,6 +8,7 @@ package inr.numass.utils;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.ListTable; import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassAnalyzer; import inr.numass.data.api.NumassAnalyzer;
@ -106,13 +107,18 @@ public class UnderflowCorrection {
if (xHigh <= xLow) { if (xHigh <= xLow) {
throw new IllegalArgumentException("Wrong borders for underflow calculation"); throw new IllegalArgumentException("Wrong borders for underflow calculation");
} }
Table binned = NumassAnalyzer.spectrumWithBinning(spectrum, xLow, xHigh, binning); Table binned = TableTransform.filter(
NumassAnalyzer.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
);
List<WeightedObservedPoint> points = binned.getRows() List<WeightedObservedPoint> points = binned.getRows()
.map(p -> new WeightedObservedPoint( .map(p -> new WeightedObservedPoint(
1d,//1d / p.getValue() , //weight 1d,//1d / p.getValue() , //weight
p.getDouble(CHANNEL_KEY), // x p.getDouble(CHANNEL_KEY), // x
p.getDouble(COUNT_RATE_KEY) / binning ) //y p.getDouble(COUNT_RATE_KEY) / binning) //y
) )
.collect(Collectors.toList()); .collect(Collectors.toList());
SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), new double[]{1d, 200d}); SimpleCurveFitter fitter = SimpleCurveFitter.create(new ExponentFunction(), new double[]{1d, 200d});

View File

@ -324,7 +324,7 @@ class NumassLoaderView : View() {
PlottableData.plot( PlottableData.plot(
seriesName, seriesName,
XYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis), XYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
NumassAnalyzer.spectrumWithBinning(getSpectrum(point), 0, 4000, binning) NumassAnalyzer.spectrumWithBinning(getSpectrum(point), binning)
).apply { ).apply {
configure(plottableConfig) configure(plottableConfig)
}.also { }.also {