Numass underflow

This commit is contained in:
darksnake 2017-08-06 18:00:29 +03:00
parent 99baa4e3eb
commit 99f40132c6
5 changed files with 100 additions and 48 deletions

View File

@ -6,12 +6,15 @@ import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table; import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat; import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder; import hep.dataforge.tables.TableFormatBuilder;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -79,6 +82,65 @@ public class NumassDataUtils {
return builder.build(); return builder.build();
} }
/**
* Apply window and binning to a spectrum. Empty bins are filled with zeroes
*
* @param binSize
* @param loChannel autodefined if negative
* @param upChannel autodefined if negative
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int binSize, int loChannel, int upChannel) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
if (loChannel < 0) {
loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
}
if (upChannel < 0) {
upChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).max().orElse(1);
}
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
AtomicReference<Double> countRateDispersion = new AtomicReference<>(0d);
int binLo = chan;
int binUp = chan + binSize;
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c < binUp;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);
countRateDispersion.accumulateAndGet(row.getDouble(COUNT_RATE_ERROR_KEY, 0), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, upChannel - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin);
}
return builder.build();
}
/**
* The same as above, but with auto definition for borders
*
* @param spectrum
* @param binSize
* @return
*/
public static Table spectrumWithBinning(Table spectrum, int binSize) {
return spectrumWithBinning(spectrum, binSize, -1, -1);
}
// public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) { // public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
// Map<Double, NumassPoint> map = new LinkedHashMap<>(); // Map<Double, NumassPoint> map = new LinkedHashMap<>();
// spectra.forEach(datum -> { // spectra.forEach(datum -> {

View File

@ -2,11 +2,9 @@ package inr.numass.data.api;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.tables.*; import hep.dataforge.tables.*;
import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream; import java.util.stream.IntStream;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -34,44 +32,7 @@ public interface NumassAnalyzer {
}).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum(); }).mapToLong(it -> it.getValue(COUNT_KEY).numberValue().longValue()).sum();
} }
/**
* Apply window and binning to a spectrum
*
* @param binSize
* @return
*/
static Table spectrumWithBinning(Table spectrum, int binSize) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY)
.addNumber("binSize");
ListTable.Builder builder = new ListTable.Builder(format);
int loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
int upChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).max().orElse(1);
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
AtomicLong count = new AtomicLong(0);
AtomicReference<Double> countRate = new AtomicReference<>(0d);
AtomicReference<Double> countRateDispersion = new AtomicReference<>(0d);
int binLo = chan;
int binUp = chan + binSize;
spectrum.getRows().filter(row -> {
int c = row.getInt(CHANNEL_KEY);
return c >= binLo && c < binUp;
}).forEach(row -> {
count.addAndGet(row.getValue(COUNT_KEY, 0).longValue());
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY, 0), (d1, d2) -> d1 + d2);
countRateDispersion.accumulateAndGet(row.getDouble(COUNT_RATE_ERROR_KEY, 0), (d1, d2) -> d1 + d2);
});
int bin = Math.min(binSize, upChannel - chan);
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin);
}
return builder.build();
}
String CHANNEL_KEY = "channel"; String CHANNEL_KEY = "channel";
String COUNT_KEY = "count"; String COUNT_KEY = "count";

View File

@ -8,10 +8,10 @@ import hep.dataforge.grind.GrindShell
import hep.dataforge.io.ColumnedDataWriter import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.plots.fx.FXPlotManager import hep.dataforge.plots.fx.FXPlotManager
import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.NumassPlugin import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassAnalyzer
import static hep.dataforge.grind.Grind.buildMeta import static hep.dataforge.grind.Grind.buildMeta
@ -22,7 +22,7 @@ ctx.pluginManager().load(CachePlugin.class)
Meta meta = buildMeta { Meta meta = buildMeta {
data(dir: "D:\\Work\\Numass\\data\\2017_05\\Fill_2", mask: "set_.{1,3}") data(dir: "D:\\Work\\Numass\\data\\2017_05\\Fill_2", mask: "set_.{1,3}")
generate(t0: 3e4, sort: true) generate(t0: 3e4, sort: false)
} }
def shell = new GrindShell(ctx); def shell = new GrindShell(ctx);
@ -30,10 +30,38 @@ def shell = new GrindShell(ctx);
DataNode<Table> spectra = UnderflowUtils.getSpectraMap(shell, meta); DataNode<Table> spectra = UnderflowUtils.getSpectraMap(shell, meta);
shell.eval { shell.eval {
Table p17100 = NumassAnalyzer.spectrumWithBinning(spectra.optData("17100").get().get(),20); def columns = [:];
Table p17000 = NumassAnalyzer.spectrumWithBinning(spectra.optData("17000").get().get(),20);
Table subtract =NumassDataUtils.subtractSpectrum(p17100,p17000); Map<Integer, Table> binned = [:]
ColumnedDataWriter.writeTable(System.out, subtract, "Response function")
(14500..17500).step(500).each {
Table up = binned.computeIfAbsent(it) { key ->
NumassDataUtils.spectrumWithBinning(spectra.optData(key as String).get().get(), 20, 400, 3100);
}
Table lo = binned.computeIfAbsent(it - 500) { key ->
NumassDataUtils.spectrumWithBinning(spectra.optData(key as String).get().get(), 20, 400, 3100);
}
columns << [channel: up.channel]
columns << [(it as String): NumassDataUtils.subtractSpectrum(lo, up).getColumn("cr")]
}
ColumnedDataWriter.writeTable(System.out, ColumnTable.of(columns), "Response function")
// println()
// println()
//
// columns.clear()
//
// binned.each { key, table ->
// columns << [channel: table.channel]
// columns << [(key as String): table.cr]
// }
//
// ColumnedDataWriter.writeTable(System.out,
// ColumnTable.of(columns),
// "Spectra")
} }

View File

@ -6,7 +6,7 @@ import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values import hep.dataforge.values.Values
import inr.numass.data.api.NumassAnalyzer import inr.numass.data.NumassDataUtils
import org.apache.commons.math3.analysis.ParametricUnivariateFunction import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter import org.apache.commons.math3.fitting.SimpleCurveFitter
@ -56,7 +56,7 @@ class UnderflowFitter {
throw new IllegalArgumentException("Wrong borders for underflow calculation"); throw new IllegalArgumentException("Wrong borders for underflow calculation");
} }
Table binned = TableTransform.filter( Table binned = TableTransform.filter(
NumassAnalyzer.spectrumWithBinning(spectrum, binning), NumassDataUtils.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY, CHANNEL_KEY,
xLow, xLow,
xHigh xHigh

View File

@ -11,6 +11,7 @@ import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableTransform; import hep.dataforge.tables.TableTransform;
import hep.dataforge.tables.ValueMap; import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.NumassDataUtils;
import inr.numass.data.api.NumassAnalyzer; import inr.numass.data.api.NumassAnalyzer;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction; import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
@ -109,7 +110,7 @@ public class UnderflowCorrection {
throw new IllegalArgumentException("Wrong borders for underflow calculation"); throw new IllegalArgumentException("Wrong borders for underflow calculation");
} }
Table binned = TableTransform.filter( Table binned = TableTransform.filter(
NumassAnalyzer.spectrumWithBinning(spectrum, binning), NumassDataUtils.spectrumWithBinning(spectrum, binning),
CHANNEL_KEY, CHANNEL_KEY,
xLow, xLow,
xHigh xHigh