Revising numass scripts
This commit is contained in:
parent
c7369b6b34
commit
bc1b79784d
@ -6,10 +6,12 @@ import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.tables.TableFormat;
|
||||
import hep.dataforge.tables.TableFormatBuilder;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassPoint;
|
||||
import inr.numass.data.api.NumassSet;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static hep.dataforge.tables.XYAdapter.*;
|
||||
@ -56,8 +58,17 @@ public class NumassDataUtils {
|
||||
.build();
|
||||
ListTable.Builder builder = new ListTable.Builder(format);
|
||||
for (int i = 0; i < sp1.size(); i++) {
|
||||
double value = sp1.getDouble(COUNT_RATE_KEY, i) - sp2.getDouble(COUNT_RATE_KEY, i);
|
||||
double error = Math.sqrt(Math.pow(sp1.getDouble(COUNT_RATE_ERROR_KEY, i), 2d) + Math.pow(sp2.getDouble(COUNT_RATE_ERROR_KEY, i), 2d));
|
||||
Values row1 = sp1.getRow(i);
|
||||
Optional<Values> row2 = sp2.getRows().filter(row -> row.getDouble(CHANNEL_KEY) == row1.getDouble(CHANNEL_KEY)).findFirst();
|
||||
|
||||
double value1 = row1.getDouble(COUNT_RATE_KEY);
|
||||
double value2 = row2.map(it -> it.getDouble(COUNT_RATE_KEY)).orElse(0d);
|
||||
|
||||
double error1 = row1.getDouble(COUNT_RATE_ERROR_KEY);
|
||||
double error2 = row2.map(it-> it.getDouble(COUNT_RATE_ERROR_KEY)).orElse(0d);
|
||||
|
||||
double value = value1 - value2;
|
||||
double error = Math.sqrt(error1*error1 + error2*error2);
|
||||
builder.row(sp1.get(CHANNEL_KEY, i).intValue(), value, error);
|
||||
}
|
||||
return builder.build();
|
||||
|
@ -2,12 +2,12 @@ package inr.numass.data.api;
|
||||
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.*;
|
||||
import hep.dataforge.values.Value;
|
||||
import hep.dataforge.values.Values;
|
||||
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static hep.dataforge.tables.XYAdapter.*;
|
||||
@ -17,9 +17,11 @@ import static hep.dataforge.tables.XYAdapter.*;
|
||||
* Created by darksnake on 06-Jul-17.
|
||||
*/
|
||||
public interface NumassAnalyzer {
|
||||
short MAX_CHANNEL = 10000;
|
||||
|
||||
/**
|
||||
* Calculate number of counts in the given channel
|
||||
*
|
||||
* @param spectrum
|
||||
* @param loChannel
|
||||
* @param upChannel
|
||||
@ -35,19 +37,20 @@ public interface NumassAnalyzer {
|
||||
/**
|
||||
* Apply window and binning to a spectrum
|
||||
*
|
||||
* @param lo
|
||||
* @param up
|
||||
* @param binSize
|
||||
* @return
|
||||
*/
|
||||
static Table spectrumWithBinning(Table spectrum, int lo, int up, int binSize) {
|
||||
static Table spectrumWithBinning(Table spectrum, int binSize) {
|
||||
TableFormat format = new TableFormatBuilder()
|
||||
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
|
||||
.addNumber(COUNT_KEY, Y_VALUE_KEY)
|
||||
.addNumber(COUNT_RATE_KEY)
|
||||
.addNumber("binSize");
|
||||
ListTable.Builder builder = new ListTable.Builder(format);
|
||||
for (int chan = lo; chan < up - binSize; chan += binSize) {
|
||||
int loChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).min().orElse(0);
|
||||
int upChannel = spectrum.getColumn(CHANNEL_KEY).stream().mapToInt(Value::intValue).max().orElse(1);
|
||||
|
||||
for (int chan = loChannel; chan < upChannel - binSize; chan += binSize) {
|
||||
AtomicLong count = new AtomicLong(0);
|
||||
AtomicReference<Double> countRate = new AtomicReference<>(0d);
|
||||
|
||||
@ -61,7 +64,7 @@ public interface NumassAnalyzer {
|
||||
count.addAndGet(row.getValue(COUNT_KEY).numberValue().longValue());
|
||||
countRate.accumulateAndGet(row.getDouble(COUNT_RATE_KEY), (d1, d2) -> d1 + d2);
|
||||
});
|
||||
int bin = Math.min(binSize, up - chan);
|
||||
int bin = Math.min(binSize, upChannel - chan);
|
||||
builder.row((double) chan + (double) bin / 2d, count.get(), countRate.get(), bin);
|
||||
}
|
||||
return builder.build();
|
||||
@ -114,24 +117,32 @@ public interface NumassAnalyzer {
|
||||
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||
.updateMeta(metaBuilder -> metaBuilder.setNode("config", config))
|
||||
.build();
|
||||
NavigableMap<Short, AtomicLong> map = new TreeMap<>();
|
||||
|
||||
//optimized for fastest computation
|
||||
//TODO requires additional performance optimization
|
||||
AtomicLong[] spectrum = new AtomicLong[MAX_CHANNEL];
|
||||
getEventStream(block, config).forEach(event -> {
|
||||
if (map.containsKey(event.getChanel())) {
|
||||
map.get(event.getChanel()).incrementAndGet();
|
||||
if (spectrum[event.getChanel()] == null) {
|
||||
spectrum[event.getChanel()] = new AtomicLong(1);
|
||||
} else {
|
||||
map.put(event.getChanel(), new AtomicLong(1));
|
||||
spectrum[event.getChanel()].incrementAndGet();
|
||||
}
|
||||
});
|
||||
|
||||
double seconds = (double) block.getLength().toMillis() / 1000d;
|
||||
return new ListTable.Builder(format)
|
||||
.rows(map.entrySet().stream()
|
||||
.map(entry ->
|
||||
new ValueMap(format.namesAsArray(),
|
||||
entry.getKey(),
|
||||
entry.getValue(),
|
||||
(double) entry.getValue().get() / block.getLength().toMillis() * 1000d,
|
||||
Math.sqrt(entry.getValue().get()) / block.getLength().toMillis() * 1000d
|
||||
)
|
||||
)
|
||||
.rows(IntStream.range(0, MAX_CHANNEL)
|
||||
.filter(i -> spectrum[i] != null)
|
||||
.mapToObj(i -> {
|
||||
long value = spectrum[i].get();
|
||||
return new ValueMap(
|
||||
format.namesAsArray(),
|
||||
i,
|
||||
value,
|
||||
(double) value / seconds,
|
||||
Math.sqrt(value) / seconds
|
||||
);
|
||||
})
|
||||
).build();
|
||||
}
|
||||
|
||||
@ -156,6 +167,4 @@ public interface NumassAnalyzer {
|
||||
default long getLength(NumassBlock block, Meta config) {
|
||||
return analyze(block, config).getValue(LENGTH_KEY).numberValue().longValue();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -54,40 +54,28 @@ Map spectra = allPoints.collectEntries {
|
||||
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)]
|
||||
}
|
||||
|
||||
|
||||
|
||||
def refereceVoltage = 18600d
|
||||
int binning = 20
|
||||
|
||||
//subtracting reference point
|
||||
|
||||
def refereceVoltage = 18600
|
||||
if (refereceVoltage) {
|
||||
def referencePoint = spectra[refereceVoltage]
|
||||
|
||||
if (referencePoint) {
|
||||
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries {
|
||||
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)]
|
||||
}
|
||||
}
|
||||
|
||||
//println "Empty files:"
|
||||
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
|
||||
// StorageUtils.loaderStream(storage).filter{ it.key.matches("Empty.*")}.map {
|
||||
// println it.key
|
||||
// it.value
|
||||
// }
|
||||
//)
|
||||
|
||||
//emptySpectra = NumassDataUtils.substractReferencePoint(emptySpectra,18600);
|
||||
//
|
||||
//data = data.collect { point ->
|
||||
// NMPoint ref = emptySpectra.find { it.u == point.u }
|
||||
// if (ref) {
|
||||
// println "substracting tritium background for point ${point.u}"
|
||||
// NumassDataUtils.substractPoint(point, ref)
|
||||
// } else {
|
||||
// println "point ${point.u} skipped"
|
||||
// point
|
||||
// }
|
||||
//}
|
||||
//Apply binning
|
||||
if (binning) {
|
||||
spectra = spectra.collectEntries {
|
||||
[(it.key): NumassAnalyzer.spectrumWithBinning(it.value as Table, binning)];
|
||||
}
|
||||
}
|
||||
|
||||
//printing selected points
|
||||
def printPoint = { Map<Double, Table> points, int binning = 20, normalize = true ->
|
||||
def printPoint = { Map<Double, Table> points ->
|
||||
print "channel"
|
||||
points.each { print "\t${it.key}" }
|
||||
println()
|
||||
@ -113,3 +101,23 @@ println()
|
||||
//Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20);
|
||||
//ColumnedDataWriter.writeTable(System.out, t, "underflow parameters")
|
||||
|
||||
//println "Empty files:"
|
||||
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
|
||||
// StorageUtils.loaderStream(storage).filter{ it.key.matches("Empty.*")}.map {
|
||||
// println it.key
|
||||
// it.value
|
||||
// }
|
||||
//)
|
||||
|
||||
//emptySpectra = NumassDataUtils.substractReferencePoint(emptySpectra,18600);
|
||||
//
|
||||
//data = data.collect { point ->
|
||||
// NMPoint ref = emptySpectra.find { it.u == point.u }
|
||||
// if (ref) {
|
||||
// println "substracting tritium background for point ${point.u}"
|
||||
// NumassDataUtils.substractPoint(point, ref)
|
||||
// } else {
|
||||
// println "point ${point.u} skipped"
|
||||
// point
|
||||
// }
|
||||
//}
|
@ -8,6 +8,7 @@ package inr.numass.utils;
|
||||
import hep.dataforge.meta.Meta;
|
||||
import hep.dataforge.tables.ListTable;
|
||||
import hep.dataforge.tables.Table;
|
||||
import hep.dataforge.tables.TableTransform;
|
||||
import hep.dataforge.tables.ValueMap;
|
||||
import hep.dataforge.values.Values;
|
||||
import inr.numass.data.api.NumassAnalyzer;
|
||||
@ -106,7 +107,12 @@ public class UnderflowCorrection {
|
||||
if (xHigh <= xLow) {
|
||||
throw new IllegalArgumentException("Wrong borders for underflow calculation");
|
||||
}
|
||||
Table binned = NumassAnalyzer.spectrumWithBinning(spectrum, xLow, xHigh, binning);
|
||||
Table binned = TableTransform.filter(
|
||||
NumassAnalyzer.spectrumWithBinning(spectrum, binning),
|
||||
CHANNEL_KEY,
|
||||
xLow,
|
||||
xHigh
|
||||
);
|
||||
|
||||
List<WeightedObservedPoint> points = binned.getRows()
|
||||
.map(p -> new WeightedObservedPoint(
|
||||
|
@ -324,7 +324,7 @@ class NumassLoaderView : View() {
|
||||
PlottableData.plot(
|
||||
seriesName,
|
||||
XYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
|
||||
NumassAnalyzer.spectrumWithBinning(getSpectrum(point), 0, 4000, binning)
|
||||
NumassAnalyzer.spectrumWithBinning(getSpectrum(point), binning)
|
||||
).apply {
|
||||
configure(plottableConfig)
|
||||
}.also {
|
||||
|
Loading…
Reference in New Issue
Block a user