Revising numass scripts

This commit is contained in:
darksnake 2017-07-28 22:29:32 +03:00
parent c84db97e7e
commit c7369b6b34
3 changed files with 88 additions and 38 deletions

View File

@ -2,12 +2,19 @@ package inr.numass.data;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
import hep.dataforge.meta.MetaBuilder; import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.tables.ListTable;
import hep.dataforge.tables.Table;
import hep.dataforge.tables.TableFormat;
import hep.dataforge.tables.TableFormatBuilder;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import java.util.Collection; import java.util.Collection;
import java.util.stream.Stream; import java.util.stream.Stream;
import static hep.dataforge.tables.XYAdapter.*;
import static inr.numass.data.api.NumassAnalyzer.*;
/** /**
* Created by darksnake on 30-Jan-17. * Created by darksnake on 30-Jan-17.
*/ */
@ -34,6 +41,28 @@ public class NumassDataUtils {
}; };
} }
/**
* Subtract reference spectrum.
*
* @param sp1
* @param sp2
* @return
*/
public static Table subtractSpectrum(Table sp1, Table sp2) {
TableFormat format = new TableFormatBuilder()
.addNumber(CHANNEL_KEY, X_VALUE_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build();
ListTable.Builder builder = new ListTable.Builder(format);
for (int i = 0; i < sp1.size(); i++) {
double value = sp1.getDouble(COUNT_RATE_KEY, i) - sp2.getDouble(COUNT_RATE_KEY, i);
double error = Math.sqrt(Math.pow(sp1.getDouble(COUNT_RATE_ERROR_KEY, i), 2d) + Math.pow(sp2.getDouble(COUNT_RATE_ERROR_KEY, i), 2d));
builder.row(sp1.get(CHANNEL_KEY, i).intValue(), value, error);
}
return builder.build();
}
// public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) { // public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
// Map<Double, NumassPoint> map = new LinkedHashMap<>(); // Map<Double, NumassPoint> map = new LinkedHashMap<>();
// spectra.forEach(datum -> { // spectra.forEach(datum -> {

View File

@ -6,34 +6,64 @@
package inr.numass.scripts package inr.numass.scripts
import hep.dataforge.io.ColumnedDataWriter import hep.dataforge.grind.Grind
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.data.NumassDataUtils import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassStorage import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory import inr.numass.data.storage.NumassStorageFactory
import inr.numass.utils.UnderflowCorrection
import java.util.stream.Collectors
import static inr.numass.data.api.NumassAnalyzer.CHANNEL_KEY
import static inr.numass.data.api.NumassAnalyzer.COUNT_RATE_KEY
//Defining root directory
//File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1") //File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
//File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide") //File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
//File rootDir = new File("D:\\Work\\Numass\\data\\2017_01\\Fill_2_wide") //File rootDir = new File("D:\\Work\\Numass\\data\\2017_01\\Fill_2_wide")
File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_1") File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
//creating storage instance
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir); NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
NumassSet data = NumassDataUtils.join( //Reading points
"data", Map<Double, List<NumassPoint>> allPoints = StorageUtils
StorageUtils.loaderStream(storage) .loaderStream(storage)
.filter { it.key.matches("set_.{1,3}") } .filter { it.key.matches("set_.{1,3}") }
.map { .map {
println "loading ${it.key}" println "loading ${it.key}"
it.value it.value
}.collect { (NumassSet) it } }.flatMap { it.points }
) .collect(Collectors.groupingBy { it.voltage })
data = NumassDataUtils.substractReferencePoint(data, 18600d); Meta analyzerMeta = Grind.buildMeta(t0: 3e4)
NumassAnalyzer analyzer = new TimeAnalyzer()
//creating spectra
Map spectra = allPoints.collectEntries {
def point = new SimpleNumassPoint(it.key, it.value)
println "generating spectrum for ${point.voltage}"
return [(point.voltage): analyzer.getSpectrum(point, analyzerMeta)]
}
//subtracting reference point
def refereceVoltage = 18600
def referencePoint = spectra[refereceVoltage]
if (referencePoint) {
spectra = spectra.findAll { it.key != refereceVoltage }.collectEntries {
return [(it.key): NumassDataUtils.subtractSpectrum(it.getValue() as Table, referencePoint as Table)]
}
}
//println "Empty files:" //println "Empty files:"
//Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra( //Collection<NMPoint> emptySpectra = NumassDataUtils.joinSpectra(
@ -56,39 +86,30 @@ data = NumassDataUtils.substractReferencePoint(data, 18600d);
// } // }
//} //}
def printPoint(Iterable<NumassPoint> data, List<Double> us, int binning = 20, normalize = true) { //printing selected points
List<NumassPoint> points = data.findAll { it.voltage in us }.sort { it.voltage } def printPoint = { Map<Double, Table> points, int binning = 20, normalize = true ->
Map spectra = points.first().getMap(binning, normalize).collectEntries { key, value ->
[key, [value]]
};
print "channel" print "channel"
points.eachWithIndex { it, index -> points.each { print "\t${it.key}" }
print "\t${it.voltage}"
it.getMap(binning, normalize).each { k, v ->
spectra[k].add(v)
}
}
println() println()
spectra.each { key, value -> def firstPoint = points.values().first()
print key (0..firstPoint.size()).each { i ->
value.each { print firstPoint.get(CHANNEL_KEY, i).intValue()
print "\t${it}" points.values().each {
print "\t${it.get(COUNT_RATE_KEY, i).doubleValue()}"
} }
println() println()
} }
println()
} }
println "\n# spectra\n" println "\n# spectra\n"
//printPoint(data, [16200d, 16400d, 16800d, 17000d, 17200d, 17700d]) //printPoint(data, [16200d, 16400d, 16800d, 17000d, 17200d, 17700d])
printPoint(data, [14000d, 14500d, 15000d, 15500d, 16500d]) printPoint(spectra.findAll { it.key in [16200d, 16400d, 16800d, 17000d, 17200d, 17700d] })
println() println()
Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20); //Table t = new UnderflowCorrection().fitAllPoints(data, 350, 550, 3100, 20);
ColumnedDataWriter.writeTable(System.out, t, "underflow parameters") //ColumnedDataWriter.writeTable(System.out, t, "underflow parameters")

View File

@ -15,8 +15,8 @@
*/ */
package inr.numass.utils; package inr.numass.utils;
import hep.dataforge.io.envelopes.DefaultEnvelopeWriter;
import hep.dataforge.io.envelopes.EnvelopeBuilder; import hep.dataforge.io.envelopes.EnvelopeBuilder;
import hep.dataforge.io.envelopes.TaglessEnvelopeType;
import hep.dataforge.io.markup.Markedup; import hep.dataforge.io.markup.Markedup;
import hep.dataforge.io.markup.SimpleMarkupRenderer; import hep.dataforge.io.markup.SimpleMarkupRenderer;
import hep.dataforge.meta.Meta; import hep.dataforge.meta.Meta;
@ -99,7 +99,7 @@ public class NumassUtils {
public static void writeEnvelope(OutputStream stream, Meta meta, Consumer<OutputStream> dataWriter) { public static void writeEnvelope(OutputStream stream, Meta meta, Consumer<OutputStream> dataWriter) {
//TODO replace by text envelope when it is ready //TODO replace by text envelope when it is ready
try { try {
new DefaultEnvelopeWriter().write( TaglessEnvelopeType.instance.getWriter().write(
stream, stream,
new EnvelopeBuilder() new EnvelopeBuilder()
.setMeta(meta) .setMeta(meta)