New numass data structures. Almost finished. Not tested. Cleanup of unused scripts.

This commit is contained in:
Alexander Nozik 2017-07-16 22:24:32 +03:00
parent 0ffefe3a3b
commit 8ce3654757
3 changed files with 19 additions and 11 deletions

View File

@ -10,6 +10,8 @@ import hep.dataforge.io.ColumnedDataWriter
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.Table
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorage
import inr.numass.data.storage.NumassStorageFactory
import inr.numass.utils.UnderflowCorrection
@ -21,13 +23,14 @@ File rootDir = new File("D:\\Work\\Numass\\data\\2017_05\\Fill_1")
NumassStorage storage = NumassStorageFactory.buildLocal(rootDir);
Collection<NumassPointImpl> data = NumassDataUtils.joinSpectra(
NumassSet data = NumassDataUtils.join(
"data",
StorageUtils.loaderStream(storage)
.filter { it.key.matches("set_.{1,3}") }
.map {
println "loading ${it.key}"
it.value
}.map { (NumassData) it }
}.collect { (NumassSet) it }
)
data = NumassDataUtils.substractReferencePoint(data, 18600d);
@ -53,8 +56,8 @@ data = NumassDataUtils.substractReferencePoint(data, 18600d);
// }
//}
def printPoint(Iterable<NumassPointImpl> data, List<Double> us, int binning = 20, normalize = true) {
List<NumassPointImpl> points = data.findAll { it.voltage in us }.sort { it.voltage }
def printPoint(Iterable<NumassPoint> data, List<Double> us, int binning = 20, normalize = true) {
List<NumassPoint> points = data.findAll { it.voltage in us }.sort { it.voltage }
Map spectra = points.first().getMap(binning, normalize).collectEntries { key, value ->
[key, [value]]

View File

@ -2,12 +2,14 @@ package inr.numass.scripts.times
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.grind.Grind
import hep.dataforge.grind.GrindShell
import hep.dataforge.grind.helpers.PlotHelper
import hep.dataforge.plots.fx.FXPlotManager
import inr.numass.NumassPlugin
import inr.numass.data.PointAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassPoint
import inr.numass.data.storage.NumassDataLoader
import inr.numass.data.storage.NumassStorage
@ -30,19 +32,21 @@ new GrindShell(ctx).eval {
def pattern = "set_.{1,2}"
List<NumassDataLoader> loaders = storage.loaders().findAll{it.name.matches(pattern)}.collect{it as NumassDataLoader}
List<NumassDataLoader> loaders = storage.loaders().findAll { it.name.matches(pattern) }.collect {
it as NumassDataLoader
}
println "Found ${loaders.size()} loaders matching pattern"
def hv = 16000.toString();
List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get()}
List<NumassPoint> points = loaders.collect { loader -> loader.optPoint(hv).get() }
def loChannel = 400;
def upChannel = 800;
def chain = new TimeAnalyzer().timeChain(loChannel,upChannel, points as NumassPoint[])
def chain = new TimeAnalyzer().timeChain(new MetaBlock(points), Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
def histogram = PointAnalyzer.histogram(chain, 5e-6,500).asTable();
def histogram = PointAnalyzer.histogram(chain, 5e-6, 500).asTable();
println "finished histogram calculation..."

View File

@ -13,6 +13,7 @@ import hep.dataforge.server.storage.StorageRatpackHandler;
import hep.dataforge.storage.api.ObjectLoader;
import hep.dataforge.storage.api.PointLoader;
import hep.dataforge.storage.api.Storage;
import inr.numass.data.api.NumassSet;
import org.slf4j.LoggerFactory;
import ratpack.handling.Context;
@ -45,7 +46,7 @@ public class NumassStorageHandler extends StorageRatpackHandler {
@Override
@SuppressWarnings("unchecked")
protected void renderObjects(Context ctx, ObjectLoader<?> loader) {
if (loader instanceof NumassData) {
if (loader instanceof NumassSet) {
} else if (NumassRun.RUN_NOTES.equals(loader.getName())) {
try {