Time analyzer action for numass

This commit is contained in:
Alexander Nozik 2017-09-21 20:13:05 +03:00
parent 9eaa316cc8
commit 0a58e4c351
10 changed files with 173 additions and 81 deletions

View File

@ -1,8 +1,13 @@
plugins {
id "org.jetbrains.kotlin.jvm" version "1.1.4-3" apply false
}
allprojects{
apply plugin: 'idea' apply plugin: 'idea'
}
subprojects { subprojects {
apply plugin: 'java' apply plugin: 'java'
apply plugin: 'idea'
group = 'inr.numass' group = 'inr.numass'
version = '1.0.0' version = '1.0.0'

View File

@ -10,6 +10,7 @@ import hep.dataforge.values.Value;
import hep.dataforge.values.Values; import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint; import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet; import inr.numass.data.api.NumassSet;
import org.jetbrains.annotations.NotNull;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
@ -141,6 +142,12 @@ public class NumassDataUtils {
return spectrumWithBinning(spectrum, binSize, -1, -1); return spectrumWithBinning(spectrum, binSize, -1, -1);
} }
@NotNull
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
// public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) { // public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
// Map<Double, NumassPoint> map = new LinkedHashMap<>(); // Map<Double, NumassPoint> map = new LinkedHashMap<>();
// spectra.forEach(datum -> { // spectra.forEach(datum -> {
@ -212,9 +219,6 @@ public class NumassDataUtils {
// return res.builder(); // return res.builder();
// } // }
// //
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
// //
// public static Table correctForDeadTime(ListTable data, double dtime) { // public static Table correctForDeadTime(ListTable data, double dtime) {
// return correctForDeadTime(data, adapter(), dtime); // return correctForDeadTime(data, adapter(), dtime);

View File

@ -5,7 +5,6 @@
*/ */
package inr.numass.data.api; package inr.numass.data.api;
import hep.dataforge.data.Data;
import hep.dataforge.meta.Metoid; import hep.dataforge.meta.Metoid;
import hep.dataforge.names.Named; import hep.dataforge.names.Named;
import hep.dataforge.providers.Provider; import hep.dataforge.providers.Provider;
@ -73,6 +72,7 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
/** /**
* List all points with given voltage * List all points with given voltage
*
* @param voltage * @param voltage
* @return * @return
*/ */
@ -95,7 +95,7 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
return getPoints().map(it -> Double.toString(it.getVoltage())); return getPoints().map(it -> Double.toString(it.getVoltage()));
} }
default Data<Table> getHvData() { default Optional<Table> getHvData() {
return Data.buildStatic(null); return Optional.empty();
} }
} }

View File

@ -15,7 +15,6 @@
*/ */
package inr.numass.data.storage; package inr.numass.data.storage;
import hep.dataforge.data.Data;
import hep.dataforge.exceptions.StorageException; import hep.dataforge.exceptions.StorageException;
import hep.dataforge.io.ColumnedDataReader; import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.io.envelopes.Envelope; import hep.dataforge.io.envelopes.Envelope;
@ -158,28 +157,21 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
} }
@Override @Override
public Data<Table> getHvData() { public Optional<Table> getHvData() {
Envelope hvEnvelope = getHVEnvelope(); return getHVEnvelope().map(hvEnvelope -> {
if (hvEnvelope == null) {
return Data.buildStatic(null);
} else {
return Data.generate(Table.class, hvEnvelope.meta(), () -> {
try { try {
return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toTable(); return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toTable();
} catch (IOException ex) { } catch (IOException ex) {
LoggerFactory.getLogger(getClass()).error("Failed to load HV data from file", ex); LoggerFactory.getLogger(getClass()).error("Failed to load HV data from file", ex);
return null; return null;
} }
});
} }
);
} }
private Envelope getHVEnvelope() { private Optional<Envelope> getHVEnvelope() {
if (getItems().containsKey(HV_FRAGMENT_NAME)) { return Optional.ofNullable(getItems().get(HV_FRAGMENT_NAME)).map(Supplier::get);
return getItems().get(HV_FRAGMENT_NAME).get();
} else {
return null;
}
} }
private Stream<Envelope> getPointEnvelopes() { private Stream<Envelope> getPointEnvelopes() {

View File

@ -1,5 +1,9 @@
apply plugin: 'groovy' plugins {
apply plugin: 'application' id 'groovy'
id 'application'
}
apply plugin: 'kotlin'
//if (!hasProperty('mainClass')) { //if (!hasProperty('mainClass')) {
// ext.mainClass = 'inr.numass.LaunchGrindShell' // ext.mainClass = 'inr.numass.LaunchGrindShell'
@ -8,22 +12,26 @@ mainClassName = 'inr.numass.LaunchGrindShell'
description = "Main numass project" description = "Main numass project"
compileKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
compileTestKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
dependencies { dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+' compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+' compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':numass-core') compile project(':numass-core')
compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit') compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit')
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal') compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')
compile "hep.dataforge:kmath"
} }
//task listActions(dependsOn: classes, type: JavaExec) {
// main "inr.numass.LaunchGrindShell"
// args "-lc"
// classpath = sourceSets.main.runtimeClasspath
// description "print a list of available actions as via -lc command line parameter"
// group "numass"
//}
task repl(dependsOn: classes, type: JavaExec) { task repl(dependsOn: classes, type: JavaExec) {
group "numass" group "numass"
main 'inr.numass.LaunchGrindShell' main 'inr.numass.LaunchGrindShell'
@ -58,25 +66,3 @@ task underflow(dependsOn: classes, type: JavaExec) {
classpath = sourceSets.main.runtimeClasspath classpath = sourceSets.main.runtimeClasspath
} }
/*
<library>
<CLASSES>
<root url="file://$MODULE_DIR$/../../../../dataforge/dataforge-plots/plots-jfc/libs" />
</CLASSES>
<JAVADOC />
<SOURCES />
<jarDirectory url="file://$MODULE_DIR$/../../../../dataforge/dataforge-plots/plots-jfc/libs" recursive="false" />
</library>
*/
idea {
module {
iml {
withXml {
def node = it.asNode()
def libNode = node.appendNode("library")
libNode.appendNode("CLASSES").appendNode("root", url: "file://\$MODULE_DIR\$/../../../../dataforge/dataforge-plots/plots-jfc/libs")
libNode.appendNode("jarDirectory", url: "file://\$MODULE_DIR\$/../../../../dataforge/dataforge-plots/plots-jfc/libs", recursive: "false")
}
}
}
}

View File

@ -16,14 +16,15 @@ import java.util.stream.LongStream
@CompileStatic @CompileStatic
class PointAnalyzer { class PointAnalyzer {
static TimeAnalyzer analyzer = new TimeAnalyzer(); static final TimeAnalyzer analyzer = new TimeAnalyzer();
static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) { static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize) return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize)
.fill(analyzer.getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel)) .fill(
.mapToDouble { analyzer
it.value / 1000 as double .getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
}) .mapToDouble { it.value / 1000 as double }
)
} }
static Histogram histogram(LongStream stream, double binSize = 0.5, int binNum = 500) { static Histogram histogram(LongStream stream, double binSize = 0.5, int binNum = 500) {

View File

@ -0,0 +1,101 @@
package inr.numass.actions
import hep.dataforge.actions.OneToOneAction
import hep.dataforge.context.Context
import hep.dataforge.description.TypedActionDef
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.configure
import hep.dataforge.maths.histogram.UnivariateHistogram
import hep.dataforge.meta.Laminate
import hep.dataforge.plots.PlotManager
import hep.dataforge.plots.data.PlottableData
import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
/**
* Plot time analysis graphics
*/
@TypedActionDef(name = "timeSpectrum", inputType = NumassPoint::class, outputType = Table::class)
class TimeAnalyzedAction : OneToOneAction<NumassPoint, Table>() {
private val analyzer = TimeAnalyzer();
override fun execute(context: Context, name: String, input: NumassPoint, inputMeta: Laminate): Table {
val log = getLog(context, name);
val loChannel = inputMeta.getInt("window.lo", 500);
val upChannel = inputMeta.getInt("window.up", 10000);
val pm = context.getFeature(PlotManager::class.java);
//TODO use meta parameters
val trueCR = analyzer.analyze(input, buildMeta {
"t0" to 30e3
"window.lo" to loChannel
"window.up" to upChannel
}).getDouble("cr")
val binNum = inputMeta.getInt("binNum", 1000);
val binSize = inputMeta.getDouble("binSize", 1.0 / trueCR * 10 / binNum)
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.mapToDouble { it.value / 1000.0 }
).asTable()
//.histogram(input, loChannel, upChannel, binSize, binNum).asTable();
log.report("Finished histogram calculation...");
val histPlot = pm.getPlotFrame(getName(), "histogram");
histPlot.configure {
node("xAxis") {
"axisTitle" to "delay"
"axisUnits" to "us"
}
node("xAxis") {
"type" to "log"
}
}
histPlot.add(PlottableData(name)
.configure {
"showLine" to true
"showSymbol" to false
"showErrors" to false
"connectionType" to "step"
node("adapter") {
"y.value" to "count"
}
}.fillData(histogram)
)
log.report("The expected count rate for 30 us delay is $trueCR")
val statPlotPoints = (1..150).map { 1000 * it }.map { t0 ->
val result = analyzer.analyze(input, buildMeta {
"t0" to t0
"window.lo" to loChannel
"window.up" to upChannel
})
ValueMap.ofMap(
mapOf(
"x" to t0 / 1000,
"y" to result.getDouble("cr"),
"y.err" to result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
)
);
}
pm.getPlotFrame(getName(), "stat-method").add(
PlottableData(name).configure {
"showLine" to true
"thickness" to 4
}.fillData(statPlotPoints)
)
return histogram;
}
}

View File

@ -1,8 +1,9 @@
plugins{ plugins{
id "org.jetbrains.kotlin.jvm" version '1.1.4'
id "application" id "application"
} }
apply plugin: 'kotlin'
repositories { repositories {
mavenCentral() mavenCentral()
} }

View File

@ -1,10 +1,10 @@
package inr.numass.viewer package inr.numass.viewer
import hep.dataforge.data.Data
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet import inr.numass.data.api.NumassSet
import java.util.*
import java.util.stream.Collectors import java.util.stream.Collectors
import java.util.stream.Stream import java.util.stream.Stream
@ -16,7 +16,7 @@ class NumassDataCache(val data: NumassSet): NumassSet {
private val cachedDescription: String by lazy { data.description } private val cachedDescription: String by lazy { data.description }
private val cachedMeta: Meta by lazy { data.meta } private val cachedMeta: Meta by lazy { data.meta }
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) } private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
private val hv: Table by lazy { data.hvData.get() } private val hv: Optional<Table> by lazy { data.hvData }
override fun getPoints(): Stream<NumassPoint> { override fun getPoints(): Stream<NumassPoint> {
@ -35,7 +35,7 @@ class NumassDataCache(val data: NumassSet): NumassSet {
return data.name; return data.name;
} }
override fun getHvData(): Data<Table> { override fun getHvData(): Optional<Table> {
return Data.buildStatic(hv); return hv;
} }
} }

View File

@ -241,8 +241,9 @@ class NumassLoaderView : View() {
private fun updateHV(data: NumassSet) { private fun updateHV(data: NumassSet) {
hvPlotData.forEach { it.clear() } hvPlotData.forEach { it.clear() }
runAsync { runAsync {
data.hvData.get() data.hvData
} ui { } ui { hvData ->
hvData.ifPresent {
for (dp in it) { for (dp in it) {
val block = dp.getString("block", "default") val block = dp.getString("block", "default")
if (!hvPlotData.has(block)) { if (!hvPlotData.has(block)) {
@ -252,6 +253,7 @@ class NumassLoaderView : View() {
} }
hvPlot.plot.addAll(hvPlotData) hvPlot.plot.addAll(hvPlotData)
} }
}
} }