Time analyzer action for numass

This commit is contained in:
Alexander Nozik 2017-09-21 20:13:05 +03:00
parent 9eaa316cc8
commit 0a58e4c351
10 changed files with 173 additions and 81 deletions

View File

@ -1,8 +1,13 @@
apply plugin: 'idea'
plugins {
id "org.jetbrains.kotlin.jvm" version "1.1.4-3" apply false
}
allprojects{
apply plugin: 'idea'
}
subprojects {
apply plugin: 'java'
apply plugin: 'idea'
group = 'inr.numass'
version = '1.0.0'

View File

@ -10,6 +10,7 @@ import hep.dataforge.values.Value;
import hep.dataforge.values.Values;
import inr.numass.data.api.NumassPoint;
import inr.numass.data.api.NumassSet;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.Map;
@ -141,6 +142,12 @@ public class NumassDataUtils {
return spectrumWithBinning(spectrum, binSize, -1, -1);
}
@NotNull
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
// public static Collection<NumassPoint> joinSpectra(Stream<NumassSet> spectra) {
// Map<Double, NumassPoint> map = new LinkedHashMap<>();
// spectra.forEach(datum -> {
@ -212,9 +219,6 @@ public class NumassDataUtils {
// return res.builder();
// }
//
public static SpectrumDataAdapter adapter() {
return new SpectrumDataAdapter("Uset", "CR", "CRerr", "Time");
}
//
// public static Table correctForDeadTime(ListTable data, double dtime) {
// return correctForDeadTime(data, adapter(), dtime);

View File

@ -5,7 +5,6 @@
*/
package inr.numass.data.api;
import hep.dataforge.data.Data;
import hep.dataforge.meta.Metoid;
import hep.dataforge.names.Named;
import hep.dataforge.providers.Provider;
@ -73,10 +72,11 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
/**
* List all points with given voltage
*
* @param voltage
* @return
*/
default List<NumassPoint> listPoints(double voltage){
default List<NumassPoint> listPoints(double voltage) {
return getPoints().filter(it -> it.getVoltage() == voltage).collect(Collectors.toList());
}
@ -95,7 +95,7 @@ public interface NumassSet extends Named, Metoid, Iterable<NumassPoint>, Provide
return getPoints().map(it -> Double.toString(it.getVoltage()));
}
default Data<Table> getHvData() {
return Data.buildStatic(null);
default Optional<Table> getHvData() {
return Optional.empty();
}
}

View File

@ -15,7 +15,6 @@
*/
package inr.numass.data.storage;
import hep.dataforge.data.Data;
import hep.dataforge.exceptions.StorageException;
import hep.dataforge.io.ColumnedDataReader;
import hep.dataforge.io.envelopes.Envelope;
@ -158,28 +157,21 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
}
@Override
public Data<Table> getHvData() {
Envelope hvEnvelope = getHVEnvelope();
if (hvEnvelope == null) {
return Data.buildStatic(null);
} else {
return Data.generate(Table.class, hvEnvelope.meta(), () -> {
try {
return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toTable();
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).error("Failed to load HV data from file", ex);
return null;
public Optional<Table> getHvData() {
return getHVEnvelope().map(hvEnvelope -> {
try {
return new ColumnedDataReader(hvEnvelope.getData().getStream(), "timestamp", "block", "value").toTable();
} catch (IOException ex) {
LoggerFactory.getLogger(getClass()).error("Failed to load HV data from file", ex);
return null;
}
}
});
}
);
}
private Envelope getHVEnvelope() {
if (getItems().containsKey(HV_FRAGMENT_NAME)) {
return getItems().get(HV_FRAGMENT_NAME).get();
} else {
return null;
}
private Optional<Envelope> getHVEnvelope() {
return Optional.ofNullable(getItems().get(HV_FRAGMENT_NAME)).map(Supplier::get);
}
private Stream<Envelope> getPointEnvelopes() {
@ -222,7 +214,7 @@ public class NumassDataLoader extends AbstractLoader implements ObjectLoader<Env
@Override
public Instant getStartTime() {
return meta.optValue("start_time").map(Value::timeValue).orElseGet(()->NumassSet.super.getStartTime());
return meta.optValue("start_time").map(Value::timeValue).orElseGet(() -> NumassSet.super.getStartTime());
}
@Override

View File

@ -1,5 +1,9 @@
apply plugin: 'groovy'
apply plugin: 'application'
plugins {
id 'groovy'
id 'application'
}
apply plugin: 'kotlin'
//if (!hasProperty('mainClass')) {
// ext.mainClass = 'inr.numass.LaunchGrindShell'
@ -8,22 +12,26 @@ mainClassName = 'inr.numass.LaunchGrindShell'
description = "Main numass project"
compileKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
compileTestKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':numass-core')
compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit')
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')
compile "hep.dataforge:kmath"
}
//task listActions(dependsOn: classes, type: JavaExec) {
// main "inr.numass.LaunchGrindShell"
// args "-lc"
// classpath = sourceSets.main.runtimeClasspath
// description "print a list of available actions as via -lc command line parameter"
// group "numass"
//}
task repl(dependsOn: classes, type: JavaExec) {
group "numass"
main 'inr.numass.LaunchGrindShell'
@ -58,25 +66,3 @@ task underflow(dependsOn: classes, type: JavaExec) {
classpath = sourceSets.main.runtimeClasspath
}
/*
<library>
<CLASSES>
<root url="file://$MODULE_DIR$/../../../../dataforge/dataforge-plots/plots-jfc/libs" />
</CLASSES>
<JAVADOC />
<SOURCES />
<jarDirectory url="file://$MODULE_DIR$/../../../../dataforge/dataforge-plots/plots-jfc/libs" recursive="false" />
</library>
*/
idea {
module {
iml {
withXml {
def node = it.asNode()
def libNode = node.appendNode("library")
libNode.appendNode("CLASSES").appendNode("root", url: "file://\$MODULE_DIR\$/../../../../dataforge/dataforge-plots/plots-jfc/libs")
libNode.appendNode("jarDirectory", url: "file://\$MODULE_DIR\$/../../../../dataforge/dataforge-plots/plots-jfc/libs", recursive: "false")
}
}
}
}

View File

@ -16,14 +16,15 @@ import java.util.stream.LongStream
@CompileStatic
class PointAnalyzer {
static TimeAnalyzer analyzer = new TimeAnalyzer();
static final TimeAnalyzer analyzer = new TimeAnalyzer();
static Histogram histogram(NumassBlock point, int loChannel = 0, int upChannel = 10000, double binSize = 0.5, int binNum = 500) {
return UnivariateHistogram.buildUniform(0d, binSize * binNum, binSize)
.fill(analyzer.getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
.mapToDouble {
it.value / 1000 as double
})
.fill(
analyzer
.getEventsWithDelay(point, Grind.buildMeta("window.lo": loChannel, "window.up": upChannel))
.mapToDouble { it.value / 1000 as double }
)
}
static Histogram histogram(LongStream stream, double binSize = 0.5, int binNum = 500) {

View File

@ -0,0 +1,101 @@
package inr.numass.actions
import hep.dataforge.actions.OneToOneAction
import hep.dataforge.context.Context
import hep.dataforge.description.TypedActionDef
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.configure
import hep.dataforge.maths.histogram.UnivariateHistogram
import hep.dataforge.meta.Laminate
import hep.dataforge.plots.PlotManager
import hep.dataforge.plots.data.PlottableData
import hep.dataforge.tables.Table
import hep.dataforge.tables.ValueMap
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassAnalyzer
import inr.numass.data.api.NumassPoint
/**
* Plot time analysis graphics
*/
@TypedActionDef(name = "timeSpectrum", inputType = NumassPoint::class, outputType = Table::class)
class TimeAnalyzedAction : OneToOneAction<NumassPoint, Table>() {
private val analyzer = TimeAnalyzer();
override fun execute(context: Context, name: String, input: NumassPoint, inputMeta: Laminate): Table {
val log = getLog(context, name);
val loChannel = inputMeta.getInt("window.lo", 500);
val upChannel = inputMeta.getInt("window.up", 10000);
val pm = context.getFeature(PlotManager::class.java);
//TODO use meta parameters
val trueCR = analyzer.analyze(input, buildMeta {
"t0" to 30e3
"window.lo" to loChannel
"window.up" to upChannel
}).getDouble("cr")
val binNum = inputMeta.getInt("binNum", 1000);
val binSize = inputMeta.getDouble("binSize", 1.0 / trueCR * 10 / binNum)
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.mapToDouble { it.value / 1000.0 }
).asTable()
//.histogram(input, loChannel, upChannel, binSize, binNum).asTable();
log.report("Finished histogram calculation...");
val histPlot = pm.getPlotFrame(getName(), "histogram");
histPlot.configure {
node("xAxis") {
"axisTitle" to "delay"
"axisUnits" to "us"
}
node("xAxis") {
"type" to "log"
}
}
histPlot.add(PlottableData(name)
.configure {
"showLine" to true
"showSymbol" to false
"showErrors" to false
"connectionType" to "step"
node("adapter") {
"y.value" to "count"
}
}.fillData(histogram)
)
log.report("The expected count rate for 30 us delay is $trueCR")
val statPlotPoints = (1..150).map { 1000 * it }.map { t0 ->
val result = analyzer.analyze(input, buildMeta {
"t0" to t0
"window.lo" to loChannel
"window.up" to upChannel
})
ValueMap.ofMap(
mapOf(
"x" to t0 / 1000,
"y" to result.getDouble("cr"),
"y.err" to result.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
)
);
}
pm.getPlotFrame(getName(), "stat-method").add(
PlottableData(name).configure {
"showLine" to true
"thickness" to 4
}.fillData(statPlotPoints)
)
return histogram;
}
}

View File

@ -1,8 +1,9 @@
plugins{
id "org.jetbrains.kotlin.jvm" version '1.1.4'
id "application"
}
apply plugin: 'kotlin'
repositories {
mavenCentral()
}

View File

@ -1,10 +1,10 @@
package inr.numass.viewer
import hep.dataforge.data.Data
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import java.util.*
import java.util.stream.Collectors
import java.util.stream.Stream
@ -12,11 +12,11 @@ import java.util.stream.Stream
* Cached numass data
* Created by darksnake on 23-Jun-17.
*/
class NumassDataCache(val data: NumassSet): NumassSet {
class NumassDataCache(val data: NumassSet) : NumassSet {
private val cachedDescription: String by lazy { data.description }
private val cachedMeta: Meta by lazy { data.meta }
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
private val hv: Table by lazy { data.hvData.get() }
private val hv: Optional<Table> by lazy { data.hvData }
override fun getPoints(): Stream<NumassPoint> {
@ -35,7 +35,7 @@ class NumassDataCache(val data: NumassSet): NumassSet {
return data.name;
}
override fun getHvData(): Data<Table> {
return Data.buildStatic(hv);
override fun getHvData(): Optional<Table> {
return hv;
}
}

View File

@ -241,16 +241,18 @@ class NumassLoaderView : View() {
private fun updateHV(data: NumassSet) {
hvPlotData.forEach { it.clear() }
runAsync {
data.hvData.get()
} ui {
for (dp in it) {
val block = dp.getString("block", "default")
if (!hvPlotData.has(block)) {
hvPlotData.add(TimePlottable(block))
data.hvData
} ui { hvData ->
hvData.ifPresent {
for (dp in it) {
val block = dp.getString("block", "default")
if (!hvPlotData.has(block)) {
hvPlotData.add(TimePlottable(block))
}
hvPlotData.get(block).put(dp.getValue("timestamp").timeValue(), dp.getValue("value"))
}
hvPlotData.get(block).put(dp.getValue("timestamp").timeValue(), dp.getValue("value"))
hvPlot.plot.addAll(hvPlotData)
}
hvPlot.plot.addAll(hvPlotData)
}
}