Fooling around with build + few additions for numass

This commit is contained in:
Alexander Nozik 2019-02-11 17:03:45 +03:00
parent b25c1dd0eb
commit 84998bce6d
13 changed files with 230 additions and 96 deletions

View File

@ -9,6 +9,11 @@ buildscript {
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
//plugins{
// id 'org.openjfx.javafxplugin' version '0.0.7' apply false
//}
allprojects {
apply plugin: 'idea'
apply plugin: 'java'

View File

@ -20,7 +20,8 @@ class NumassFrame(
/**
* The buffered signal shape in ticks
*/
val signal: ShortBuffer) {
val signal: ShortBuffer
) {
val length: Duration
get() = tickSize.multipliedBy(signal.capacity().toLong())

View File

@ -7,5 +7,5 @@ import java.util.stream.Stream
* Created by darksnake on 07.07.2017.
*/
interface SignalProcessor {
fun analyze(frame: NumassFrame): Stream<NumassEvent>
fun analyze(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent>
}

View File

@ -0,0 +1,19 @@
plugins {
idea
kotlin("jvm")
}
repositories {
mavenLocal()
mavenCentral()
}
dependencies {
compile(kotlin("stdlib-jdk8"))
compile(project(":numass-core:numass-data-api"))
// https://mvnrepository.com/artifact/org.apache.commons/commons-collections4
compile(group = "org.apache.commons", name = "commons-collections4", version = "4.3")
}

View File

@ -0,0 +1,66 @@
package inr.numass.data
import hep.dataforge.meta.Meta
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassFrame
import inr.numass.data.api.SignalProcessor
import org.apache.commons.collections4.queue.CircularFifoQueue
import java.nio.ShortBuffer
import java.util.stream.Stream
import kotlin.streams.asStream
private fun ShortBuffer.clone(): ShortBuffer {
val clone = ShortBuffer.allocate(capacity())
rewind()//copy from the beginning
clone.put(this)
rewind()
clone.flip()
return clone
}
class ChernovProcessor(val meta: Meta) : SignalProcessor {
val threshold = meta.getValue("threshold").number.toShort()
val signalRange: IntRange = TODO()
val signal: (Double) -> Double = { TODO() }
val tickSize: Int = TODO()
private fun CircularFifoQueue<Short>.findMax(): Pair<Double, Double> {
TODO()
}
override fun analyze(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent> {
return sequence<NumassEvent> {
val events = HashMap<Double, Double>()
val buffer = frame.signal.clone()
val ringBuffer = CircularFifoQueue<Short>(5)
while (buffer.remaining() > 0) {
ringBuffer.add(buffer.get())
val lastValue = ringBuffer[1] ?: -1
val currentValue = ringBuffer[0]
if (lastValue > threshold && currentValue < lastValue) {
//Found bending, evaluating event
ringBuffer.add(buffer.get())//do another step to have 5-points
//TODO check end of frame
val (pos, amp) = ringBuffer.findMax()
val event = NumassEvent(amp.toShort(), pos.toLong() * tickSize, parent)
yield(event)
//subtracting event from buffer copy
for (x in signalRange) {
//TODO check all roundings
val position = buffer.position() - x.toShort()
val oldValue = buffer.get(position)
val newValue = oldValue - amp * signal(x.toDouble())
buffer.put(position, newValue.toShort())
}
}
}
}.asStream()
}
}

View File

@ -41,7 +41,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
"simple" -> simpleAnalyzer
"time" -> timeAnalyzer
"debunch" -> debunchAnalyzer
else -> throw IllegalArgumentException("Analyzer not found")
else -> throw IllegalArgumentException("Analyzer ${config.getString("type")} not found")
}
} else {
if (config.hasValue("t0") || config.hasMeta("t0")) {

View File

@ -1,33 +0,0 @@
syntax = "proto3";
package inr.numass.data;
message Point {
// A single channel for multichannel detector readout
message Channel {
//A continuous measurement block
message Block {
// Raw data frame
message Frame {
uint64 time = 1; // Time in nanos from the beginning of the block
bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
}
// Event block obtained directly from device of from frame analysis
// In order to save space, times and amplitudes are in separate arrays.
// Amplitude and time with the same index correspond to the same event
message Events {
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
}
uint64 time = 1; // Block start in epoch nanos
repeated Frame frames = 2; // Frames array
Events events = 3; // Events array
uint64 length = 4; // block size in nanos. If missing, take from meta.
uint64 bin_size = 5; // tick size in nanos. Obsolete, to be removed
}
uint64 id = 1; // The number of measuring channel
repeated Block blocks = 2; // Blocks
}
repeated Channel channels = 1; // Array of measuring channels
}

View File

@ -5,6 +5,12 @@ plugins {
apply plugin: 'kotlin'
//apply plugin: 'org.openjfx.javafxplugin'
//
//javafx{
// modules = [ 'javafx.controls']
//}
//if (!hasProperty('mainClass')) {
// ext.mainClass = 'inr.numass.LaunchGrindShell'
//}

View File

@ -72,7 +72,8 @@ class NumassPlugin : BasicPlugin() {
plotFitTask,
histogramTask,
fitScanTask,
sliceTask
sliceTask,
subThresholdTask
)
@Provides(Task.TASK_TARGET)

View File

@ -10,6 +10,7 @@ import hep.dataforge.nullable
import hep.dataforge.storage.Storage
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.toList
import hep.dataforge.values.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.*
@ -25,7 +26,9 @@ import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import org.slf4j.LoggerFactory
import java.util.stream.Collectors
import java.util.stream.StreamSupport
object Threshold {
@ -35,13 +38,13 @@ object Threshold {
//creating storage instance
val storage = NumassDirectory.read(context, meta.getString("data.dir")) as Storage
fun Storage.loaders(): Sequence<NumassDataLoader>{
fun Storage.loaders(): Sequence<NumassDataLoader> {
return sequence<NumassDataLoader> {
print("Reading ${this@loaders.fullName}")
runBlocking { this@loaders.children }.forEach {
if(it is NumassDataLoader){
if (it is NumassDataLoader) {
yield(it)
} else if (it is Storage){
} else if (it is Storage) {
yieldAll(it.loaders())
}
}
@ -94,7 +97,8 @@ object Threshold {
WeightedObservedPoint(
1.0,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY), // x
it.getDouble(COUNT_RATE_KEY) / binning) //y
it.getDouble(COUNT_RATE_KEY) / binning
) //y
}
.collect(Collectors.toList())
}
@ -132,7 +136,6 @@ object Threshold {
}
/**
* Exponential function $a e^{\frac{x}{\sigma}}$
*/
@ -223,23 +226,33 @@ object Threshold {
}
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
val reference = config.optNumber("reference").nullable?.let{
val reference = config.optNumber("reference").nullable?.let {
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
}?.let {
println("Using reference point ${it.voltage}")
analyzer.getAmplitudeSpectrum(it,config)
analyzer.getAmplitudeSpectrum(it, config)
}
return ListTable.Builder().apply {
set.forEach{ point ->
val spectrum = analyzer.getAmplitudeSpectrum(point,config).let {
if(reference == null){
StreamSupport.stream(set.spliterator(), true).map { point ->
LoggerFactory.getLogger(Threshold.javaClass).info("Starting point ${point.voltage}")
val spectrum = analyzer.getAmplitudeSpectrum(point, config).let {
if (reference == null) {
it
} else{
subtractAmplitudeSpectrum(it,reference)
} else {
subtractAmplitudeSpectrum(it, reference)
}
}
row(calculateSubThreshold(spectrum,point.voltage,config))
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
try {
calculateSubThreshold(spectrum, point.voltage, config)
} catch (ex: Exception) {
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
null
}
}.toList().filterNotNull().forEach {
println(it.toString())
row(it)
}
}.build()
}

View File

@ -4,8 +4,6 @@ import hep.dataforge.configure
import hep.dataforge.data.*
import hep.dataforge.io.output.stream
import hep.dataforge.io.render
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.plots.data.DataPlot
@ -86,11 +84,13 @@ val analyzeTask = task("analyze") {
info = "Count the number of events for each voltage and produce a table with the results"
}
model { meta ->
dependsOn(selectTask, meta);
configure(MetaUtils.optEither(meta, "analyzer", "prepare").orElse(Meta.empty()))
dependsOn(selectTask, meta)
configure {
"analyzer" to meta.getMetaOrEmpty("analyzer")
}
}
pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res ->
SmartAnalyzer().analyzeSet(set, meta.getMeta("analyzer")).also { res ->
val outputMeta = meta.builder.putNode("data", set.meta)
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
}
@ -299,7 +299,7 @@ val histogramTask = task("histogram") {
value(
"binning",
types = listOf(ValueType.NUMBER),
defaultValue = 20,
defaultValue = 16,
info = "The binning of resulting histogram"
)
value(
@ -381,7 +381,7 @@ val histogramTask = task("histogram") {
data.toSortedMap().forEach { name, set ->
putNode("data", buildMeta {
"name" to name
set.meta.useMeta("iteration_info"){"iteration" to it}
set.meta.useMeta("iteration_info") { "iteration" to it }
})
}
}

View File

@ -0,0 +1,55 @@
package inr.numass.tasks
import hep.dataforge.io.render
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.output.plotFrame
import hep.dataforge.plots.plotData
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import hep.dataforge.tables.filter
import hep.dataforge.useMeta
import hep.dataforge.values.ValueType
import hep.dataforge.workspace.tasks.task
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassSet
import inr.numass.subthreshold.Threshold
val subThresholdTask = task("threshold") {
descriptor {
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show threshold correction plot")
value(
"binning",
types = listOf(ValueType.NUMBER),
defaultValue = 16,
info = "The binning used for fit"
)
info = "Calculate sub threshold correction"
}
model { meta ->
dependsOn(selectTask, meta)
configure(meta.getMetaOrEmpty("threshold"))
configure {
meta.useMeta("analyzer") { putNode(it) }
setValue("@target", meta.getString("@target", meta.name))
}
}
join<NumassSet, Table> { data ->
val sum = NumassDataUtils.join(name, data.values)
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
it.getDouble("correction") in (1.0..1.2)
}
if (meta.getBoolean("plot", false)) {
context.plotFrame("$name.plot", stage = "numass.threshold") {
plots.setType<DataPlot>()
plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
}
}
context.output.render(correctionTable, "numass.correction", name, meta = meta)
return@join correctionTable
}
}

View File

@ -15,6 +15,7 @@ include ":numass-core"
include 'numass-core:numass-data-api'
include 'numass-core:numass-data-proto'
include 'numass-core:numass-signal-processing'
//include ":numass-server"
//