Fooling around with build + few additions for numass
This commit is contained in:
parent
b25c1dd0eb
commit
84998bce6d
@ -9,6 +9,11 @@ buildscript {
|
|||||||
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
|
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//plugins{
|
||||||
|
// id 'org.openjfx.javafxplugin' version '0.0.7' apply false
|
||||||
|
//}
|
||||||
|
|
||||||
allprojects {
|
allprojects {
|
||||||
apply plugin: 'idea'
|
apply plugin: 'idea'
|
||||||
apply plugin: 'java'
|
apply plugin: 'java'
|
||||||
|
@ -9,18 +9,19 @@ import java.time.Instant
|
|||||||
* Created by darksnake on 06-Jul-17.
|
* Created by darksnake on 06-Jul-17.
|
||||||
*/
|
*/
|
||||||
class NumassFrame(
|
class NumassFrame(
|
||||||
/**
|
/**
|
||||||
* The absolute start time of the frame
|
* The absolute start time of the frame
|
||||||
*/
|
*/
|
||||||
val time: Instant,
|
val time: Instant,
|
||||||
/**
|
/**
|
||||||
* The time interval per tick
|
* The time interval per tick
|
||||||
*/
|
*/
|
||||||
val tickSize: Duration,
|
val tickSize: Duration,
|
||||||
/**
|
/**
|
||||||
* The buffered signal shape in ticks
|
* The buffered signal shape in ticks
|
||||||
*/
|
*/
|
||||||
val signal: ShortBuffer) {
|
val signal: ShortBuffer
|
||||||
|
) {
|
||||||
|
|
||||||
val length: Duration
|
val length: Duration
|
||||||
get() = tickSize.multipliedBy(signal.capacity().toLong())
|
get() = tickSize.multipliedBy(signal.capacity().toLong())
|
||||||
|
@ -7,5 +7,5 @@ import java.util.stream.Stream
|
|||||||
* Created by darksnake on 07.07.2017.
|
* Created by darksnake on 07.07.2017.
|
||||||
*/
|
*/
|
||||||
interface SignalProcessor {
|
interface SignalProcessor {
|
||||||
fun analyze(frame: NumassFrame): Stream<NumassEvent>
|
fun analyze(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent>
|
||||||
}
|
}
|
||||||
|
19
numass-core/numass-signal-processing/build.gradle.kts
Normal file
19
numass-core/numass-signal-processing/build.gradle.kts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
plugins {
|
||||||
|
idea
|
||||||
|
kotlin("jvm")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
repositories {
|
||||||
|
mavenLocal()
|
||||||
|
mavenCentral()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
compile(kotlin("stdlib-jdk8"))
|
||||||
|
compile(project(":numass-core:numass-data-api"))
|
||||||
|
|
||||||
|
// https://mvnrepository.com/artifact/org.apache.commons/commons-collections4
|
||||||
|
compile(group = "org.apache.commons", name = "commons-collections4", version = "4.3")
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,66 @@
|
|||||||
|
package inr.numass.data
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta
|
||||||
|
import inr.numass.data.api.NumassBlock
|
||||||
|
import inr.numass.data.api.NumassEvent
|
||||||
|
import inr.numass.data.api.NumassFrame
|
||||||
|
import inr.numass.data.api.SignalProcessor
|
||||||
|
import org.apache.commons.collections4.queue.CircularFifoQueue
|
||||||
|
import java.nio.ShortBuffer
|
||||||
|
import java.util.stream.Stream
|
||||||
|
import kotlin.streams.asStream
|
||||||
|
|
||||||
|
|
||||||
|
private fun ShortBuffer.clone(): ShortBuffer {
|
||||||
|
val clone = ShortBuffer.allocate(capacity())
|
||||||
|
rewind()//copy from the beginning
|
||||||
|
clone.put(this)
|
||||||
|
rewind()
|
||||||
|
clone.flip()
|
||||||
|
return clone
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ChernovProcessor(val meta: Meta) : SignalProcessor {
|
||||||
|
val threshold = meta.getValue("threshold").number.toShort()
|
||||||
|
val signalRange: IntRange = TODO()
|
||||||
|
val signal: (Double) -> Double = { TODO() }
|
||||||
|
val tickSize: Int = TODO()
|
||||||
|
|
||||||
|
private fun CircularFifoQueue<Short>.findMax(): Pair<Double, Double> {
|
||||||
|
TODO()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun analyze(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent> {
|
||||||
|
return sequence<NumassEvent> {
|
||||||
|
val events = HashMap<Double, Double>()
|
||||||
|
val buffer = frame.signal.clone()
|
||||||
|
|
||||||
|
val ringBuffer = CircularFifoQueue<Short>(5)
|
||||||
|
while (buffer.remaining() > 0) {
|
||||||
|
ringBuffer.add(buffer.get())
|
||||||
|
val lastValue = ringBuffer[1] ?: -1
|
||||||
|
val currentValue = ringBuffer[0]
|
||||||
|
if (lastValue > threshold && currentValue < lastValue) {
|
||||||
|
//Found bending, evaluating event
|
||||||
|
|
||||||
|
ringBuffer.add(buffer.get())//do another step to have 5-points
|
||||||
|
//TODO check end of frame
|
||||||
|
val (pos, amp) = ringBuffer.findMax()
|
||||||
|
val event = NumassEvent(amp.toShort(), pos.toLong() * tickSize, parent)
|
||||||
|
yield(event)
|
||||||
|
|
||||||
|
//subtracting event from buffer copy
|
||||||
|
for (x in signalRange) {
|
||||||
|
//TODO check all roundings
|
||||||
|
val position = buffer.position() - x.toShort()
|
||||||
|
val oldValue = buffer.get(position)
|
||||||
|
val newValue = oldValue - amp * signal(x.toDouble())
|
||||||
|
buffer.put(position, newValue.toShort())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}.asStream()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -41,7 +41,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
|
|||||||
"simple" -> simpleAnalyzer
|
"simple" -> simpleAnalyzer
|
||||||
"time" -> timeAnalyzer
|
"time" -> timeAnalyzer
|
||||||
"debunch" -> debunchAnalyzer
|
"debunch" -> debunchAnalyzer
|
||||||
else -> throw IllegalArgumentException("Analyzer not found")
|
else -> throw IllegalArgumentException("Analyzer ${config.getString("type")} not found")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (config.hasValue("t0") || config.hasMeta("t0")) {
|
if (config.hasValue("t0") || config.hasMeta("t0")) {
|
||||||
|
@ -1,33 +0,0 @@
|
|||||||
syntax = "proto3";
|
|
||||||
|
|
||||||
package inr.numass.data;
|
|
||||||
|
|
||||||
message Point {
|
|
||||||
// A single channel for multichannel detector readout
|
|
||||||
message Channel {
|
|
||||||
//A continuous measurement block
|
|
||||||
message Block {
|
|
||||||
// Raw data frame
|
|
||||||
message Frame {
|
|
||||||
uint64 time = 1; // Time in nanos from the beginning of the block
|
|
||||||
bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
|
|
||||||
}
|
|
||||||
// Event block obtained directly from device of from frame analysis
|
|
||||||
// In order to save space, times and amplitudes are in separate arrays.
|
|
||||||
// Amplitude and time with the same index correspond to the same event
|
|
||||||
message Events {
|
|
||||||
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
|
|
||||||
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
|
|
||||||
}
|
|
||||||
|
|
||||||
uint64 time = 1; // Block start in epoch nanos
|
|
||||||
repeated Frame frames = 2; // Frames array
|
|
||||||
Events events = 3; // Events array
|
|
||||||
uint64 length = 4; // block size in nanos. If missing, take from meta.
|
|
||||||
uint64 bin_size = 5; // tick size in nanos. Obsolete, to be removed
|
|
||||||
}
|
|
||||||
uint64 id = 1; // The number of measuring channel
|
|
||||||
repeated Block blocks = 2; // Blocks
|
|
||||||
}
|
|
||||||
repeated Channel channels = 1; // Array of measuring channels
|
|
||||||
}
|
|
@ -5,6 +5,12 @@ plugins {
|
|||||||
|
|
||||||
apply plugin: 'kotlin'
|
apply plugin: 'kotlin'
|
||||||
|
|
||||||
|
//apply plugin: 'org.openjfx.javafxplugin'
|
||||||
|
//
|
||||||
|
//javafx{
|
||||||
|
// modules = [ 'javafx.controls']
|
||||||
|
//}
|
||||||
|
|
||||||
//if (!hasProperty('mainClass')) {
|
//if (!hasProperty('mainClass')) {
|
||||||
// ext.mainClass = 'inr.numass.LaunchGrindShell'
|
// ext.mainClass = 'inr.numass.LaunchGrindShell'
|
||||||
//}
|
//}
|
||||||
|
@ -72,7 +72,8 @@ class NumassPlugin : BasicPlugin() {
|
|||||||
plotFitTask,
|
plotFitTask,
|
||||||
histogramTask,
|
histogramTask,
|
||||||
fitScanTask,
|
fitScanTask,
|
||||||
sliceTask
|
sliceTask,
|
||||||
|
subThresholdTask
|
||||||
)
|
)
|
||||||
|
|
||||||
@Provides(Task.TASK_TARGET)
|
@Provides(Task.TASK_TARGET)
|
||||||
|
@ -10,6 +10,7 @@ import hep.dataforge.nullable
|
|||||||
import hep.dataforge.storage.Storage
|
import hep.dataforge.storage.Storage
|
||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
|
import hep.dataforge.toList
|
||||||
import hep.dataforge.values.ValueMap
|
import hep.dataforge.values.ValueMap
|
||||||
import hep.dataforge.values.Values
|
import hep.dataforge.values.Values
|
||||||
import inr.numass.data.analyzers.*
|
import inr.numass.data.analyzers.*
|
||||||
@ -25,7 +26,9 @@ import org.apache.commons.math3.analysis.ParametricUnivariateFunction
|
|||||||
import org.apache.commons.math3.exception.DimensionMismatchException
|
import org.apache.commons.math3.exception.DimensionMismatchException
|
||||||
import org.apache.commons.math3.fitting.SimpleCurveFitter
|
import org.apache.commons.math3.fitting.SimpleCurveFitter
|
||||||
import org.apache.commons.math3.fitting.WeightedObservedPoint
|
import org.apache.commons.math3.fitting.WeightedObservedPoint
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
import java.util.stream.Collectors
|
import java.util.stream.Collectors
|
||||||
|
import java.util.stream.StreamSupport
|
||||||
|
|
||||||
|
|
||||||
object Threshold {
|
object Threshold {
|
||||||
@ -35,13 +38,13 @@ object Threshold {
|
|||||||
//creating storage instance
|
//creating storage instance
|
||||||
val storage = NumassDirectory.read(context, meta.getString("data.dir")) as Storage
|
val storage = NumassDirectory.read(context, meta.getString("data.dir")) as Storage
|
||||||
|
|
||||||
fun Storage.loaders(): Sequence<NumassDataLoader>{
|
fun Storage.loaders(): Sequence<NumassDataLoader> {
|
||||||
return sequence<NumassDataLoader> {
|
return sequence<NumassDataLoader> {
|
||||||
print("Reading ${this@loaders.fullName}")
|
print("Reading ${this@loaders.fullName}")
|
||||||
runBlocking { this@loaders.children }.forEach {
|
runBlocking { this@loaders.children }.forEach {
|
||||||
if(it is NumassDataLoader){
|
if (it is NumassDataLoader) {
|
||||||
yield(it)
|
yield(it)
|
||||||
} else if (it is Storage){
|
} else if (it is Storage) {
|
||||||
yieldAll(it.loaders())
|
yieldAll(it.loaders())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -51,19 +54,19 @@ object Threshold {
|
|||||||
//Reading points
|
//Reading points
|
||||||
//Free operation. No reading done
|
//Free operation. No reading done
|
||||||
val sets = storage.loaders()
|
val sets = storage.loaders()
|
||||||
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
|
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
|
||||||
|
|
||||||
val analyzer = TimeAnalyzer();
|
val analyzer = TimeAnalyzer();
|
||||||
|
|
||||||
val data = DataSet.edit(NumassPoint::class).also { dataBuilder ->
|
val data = DataSet.edit(NumassPoint::class).also { dataBuilder ->
|
||||||
sets.sortedBy { it.startTime }
|
sets.sortedBy { it.startTime }
|
||||||
.flatMap { set -> set.points.asSequence() }
|
.flatMap { set -> set.points.asSequence() }
|
||||||
.groupBy { it.voltage }
|
.groupBy { it.voltage }
|
||||||
.forEach { key, value ->
|
.forEach { key, value ->
|
||||||
val point = SimpleNumassPoint(value, key)
|
val point = SimpleNumassPoint(value, key)
|
||||||
val name = key.toInt().toString()
|
val name = key.toInt().toString()
|
||||||
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
||||||
}
|
}
|
||||||
}.build()
|
}.build()
|
||||||
|
|
||||||
return data.pipe(context, meta) {
|
return data.pipe(context, meta) {
|
||||||
@ -90,13 +93,14 @@ object Threshold {
|
|||||||
// )
|
// )
|
||||||
|
|
||||||
return binned.rows
|
return binned.rows
|
||||||
.map {
|
.map {
|
||||||
WeightedObservedPoint(
|
WeightedObservedPoint(
|
||||||
1.0,//1d / p.getValue() , //weight
|
1.0,//1d / p.getValue() , //weight
|
||||||
it.getDouble(CHANNEL_KEY), // x
|
it.getDouble(CHANNEL_KEY), // x
|
||||||
it.getDouble(COUNT_RATE_KEY) / binning) //y
|
it.getDouble(COUNT_RATE_KEY) / binning
|
||||||
}
|
) //y
|
||||||
.collect(Collectors.toList())
|
}
|
||||||
|
.collect(Collectors.toList())
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun norm(spectrum: Table, xLow: Int, upper: Int): Double {
|
private fun norm(spectrum: Table, xLow: Int, upper: Int): Double {
|
||||||
@ -132,7 +136,6 @@ object Threshold {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exponential function $a e^{\frac{x}{\sigma}}$
|
* Exponential function $a e^{\frac{x}{\sigma}}$
|
||||||
*/
|
*/
|
||||||
@ -149,10 +152,10 @@ object Threshold {
|
|||||||
val norm = norm(spectrum, xLow, upper)
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
return ValueMap.ofPairs(
|
return ValueMap.ofPairs(
|
||||||
"U" to voltage,
|
"U" to voltage,
|
||||||
"a" to a,
|
"a" to a,
|
||||||
"sigma" to sigma,
|
"sigma" to sigma,
|
||||||
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -173,14 +176,14 @@ object Threshold {
|
|||||||
val delta = shift ?: parameters[2]
|
val delta = shift ?: parameters[2]
|
||||||
return if (parameters.size > 2) {
|
return if (parameters.size > 2) {
|
||||||
doubleArrayOf(
|
doubleArrayOf(
|
||||||
Math.pow(x - delta, beta),
|
Math.pow(x - delta, beta),
|
||||||
a * Math.pow(x - delta, beta) * Math.log(x - delta),
|
a * Math.pow(x - delta, beta) * Math.log(x - delta),
|
||||||
-a * beta * Math.pow(x - delta, beta - 1)
|
-a * beta * Math.pow(x - delta, beta - 1)
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
doubleArrayOf(
|
doubleArrayOf(
|
||||||
Math.pow(x - delta, beta),
|
Math.pow(x - delta, beta),
|
||||||
a * Math.pow(x - delta, beta) * Math.log(x - delta)
|
a * Math.pow(x - delta, beta) * Math.log(x - delta)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -206,11 +209,11 @@ object Threshold {
|
|||||||
val norm = norm(spectrum, xLow, upper)
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
return ValueMap.ofPairs(
|
return ValueMap.ofPairs(
|
||||||
"U" to voltage,
|
"U" to voltage,
|
||||||
"a" to a,
|
"a" to a,
|
||||||
"beta" to beta,
|
"beta" to beta,
|
||||||
"delta" to delta,
|
"delta" to delta,
|
||||||
"correction" to a / (beta + 1) * Math.pow(xLow - delta, beta + 1.0) / norm + 1.0
|
"correction" to a / (beta + 1) * Math.pow(xLow - delta, beta + 1.0) / norm + 1.0
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,23 +226,33 @@ object Threshold {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
|
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
|
||||||
val reference = config.optNumber("reference").nullable?.let{
|
val reference = config.optNumber("reference").nullable?.let {
|
||||||
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
|
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
|
||||||
}?.let {
|
}?.let {
|
||||||
println("Using reference point ${it.voltage}")
|
println("Using reference point ${it.voltage}")
|
||||||
analyzer.getAmplitudeSpectrum(it,config)
|
analyzer.getAmplitudeSpectrum(it, config)
|
||||||
}
|
}
|
||||||
|
|
||||||
return ListTable.Builder().apply {
|
return ListTable.Builder().apply {
|
||||||
set.forEach{ point ->
|
StreamSupport.stream(set.spliterator(), true).map { point ->
|
||||||
val spectrum = analyzer.getAmplitudeSpectrum(point,config).let {
|
LoggerFactory.getLogger(Threshold.javaClass).info("Starting point ${point.voltage}")
|
||||||
if(reference == null){
|
val spectrum = analyzer.getAmplitudeSpectrum(point, config).let {
|
||||||
|
if (reference == null) {
|
||||||
it
|
it
|
||||||
} else{
|
} else {
|
||||||
subtractAmplitudeSpectrum(it,reference)
|
subtractAmplitudeSpectrum(it, reference)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
row(calculateSubThreshold(spectrum,point.voltage,config))
|
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
|
||||||
|
try {
|
||||||
|
calculateSubThreshold(spectrum, point.voltage, config)
|
||||||
|
} catch (ex: Exception) {
|
||||||
|
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
|
||||||
|
null
|
||||||
|
}
|
||||||
|
}.toList().filterNotNull().forEach {
|
||||||
|
println(it.toString())
|
||||||
|
row(it)
|
||||||
}
|
}
|
||||||
}.build()
|
}.build()
|
||||||
}
|
}
|
||||||
|
@ -4,8 +4,6 @@ import hep.dataforge.configure
|
|||||||
import hep.dataforge.data.*
|
import hep.dataforge.data.*
|
||||||
import hep.dataforge.io.output.stream
|
import hep.dataforge.io.output.stream
|
||||||
import hep.dataforge.io.render
|
import hep.dataforge.io.render
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.meta.MetaUtils
|
|
||||||
import hep.dataforge.meta.buildMeta
|
import hep.dataforge.meta.buildMeta
|
||||||
import hep.dataforge.nullable
|
import hep.dataforge.nullable
|
||||||
import hep.dataforge.plots.data.DataPlot
|
import hep.dataforge.plots.data.DataPlot
|
||||||
@ -86,11 +84,13 @@ val analyzeTask = task("analyze") {
|
|||||||
info = "Count the number of events for each voltage and produce a table with the results"
|
info = "Count the number of events for each voltage and produce a table with the results"
|
||||||
}
|
}
|
||||||
model { meta ->
|
model { meta ->
|
||||||
dependsOn(selectTask, meta);
|
dependsOn(selectTask, meta)
|
||||||
configure(MetaUtils.optEither(meta, "analyzer", "prepare").orElse(Meta.empty()))
|
configure {
|
||||||
|
"analyzer" to meta.getMetaOrEmpty("analyzer")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pipe<NumassSet, Table> { set ->
|
pipe<NumassSet, Table> { set ->
|
||||||
SmartAnalyzer().analyzeSet(set, meta).also { res ->
|
SmartAnalyzer().analyzeSet(set, meta.getMeta("analyzer")).also { res ->
|
||||||
val outputMeta = meta.builder.putNode("data", set.meta)
|
val outputMeta = meta.builder.putNode("data", set.meta)
|
||||||
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
|
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
|
||||||
}
|
}
|
||||||
@ -299,7 +299,7 @@ val histogramTask = task("histogram") {
|
|||||||
value(
|
value(
|
||||||
"binning",
|
"binning",
|
||||||
types = listOf(ValueType.NUMBER),
|
types = listOf(ValueType.NUMBER),
|
||||||
defaultValue = 20,
|
defaultValue = 16,
|
||||||
info = "The binning of resulting histogram"
|
info = "The binning of resulting histogram"
|
||||||
)
|
)
|
||||||
value(
|
value(
|
||||||
@ -381,7 +381,7 @@ val histogramTask = task("histogram") {
|
|||||||
data.toSortedMap().forEach { name, set ->
|
data.toSortedMap().forEach { name, set ->
|
||||||
putNode("data", buildMeta {
|
putNode("data", buildMeta {
|
||||||
"name" to name
|
"name" to name
|
||||||
set.meta.useMeta("iteration_info"){"iteration" to it}
|
set.meta.useMeta("iteration_info") { "iteration" to it }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,55 @@
|
|||||||
|
package inr.numass.tasks
|
||||||
|
|
||||||
|
import hep.dataforge.io.render
|
||||||
|
import hep.dataforge.plots.data.DataPlot
|
||||||
|
import hep.dataforge.plots.output.plotFrame
|
||||||
|
import hep.dataforge.plots.plotData
|
||||||
|
import hep.dataforge.tables.Adapters
|
||||||
|
import hep.dataforge.tables.Table
|
||||||
|
import hep.dataforge.tables.filter
|
||||||
|
import hep.dataforge.useMeta
|
||||||
|
import hep.dataforge.values.ValueType
|
||||||
|
import hep.dataforge.workspace.tasks.task
|
||||||
|
import inr.numass.data.NumassDataUtils
|
||||||
|
import inr.numass.data.api.NumassSet
|
||||||
|
import inr.numass.subthreshold.Threshold
|
||||||
|
|
||||||
|
val subThresholdTask = task("threshold") {
|
||||||
|
descriptor {
|
||||||
|
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show threshold correction plot")
|
||||||
|
value(
|
||||||
|
"binning",
|
||||||
|
types = listOf(ValueType.NUMBER),
|
||||||
|
defaultValue = 16,
|
||||||
|
info = "The binning used for fit"
|
||||||
|
)
|
||||||
|
info = "Calculate sub threshold correction"
|
||||||
|
}
|
||||||
|
model { meta ->
|
||||||
|
dependsOn(selectTask, meta)
|
||||||
|
configure(meta.getMetaOrEmpty("threshold"))
|
||||||
|
configure {
|
||||||
|
meta.useMeta("analyzer") { putNode(it) }
|
||||||
|
setValue("@target", meta.getString("@target", meta.name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
join<NumassSet, Table> { data ->
|
||||||
|
val sum = NumassDataUtils.join(name, data.values)
|
||||||
|
|
||||||
|
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
|
||||||
|
it.getDouble("correction") in (1.0..1.2)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.getBoolean("plot", false)) {
|
||||||
|
context.plotFrame("$name.plot", stage = "numass.threshold") {
|
||||||
|
plots.setType<DataPlot>()
|
||||||
|
plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
|
||||||
|
plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
|
||||||
|
plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
context.output.render(correctionTable, "numass.correction", name, meta = meta)
|
||||||
|
return@join correctionTable
|
||||||
|
}
|
||||||
|
}
|
@ -15,6 +15,7 @@ include ":numass-core"
|
|||||||
|
|
||||||
include 'numass-core:numass-data-api'
|
include 'numass-core:numass-data-api'
|
||||||
include 'numass-core:numass-data-proto'
|
include 'numass-core:numass-data-proto'
|
||||||
|
include 'numass-core:numass-signal-processing'
|
||||||
|
|
||||||
//include ":numass-server"
|
//include ":numass-server"
|
||||||
//
|
//
|
||||||
|
Loading…
Reference in New Issue
Block a user