Fixed error in numass file read

This commit is contained in:
Alexander Nozik 2019-02-27 15:29:38 +03:00
parent 5b170da7d4
commit 28b7969709
17 changed files with 258 additions and 129 deletions

View File

@ -1,22 +1,15 @@
package inr.numass.data.api package inr.numass.data.api
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.MetaHolder import hep.dataforge.meta.MetaHolder
import hep.dataforge.meta.buildMeta
/** /**
* A simple static implementation of NumassPoint * A simple static implementation of NumassPoint
* Created by darksnake on 08.07.2017. * Created by darksnake on 08.07.2017.
*/ */
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) : MetaHolder(meta), NumassPoint { class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) :
MetaHolder(meta), NumassPoint {
/**
* Input blocks must be sorted
* @param voltage
* @param blocks
*/
constructor(blocks: Collection<NumassBlock>, voltage: Double) :
this(blocks.sortedBy { it.startTime }, MetaBuilder("point").setValue(NumassPoint.HV_KEY, voltage))
init { init {
if (blocks.isEmpty()) { if (blocks.isEmpty()) {
@ -24,4 +17,14 @@ class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, over
} }
} }
companion object {
fun build(blocks: Collection<NumassBlock>, voltage: Double? = null, index: Int? = null): SimpleNumassPoint {
val meta = buildMeta("point") {
NumassPoint.HV_KEY to voltage
NumassPoint.INDEX_KEY to index
}
return SimpleNumassPoint(blocks.sortedBy { it.startTime }, meta.build())
}
}
} }

View File

@ -21,9 +21,7 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder import hep.dataforge.meta.MetaBuilder
import inr.numass.data.api.* import inr.numass.data.api.*
import inr.numass.data.storage.ClassicNumassPoint import inr.numass.data.storage.ClassicNumassPoint
import java.util.stream.Collectors
import kotlin.streams.asSequence import kotlin.streams.asSequence
import kotlin.streams.toList
/** /**
@ -35,10 +33,8 @@ object NumassDataUtils {
override suspend fun getHvData() = TODO() override suspend fun getHvData() = TODO()
override val points: List<NumassPoint> by lazy { override val points: List<NumassPoint> by lazy {
val points = sets.stream().flatMap<NumassPoint> { it.points.stream() } val points = sets.flatMap { it.points }.groupBy { it.voltage }
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage }) return@lazy points.entries.map { entry -> SimpleNumassPoint.build(entry.value, entry.key) }
points.entries.stream().map { entry -> SimpleNumassPoint(entry.value, entry.key) }
.toList()
} }
override val meta: Meta by lazy { override val meta: Meta by lazy {
@ -51,6 +47,30 @@ object NumassDataUtils {
} }
} }
fun joinByIndex(setName: String, sets: Collection<NumassSet>): NumassSet {
return object : NumassSet {
override suspend fun getHvData() = TODO()
override val points: List<NumassPoint> by lazy {
val points = sets.flatMap { it.points }.groupBy { it.index }
return@lazy points.map { (index, points) ->
val voltage = points.first().voltage
if (!points.all { it.voltage == voltage }) error("Not all points with same index have same voltage")
SimpleNumassPoint.build(points, voltage, index)
}
}
override val meta: Meta by lazy {
val metaBuilder = MetaBuilder()
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
metaBuilder
}
override val name = setName
}
}
fun adapter(): SpectrumAdapter { fun adapter(): SpectrumAdapter {
return SpectrumAdapter("Uset", "CR", "CRerr", "Time") return SpectrumAdapter("Uset", "CR", "CRerr", "Time")
} }

View File

@ -22,6 +22,7 @@ import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table import hep.dataforge.tables.Table
import hep.dataforge.tables.TableFormat import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.TableFormatBuilder import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.toList
import inr.numass.data.api.NumassBlock import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassPoint.Companion.HV_KEY import inr.numass.data.api.NumassPoint.Companion.HV_KEY
@ -43,7 +44,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
* @param block * @param block
* @return * @return
*/ */
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> { override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val loChannel = meta.getInt("window.lo", 0) val loChannel = meta.getInt("window.lo", 0)
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE) val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
// if (meta.getBoolean("sort", false)) { // if (meta.getBoolean("sort", false)) {
@ -51,7 +52,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
// } // }
return getAllEvents(block).filter { event -> return getAllEvents(block).filter { event ->
event.amplitude.toInt() in loChannel..(upChannel - 1) event.amplitude.toInt() in loChannel..(upChannel - 1)
} }.toList()
} }
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> { protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {

View File

@ -28,7 +28,6 @@ import java.util.*
import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.atomic.AtomicReference
import java.util.stream.IntStream import java.util.stream.IntStream
import java.util.stream.Stream
import kotlin.streams.asSequence import kotlin.streams.asSequence
/** /**
@ -67,7 +66,7 @@ interface NumassAnalyzer {
* @param block * @param block
* @return * @return
*/ */
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): Stream<NumassEvent> fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): List<NumassEvent>
/** /**
* Analyze the whole set. And return results as a table * Analyze the whole set. And return results as a table

View File

@ -24,7 +24,6 @@ import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent import inr.numass.data.api.NumassEvent
import inr.numass.data.api.SignalProcessor import inr.numass.data.api.SignalProcessor
import java.util.stream.Stream
/** /**
* An analyzer dispatcher which uses different analyzer for different meta * An analyzer dispatcher which uses different analyzer for different meta
@ -59,7 +58,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
return ValueMap(map) return ValueMap(map)
} }
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> { override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
return getAnalyzer(meta).getEvents(block, meta) return getAnalyzer(meta).getEvents(block, meta)
} }

View File

@ -32,10 +32,8 @@ import inr.numass.data.api.*
import inr.numass.data.api.NumassPoint.Companion.HV_KEY import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import java.util.* import java.util.*
import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.atomic.AtomicLong
import java.util.stream.Stream
import kotlin.math.sqrt import kotlin.math.sqrt
import kotlin.streams.asSequence import kotlin.streams.asSequence
import kotlin.streams.asStream
/** /**
@ -43,8 +41,17 @@ import kotlin.streams.asStream
* Created by darksnake on 11.07.2017. * Created by darksnake on 11.07.2017.
*/ */
@ValueDefs( @ValueDefs(
ValueDef(key = "separateParallelBlocks", type = [ValueType.BOOLEAN], info = "If true, then parallel blocks will be forced to be evaluated separately"), ValueDef(
ValueDef(key = "chunkSize", type = [ValueType.NUMBER], def = "-1", info = "The number of events in chunk to split the chain into. If negative, no chunks are used") key = "separateParallelBlocks",
type = [ValueType.BOOLEAN],
info = "If true, then parallel blocks will be forced to be evaluated separately"
),
ValueDef(
key = "chunkSize",
type = [ValueType.NUMBER],
def = "-1",
info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
)
) )
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) { class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
@ -60,8 +67,8 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val chunkSize = config.getInt("chunkSize", -1) val chunkSize = config.getInt("chunkSize", -1)
val count = block.events.count() val count = super.getEvents(block, config).count()
val length = block.length.toMillis().toDouble() / 1e3 val length = block.length.toNanos().toDouble() / 1e9
val res = when { val res = when {
count < 1000 -> ValueMap.ofPairs( count < 1000 -> ValueMap.ofPairs(
@ -78,6 +85,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
} }
return ValueMap.Builder(res) return ValueMap.Builder(res)
.putValue("blockLength", length)
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel)) .putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
.putValue(NumassAnalyzer.TIME_KEY, block.startTime) .putValue(NumassAnalyzer.TIME_KEY, block.startTime)
.putValue(T0_KEY, t0.toDouble() / 1000.0) .putValue(T0_KEY, t0.toDouble() / 1000.0)
@ -99,7 +107,8 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
error("Zero number of intervals") error("Zero number of intervals")
} }
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0); val countRate =
1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / Math.sqrt(totalN.get().toDouble()) val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
val length = totalT.get() / 1e9 val length = totalT.get() / 1e9
val count = (length * countRate).toLong() val count = (length * countRate).toLong()
@ -159,7 +168,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
) )
GEOMETRIC -> { GEOMETRIC -> {
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size) val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = Math.pow(mean / size, 2.0) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) } val variance = Math.pow(
mean / size,
2.0
) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
Pair(mean, variance) Pair(mean, variance)
} }
} }
@ -174,7 +186,11 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
@ValueDefs( @ValueDefs(
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"), ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
ValueDef(key = "t0.crFraction", type = arrayOf(ValueType.NUMBER), info = "The relative fraction of events that should be removed by time cut"), ValueDef(
key = "t0.crFraction",
type = arrayOf(ValueType.NUMBER),
info = "The relative fraction of events that should be removed by time cut"
),
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0") ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
) )
private fun getT0(block: NumassBlock, meta: Meta): Int { private fun getT0(block: NumassBlock, meta: Meta): Int {
@ -211,12 +227,11 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
*/ */
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> { fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
val inverted = config.getBoolean("inverted", true) val inverted = config.getBoolean("inverted", true)
val events: Stream<NumassEvent> = super.getEvents(block, config).let { stream -> val events = super.getEvents(block, config).toMutableList()
if (block is ParentBlock && !block.isSequential) { if (block is ParentBlock && !block.isSequential) {
stream.sorted(compareBy { it.timeOffset }) //sort in place if needed
} else { events.sortBy { it.timeOffset }
stream
}
} }
return events.asSequence().zipWithNext { prev, next -> return events.asSequence().zipWithNext { prev, next ->
@ -236,9 +251,9 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
* @param meta * @param meta
* @return * @return
*/ */
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> { override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val t0 = getT0(block, meta).toLong() val t0 = getT0(block, meta).toLong()
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.asStream().map { it.first } return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.map { it.first }.toList()
} }
public override fun getTableFormat(config: Meta): TableFormat { public override fun getTableFormat(config: Meta): TableFormat {

View File

@ -15,7 +15,7 @@ import java.nio.file.Path
import java.time.Duration import java.time.Duration
import java.time.Instant import java.time.Instant
import java.util.stream.Stream import java.util.stream.Stream
import kotlin.streams.asStream import java.util.stream.StreamSupport
/** /**
* Created by darksnake on 08.07.2017. * Created by darksnake on 08.07.2017.
@ -48,10 +48,11 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
//TODO split blocks using meta //TODO split blocks using meta
private inner class ClassicBlock( private inner class ClassicBlock(
override val startTime: Instant, override val startTime: Instant,
override val length: Duration) : NumassBlock, Iterable<NumassEvent> { override val length: Duration
) : NumassBlock, Iterable<NumassEvent> {
override val events: Stream<NumassEvent> override val events: Stream<NumassEvent>
get() = this.asSequence().asStream() get() = StreamSupport.stream(this.spliterator(), false)
override fun iterator(): Iterator<NumassEvent> { override fun iterator(): Iterator<NumassEvent> {
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0) val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
@ -78,7 +79,8 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
} }
} }
} catch (e: IOException) { } catch (e: IOException) {
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass).error("Unexpected IOException when reading block", e) LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass)
.error("Unexpected IOException when reading block", e)
return false return false
} }

View File

@ -32,7 +32,7 @@ fun main(args: Array<String>) {
} }
val point = SimpleNumassPoint(blocks, 10000.0) val point = SimpleNumassPoint.build(blocks, 10000.0)
val meta = buildMeta { val meta = buildMeta {
"t0" to 1e7 "t0" to 1e7

View File

@ -25,10 +25,9 @@ import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassSet import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory import inr.numass.data.storage.NumassDirectory
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
import java.util.stream.Stream
private fun correlation(sequence: Stream<NumassEvent>): Double { private fun correlation(sequence: List<NumassEvent>): Double {
val amplitudes: MutableList<Double> = ArrayList() val amplitudes: MutableList<Double> = ArrayList()
val times: MutableList<Double> = ArrayList() val times: MutableList<Double> = ArrayList()
sequence.forEach { sequence.forEach {

View File

@ -55,7 +55,7 @@ fun main(args: Array<String>) {
if (!points.isEmpty()) { if (!points.isEmpty()) {
putStatic( putStatic(
"point_${hv.toInt()}", "point_${hv.toInt()}",
SimpleNumassPoint(points, hv) SimpleNumassPoint.build(points, hv)
) )
} }
} }

View File

@ -62,10 +62,7 @@ fun main(args: Array<String>) {
it.voltage == hv && it.channel == 0 it.voltage == hv && it.channel == 0
}.toList() }.toList()
if (!points.isEmpty()) { if (!points.isEmpty()) {
putStatic( putStatic("point_${hv.toInt()}", SimpleNumassPoint.build(points, hv))
"point_${hv.toInt()}",
SimpleNumassPoint(points, hv)
)
} }
} }
}.build() }.build()

View File

@ -39,7 +39,7 @@ fun main(args: Array<String>) {
.generateBlock(start.plusNanos(it * length), length) .generateBlock(start.plusNanos(it * length), length)
} }
}.join(Global) { blocks -> }.join(Global) { blocks ->
SimpleNumassPoint(blocks, 12000.0) SimpleNumassPoint.build(blocks, 12000.0)
}.get() }.get()

View File

@ -59,7 +59,7 @@ fun main(args: Array<String>) {
.generateBlock(start.plusNanos(it * length), length) .generateBlock(start.plusNanos(it * length), length)
} }
}.join(Global) { blocks -> }.join(Global) { blocks ->
SimpleNumassPoint(blocks, 18000.0) SimpleNumassPoint.build(blocks, 18000.0)
}.get() }.get()

View File

@ -0,0 +1,90 @@
package inr.numass.scripts.timeanalysis
import hep.dataforge.buildContext
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.meta.buildMeta
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
}
val storage = NumassDirectory.read(context, "Fill_3")!!
val loader = storage.provide("set_10", NumassSet::class.java).get()
val point = loader.getPoints(18050.00).first()
val analyzer = TimeAnalyzer()
val meta = buildMeta("analyzer") {
"t0" to 3000
"inverted" to false
//"chunkSize" to 5000
//"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
}
println(analyzer.analyze(point, meta))
println(analyzer.getEventsWithDelay(point.firstBlock, meta ).count())
println(point.events.count())
println(point.firstBlock.events.count())
// val time = point.events.asSequence().zipWithNext().map { (p, n) ->
// n.timeOffset - p.timeOffset
// }.filter { it > 0 }.sum()
val time = analyzer.getEventsWithDelay(point.firstBlock, meta ).map { it.second }.filter { it > 0 }.sum()
// val totalN = AtomicLong(0)
// val totalT = AtomicLong(0)
//
// analyzer.getEventsWithDelay(point.firstBlock, meta ).filter { pair -> pair.second >= 3000 }
// .forEach { pair ->
// totalN.incrementAndGet()
// //TODO add progress listener here
// totalT.addAndGet(pair.second)
// }
//
// val time = totalT.get()
println(time / 1e9)
//
// val cr = 80.0
// val length = 5e9.toLong()
// val num = 6
// val dt = 6.5
//
// val start = Instant.now()
//
// val generator = SynchronizedRandomGenerator(JDKRandomGenerator(2223))
//
// repeat(100) {
//
// val point = (1..num).map {
// Global.generate {
// NumassGenerator
// .generateEvents(cr , rnd = generator)
//// .withDeadTime { (dt * 1000).toLong() }
// .generateBlock(start.plusNanos(it * length), length)
// }
// }.join(Global) { blocks ->
// SimpleNumassPoint.build(blocks, 12000.0)
// }.get()
//
//
// println(analyzer.analyze(point, meta))
//
// }
}

View File

@ -63,7 +63,7 @@ object Threshold {
.flatMap { set -> set.points.asSequence() } .flatMap { set -> set.points.asSequence() }
.groupBy { it.voltage } .groupBy { it.voltage }
.forEach { key, value -> .forEach { key, value ->
val point = SimpleNumassPoint(value, key) val point = SimpleNumassPoint.build(value, key)
val name = key.toInt().toString() val name = key.toInt().toString()
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key)); dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
} }
@ -139,7 +139,7 @@ object Threshold {
/** /**
* Exponential function $a e^{\frac{x}{\sigma}}$ * Exponential function $a e^{\frac{x}{\sigma}}$
*/ */
private fun exponential(spectrum: Table, voltage: Double, config: Meta): Values { private fun exponential(point: NumassPoint, spectrum: Table, config: Meta): Values {
val xLow: Int = config.getInt("xLow", 400) val xLow: Int = config.getInt("xLow", 400)
val xHigh: Int = config.getInt("xHigh", 700) val xHigh: Int = config.getInt("xHigh", 700)
val upper: Int = config.getInt("upper", 3100) val upper: Int = config.getInt("upper", 3100)
@ -152,7 +152,8 @@ object Threshold {
val norm = norm(spectrum, xLow, upper) val norm = norm(spectrum, xLow, upper)
return ValueMap.ofPairs( return ValueMap.ofPairs(
"U" to voltage, "index" to point.index,
"U" to point.voltage,
"a" to a, "a" to a,
"sigma" to sigma, "sigma" to sigma,
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0 "correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
@ -192,7 +193,7 @@ object Threshold {
/** /**
* Power function $a (x-\delta)^{\beta} * Power function $a (x-\delta)^{\beta}
*/ */
private fun power(spectrum: Table, voltage: Double, config: Meta): Values { private fun power(point: NumassPoint, spectrum: Table, config: Meta): Values {
val xLow: Int = config.getInt("xLow", 400) val xLow: Int = config.getInt("xLow", 400)
val xHigh: Int = config.getInt("xHigh", 700) val xHigh: Int = config.getInt("xHigh", 700)
val upper: Int = config.getInt("upper", 3100) val upper: Int = config.getInt("upper", 3100)
@ -209,7 +210,8 @@ object Threshold {
val norm = norm(spectrum, xLow, upper) val norm = norm(spectrum, xLow, upper)
return ValueMap.ofPairs( return ValueMap.ofPairs(
"U" to voltage, "index" to point.index,
"U" to point.voltage,
"a" to a, "a" to a,
"beta" to beta, "beta" to beta,
"delta" to delta, "delta" to delta,
@ -217,10 +219,10 @@ object Threshold {
) )
} }
fun calculateSubThreshold(spectrum: Table, voltage: Double, config: Meta): Values { fun calculateSubThreshold(point: NumassPoint, spectrum: Table, config: Meta): Values {
return when (config.getString("method", "exp")) { return when (config.getString("method", "exp")) {
"exp" -> exponential(spectrum, voltage, config) "exp" -> exponential(point, spectrum, config)
"pow" -> power(spectrum, voltage, config) "pow" -> power(point, spectrum, config)
else -> throw RuntimeException("Unknown sub threshold calculation method") else -> throw RuntimeException("Unknown sub threshold calculation method")
} }
} }
@ -244,8 +246,8 @@ object Threshold {
} }
} }
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}") LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
try { return@map try {
calculateSubThreshold(spectrum, point.voltage, config) calculateSubThreshold(point, spectrum, config)
} catch (ex: Exception) { } catch (ex: Exception) {
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex) LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
null null

View File

@ -107,6 +107,7 @@ val monitorTableTask = task("monitor") {
configure(meta.getMetaOrEmpty("monitor")) configure(meta.getMetaOrEmpty("monitor"))
configure { configure {
meta.useMeta("analyzer") { putNode(it) } meta.useMeta("analyzer") { putNode(it) }
setValue("@target", meta.getString("@target", meta.name))
} }
} }
join<NumassSet, Table> { data -> join<NumassSet, Table> { data ->
@ -130,10 +131,11 @@ val monitorTableTask = task("monitor") {
"yAxis.title" to "Count rate" "yAxis.title" to "Count rate"
"yAxis.units" to "Hz" "yAxis.units" to "Hz"
} }
((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values) ((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
} }
context.output["numass.monitor", name].render(NumassUtils.wrap(res, meta)) context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
return@join res; return@join res;
} }
@ -157,7 +159,7 @@ val mergeEmptyTask = task("empty") {
.removeNode("data") .removeNode("data")
.removeNode("empty") .removeNode("empty")
.setNode("data", meta.getMeta("empty")) .setNode("data", meta.getMeta("empty"))
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty"); .setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty")
dependsOn(mergeTask, newMeta) dependsOn(mergeTask, newMeta)
} }
transform<Table> { data -> transform<Table> { data ->

View File

@ -34,7 +34,7 @@ val subThresholdTask = task("threshold") {
} }
} }
join<NumassSet, Table> { data -> join<NumassSet, Table> { data ->
val sum = NumassDataUtils.join(name, data.values) val sum = NumassDataUtils.joinByIndex(name, data.values)
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter { val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
it.getDouble("correction") in (1.0..1.2) it.getDouble("correction") in (1.0..1.2)