Fixed error in numass file read
This commit is contained in:
parent
5b170da7d4
commit
28b7969709
@ -1,22 +1,15 @@
|
||||
package inr.numass.data.api
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import hep.dataforge.meta.MetaHolder
|
||||
import hep.dataforge.meta.buildMeta
|
||||
|
||||
/**
|
||||
* A simple static implementation of NumassPoint
|
||||
* Created by darksnake on 08.07.2017.
|
||||
*/
|
||||
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) : MetaHolder(meta), NumassPoint {
|
||||
|
||||
/**
|
||||
* Input blocks must be sorted
|
||||
* @param voltage
|
||||
* @param blocks
|
||||
*/
|
||||
constructor(blocks: Collection<NumassBlock>, voltage: Double) :
|
||||
this(blocks.sortedBy { it.startTime }, MetaBuilder("point").setValue(NumassPoint.HV_KEY, voltage))
|
||||
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) :
|
||||
MetaHolder(meta), NumassPoint {
|
||||
|
||||
init {
|
||||
if (blocks.isEmpty()) {
|
||||
@ -24,4 +17,14 @@ class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, over
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun build(blocks: Collection<NumassBlock>, voltage: Double? = null, index: Int? = null): SimpleNumassPoint {
|
||||
val meta = buildMeta("point") {
|
||||
NumassPoint.HV_KEY to voltage
|
||||
NumassPoint.INDEX_KEY to index
|
||||
}
|
||||
return SimpleNumassPoint(blocks.sortedBy { it.startTime }, meta.build())
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -21,9 +21,7 @@ import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.MetaBuilder
|
||||
import inr.numass.data.api.*
|
||||
import inr.numass.data.storage.ClassicNumassPoint
|
||||
import java.util.stream.Collectors
|
||||
import kotlin.streams.asSequence
|
||||
import kotlin.streams.toList
|
||||
|
||||
|
||||
/**
|
||||
@ -35,10 +33,8 @@ object NumassDataUtils {
|
||||
override suspend fun getHvData() = TODO()
|
||||
|
||||
override val points: List<NumassPoint> by lazy {
|
||||
val points = sets.stream().flatMap<NumassPoint> { it.points.stream() }
|
||||
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
|
||||
points.entries.stream().map { entry -> SimpleNumassPoint(entry.value, entry.key) }
|
||||
.toList()
|
||||
val points = sets.flatMap { it.points }.groupBy { it.voltage }
|
||||
return@lazy points.entries.map { entry -> SimpleNumassPoint.build(entry.value, entry.key) }
|
||||
}
|
||||
|
||||
override val meta: Meta by lazy {
|
||||
@ -51,6 +47,30 @@ object NumassDataUtils {
|
||||
}
|
||||
}
|
||||
|
||||
fun joinByIndex(setName: String, sets: Collection<NumassSet>): NumassSet {
|
||||
return object : NumassSet {
|
||||
override suspend fun getHvData() = TODO()
|
||||
|
||||
override val points: List<NumassPoint> by lazy {
|
||||
val points = sets.flatMap { it.points }.groupBy { it.index }
|
||||
return@lazy points.map { (index, points) ->
|
||||
val voltage = points.first().voltage
|
||||
if (!points.all { it.voltage == voltage }) error("Not all points with same index have same voltage")
|
||||
SimpleNumassPoint.build(points, voltage, index)
|
||||
}
|
||||
}
|
||||
|
||||
override val meta: Meta by lazy {
|
||||
val metaBuilder = MetaBuilder()
|
||||
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
|
||||
metaBuilder
|
||||
}
|
||||
|
||||
override val name = setName
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun adapter(): SpectrumAdapter {
|
||||
return SpectrumAdapter("Uset", "CR", "CRerr", "Time")
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ import hep.dataforge.tables.ListTable
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.tables.TableFormat
|
||||
import hep.dataforge.tables.TableFormatBuilder
|
||||
import hep.dataforge.toList
|
||||
import inr.numass.data.api.NumassBlock
|
||||
import inr.numass.data.api.NumassEvent
|
||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||
@ -43,7 +44,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
|
||||
* @param block
|
||||
* @return
|
||||
*/
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||
val loChannel = meta.getInt("window.lo", 0)
|
||||
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
|
||||
// if (meta.getBoolean("sort", false)) {
|
||||
@ -51,7 +52,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
|
||||
// }
|
||||
return getAllEvents(block).filter { event ->
|
||||
event.amplitude.toInt() in loChannel..(upChannel - 1)
|
||||
}
|
||||
}.toList()
|
||||
}
|
||||
|
||||
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
|
||||
|
@ -28,7 +28,6 @@ import java.util.*
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import java.util.stream.IntStream
|
||||
import java.util.stream.Stream
|
||||
import kotlin.streams.asSequence
|
||||
|
||||
/**
|
||||
@ -67,7 +66,7 @@ interface NumassAnalyzer {
|
||||
* @param block
|
||||
* @return
|
||||
*/
|
||||
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): Stream<NumassEvent>
|
||||
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): List<NumassEvent>
|
||||
|
||||
/**
|
||||
* Analyze the whole set. And return results as a table
|
||||
|
@ -24,7 +24,6 @@ import hep.dataforge.values.Values
|
||||
import inr.numass.data.api.NumassBlock
|
||||
import inr.numass.data.api.NumassEvent
|
||||
import inr.numass.data.api.SignalProcessor
|
||||
import java.util.stream.Stream
|
||||
|
||||
/**
|
||||
* An analyzer dispatcher which uses different analyzer for different meta
|
||||
@ -59,7 +58,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
|
||||
return ValueMap(map)
|
||||
}
|
||||
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||
return getAnalyzer(meta).getEvents(block, meta)
|
||||
}
|
||||
|
||||
|
@ -32,10 +32,8 @@ import inr.numass.data.api.*
|
||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||
import java.util.*
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import java.util.stream.Stream
|
||||
import kotlin.math.sqrt
|
||||
import kotlin.streams.asSequence
|
||||
import kotlin.streams.asStream
|
||||
|
||||
|
||||
/**
|
||||
@ -43,8 +41,17 @@ import kotlin.streams.asStream
|
||||
* Created by darksnake on 11.07.2017.
|
||||
*/
|
||||
@ValueDefs(
|
||||
ValueDef(key = "separateParallelBlocks", type = [ValueType.BOOLEAN], info = "If true, then parallel blocks will be forced to be evaluated separately"),
|
||||
ValueDef(key = "chunkSize", type = [ValueType.NUMBER], def = "-1", info = "The number of events in chunk to split the chain into. If negative, no chunks are used")
|
||||
ValueDef(
|
||||
key = "separateParallelBlocks",
|
||||
type = [ValueType.BOOLEAN],
|
||||
info = "If true, then parallel blocks will be forced to be evaluated separately"
|
||||
),
|
||||
ValueDef(
|
||||
key = "chunkSize",
|
||||
type = [ValueType.NUMBER],
|
||||
def = "-1",
|
||||
info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
|
||||
)
|
||||
)
|
||||
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
|
||||
|
||||
@ -60,8 +67,8 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
|
||||
val chunkSize = config.getInt("chunkSize", -1)
|
||||
|
||||
val count = block.events.count()
|
||||
val length = block.length.toMillis().toDouble() / 1e3
|
||||
val count = super.getEvents(block, config).count()
|
||||
val length = block.length.toNanos().toDouble() / 1e9
|
||||
|
||||
val res = when {
|
||||
count < 1000 -> ValueMap.ofPairs(
|
||||
@ -78,6 +85,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
}
|
||||
|
||||
return ValueMap.Builder(res)
|
||||
.putValue("blockLength", length)
|
||||
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
|
||||
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
|
||||
.putValue(T0_KEY, t0.toDouble() / 1000.0)
|
||||
@ -99,7 +107,8 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
error("Zero number of intervals")
|
||||
}
|
||||
|
||||
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
|
||||
val countRate =
|
||||
1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
|
||||
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
|
||||
val length = totalT.get() / 1e9
|
||||
val count = (length * countRate).toLong()
|
||||
@ -159,7 +168,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
)
|
||||
GEOMETRIC -> {
|
||||
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
|
||||
val variance = Math.pow(mean / size, 2.0) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
|
||||
val variance = Math.pow(
|
||||
mean / size,
|
||||
2.0
|
||||
) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
|
||||
Pair(mean, variance)
|
||||
}
|
||||
}
|
||||
@ -174,7 +186,11 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
|
||||
@ValueDefs(
|
||||
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
|
||||
ValueDef(key = "t0.crFraction", type = arrayOf(ValueType.NUMBER), info = "The relative fraction of events that should be removed by time cut"),
|
||||
ValueDef(
|
||||
key = "t0.crFraction",
|
||||
type = arrayOf(ValueType.NUMBER),
|
||||
info = "The relative fraction of events that should be removed by time cut"
|
||||
),
|
||||
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
|
||||
)
|
||||
private fun getT0(block: NumassBlock, meta: Meta): Int {
|
||||
@ -211,12 +227,11 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
*/
|
||||
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
|
||||
val inverted = config.getBoolean("inverted", true)
|
||||
val events: Stream<NumassEvent> = super.getEvents(block, config).let { stream ->
|
||||
val events = super.getEvents(block, config).toMutableList()
|
||||
|
||||
if (block is ParentBlock && !block.isSequential) {
|
||||
stream.sorted(compareBy { it.timeOffset })
|
||||
} else {
|
||||
stream
|
||||
}
|
||||
//sort in place if needed
|
||||
events.sortBy { it.timeOffset }
|
||||
}
|
||||
|
||||
return events.asSequence().zipWithNext { prev, next ->
|
||||
@ -236,9 +251,9 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
* @param meta
|
||||
* @return
|
||||
*/
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
||||
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||
val t0 = getT0(block, meta).toLong()
|
||||
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.asStream().map { it.first }
|
||||
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.map { it.first }.toList()
|
||||
}
|
||||
|
||||
public override fun getTableFormat(config: Meta): TableFormat {
|
||||
|
@ -15,7 +15,7 @@ import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.stream.Stream
|
||||
import kotlin.streams.asStream
|
||||
import java.util.stream.StreamSupport
|
||||
|
||||
/**
|
||||
* Created by darksnake on 08.07.2017.
|
||||
@ -48,10 +48,11 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
//TODO split blocks using meta
|
||||
private inner class ClassicBlock(
|
||||
override val startTime: Instant,
|
||||
override val length: Duration) : NumassBlock, Iterable<NumassEvent> {
|
||||
override val length: Duration
|
||||
) : NumassBlock, Iterable<NumassEvent> {
|
||||
|
||||
override val events: Stream<NumassEvent>
|
||||
get() = this.asSequence().asStream()
|
||||
get() = StreamSupport.stream(this.spliterator(), false)
|
||||
|
||||
override fun iterator(): Iterator<NumassEvent> {
|
||||
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
|
||||
@ -78,7 +79,8 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
}
|
||||
}
|
||||
} catch (e: IOException) {
|
||||
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass).error("Unexpected IOException when reading block", e)
|
||||
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass)
|
||||
.error("Unexpected IOException when reading block", e)
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ fun main(args: Array<String>) {
|
||||
}
|
||||
|
||||
|
||||
val point = SimpleNumassPoint(blocks, 10000.0)
|
||||
val point = SimpleNumassPoint.build(blocks, 10000.0)
|
||||
|
||||
val meta = buildMeta {
|
||||
"t0" to 1e7
|
||||
|
@ -25,10 +25,9 @@ import inr.numass.data.api.NumassEvent
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.storage.NumassDirectory
|
||||
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
|
||||
import java.util.stream.Stream
|
||||
|
||||
|
||||
private fun correlation(sequence: Stream<NumassEvent>): Double {
|
||||
private fun correlation(sequence: List<NumassEvent>): Double {
|
||||
val amplitudes: MutableList<Double> = ArrayList()
|
||||
val times: MutableList<Double> = ArrayList()
|
||||
sequence.forEach {
|
||||
|
@ -55,7 +55,7 @@ fun main(args: Array<String>) {
|
||||
if (!points.isEmpty()) {
|
||||
putStatic(
|
||||
"point_${hv.toInt()}",
|
||||
SimpleNumassPoint(points, hv)
|
||||
SimpleNumassPoint.build(points, hv)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -62,10 +62,7 @@ fun main(args: Array<String>) {
|
||||
it.voltage == hv && it.channel == 0
|
||||
}.toList()
|
||||
if (!points.isEmpty()) {
|
||||
putStatic(
|
||||
"point_${hv.toInt()}",
|
||||
SimpleNumassPoint(points, hv)
|
||||
)
|
||||
putStatic("point_${hv.toInt()}", SimpleNumassPoint.build(points, hv))
|
||||
}
|
||||
}
|
||||
}.build()
|
||||
|
@ -39,7 +39,7 @@ fun main(args: Array<String>) {
|
||||
.generateBlock(start.plusNanos(it * length), length)
|
||||
}
|
||||
}.join(Global) { blocks ->
|
||||
SimpleNumassPoint(blocks, 12000.0)
|
||||
SimpleNumassPoint.build(blocks, 12000.0)
|
||||
}.get()
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ fun main(args: Array<String>) {
|
||||
.generateBlock(start.plusNanos(it * length), length)
|
||||
}
|
||||
}.join(Global) { blocks ->
|
||||
SimpleNumassPoint(blocks, 18000.0)
|
||||
SimpleNumassPoint.build(blocks, 18000.0)
|
||||
}.get()
|
||||
|
||||
|
||||
|
@ -0,0 +1,90 @@
|
||||
package inr.numass.scripts.timeanalysis
|
||||
|
||||
import hep.dataforge.buildContext
|
||||
import hep.dataforge.fx.output.FXOutputManager
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.storage.NumassDirectory
|
||||
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
|
||||
output = FXOutputManager()
|
||||
rootDir = "D:\\Work\\Numass\\sterile2017_05"
|
||||
dataDir = "D:\\Work\\Numass\\data\\2017_05"
|
||||
}
|
||||
|
||||
val storage = NumassDirectory.read(context, "Fill_3")!!
|
||||
|
||||
val loader = storage.provide("set_10", NumassSet::class.java).get()
|
||||
|
||||
val point = loader.getPoints(18050.00).first()
|
||||
|
||||
val analyzer = TimeAnalyzer()
|
||||
|
||||
val meta = buildMeta("analyzer") {
|
||||
"t0" to 3000
|
||||
"inverted" to false
|
||||
//"chunkSize" to 5000
|
||||
//"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
||||
}
|
||||
|
||||
println(analyzer.analyze(point, meta))
|
||||
|
||||
println(analyzer.getEventsWithDelay(point.firstBlock, meta ).count())
|
||||
println(point.events.count())
|
||||
println(point.firstBlock.events.count())
|
||||
|
||||
// val time = point.events.asSequence().zipWithNext().map { (p, n) ->
|
||||
// n.timeOffset - p.timeOffset
|
||||
// }.filter { it > 0 }.sum()
|
||||
|
||||
val time = analyzer.getEventsWithDelay(point.firstBlock, meta ).map { it.second }.filter { it > 0 }.sum()
|
||||
|
||||
|
||||
|
||||
// val totalN = AtomicLong(0)
|
||||
// val totalT = AtomicLong(0)
|
||||
//
|
||||
// analyzer.getEventsWithDelay(point.firstBlock, meta ).filter { pair -> pair.second >= 3000 }
|
||||
// .forEach { pair ->
|
||||
// totalN.incrementAndGet()
|
||||
// //TODO add progress listener here
|
||||
// totalT.addAndGet(pair.second)
|
||||
// }
|
||||
//
|
||||
// val time = totalT.get()
|
||||
|
||||
println(time / 1e9)
|
||||
|
||||
//
|
||||
// val cr = 80.0
|
||||
// val length = 5e9.toLong()
|
||||
// val num = 6
|
||||
// val dt = 6.5
|
||||
//
|
||||
// val start = Instant.now()
|
||||
//
|
||||
// val generator = SynchronizedRandomGenerator(JDKRandomGenerator(2223))
|
||||
//
|
||||
// repeat(100) {
|
||||
//
|
||||
// val point = (1..num).map {
|
||||
// Global.generate {
|
||||
// NumassGenerator
|
||||
// .generateEvents(cr , rnd = generator)
|
||||
//// .withDeadTime { (dt * 1000).toLong() }
|
||||
// .generateBlock(start.plusNanos(it * length), length)
|
||||
// }
|
||||
// }.join(Global) { blocks ->
|
||||
// SimpleNumassPoint.build(blocks, 12000.0)
|
||||
// }.get()
|
||||
//
|
||||
//
|
||||
// println(analyzer.analyze(point, meta))
|
||||
//
|
||||
// }
|
||||
}
|
@ -63,7 +63,7 @@ object Threshold {
|
||||
.flatMap { set -> set.points.asSequence() }
|
||||
.groupBy { it.voltage }
|
||||
.forEach { key, value ->
|
||||
val point = SimpleNumassPoint(value, key)
|
||||
val point = SimpleNumassPoint.build(value, key)
|
||||
val name = key.toInt().toString()
|
||||
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
||||
}
|
||||
@ -139,7 +139,7 @@ object Threshold {
|
||||
/**
|
||||
* Exponential function $a e^{\frac{x}{\sigma}}$
|
||||
*/
|
||||
private fun exponential(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||
private fun exponential(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||
val xLow: Int = config.getInt("xLow", 400)
|
||||
val xHigh: Int = config.getInt("xHigh", 700)
|
||||
val upper: Int = config.getInt("upper", 3100)
|
||||
@ -152,7 +152,8 @@ object Threshold {
|
||||
val norm = norm(spectrum, xLow, upper)
|
||||
|
||||
return ValueMap.ofPairs(
|
||||
"U" to voltage,
|
||||
"index" to point.index,
|
||||
"U" to point.voltage,
|
||||
"a" to a,
|
||||
"sigma" to sigma,
|
||||
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
||||
@ -192,7 +193,7 @@ object Threshold {
|
||||
/**
|
||||
* Power function $a (x-\delta)^{\beta}
|
||||
*/
|
||||
private fun power(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||
private fun power(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||
val xLow: Int = config.getInt("xLow", 400)
|
||||
val xHigh: Int = config.getInt("xHigh", 700)
|
||||
val upper: Int = config.getInt("upper", 3100)
|
||||
@ -209,7 +210,8 @@ object Threshold {
|
||||
val norm = norm(spectrum, xLow, upper)
|
||||
|
||||
return ValueMap.ofPairs(
|
||||
"U" to voltage,
|
||||
"index" to point.index,
|
||||
"U" to point.voltage,
|
||||
"a" to a,
|
||||
"beta" to beta,
|
||||
"delta" to delta,
|
||||
@ -217,10 +219,10 @@ object Threshold {
|
||||
)
|
||||
}
|
||||
|
||||
fun calculateSubThreshold(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||
fun calculateSubThreshold(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||
return when (config.getString("method", "exp")) {
|
||||
"exp" -> exponential(spectrum, voltage, config)
|
||||
"pow" -> power(spectrum, voltage, config)
|
||||
"exp" -> exponential(point, spectrum, config)
|
||||
"pow" -> power(point, spectrum, config)
|
||||
else -> throw RuntimeException("Unknown sub threshold calculation method")
|
||||
}
|
||||
}
|
||||
@ -244,8 +246,8 @@ object Threshold {
|
||||
}
|
||||
}
|
||||
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
|
||||
try {
|
||||
calculateSubThreshold(spectrum, point.voltage, config)
|
||||
return@map try {
|
||||
calculateSubThreshold(point, spectrum, config)
|
||||
} catch (ex: Exception) {
|
||||
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
|
||||
null
|
||||
|
@ -107,6 +107,7 @@ val monitorTableTask = task("monitor") {
|
||||
configure(meta.getMetaOrEmpty("monitor"))
|
||||
configure {
|
||||
meta.useMeta("analyzer") { putNode(it) }
|
||||
setValue("@target", meta.getString("@target", meta.name))
|
||||
}
|
||||
}
|
||||
join<NumassSet, Table> { data ->
|
||||
@ -130,10 +131,11 @@ val monitorTableTask = task("monitor") {
|
||||
"yAxis.title" to "Count rate"
|
||||
"yAxis.units" to "Hz"
|
||||
}
|
||||
|
||||
((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
|
||||
}
|
||||
|
||||
context.output["numass.monitor", name].render(NumassUtils.wrap(res, meta))
|
||||
context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
|
||||
|
||||
return@join res;
|
||||
}
|
||||
@ -157,7 +159,7 @@ val mergeEmptyTask = task("empty") {
|
||||
.removeNode("data")
|
||||
.removeNode("empty")
|
||||
.setNode("data", meta.getMeta("empty"))
|
||||
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty");
|
||||
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty")
|
||||
dependsOn(mergeTask, newMeta)
|
||||
}
|
||||
transform<Table> { data ->
|
||||
|
@ -34,7 +34,7 @@ val subThresholdTask = task("threshold") {
|
||||
}
|
||||
}
|
||||
join<NumassSet, Table> { data ->
|
||||
val sum = NumassDataUtils.join(name, data.values)
|
||||
val sum = NumassDataUtils.joinByIndex(name, data.values)
|
||||
|
||||
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
|
||||
it.getDouble("correction") in (1.0..1.2)
|
||||
|
Loading…
Reference in New Issue
Block a user