Separated Input and Output management. Input moved to context

This commit is contained in:
Alexander Nozik 2018-05-20 10:27:43 +03:00
parent a489d33548
commit 86f7af61d3
3 changed files with 40 additions and 17 deletions

View File

@ -22,14 +22,12 @@ import hep.dataforge.meta.Meta
import hep.dataforge.tables.Adapters.*
import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.values.Value
import hep.dataforge.values.ValueMap
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import hep.dataforge.values.*
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.LENGTH_KEY
import inr.numass.data.analyzers.TimeAnalyzer.AveragingMethod.*
import inr.numass.data.api.*
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import java.util.*
@ -65,7 +63,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val res = getEventsWithDelay(block, config).asSequence().chunked(chunkSize) {
analyzeSequence(it.asSequence(), t0)
}.toList().average()
}.toList().mean(config.getEnum("mean", WEIGHTED))
return ValueMap.Builder(res)
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
@ -104,7 +102,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val res = point.blocks.stream()
.map { it -> analyze(it, config) }
.toList()
.average()
.mean(config.getEnum("mean", WEIGHTED))
val map = HashMap(res.asMap())
if (point is NumassPoint) {
@ -113,16 +111,36 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
return ValueMap(map)
}
enum class AveragingMethod {
ARITHMETIC,
WEIGHTED,
GEOMETRIC
}
/**
* Combine multiple blocks from the same point into one
*
* @return
*/
private fun List<Values>.average(): Values {
private fun List<Values>.mean(method: AveragingMethod): Values {
val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
val countRate = sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime
val countRateDispersion = sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
val (countRate, countRateDispersion) = when (method) {
ARITHMETIC -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY), 2.0) } / size / size
)
WEIGHTED -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
)
GEOMETRIC -> {
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = Math.pow(mean / size, 2.0) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
Pair(mean, variance)
}
}
return ValueMap.Builder(first())
.putValue(LENGTH_KEY, totalTime)

View File

@ -9,11 +9,13 @@ import hep.dataforge.kodex.join
import hep.dataforge.maths.chain.MarkovChain
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.OrphanNumassEvent
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.generateBlock
import org.apache.commons.math3.random.JDKRandomGenerator
import org.apache.commons.math3.random.RandomGenerator
import java.lang.Math.exp
import java.time.Instant
fun main(args: Array<String>) {
@ -42,8 +44,8 @@ fun main(args: Array<String>) {
val point = (1..num).map {
Global.generate {
MarkovChain(OrphanNumassEvent(1000, 0)) { event ->
//val deltaT = rnd.nextDeltaTime(cr * exp(- event.timeOffset / 1e11))
val deltaT = rnd.nextDeltaTime(cr)
val deltaT = rnd.nextDeltaTime(cr * exp(- event.timeOffset.toDouble() / 5e10))
//val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(1000, event.timeOffset + deltaT)
}.generateBlock(start.plusNanos(it * length), length)
}
@ -57,6 +59,7 @@ fun main(args: Array<String>) {
"binNum" to 200
"t0Step" to 200
"chunkSize" to 5000
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
}
TimeAnalyzerAction().simpleRun(point, meta);

View File

@ -8,6 +8,8 @@ import hep.dataforge.description.ValueDef
import hep.dataforge.description.ValueDefs
import hep.dataforge.fx.plots.FXPlotManager
import hep.dataforge.fx.plots.plus
import hep.dataforge.io.output.Output.Companion.BINARY_MODE
import hep.dataforge.io.output.stream
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.configure
import hep.dataforge.kodex.task
@ -73,8 +75,8 @@ val monitorTableTask = task("monitor") {
model { meta ->
dependsOn(selectTask, meta)
configure(meta.getMetaOrEmpty("monitor"))
configure{
meta.useMeta("analyzer"){putNode(it)}
configure {
meta.useMeta("analyzer") { putNode(it) }
}
}
join<NumassSet, Table> { data ->
@ -105,7 +107,7 @@ val monitorTableTask = task("monitor") {
//add set markers
addSetMarkers(frame, data.values)
}
context.output.get(name, stage = "numass.monitor", type = "dfp").render(PlotFrame.Wrapper().wrap(frame))
context.output.get(name, "numass.monitor", BINARY_MODE).render(PlotFrame.Wrapper().wrap(frame))
}
}
@ -221,7 +223,7 @@ val fitTask = task("fit") {
configure(meta.getMeta("fit"))
}
pipe<Table, FitResult> { data ->
context.output.stream(name, "numass.fit").use { out ->
context.output[name, "numass.fit"].stream.use { out ->
val writer = PrintWriter(out)
writer.printf("%n*** META ***%n")
writer.println(meta.toString())