minor update

This commit is contained in:
Alexander Nozik 2018-06-10 22:29:01 +03:00
parent 1c3ce3ae2b
commit 0d4f6e3d7e
9 changed files with 253 additions and 167 deletions

View File

@ -46,13 +46,12 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
val loChannel = meta.getInt("window.lo", 0)
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
var res = getAllEvents(block).filter { event ->
// if (meta.getBoolean("sort", false)) {
// res = res.sorted(compareBy { it.timeOffset })
// }
return getAllEvents(block).filter { event ->
event.amplitude.toInt() in loChannel..(upChannel - 1)
}
if (meta.getBoolean("sort", false)) {
res = res.sorted(compareBy { it.timeOffset })
}
return res
}
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {

View File

@ -22,7 +22,7 @@ import kotlin.streams.asStream
* Plot time analysis graphics
*/
@ValueDefs(
ValueDef(key = "normalize", type = arrayOf(ValueType.BOOLEAN), def = "true", info = "Normalize t0 dependencies"),
ValueDef(key = "normalize", type = arrayOf(ValueType.BOOLEAN), def = "false", info = "Normalize t0 dependencies"),
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), def = "30e3", info = "The default t0 in nanoseconds"),
ValueDef(key = "window.lo", type = arrayOf(ValueType.NUMBER), def = "0", info = "Lower boundary for amplitude window"),
ValueDef(key = "window.up", type = arrayOf(ValueType.NUMBER), def = "10000", info = "Upper boundary for amplitude window"),
@ -40,13 +40,15 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
override fun execute(context: Context, name: String, input: NumassPoint, inputMeta: Laminate): Table {
val log = getLog(context, name);
val initialEstimate = analyzer.analyze(input, inputMeta)
val trueCR = initialEstimate.getDouble("cr")
val analyzerMeta = inputMeta.getMetaOrEmpty("analyzer")
log.report("The expected count rate for ${initialEstimate.getDouble(T0_KEY)} us delay is $trueCR")
val initialEstimate = analyzer.analyze(input, analyzerMeta)
val cr = initialEstimate.getDouble("cr")
log.report("The expected count rate for ${initialEstimate.getDouble(T0_KEY)} us delay is $cr")
val binNum = inputMeta.getInt("binNum", 1000);
val binSize = inputMeta.getDouble("binSize", 1.0 / trueCR * 10 / binNum * 1e6)
val binSize = inputMeta.getDouble("binSize", 1.0 / cr * 10 / binNum * 1e6)
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
@ -72,7 +74,7 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
val functionPlot = XYFunctionPlot.plot(name + "_theory", 0.0, binSize * binNum) {
trueCR / 1e6 * initialEstimate.getInt(NumassAnalyzer.COUNT_KEY) * binSize * Math.exp(-it * trueCR / 1e6)
cr / 1e6 * initialEstimate.getInt(NumassAnalyzer.COUNT_KEY) * binSize * Math.exp(-it * cr / 1e6)
}
context.plot("histogram", name, listOf(histogramPlot, functionPlot)) {
@ -106,15 +108,19 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
}
}
(1..100).map { inputMeta.getDouble("t0Step", 1000.0) * it }.map { t ->
val result = analyzer.analyze(input, inputMeta.builder.setValue("t0", t))
val minT0 = inputMeta.getDouble("t0.min", 0.0)
val maxT0 = inputMeta.getDouble("t0.max", 1e9 / cr)
val steps = inputMeta.getInt("t0.steps", 100)
val norm = if (inputMeta.getBoolean("normalize", true)) {
trueCR
val norm = if (inputMeta.getBoolean("normalize", false)) {
cr
} else {
1.0
}
(0..steps).map { minT0 + (maxT0-minT0)/steps*it }.map { t ->
val result = analyzer.analyze(input, analyzerMeta.builder.setValue("t0", t))
if (Thread.currentThread().isInterrupted) {
throw InterruptedException()
}

View File

@ -1,115 +0,0 @@
package inr.numass.data
import hep.dataforge.maths.chain.Chain
import hep.dataforge.maths.chain.MarkovChain
import hep.dataforge.maths.chain.StatefulChain
import hep.dataforge.stat.defaultGenerator
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.OrphanNumassEvent
import inr.numass.data.api.SimpleBlock
import kotlinx.coroutines.experimental.channels.takeWhile
import kotlinx.coroutines.experimental.channels.toList
import org.apache.commons.math3.distribution.EnumeratedRealDistribution
import org.apache.commons.math3.random.RandomGenerator
import java.time.Duration
import java.time.Instant
private fun RandomGenerator.nextExp(mean: Double): Double {
return -mean * Math.log(1 - nextDouble())
}
private fun RandomGenerator.nextDeltaTime(cr: Double): Long {
return (nextExp(1.0 / cr) * 1e9).toLong()
}
suspend fun Chain<OrphanNumassEvent>.generateBlock(start: Instant, length: Long): NumassBlock {
return SimpleBlock.produce(start, Duration.ofNanos(length)) {
channel.takeWhile { it.timeOffset < length }.toList()
}
}
internal val defaultAmplitudeGenerator: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
/**
* Generate an event chain with fixed count rate
* @param cr = count rate in Hz
* @param rnd = random number generator
* @param amp amplitude generator for the chain. The receiver is rng, first argument is the previous event and second argument
* is the delay between the next event. The result is the amplitude in channels
*/
fun generateEvents(
cr: Double,
rnd: RandomGenerator = defaultGenerator,
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator): Chain<OrphanNumassEvent> {
return MarkovChain(OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
}
}
/**
* Generate a chain using provided spectrum for amplitudes
*/
fun generateEvents(
cr: Double,
rnd: RandomGenerator = defaultGenerator,
spectrum: Table): Chain<OrphanNumassEvent> {
val channels = DoubleArray(spectrum.size())
val values = DoubleArray(spectrum.size())
for (i in 0 until spectrum.size()) {
channels[i] = spectrum.get(CHANNEL_KEY, i).double
values[i] = spectrum.get(COUNT_RATE_KEY, i).double
}
val distribution = EnumeratedRealDistribution(channels, values)
return generateEvents(cr, rnd) { _, _ -> distribution.sample().toShort() }
}
private data class BunchState(var bunchStart: Long = 0, var bunchEnd: Long = 0)
/**
* The chain of bunched events
* @param cr count rate of events inside bunch
* @param bunchRate number of bunches per second
* @param bunchLength the length of bunch
*/
fun buildBunchChain(
cr: Double,
bunchRate: Double,
bunchLength: Double,
rnd: RandomGenerator = defaultGenerator,
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator
): Chain<OrphanNumassEvent> {
return StatefulChain(
BunchState(0, 0),
OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
if (event.timeOffset >= bunchEnd) {
bunchStart = bunchEnd + rnd.nextDeltaTime(bunchRate)
bunchEnd = bunchStart + (bunchLength * 1e9).toLong()
OrphanNumassEvent(rnd.amp(null, 0), bunchStart)
} else {
val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
}
}
}
private class MergingState(private val chains: List<Chain<OrphanNumassEvent>>) {
suspend fun poll(): OrphanNumassEvent {
val next = chains.minBy { it.value.timeOffset } ?: chains.first()
val res = next.value
next.next()
return res
}
}
fun mergeEventChains(vararg chains: Chain<OrphanNumassEvent>): Chain<OrphanNumassEvent> {
return StatefulChain(MergingState(listOf(*chains)), OrphanNumassEvent(0, 0)) {
poll()
}
}

View File

@ -0,0 +1,141 @@
package inr.numass.data
import hep.dataforge.maths.chain.Chain
import hep.dataforge.maths.chain.MarkovChain
import hep.dataforge.maths.chain.StatefulChain
import hep.dataforge.stat.defaultGenerator
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.OrphanNumassEvent
import inr.numass.data.api.SimpleBlock
import kotlinx.coroutines.experimental.channels.asReceiveChannel
import kotlinx.coroutines.experimental.channels.takeWhile
import kotlinx.coroutines.experimental.channels.toList
import org.apache.commons.math3.distribution.EnumeratedRealDistribution
import org.apache.commons.math3.random.RandomGenerator
import java.time.Duration
import java.time.Instant
private fun RandomGenerator.nextExp(mean: Double): Double {
return -mean * Math.log(1 - nextDouble())
}
private fun RandomGenerator.nextDeltaTime(cr: Double): Long {
return (nextExp(1.0 / cr) * 1e9).toLong()
}
suspend fun Sequence<OrphanNumassEvent>.generateBlock(start: Instant, length: Long): NumassBlock {
return SimpleBlock.produce(start, Duration.ofNanos(length)) {
asReceiveChannel().takeWhile { it.timeOffset < length }.toList()
}
}
private class MergingState(private val chains: List<Chain<OrphanNumassEvent>>) {
suspend fun poll(): OrphanNumassEvent {
val next = chains.minBy { it.value.timeOffset } ?: chains.first()
val res = next.value
next.next()
return res
}
}
/**
* Merge event chains in ascending time order
*/
fun List<Chain<OrphanNumassEvent>>.merge(): Chain<OrphanNumassEvent> {
return StatefulChain(MergingState(this), OrphanNumassEvent(0, 0)) {
poll()
}
}
/**
* Apply dead time based on event that caused it
*/
fun Chain<OrphanNumassEvent>.withDeadTime(deadTime: (OrphanNumassEvent) -> Long): Chain<OrphanNumassEvent> {
return MarkovChain(this.value) {
val start = this.value
val dt = deadTime(start)
do {
val next = next()
} while (next.timeOffset - start.timeOffset < dt)
this.value
}
}
object NumassGenerator {
val defaultAmplitudeGenerator: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
/**
* Generate an event chain with fixed count rate
* @param cr = count rate in Hz
* @param rnd = random number generator
* @param amp amplitude generator for the chain. The receiver is rng, first argument is the previous event and second argument
* is the delay between the next event. The result is the amplitude in channels
*/
fun generateEvents(
cr: Double,
rnd: RandomGenerator = defaultGenerator,
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator): Chain<OrphanNumassEvent> {
return MarkovChain(OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
}
}
fun mergeEventChains(vararg chains: Chain<OrphanNumassEvent>): Chain<OrphanNumassEvent> {
return listOf(*chains).merge()
}
private data class BunchState(var bunchStart: Long = 0, var bunchEnd: Long = 0)
/**
* The chain of bunched events
* @param cr count rate of events inside bunch
* @param bunchRate number of bunches per second
* @param bunchLength the length of bunch
*/
fun generateBunches(
cr: Double,
bunchRate: Double,
bunchLength: Double,
rnd: RandomGenerator = defaultGenerator,
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator
): Chain<OrphanNumassEvent> {
return StatefulChain(
BunchState(0, 0),
OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
if (event.timeOffset >= bunchEnd) {
bunchStart = bunchEnd + rnd.nextDeltaTime(bunchRate)
bunchEnd = bunchStart + (bunchLength * 1e9).toLong()
OrphanNumassEvent(rnd.amp(null, 0), bunchStart)
} else {
val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
}
}
}
/**
* Generate a chain using provided spectrum for amplitudes
*/
fun generateEvents(
cr: Double,
rnd: RandomGenerator = defaultGenerator,
spectrum: Table): Chain<OrphanNumassEvent> {
val channels = DoubleArray(spectrum.size())
val values = DoubleArray(spectrum.size())
for (i in 0 until spectrum.size()) {
channels[i] = spectrum.get(CHANNEL_KEY, i).double
values[i] = spectrum.get(COUNT_RATE_KEY, i).double
}
val distribution = EnumeratedRealDistribution(channels, values)
return generateEvents(cr, rnd) { _, _ -> distribution.sample().toShort() }
}
}

View File

@ -50,7 +50,7 @@ class PileUpSimulator {
constructor(length: Long, rnd: RandomGenerator, countRate: Double) {
this.rnd = rnd
generator = generateEvents(countRate, rnd)
generator = NumassGenerator.generateEvents(countRate, rnd)
this.pointLength = length
}

View File

@ -2,11 +2,9 @@ package inr.numass.scripts
import hep.dataforge.kodex.buildMeta
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassGenerator
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.buildBunchChain
import inr.numass.data.generateBlock
import inr.numass.data.generateEvents
import inr.numass.data.mergeEventChains
import kotlinx.coroutines.experimental.channels.produce
import kotlinx.coroutines.experimental.channels.toList
import kotlinx.coroutines.experimental.runBlocking
@ -21,10 +19,10 @@ fun main(args: Array<String>) {
val blockchannel = produce {
(1..num).forEach {
val regularChain = generateEvents(cr)
val bunchChain = buildBunchChain(40.0, 0.01, 5.0)
val regularChain = NumassGenerator.generateEvents(cr)
val bunchChain = NumassGenerator.generateBunches(40.0, 0.01, 5.0)
send(mergeEventChains(regularChain, bunchChain).generateBlock(start.plusNanos(it * length), length))
send(NumassGenerator.mergeEventChains(regularChain, bunchChain).generateBlock(start.plusNanos(it * length), length))
}
}

View File

@ -64,6 +64,8 @@ fun main(args: Array<String>) {
val histogram = SimpleHistogram(arrayOf(0.0, 0.0), arrayOf(20.0, 100.0))
histogram.fill(
TimeAnalyzer().getEventsWithDelay(point, meta)
.filter { it.second <10000 }
.filter { it.first.channel == 0 }
.map { arrayOf(it.first.amplitude.toDouble(), it.second.toDouble()/1e3) }
.asStream()
)

View File

@ -6,17 +6,14 @@ import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.coroutineContext
import hep.dataforge.kodex.generate
import hep.dataforge.kodex.join
import hep.dataforge.maths.chain.MarkovChain
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassGenerator
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.OrphanNumassEvent
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.generateBlock
import org.apache.commons.math3.random.JDKRandomGenerator
import org.apache.commons.math3.random.RandomGenerator
import java.lang.Math.exp
import inr.numass.data.withDeadTime
import java.time.Instant
fun main(args: Array<String>) {
@ -29,27 +26,11 @@ fun main(args: Array<String>) {
val num = 2
val dt = 6.5
val rnd = JDKRandomGenerator()
fun RandomGenerator.nextExp(mean: Double): Double {
return -mean * Math.log(1 - nextDouble())
}
fun RandomGenerator.nextDeltaTime(cr: Double): Long {
return (nextExp(1.0 / cr) * 1e9).toLong()
}
val start = Instant.now()
val point = (1..num).map {
Global.generate {
MarkovChain(OrphanNumassEvent(1000, 0)) { event ->
val deltaT = rnd.nextDeltaTime(cr * exp(- event.timeOffset.toDouble() / 5e10))
//val deltaT = rnd.nextDeltaTime(cr)
OrphanNumassEvent(1000, event.timeOffset + deltaT)
}.generateBlock(start.plusNanos(it * length), length)
NumassGenerator.generateEvents(cr).withDeadTime { (dt*1000).toLong() }.generateBlock(start.plusNanos(it * length), length)
}
}.join(Global.coroutineContext) { blocks ->
SimpleNumassPoint(blocks, 12000.0)
@ -57,12 +38,14 @@ fun main(args: Array<String>) {
val meta = buildMeta {
"analyzer" to {
"t0" to 3000
"binNum" to 200
"t0Step" to 200
"chunkSize" to 5000
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
}
"binNum" to 200
"t0.max" to 1e4
}
TimeAnalyzerAction().simpleRun(point, meta);
}

View File

@ -0,0 +1,72 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.scripts.timeanalysis
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.coroutineContext
import hep.dataforge.kodex.generate
import hep.dataforge.kodex.join
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassGenerator
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.generateBlock
import inr.numass.data.withDeadTime
import java.time.Instant
fun main(args: Array<String>) {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
NumassPlugin().startGlobal()
val cr = 3.0
val length = (30000 *1e9).toLong()
val num = 1
val dt = 6.5
val start = Instant.now()
val point = (1..num).map {
Global.generate {
val events = NumassGenerator
.generateEvents(cr)
val bunches = NumassGenerator
.generateBunches(6.0, 0.001, 5.0)
val discharges = NumassGenerator
.generateBunches(50.0,0.001,0.1)
NumassGenerator.mergeEventChains(events, bunches, discharges).withDeadTime { (dt * 1000).toLong() }.generateBlock(start.plusNanos(it * length), length)
}
}.join(Global.coroutineContext) { blocks ->
SimpleNumassPoint(blocks, 18000.0)
}.get()
val meta = buildMeta {
"analyzer" to {
"t0" to 30000
}
"binNum" to 200
}
TimeAnalyzerAction().simpleRun(point, meta);
}