Fix jupyter and analyzers

Fix jupyter and analyzers
This commit is contained in:
Alexander Nozik 2021-12-18 23:08:03 +03:00
parent d124f376d0
commit d1ddf89c6e
No known key found for this signature in database
GPG Key ID: F7FCF2DD25C71357
46 changed files with 693 additions and 896 deletions

View File

@ -12,14 +12,18 @@ val tablesVersion: String by rootProject.extra
kotlin.sourceSets {
commonMain {
dependencies {
api(project(":numass-data-model"))
api(projects.numass.numassDataModel)
api("space.kscience:dataforge-io:$dataforgeVersion")
api("space.kscience:tables-kt:$tablesVersion")
api("space.kscience:kmath-histograms:$kmathVersion")
api("space.kscience:kmath-complex:$kmathVersion")
api("space.kscience:kmath-stat:$kmathVersion")
api("space.kscience:kmath-histograms:$kmathVersion")
}
}
}
kscience{
useAtomic()
}

View File

@ -1,112 +0,0 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.*
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.SignalProcessor
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.int
import space.kscience.dataforge.values.Value
import space.kscience.tables.RowTable
import space.kscience.tables.Table
/**
* Created by darksnake on 11.07.2017.
*/
public abstract class AbstractAnalyzer(
private val processor: SignalProcessor? = null,
) : NumassAnalyzer {
/**
* Return unsorted stream of events including events from frames.
* In theory, events after processing could be unsorted due to mixture of frames and events.
* In practice usually block have either frame or events, but not both.
*
* @param block
* @return
*/
override fun getEvents(block: NumassBlock, meta: Meta): Flow<NumassEvent> {
val range = meta.getRange()
return getAllEvents(block).filter { event ->
event.amplitude.toInt() in range
}
}
protected fun Meta.getRange(): IntRange {
val loChannel = get("window.lo")?.int ?: 0
val upChannel = get("window.up")?.int ?: Int.MAX_VALUE
return loChannel until upChannel
}
protected fun getAllEvents(block: NumassBlock): Flow<NumassEvent> {
return when {
block.framesCount == 0L -> block.events
processor == null -> throw IllegalArgumentException("Signal processor needed to analyze frames")
else -> flow {
emitAll(block.events)
emitAll(block.frames.flatMapConcat { processor.analyze(it) })
}
}
}
// /**
// * Get table format for summary table
// *
// * @param config
// * @return
// */
// protected open fun getTableFormat(config: Meta): ValueTableHeader {
// return TableFormatBuilder()
// .addNumber(HV_KEY, X_VALUE_KEY)
// .addNumber(NumassAnalyzer.LENGTH_KEY)
// .addNumber(NumassAnalyzer.COUNT_KEY)
// .addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
// .addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
// .addColumn(NumassAnalyzer.WINDOW_KEY)
// .addTime()
// .build()
// }
override suspend fun analyzeSet(set: NumassSet, config: Meta): Table<Value> = RowTable(
NumassAnalyzer.length,
NumassAnalyzer.count,
NumassAnalyzer.cr,
NumassAnalyzer.crError,
// NumassAnalyzer.window,
// NumassAnalyzer.timestamp
) {
set.points.forEach { point ->
analyzeParent(point, config)
}
}
public companion object {
// public val NAME_LIST: List<String> = listOf(
// NumassAnalyzer.LENGTH_KEY,
// NumassAnalyzer.COUNT_KEY,
// NumassAnalyzer.COUNT_RATE_KEY,
// NumassAnalyzer.COUNT_RATE_ERROR_KEY,
// NumassAnalyzer.WINDOW_KEY,
// NumassAnalyzer.TIME_KEY
// )
}
}

View File

@ -1,35 +0,0 @@
/*
* Copyright 2017 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.inr.mass.data.analysis
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.SignalProcessor
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
/**
* Block analyzer that can perform debunching
* Created by darksnake on 11.07.2017.
*/
public class DebunchAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult {
TODO()
}
override val descriptor: MetaDescriptor? = null
}

View File

@ -0,0 +1,70 @@
package ru.inr.mass.data.analysis
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.launch
import ru.inr.mass.data.api.NumassBlock
import space.kscience.kmath.histogram.LongCounter
import kotlin.math.min
public class NumassAmplitudeSpectrum(public val amplitudes: Map<UShort, ULong>) {
public val minChannel: UShort by lazy { amplitudes.keys.minOf { it } }
public val maxChannel: UShort by lazy { amplitudes.keys.maxOf { it } }
public fun binned(binSize: UInt, range: UIntRange = minChannel..maxChannel): Map<UIntRange, Double> {
val keys = sequence {
var left = range.first
do {
val right = min(left + binSize, range.last)
yield(left..right)
left = right
} while (right < range.last)
}
return keys.associateWith { bin -> amplitudes.filter { it.key in bin }.values.sum().toDouble() }
}
}
/**
* Build an amplitude spectrum with bin of 1.0 counted from 0.0. Some bins could be missing
*/
public suspend fun NumassBlock.amplitudeSpectrum(
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): NumassAmplitudeSpectrum {
val map = HashMap<UShort, LongCounter>()
extractor.extract(this).collect { event ->
map.getOrPut(event.amplitude) { LongCounter() }.add(1L)
}
return NumassAmplitudeSpectrum(map.mapValues { it.value.value.toULong() })
}
/**
* Collect events from block in parallel
*/
public suspend fun Collection<NumassBlock>.amplitudeSpectrum(
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): NumassAmplitudeSpectrum {
val hist = List(UShort.MAX_VALUE.toInt()) {
LongCounter()
}
coroutineScope {
forEach { block ->
launch {
extractor.extract(block).collect { event ->
hist[event.amplitude.toInt()].add(1L)
}
}
}
}
val map = hist.mapIndexedNotNull { index, counter ->
if (counter.value == 0L) {
null
} else {
index.toUShort() to counter.value.toULong()
}
}.toMap()
return NumassAmplitudeSpectrum(map)
}

View File

@ -17,42 +17,46 @@
package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.MAX_CHANNEL
import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.channel
import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.count
import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.cr
import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.crError
import kotlinx.coroutines.flow.filter
import ru.inr.mass.data.api.*
import ru.inr.mass.data.api.NumassPoint.Companion.HV_KEY
import ru.inr.mass.data.api.NumassPoint.Companion.LENGTH_KEY
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.meta.descriptors.Described
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.values.*
import space.kscience.kmath.histogram.Counter
import space.kscience.kmath.histogram.LongCounter
import space.kscience.tables.*
import kotlin.math.max
import kotlin.math.min
import kotlin.math.pow
import kotlin.math.sqrt
import space.kscience.dataforge.values.ListValue
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.values.ValueType
import space.kscience.dataforge.values.int
import space.kscience.tables.ColumnHeader
import space.kscience.tables.MetaRow
import space.kscience.tables.RowTable
import space.kscience.tables.Table
import kotlin.properties.ReadWriteProperty
public fun MutableMetaProvider.uIntRange(
default: UIntRange = 0U..Int.MAX_VALUE.toUInt(),
key: Name? = null,
): ReadWriteProperty<Any?, UIntRange> = value(
key,
reader = { value ->
val (l, r) = value?.list ?: return@value default
l.int.toUInt()..r.int.toUInt()
},
writer = { range ->
ListValue(range.first.toInt(), range.last.toInt())
}
)
public class NumassAnalyzerResult : Scheme() {
public var count: Long? by long(NumassAnalyzer.count.name.asName())
public var countRate: Double? by double(NumassAnalyzer.cr.name.asName())
public var countRateError: Double? by double(NumassAnalyzer.crError.name.asName())
public var length: Long? by long(NumassAnalyzer.length.name.asName())
public var count: Long by long(0L, NumassAnalyzer.count.name.asName())
public var countRate: Double by double(0.0, NumassAnalyzer.cr.name.asName())
public var countRateError: Double by double(0.0, NumassAnalyzer.crError.name.asName())
public var length: Double by double(0.0, NumassAnalyzer.length.name.asName())
public var voltage: Double? by double(HV_KEY.asName())
public var window: UIntRange?
get() = meta["window"]?.value?.list?.let {
it[0].int.toUInt().rangeTo(it[1].int.toUInt())
}
set(value) {
meta["window"] = value?.let { ListValue(it.first.toInt(), it.first.toInt()) }
}
public var parameters: NumassAnalyzerParameters by spec(NumassAnalyzerParameters)
public companion object : SchemeSpec<NumassAnalyzerResult>(::NumassAnalyzerResult)
}
@ -62,7 +66,9 @@ public class NumassAnalyzerResult : Scheme() {
* A general raw data analysis utility. Could have different implementations
* Created by darksnake on 06-Jul-17.
*/
public interface NumassAnalyzer : Described {
public abstract class NumassAnalyzer {
public abstract val extractor: NumassEventExtractor
/**
* Perform analysis on block. The values for count rate, its error and point length in nanos must
@ -71,67 +77,36 @@ public interface NumassAnalyzer : Described {
* @param block
* @return
*/
public suspend fun analyze(block: NumassBlock, config: Meta = Meta.EMPTY): NumassAnalyzerResult
protected abstract suspend fun analyzeInternal(
block: NumassBlock,
parameters: NumassAnalyzerParameters,
): NumassAnalyzerResult
/**
* Analysis result for point including hv information
* @param point
* @param config
* @param parameters
* @return
*/
public suspend fun analyzeParent(point: ParentBlock, config: Meta = Meta.EMPTY): NumassAnalyzerResult {
// //Add properties to config
// val newConfig = config.builder.apply {
// if (point is NumassPoint) {
// setValue("voltage", point.voltage)
// setValue("index", point.index)
// }
// setValue("channel", point.channel)
// }
val res = analyze(point, config)
public suspend fun analyze(
point: ParentBlock,
parameters: NumassAnalyzerParameters = NumassAnalyzerParameters.empty(),
): NumassAnalyzerResult {
val res = analyzeInternal(point, parameters)
if (point is NumassPoint) {
res.voltage = point.voltage
}
res.parameters = parameters
return res
}
/**
* Return unsorted stream of events including events from frames
*
* @param block
* @return
*/
public fun getEvents(block: NumassBlock, meta: Meta = Meta.EMPTY): Flow<NumassEvent>
/**
* Analyze the whole set. And return results as a table
*
* @param set
* @param config
* @return
*/
public suspend fun analyzeSet(set: NumassSet, config: Meta): Table<Value>
/**
* Get the approximate number of events in block. Not all analyzers support precise event counting
*
* @param block
* @param config
* @return
*/
public suspend fun getCount(block: NumassBlock, config: Meta): Long =
analyze(block, config).getValue(count.name)?.long ?: 0L
/**
* Get approximate effective point length in nanos. It is not necessary corresponds to real point length.
*
* @param block
* @param config
* @return
*/
public suspend fun getLength(block: NumassBlock, config: Meta = Meta.EMPTY): Long =
analyze(block, config).getValue(LENGTH_KEY)?.long ?: 0L
protected suspend fun NumassBlock.flowFilteredEvents(
parameters: NumassAnalyzerParameters,
): Flow<NumassEvent> {
val window = parameters.window
return extractor.extract(this).filter { it.amplitude in window }
}
public companion object {
@ -149,120 +124,143 @@ public interface NumassAnalyzer : Described {
}
}
public suspend fun NumassAnalyzer.getAmplitudeSpectrum(
block: NumassBlock,
range: UIntRange = 0U..MAX_CHANNEL,
config: Meta = Meta.EMPTY,
): Table<Value> {
val seconds = block.getLength().inWholeMilliseconds.toDouble() / 1000.0
return getEvents(block, config).getAmplitudeSpectrum(seconds, range)
}
/**
* Calculate number of counts in the given channel
* Analyze the whole set. And return results as a table
*
* @param spectrum
* @param loChannel
* @param upChannel
* @return
*/
internal fun Table<Value>.countInWindow(loChannel: Short, upChannel: Short): Long = rows.filter { row ->
row[channel]?.int in loChannel until upChannel
}.sumOf { it[count]?.long ?: 0L }
/**
* Calculate the amplitude spectrum for a given block. The s
*
* @param this@getAmplitudeSpectrum
* @param length length in seconds, used for count rate calculation
* @param set
* @param config
* @return
*/
private suspend fun Flow<NumassEvent>.getAmplitudeSpectrum(
length: Double,
range: UIntRange = 0U..MAX_CHANNEL,
): Table<Value> {
//optimized for fastest computation
val spectrum: MutableMap<UInt, LongCounter> = HashMap()
collect { event ->
val channel = event.amplitude
spectrum.getOrPut(channel.toUInt()) {
LongCounter()
}.add(1L)
}
return RowTable<Value>(channel, count, cr, crError) {
range.forEach { ch ->
val countValue: Long = spectrum[ch]?.value ?: 0L
valueRow(
channel to ch,
count to countValue,
cr to (countValue.toDouble() / length),
crError to sqrt(countValue.toDouble()) / length
)
}
public suspend fun NumassAnalyzer.analyzeSet(
set: NumassSet,
config: NumassAnalyzerParameters = NumassAnalyzerParameters.empty(),
): Table<Value> = RowTable(
NumassAnalyzer.length,
NumassAnalyzer.count,
NumassAnalyzer.cr,
NumassAnalyzer.crError,
// NumassAnalyzer.window,
// NumassAnalyzer.timestamp
) {
set.points.forEach { point ->
addRow(MetaRow(analyze(point, config).meta))
}
}
/**
* Apply window and binning to a spectrum. Empty bins are filled with zeroes
*/
private fun Table<Value>.withBinning(
binSize: UInt, range: UIntRange = 0U..MAX_CHANNEL,
): Table<Value> = RowTable<Value>(channel, count, cr, crError) {
// var chan = loChannel
// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
//
// val top = upChannel
// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
val binSizeColumn = newColumn<Value>("binSize")
var chan = range.first
while (chan < range.last - binSize) {
val counter = LongCounter()
val countRateCounter = Counter.real()
val countRateDispersionCounter = Counter.real()
val binLo = chan
val binUp = chan + binSize
rows.filter { row ->
(row[channel]?.int ?: 0U) in binLo until binUp
}.forEach { row ->
counter.add(row[count]?.long ?: 0L)
countRateCounter.add(row[cr]?.double ?: 0.0)
countRateDispersionCounter.add(row[crError]?.double?.pow(2.0) ?: 0.0)
}
val bin = min(binSize, range.last - chan)
valueRow(
channel to (chan.toDouble() + bin.toDouble() / 2.0),
count to counter.value,
cr to countRateCounter.value,
crError to sqrt(countRateDispersionCounter.value),
binSizeColumn to bin
)
chan += binSize
}
}
/**
* Subtract reference spectrum.
*/
private fun subtractAmplitudeSpectrum(
sp1: Table<Value>, sp2: Table<Value>,
): Table<Value> = RowTable<Value>(channel, cr, crError) {
sp1.rows.forEach { row1 ->
val channelValue = row1[channel]?.double
val row2 = sp2.rows.find { it[channel]?.double == channelValue } ?: MapRow(emptyMap())
val value = max((row1[cr]?.double ?: 0.0) - (row2[cr]?.double ?: 0.0), 0.0)
val error1 = row1[crError]?.double ?: 0.0
val error2 = row2[crError]?.double ?: 0.0
val error = sqrt(error1 * error1 + error2 * error2)
valueRow(channel to channelValue, cr to value, crError to error)
}
}
//public suspend fun NumassAnalyzer.getAmplitudeSpectrum(
// block: NumassBlock,
// range: UIntRange = 0U..MAX_CHANNEL,
// config: Meta = Meta.EMPTY,
//): Table<Value> {
// val seconds = block.getLength().inWholeMilliseconds.toDouble() / 1000.0
// return getEvents(block, config).getAmplitudeSpectrum(seconds, range)
//}
//
///**
// * Calculate number of counts in the given channel
// *
// * @param spectrum
// * @param loChannel
// * @param upChannel
// * @return
// */
//internal fun Table<Value>.countInWindow(loChannel: Short, upChannel: Short): Long = rows.filter { row ->
// row[channel]?.int in loChannel until upChannel
//}.sumOf { it[count]?.long ?: 0L }
//
///**
// * Calculate the amplitude spectrum for a given block. The s
// *
// * @param this@getAmplitudeSpectrum
// * @param length length in seconds, used for count rate calculation
// * @param config
// * @return
// */
//private suspend fun Flow<NumassEvent>.getAmplitudeSpectrum(
// length: Double,
// range: UIntRange = 0U..MAX_CHANNEL,
//): Table<Value> {
//
// //optimized for fastest computation
// val spectrum: MutableMap<UInt, LongCounter> = HashMap()
// collect { event ->
// val channel = event.amplitude
// spectrum.getOrPut(channel.toUInt()) {
// LongCounter()
// }.add(1L)
// }
//
// return RowTable<Value>(channel, count, cr, crError) {
// range.forEach { ch ->
// val countValue: Long = spectrum[ch]?.value ?: 0L
// valueRow(
// channel to ch,
// count to countValue,
// cr to (countValue.toDouble() / length),
// crError to sqrt(countValue.toDouble()) / length
// )
// }
// }
//}
//
///**
// * Apply window and binning to a spectrum. Empty bins are filled with zeroes
// */
//private fun Table<Value>.withBinning(
// binSize: UInt, range: UIntRange = 0U..MAX_CHANNEL,
//): Table<Value> = RowTable<Value>(channel, count, cr, crError) {
//// var chan = loChannel
//// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
////
//// val top = upChannel
//// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
//
// val binSizeColumn = newColumn<Value>("binSize")
//
// var chan = range.first
//
// while (chan < range.last - binSize) {
// val counter = LongCounter()
// val countRateCounter = Counter.real()
// val countRateDispersionCounter = Counter.real()
//
// val binLo = chan
// val binUp = chan + binSize
//
// rows.filter { row ->
// (row[channel]?.int ?: 0U) in binLo until binUp
// }.forEach { row ->
// counter.add(row[count]?.long ?: 0L)
// countRateCounter.add(row[cr]?.double ?: 0.0)
// countRateDispersionCounter.add(row[crError]?.double?.pow(2.0) ?: 0.0)
// }
// val bin = min(binSize, range.last - chan)
//
// valueRow(
// channel to (chan.toDouble() + bin.toDouble() / 2.0),
// count to counter.value,
// cr to countRateCounter.value,
// crError to sqrt(countRateDispersionCounter.value),
// binSizeColumn to bin
// )
// chan += binSize
// }
//}
//
///**
// * Subtract reference spectrum.
// */
//private fun subtractAmplitudeSpectrum(
// sp1: Table<Value>, sp2: Table<Value>,
//): Table<Value> = RowTable<Value>(channel, cr, crError) {
// sp1.rows.forEach { row1 ->
// val channelValue = row1[channel]?.double
// val row2 = sp2.rows.find { it[channel]?.double == channelValue } ?: MapRow(emptyMap())
//
// val value = max((row1[cr]?.double ?: 0.0) - (row2[cr]?.double ?: 0.0), 0.0)
// val error1 = row1[crError]?.double ?: 0.0
// val error2 = row2[crError]?.double ?: 0.0
// val error = sqrt(error1 * error1 + error2 * error2)
// valueRow(channel to channelValue, cr to value, crError to error)
// }
//}

View File

@ -0,0 +1,46 @@
package ru.inr.mass.data.analysis
import space.kscience.dataforge.meta.*
public class TimeAnalyzerParameters : Scheme() {
public enum class AveragingMethod {
ARITHMETIC,
WEIGHTED,
GEOMETRIC
}
public var value: Int? by int()
/**
* The relative fraction of events that should be removed by time cut
*/
public var crFraction by double()
public var min by double(0.0)
public var crMin by double(0.0)
/**
* The number of events in chunk to split the chain into. If null, no chunks are used
*/
public var chunkSize: Int? by int()
public var inverted: Boolean by boolean(true)
public var sortEvents: Boolean by boolean(false)
/**
* Chunk averaging method
*/
public var averagingMethod: AveragingMethod by enum(AveragingMethod.WEIGHTED)
public companion object : SchemeSpec<TimeAnalyzerParameters>(::TimeAnalyzerParameters)
}
public class NumassAnalyzerParameters : Scheme() {
public var deadTime: Double by double(0.0)
public var window: UIntRange by uIntRange()
public var t0: TimeAnalyzerParameters by spec(TimeAnalyzerParameters)
public companion object : SchemeSpec<NumassAnalyzerParameters>(::NumassAnalyzerParameters)
}

View File

@ -0,0 +1,22 @@
package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.Flow
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
public fun interface NumassEventExtractor {
public suspend fun extract(block: NumassBlock): Flow<NumassEvent>
public companion object {
/**
* A default event extractor that ignores frames
*/
public val EVENTS_ONLY: NumassEventExtractor = NumassEventExtractor { it.events }
}
}
//public fun NumassEventExtractor.filter(
// condition: NumassBlock.(NumassEvent) -> Boolean,
//): NumassEventExtractor = NumassEventExtractor { block ->
// extract(block).filter { block.condition(it) }
//}

View File

@ -11,11 +11,12 @@ package ru.inr.mass.data.analysis
//import ru.inr.mass.data.api.NumassBlock
//import ru.inr.mass.data.api.OrphanNumassEvent
//import ru.inr.mass.data.api.SimpleBlock
//import space.kscience.dataforge.tables.Table
//import space.kscience.kmath.chains.Chain
//import space.kscience.kmath.chains.MarkovChain
//import space.kscience.kmath.chains.StatefulChain
//import space.kscience.kmath.stat.RandomGenerator
//import space.kscience.tables.Table
//import kotlin.math.ln
//import kotlin.time.Duration.Companion.nanoseconds
//
//private fun RandomGenerator.nextExp(mean: Double): Double {

View File

@ -14,43 +14,29 @@
* limitations under the License.
*/
package inr.numass.data.analyzers
package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.count
import ru.inr.mass.data.analysis.AbstractAnalyzer
import ru.inr.mass.data.analysis.NumassAnalyzerResult
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.SignalProcessor
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.value
import space.kscience.dataforge.meta.double
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.int
import space.kscience.dataforge.values.ValueType
import kotlin.math.sqrt
/**
* A simple event counter
* Created by darksnake on 07.07.2017.
*/
public class SimpleAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
public class SimpleAnalyzer(
override val extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
) : NumassAnalyzer() {
override val descriptor: MetaDescriptor = MetaDescriptor {
value("deadTime", ValueType.NUMBER) {
info = "Dead time in nanoseconds for correction"
default(0.0)
}
}
override suspend fun analyzeInternal(
block: NumassBlock,
parameters: NumassAnalyzerParameters
): NumassAnalyzerResult {
override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult {
val loChannel = config["window.lo"]?.int ?: 0
val upChannel = config["window.up"]?.int ?: Int.MAX_VALUE
val count: Int = getEvents(block, config).count()
val count: Int = block.flowFilteredEvents(parameters).count()
val length: Double = block.getLength().inWholeNanoseconds.toDouble() / 1e9
val deadTime = config["deadTime"]?.double ?: 0.0
val deadTime = parameters.deadTime
val countRate = if (deadTime > 0) {
val mu = count.toDouble() / length
@ -61,11 +47,10 @@ public class SimpleAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyz
val countRateError = sqrt(count.toDouble()) / length
return NumassAnalyzerResult {
this.length = length.toLong()
this.length = length
this.count = count.toLong()
this.countRate = countRate
this.countRateError = countRateError
this.window = loChannel.toUInt().rangeTo(upChannel.toUInt())
//TODO NumassAnalyzer.timestamp to block.startTime
}
}

View File

@ -1,104 +0,0 @@
///*
// * Copyright 2017 Alexander Nozik.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// */
//
package ru.inr.mass.data.analysis
import inr.numass.data.analyzers.SimpleAnalyzer
import kotlinx.coroutines.flow.Flow
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.SignalProcessor
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.values.asValue
import space.kscience.dataforge.values.setValue
import space.kscience.tables.Table
/**
* An analyzer dispatcher which uses different analyzer for different meta
* Created by darksnake on 11.07.2017.
*/
public open class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
private val simpleAnalyzer = SimpleAnalyzer(processor)
private val debunchAnalyzer = DebunchAnalyzer(processor)
private val timeAnalyzer: NumassAnalyzer = TODO()// TimeAnalyzer(processor)
override val descriptor: MetaDescriptor? = null
private fun getAnalyzer(config: Meta): NumassAnalyzer = when (val type = config["type"]?.string) {
null -> if (config["t0"] != null) {
timeAnalyzer
} else {
simpleAnalyzer
}
"simple" -> simpleAnalyzer
"time" -> timeAnalyzer
"debunch" -> debunchAnalyzer
else -> throw IllegalArgumentException("Analyzer $type not found")
}
override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult {
val analyzer = getAnalyzer(config)
val res = analyzer.analyze(block, config)
return NumassAnalyzerResult.read(res.meta).apply {
setValue(T0_KEY, 0.0.asValue())
}
}
override fun getEvents(block: NumassBlock, meta: Meta): Flow<NumassEvent> =
getAnalyzer(meta).getEvents(block, meta)
override suspend fun analyzeSet(set: NumassSet, config: Meta): Table<Value> {
return getAnalyzer(config).analyzeSet(set, config)
// fun Value.computeExpression(point: NumassPoint): Int {
// return when {
// this.type == ValueType.NUMBER -> this.int
// this.type == ValueType.STRING -> {
// val exprParams = HashMap<String, Any>()
//
// exprParams["U"] = point.voltage
//
// ExpressionUtils.function(this.string, exprParams).toInt()
// }
// else -> error("Can't interpret $type as expression or number")
// }
// }
//
// val lo = config.getValue("window.lo", 0)
// val up = config.getValue("window.up", Int.MAX_VALUE)
//
// val format = getTableFormat(config)
//
// return ListTable.Builder(format)
// .rows(set.points.map { point ->
// val newConfig = config.builder.apply {
// setValue("window.lo", lo.computeExpression(point))
// setValue("window.up", up.computeExpression(point))
// }
// analyzeParent(point, newConfig)
// })
// .build()
}
public companion object : SmartAnalyzer() {
public const val T0_KEY: String = "t0"
}
}

View File

@ -15,286 +15,161 @@
// */
//
package ru.inr.mass.data.analysis
//
//import hep.dataforge.description.ValueDef
//import hep.dataforge.description.ValueDefs
//import hep.dataforge.meta.Meta
//import hep.dataforge.tables.Adapters.*
//import hep.dataforge.tables.TableFormat
//import hep.dataforge.tables.TableFormatBuilder
//import hep.dataforge.values.*
//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_KEY
//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY
//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_RATE_KEY
//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.LENGTH_KEY
//import ru.inr.mass.data.analysis.TimeAnalyzer.AveragingMethod.*
//import inr.numass.data.api.*
//import inr.numass.data.api.NumassPoint.Companion.HV_KEY
//import ru.inr.mass.data.api.NumassBlock
//import ru.inr.mass.data.api.SignalProcessor
//import space.kscience.dataforge.values.ValueType
//import java.util.*
//import java.util.concurrent.atomic.AtomicLong
//import kotlin.collections.List
//import kotlin.collections.asSequence
//import kotlin.collections.count
//import kotlin.collections.first
//import kotlin.collections.map
//import kotlin.collections.set
//import kotlin.collections.sortBy
//import kotlin.collections.sumBy
//import kotlin.collections.sumByDouble
//import kotlin.collections.toMutableList
//import kotlin.math.*
//import kotlin.streams.asSequence
//
//
///**
// * An analyzer which uses time information from events
// * Created by darksnake on 11.07.2017.
// */
//@ValueDefs(
// ValueDef(
// key = "separateParallelBlocks",
// type = [ValueType.BOOLEAN],
// info = "If true, then parallel blocks will be forced to be evaluated separately"
// ),
// ValueDef(
// key = "chunkSize",
// type = [ValueType.NUMBER],
// def = "-1",
// info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
// )
//)
//open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
//
// override fun analyze(block: NumassBlock, config: Meta): Values {
// //In case points inside points
// if (block is ParentBlock && (block.isSequential || config.getBoolean("separateParallelBlocks", false))) {
// return analyzeParent(block, config)
// }
//
// val t0 = getT0(block, config).toLong()
//
// val chunkSize = config.getInt("chunkSize", -1)
//
// val count = super.getEvents(block, config).count()
// val length = block.length.toNanos().toDouble() / 1e9
//
// val res = when {
// count < 1000 -> ValueMap.ofPairs(
// LENGTH_KEY to length,
// COUNT_KEY to count,
// COUNT_RATE_KEY to count.toDouble() / length,
// COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
// )
// chunkSize > 0 -> getEventsWithDelay(block, config)
// .chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
// .toList()
// .mean(config.getEnum("mean", WEIGHTED))
// else -> analyzeSequence(getEventsWithDelay(block, config), t0)
// }
//
// return ValueMap.Builder(res)
// .putValue("blockLength", length)
// .putValue(NumassAnalyzer.WINDOW_KEY, config.getRange())
// .putValue(NumassAnalyzer.TIME_KEY, block.startTime)
// .putValue(T0_KEY, t0.toDouble() / 1000.0)
// .build()
// }
//
//
// private fun analyzeSequence(sequence: Sequence<Pair<NumassEvent, Long>>, t0: Long): Values {
// val totalN = AtomicLong(0)
// val totalT = AtomicLong(0)
// sequence.filter { pair -> pair.second >= t0 }
// .forEach { pair ->
// totalN.incrementAndGet()
// //TODO add progress listener here
// totalT.addAndGet(pair.second)
// }
//
// if (totalN.toInt() == 0) {
// error("Zero number of intervals")
// }
//
// val countRate =
// 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
// val countRateError = countRate / sqrt(totalN.get().toDouble())
// val length = totalT.get() / 1e9
// val count = (length * countRate).toLong()
//
// return ValueMap.ofPairs(
// LENGTH_KEY to length,
// COUNT_KEY to count,
// COUNT_RATE_KEY to countRate,
// COUNT_RATE_ERROR_KEY to countRateError
// )
//
// }
//
// override fun analyzeParent(point: ParentBlock, config: Meta): Values {
// //Average count rates, do not sum events
// val res = point.blocks.map { it -> analyze(it, config) }
//
// val map = HashMap(res.mean(config.getEnum("mean", WEIGHTED)).asMap())
// if (point is NumassPoint) {
// map[HV_KEY] = Value.of(point.voltage)
// }
// return ValueMap(map)
// }
//
// enum class AveragingMethod {
// ARITHMETIC,
// WEIGHTED,
// GEOMETRIC
// }
//
// /**
// * Combine multiple blocks from the same point into one
// *
// * @return
// */
// private fun List<Values>.mean(method: AveragingMethod): Values {
//
// if (this.isEmpty()) {
// return ValueMap.Builder()
// .putValue(LENGTH_KEY, 0)
// .putValue(COUNT_KEY, 0)
// .putValue(COUNT_RATE_KEY, 0)
// .putValue(COUNT_RATE_ERROR_KEY, 0)
// .build()
// }
//
// val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
//
// val (countRate, countRateDispersion) = when (method) {
// ARITHMETIC -> Pair(
// sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
// sumByDouble { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size
// )
// WEIGHTED -> Pair(
// sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
// sumByDouble { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) }
// )
// GEOMETRIC -> {
// val mean = exp(sumByDouble { ln(it.getDouble(COUNT_RATE_KEY)) } / size)
// val variance = (mean / size).pow(2.0) * sumByDouble {
// (it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(
// COUNT_RATE_KEY
// )).pow(2.0)
// }
// Pair(mean, variance)
// }
// }
//
// return ValueMap.Builder(first())
// .putValue(LENGTH_KEY, totalTime)
// .putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
// .putValue(COUNT_RATE_KEY, countRate)
// .putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
// .build()
// }
//
// @ValueDefs(
// ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
// ValueDef(
// key = "t0.crFraction",
// type = arrayOf(ValueType.NUMBER),
// info = "The relative fraction of events that should be removed by time cut"
// ),
// ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
// )
// protected fun getT0(block: NumassBlock, meta: Meta): Int {
// return if (meta.hasValue("t0")) {
// meta.getInt("t0")
// } else if (meta.hasMeta("t0")) {
// val fraction = meta.getDouble("t0.crFraction")
// val cr = estimateCountRate(block)
// if (cr < meta.getDouble("t0.minCR", 0.0)) {
// 0
// } else {
// max(-1e9 / cr * ln(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt()
// }
// } else {
// 0
// }
//
// }
//
// private fun estimateCountRate(block: NumassBlock): Double {
// return block.events.count().toDouble() / block.length.toMillis() * 1000
// }
//
// fun zipEvents(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, NumassEvent>> {
// return getAllEvents(block).asSequence().zipWithNext()
// }
//
// /**
// * The chain of event with delays in nanos
// *
// * @param block
// * @param config
// * @return
// */
// fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
// val inverted = config.getBoolean("inverted", true)
// //range is included in super.getEvents
// val events = super.getEvents(block, config).toMutableList()
//
// if (config.getBoolean("sortEvents", false) || (block is ParentBlock && !block.isSequential)) {
// //sort in place if needed
// events.sortBy { it.timeOffset }
// }
//
// return events.asSequence().zipWithNext { prev, next ->
// val delay = max(next.timeOffset - prev.timeOffset, 0)
// if (inverted) {
// Pair(next, delay)
// } else {
// Pair(prev, delay)
// }
// }
// }
//
// /**
// * The filtered stream of events
// *
// * @param block
// * @param meta
// * @return
// */
// override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
// val t0 = getT0(block, meta).toLong()
// return getEventsWithDelay(block, meta)
// .filter { pair -> pair.second >= t0 }
// .map { it.first }.toList()
// }
//
// public override fun getTableFormat(config: Meta): TableFormat {
// return TableFormatBuilder()
// .addNumber(HV_KEY, X_VALUE_KEY)
// .addNumber(LENGTH_KEY)
// .addNumber(COUNT_KEY)
// .addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
// .addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
// .addColumn(NumassAnalyzer.WINDOW_KEY)
// .addTime()
// .addNumber(T0_KEY)
// .build()
// }
//
// companion object {
// const val T0_KEY = "t0"
//
// val NAME_LIST = arrayOf(
// LENGTH_KEY,
// COUNT_KEY,
// COUNT_RATE_KEY,
// COUNT_RATE_ERROR_KEY,
// NumassAnalyzer.WINDOW_KEY,
// NumassAnalyzer.TIME_KEY,
// T0_KEY
// )
// }
//}
import kotlinx.coroutines.flow.*
import ru.inr.mass.data.analysis.TimeAnalyzerParameters.AveragingMethod
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
import ru.inr.mass.data.api.ParentBlock
import space.kscience.kmath.streaming.asFlow
import space.kscience.kmath.streaming.chunked
import space.kscience.kmath.structures.Buffer
import kotlin.math.*
/**
* An analyzer which uses time information from events
* Created by darksnake on 11.07.2017.
*/
public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : NumassAnalyzer() {
override suspend fun analyzeInternal(
block: NumassBlock,
parameters: NumassAnalyzerParameters,
): NumassAnalyzerResult {
//Parallel processing and merging of parent blocks
if (block is ParentBlock) {
val res = block.flowBlocks().map { analyzeInternal(it, parameters) }.toList()
return res.combineResults(parameters.t0.averagingMethod)
}
val t0 = getT0(block, parameters.t0).toLong()
return when (val chunkSize = parameters.t0.chunkSize) {
null -> block.flowFilteredEvents(parameters)
.byPairs(parameters.t0.inverted)
.analyze(t0)
// // chunk is larger than a number of event
// chunkSize > count -> NumassAnalyzerResult {
// this.length = length
// this.count = count
// this.countRate = count.toDouble() / length
// this.countRateError = sqrt(count.toDouble()) / length
// }
else -> block.flowFilteredEvents(parameters)
.byPairs(parameters.t0.inverted)
.chunked(chunkSize, Buffer.Companion::auto)
.map { it.asFlow().analyze(t0) }
.toList()
.combineResults(parameters.t0.averagingMethod)
}
}
/**
* Analyze given flow of events + delays
*/
private suspend fun Flow<Pair<NumassEvent, Long>>.analyze(t0: Long): NumassAnalyzerResult {
var totalN = 0L
var totalT = 0L
filter { pair -> pair.second >= t0 }.collect { pair ->
totalN++
//TODO add progress listener here
totalT+= pair.second
}
if (totalN == 0L) {
error("Zero number of intervals")
}
val countRate = 1e6 * totalN / (totalT / 1000 - t0 * totalN / 1000)
val countRateError = countRate / sqrt(totalN.toDouble())
val length = totalT / 1e9
val count = (length * countRate).toLong()
return NumassAnalyzerResult {
this.length = totalT / 1e9
this.count = count
this.countRate = countRate
this.countRateError = countRateError
}
}
/**
* Combine multiple blocks from the same point into one
*
* @return
*/
private fun List<NumassAnalyzerResult>.combineResults(method: AveragingMethod): NumassAnalyzerResult {
if (this.isEmpty()) {
return NumassAnalyzerResult.empty()
}
val totalTime = sumOf { it.length }
val (countRate, countRateDispersion) = when (method) {
AveragingMethod.ARITHMETIC -> Pair(
sumOf { it.countRate } / size,
sumOf { it.countRateError.pow(2.0) } / size / size
)
AveragingMethod.WEIGHTED -> Pair(
sumOf { it.countRate * it.length } / totalTime,
sumOf { (it.countRateError * it.length / totalTime).pow(2.0) }
)
AveragingMethod.GEOMETRIC -> {
val mean = exp(sumOf { ln(it.countRate) } / size)
val variance = (mean / size).pow(2.0) * sumOf {
(it.countRateError / it.countRate).pow(2.0)
}
Pair(mean, variance)
}
}
return NumassAnalyzerResult {
length = totalTime
count = sumOf { it.count }
this.countRate = countRate
this.countRateError = sqrt(countRateDispersion)
}
}
/**
* Compute actual t0
*/
private suspend fun getT0(block: NumassBlock, parameters: TimeAnalyzerParameters): Int {
parameters.value?.let { return it }
parameters.crFraction?.let { fraction ->
val cr = block.events.count().toDouble() / block.getLength().inWholeMilliseconds * 1000
if (cr < parameters.crMin) {
0
} else {
max(-1e9 / cr * ln(1.0 - fraction), parameters.min).toInt()
}
}
return 0
}
/**
* Add a delay after (inverted = false) or before (inverted = true) event to each event
*/
private suspend fun Flow<NumassEvent>.byPairs(inverted: Boolean = true): Flow<Pair<NumassEvent, Long>> = flow {
var prev: NumassEvent?
var next: NumassEvent?
collect { value ->
next = value
prev = next
if (prev != null && next != null) {
val delay = next!!.timeOffset - prev!!.timeOffset
if (delay < 0) error("Events are not ordered!")
if (inverted) {
emit(Pair(next!!, delay))
} else {
emit(Pair(prev!!, delay))
}
}
}
}
}

View File

@ -1,5 +0,0 @@
package ru.inr.mass.data.analysis
import space.kscience.dataforge.values.Value
public typealias Values = Map<String, Value>

View File

@ -0,0 +1,37 @@
package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.transform
import kotlinx.coroutines.runBlocking
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.getTime
import space.kscience.kmath.histogram.UnivariateHistogram
import kotlin.math.max
public fun <T, R> Flow<T>.zipWithNext(block: (l: T, r: T) -> R): Flow<R> {
var current: T? = null
return transform { r ->
current?.let { l ->
emit(block(l, r))
}
current = r
}
}
public fun NumassBlock.timeHistogram(
binSize: Double,
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): UnivariateHistogram = UnivariateHistogram.uniform(binSize) {
runBlocking {
extractor.extract(this@timeHistogram).zipWithNext { l, r ->
if(l.owner == r.owner) {
max((r.getTime() - l.getTime()).inWholeMicroseconds,0L)
} else {
0
}
}.collect {
putValue(it.toDouble())
}
}
}

View File

@ -3,14 +3,13 @@ package ru.inr.mass.data.proto
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Test
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.ParentBlock
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.values.ListValue
import java.nio.file.Path
import kotlin.test.Ignore
import kotlin.test.assertEquals
class TestNumassDirectory {
@ -31,10 +30,10 @@ class TestNumassDirectory {
}
@Test
@Ignore
fun testTQDCRead() = runBlocking {
val pointPath = Path.of("C:\\Users\\altavir\\Desktop\\p20211122173034(20s).dat")
val point: NumassPoint = context.readNumassPointFile(pointPath)!!
val pointPath = Path.of("src/test/resources", "testData/tqdc")
val set: NumassSet = context.readNumassDirectory(pointPath)
val point = set.first { it.voltage == 16000.0 }
point.getChannels().forEach { (channel, block) ->
println("$channel: $block")
if(block is ParentBlock){

View File

@ -13,11 +13,14 @@ val plotlyVersion: String by rootProject.extra
val kmathVersion: String by rootProject.extra
dependencies {
implementation(project(":numass-data-proto"))
implementation(project(":numass-model"))
implementation(project(":numass-analysis"))
implementation(projects.numassDataProto)
implementation(projects.numassModel)
implementation(projects.numassAnalysis)
implementation("space.kscience:dataforge-workspace:$dataforgeVersion")
implementation("space.kscience:plotlykt-core:$plotlyVersion")
implementation("space.kscience:kmath-histograms:$kmathVersion")
implementation("space.kscience:kmath-for-real:$kmathVersion")
implementation("space.kscience:plotlykt-jupyter:$plotlyVersion")
implementation("space.kscience:kmath-jupyter:$kmathVersion")
}
kscience{
jupyterLibrary("ru.inr.mass.notebook.NumassJupyter")
}

View File

@ -0,0 +1,40 @@
package ru.inr.mass.notebook
import org.jetbrains.kotlinx.jupyter.api.HTML
import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.workspace.Numass
import ru.inr.mass.workspace.numassSet
import space.kscience.plotly.Plotly
internal class NumassJupyter : JupyterIntegration() {
override fun Builder.onLoaded() {
repositories(
"https://repo.kotlin.link"
)
import(
"ru.inr.mass.models.*",
"ru.inr.mass.data.analysis.*",
"ru.inr.mass.workspace.*",
"ru.inr.mass.data.api.*",
"ru.inr.mass.data.proto.*",
"space.kscience.dataforge.data.*",
"kotlinx.coroutines.*",
"kotlinx.coroutines.flow.*",
)
import<Numass>()
render<NumassBlock> {
}
render<NumassSet> { numassSet ->
HTML(Plotly.numassSet(numassSet).render(), true)
}
}
}

View File

@ -1,6 +1,8 @@
package ru.inr.mass.workspace
package ru.inr.mass.scripts
import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.workspace.Numass.readNumassRepository
import ru.inr.mass.workspace.numassSet
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.await
import space.kscience.dataforge.data.getData
@ -11,6 +13,6 @@ suspend fun main() {
val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\Numass\\data\\2018_04")
//val dataPath = Path.of("D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\set_4\\")
//val testSet = NUMASS.context.readNumassDirectory(dataPath)
val testSet = repo.getData("Adiabacity_19.set_4")?.await() ?: error("Not found")
Plotly.numassDirectory(testSet).makeFile()
val testSet = repo.getData("Adiabacity_19.set_3")?.await() ?: error("Not found")
Plotly.numassSet(testSet).makeFile()
}

View File

@ -2,7 +2,7 @@ package ru.inr.mass.scripts
import kotlinx.coroutines.flow.collect
import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.workspace.readNumassRepository
import ru.inr.mass.workspace.Numass.readNumassRepository
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.filter
import space.kscience.dataforge.meta.string

View File

@ -1,13 +1,10 @@
package ru.inr.mass.scripts
import kotlinx.coroutines.flow.toList
import kotlinx.html.code
import kotlinx.html.h2
import kotlinx.html.p
import kotlinx.serialization.json.Json
import ru.inr.mass.workspace.readNumassDirectory
import space.kscience.dataforge.io.JsonMetaFormat
import space.kscience.dataforge.io.toString
import ru.inr.mass.workspace.Numass.readNumassDirectory
import space.kscience.dataforge.meta.MetaSerializer
import space.kscience.plotly.*
@ -33,13 +30,13 @@ suspend fun main() {
h2 { +"Analysis" }
plot {
histogram {
name="max"
name = "max"
x.numbers = frames.map { frame -> frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } }
}
histogram {
name="max-min"
xbins{
name = "max-min"
xbins {
size = 2.0
}
x.numbers = frames.map { frame ->

View File

@ -0,0 +1,47 @@
package ru.inr.mass.workspace
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.data.proto.readNumassDirectory
import space.kscience.dataforge.data.*
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import java.nio.file.Files
import java.nio.file.Path
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.exists
import kotlin.io.path.isDirectory
import kotlin.io.path.relativeTo
import kotlin.streams.toList
object Numass {
fun readNumassDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path)
@OptIn(ExperimentalPathApi::class)
fun readNumassRepository(path: Path): DataTree<NumassDirectorySet> = runBlocking {
ActiveDataTree {
@Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) {
Files.walk(path).filter {
it.isDirectory() && it.resolve("meta").exists()
}.toList().forEach { childPath ->
val name = Name(childPath.relativeTo(path).map { segment ->
NameToken(segment.fileName.toString())
})
val value = NUMASS.context.readNumassDirectory(childPath)
static(name, value, value.meta)
}
}
//TODO add file watcher
}
}
fun readNumassRepository(path: String): DataTree<NumassDirectorySet> = readNumassRepository(Path.of(path))
}
operator fun DataSet<NumassSet>.get(name: String): NumassSet? = runBlocking {
getData(Name.parse(name))?.await()
}

View File

@ -1,57 +0,0 @@
@file:Suppress("EXPERIMENTAL_API_USAGE")
package ru.inr.mass.workspace
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.runBlocking
import ru.inr.mass.data.api.NumassPoint
import space.kscience.dataforge.context.logger
import space.kscience.dataforge.context.warn
import space.kscience.kmath.histogram.UnivariateHistogram
import space.kscience.kmath.histogram.center
import space.kscience.kmath.histogram.put
import space.kscience.kmath.misc.UnstableKMathAPI
import space.kscience.kmath.structures.DoubleBuffer
import space.kscience.kmath.structures.asBuffer
/**
* Build an amplitude spectrum with bin of 1.0 counted from 0.0. Some bins could be missing
*/
fun NumassPoint.spectrum(): UnivariateHistogram = UnivariateHistogram.uniform(1.0) {
runBlocking {
events.collect {
putValue(it.amplitude.toDouble())
}
}
}
operator fun UnivariateHistogram.component1(): DoubleBuffer = bins.map { it.domain.center }.toDoubleArray().asBuffer()
operator fun UnivariateHistogram.component2(): DoubleBuffer = bins.map { it.value }.toDoubleArray().asBuffer()
fun Collection<NumassPoint>.spectrum(): UnivariateHistogram {
if (distinctBy { it.voltage }.size != 1) {
NUMASS.logger.warn { "Spectrum is generated from points with different hv value: $this" }
}
return UnivariateHistogram.uniform(1.0) {
runBlocking {
this@spectrum.forEach { point ->
point.events.collect { put(it.amplitude.toDouble()) }
}
}
}
}
/**
* Re-shape the spectrum with the increased bin size and range. Throws a error if new bin is smaller than before.
*/
@OptIn(UnstableKMathAPI::class)
fun UnivariateHistogram.reShape(
binSize: Int,
channelRange: IntRange,
): UnivariateHistogram = UnivariateHistogram.uniform(binSize.toDouble()) {
this@reShape.bins.filter { it.domain.center.toInt() in channelRange }.forEach { bin ->
if (bin.domain.volume() > binSize.toDouble()) error("Can't reShape the spectrum with increased binning")
putValue(bin.domain.center, bin.value)
}
}

View File

@ -1,39 +0,0 @@
package ru.inr.mass.workspace
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.data.proto.readNumassDirectory
import space.kscience.dataforge.data.ActiveDataTree
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.static
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import java.nio.file.Files
import java.nio.file.Path
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.exists
import kotlin.io.path.isDirectory
import kotlin.io.path.relativeTo
import kotlin.streams.toList
fun readNumassDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path)
@OptIn(ExperimentalPathApi::class)
suspend fun readNumassRepository(path: Path): DataTree<NumassDirectorySet> = ActiveDataTree {
@Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) {
Files.walk(path).filter {
it.isDirectory() && it.resolve("meta").exists()
}.toList().forEach { childPath ->
val name = Name(childPath.relativeTo(path).map { segment ->
NameToken(segment.fileName.toString())
})
val value = NUMASS.context.readNumassDirectory(childPath)
static(name, value, value.meta)
}
}
//TODO add file watcher
}
suspend fun readNumassRepository(path: String): DataTree<NumassDirectorySet> = readNumassRepository(Path.of(path))

View File

@ -1,8 +1,13 @@
package ru.inr.mass.workspace
import kotlinx.coroutines.runBlocking
import kotlinx.html.h1
import kotlinx.html.h2
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.analysis.NumassAmplitudeSpectrum
import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.amplitudeSpectrum
import ru.inr.mass.data.analysis.timeHistogram
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.proto.HVData
import ru.inr.mass.data.proto.NumassDirectorySet
import space.kscience.dataforge.values.asValue
@ -14,29 +19,29 @@ import space.kscience.kmath.operations.asIterable
import space.kscience.kmath.structures.Buffer
import space.kscience.kmath.structures.DoubleBuffer
import space.kscience.plotly.*
import space.kscience.plotly.models.Scatter
import space.kscience.plotly.models.Trace
import space.kscience.plotly.models.TraceValues
import space.kscience.plotly.models.*
/**
* Plot a kmath histogram
*/
@OptIn(UnstableKMathAPI::class)
fun Plot.histogram(histogram: UnivariateHistogram, block: Scatter.() -> Unit): Trace = scatter {
fun Plot.histogram(histogram: UnivariateHistogram, block: Scatter.() -> Unit = {}): Trace = scatter {
x.numbers = histogram.bins.map { it.domain.center }
y.numbers = histogram.bins.map { it.value }
line.shape = LineShape.hv
block()
}
fun Plot.amplitudeSpectrum(
point: NumassPoint,
binSize: Int = 20,
range: IntRange = 0..2000,
name: String = point.toString(),
fun Plot.histogram(
spectrum: NumassAmplitudeSpectrum,
binSize: UInt = 20U,
block: Scatter.() -> Unit = {},
): Trace = scatter {
histogram(point.spectrum().reShape(binSize, range)) {
this.name = name
}
val binned = spectrum.binned(binSize)
x.numbers = binned.keys.map { (it.first + it.last).toDouble() / 2.0 }
y.numbers = binned.values
line.shape = LineShape.hv
block()
}
/**
@ -47,20 +52,37 @@ fun Plot.hvData(data: HVData): Trace = scatter {
y.numbers = data.map { it.value }
}
fun Plotly.numassDirectory(set: NumassDirectorySet, binSize: Int = 20, range: IntRange = 0..2000): PlotlyPage =
fun Plotly.numassSet(
set: NumassSet,
amplitudeBinSize: UInt = 20U,
eventExtractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): PlotlyPage =
Plotly.page {
h1 {
+"Numass point set ${set.path}"
+"Numass point set ${ShapeType.path}"
}
h2 {
+"Amplitude spectrum"
}
plot {
runBlocking {
set.points.sortedBy { it.index }.forEach {
amplitudeSpectrum(it, binSize, range)
histogram(it.amplitudeSpectrum(eventExtractor), amplitudeBinSize)
}
}
}
h2 {
+"Time spectra"
}
plot {
set.points.sortedBy { it.index }.forEach {
histogram(it.timeHistogram(1e3))
}
layout.yaxis.type = AxisType.log
}
if (set is NumassDirectorySet) {
set.getHvData()?.let { entries ->
h2 {
+"HV"
@ -70,6 +92,7 @@ fun Plotly.numassDirectory(set: NumassDirectorySet, binSize: Int = 20, range: In
}
}
}
}
/**
* Add a number buffer accessor for Plotly trace values