diff --git a/numass-workspace/numass.json b/notebooks/numass.json similarity index 100% rename from numass-workspace/numass.json rename to notebooks/numass.json diff --git a/numass-analysis/build.gradle.kts b/numass-analysis/build.gradle.kts index d86ddf8..14516f4 100644 --- a/numass-analysis/build.gradle.kts +++ b/numass-analysis/build.gradle.kts @@ -12,14 +12,18 @@ val tablesVersion: String by rootProject.extra kotlin.sourceSets { commonMain { dependencies { - api(project(":numass-data-model")) + api(projects.numass.numassDataModel) api("space.kscience:dataforge-io:$dataforgeVersion") api("space.kscience:tables-kt:$tablesVersion") + api("space.kscience:kmath-histograms:$kmathVersion") api("space.kscience:kmath-complex:$kmathVersion") api("space.kscience:kmath-stat:$kmathVersion") - api("space.kscience:kmath-histograms:$kmathVersion") } } } +kscience{ + useAtomic() +} + diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/AbstractAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/AbstractAnalyzer.kt deleted file mode 100644 index b5f5e7b..0000000 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/AbstractAnalyzer.kt +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2017 Alexander Nozik. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ru.inr.mass.data.analysis - -import kotlinx.coroutines.flow.* -import ru.inr.mass.data.api.NumassBlock -import ru.inr.mass.data.api.NumassEvent -import ru.inr.mass.data.api.NumassSet -import ru.inr.mass.data.api.SignalProcessor -import space.kscience.dataforge.meta.Meta -import space.kscience.dataforge.meta.get -import space.kscience.dataforge.meta.int -import space.kscience.dataforge.values.Value -import space.kscience.tables.RowTable -import space.kscience.tables.Table - -/** - * Created by darksnake on 11.07.2017. - */ -public abstract class AbstractAnalyzer( - private val processor: SignalProcessor? = null, -) : NumassAnalyzer { - - /** - * Return unsorted stream of events including events from frames. - * In theory, events after processing could be unsorted due to mixture of frames and events. - * In practice usually block have either frame or events, but not both. - * - * @param block - * @return - */ - override fun getEvents(block: NumassBlock, meta: Meta): Flow { - val range = meta.getRange() - return getAllEvents(block).filter { event -> - event.amplitude.toInt() in range - } - } - - protected fun Meta.getRange(): IntRange { - val loChannel = get("window.lo")?.int ?: 0 - val upChannel = get("window.up")?.int ?: Int.MAX_VALUE - return loChannel until upChannel - } - - protected fun getAllEvents(block: NumassBlock): Flow { - return when { - block.framesCount == 0L -> block.events - processor == null -> throw IllegalArgumentException("Signal processor needed to analyze frames") - else -> flow { - emitAll(block.events) - emitAll(block.frames.flatMapConcat { processor.analyze(it) }) - } - } - } - -// /** -// * Get table format for summary table -// * -// * @param config -// * @return -// */ -// protected open fun getTableFormat(config: Meta): ValueTableHeader { -// return TableFormatBuilder() -// .addNumber(HV_KEY, X_VALUE_KEY) -// .addNumber(NumassAnalyzer.LENGTH_KEY) -// .addNumber(NumassAnalyzer.COUNT_KEY) -// .addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY) -// .addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) -// .addColumn(NumassAnalyzer.WINDOW_KEY) -// .addTime() -// .build() -// } - - override suspend fun analyzeSet(set: NumassSet, config: Meta): Table = RowTable( - NumassAnalyzer.length, - NumassAnalyzer.count, - NumassAnalyzer.cr, - NumassAnalyzer.crError, -// NumassAnalyzer.window, -// NumassAnalyzer.timestamp - ) { - - set.points.forEach { point -> - analyzeParent(point, config) - } - } - - public companion object { -// public val NAME_LIST: List = listOf( -// NumassAnalyzer.LENGTH_KEY, -// NumassAnalyzer.COUNT_KEY, -// NumassAnalyzer.COUNT_RATE_KEY, -// NumassAnalyzer.COUNT_RATE_ERROR_KEY, -// NumassAnalyzer.WINDOW_KEY, -// NumassAnalyzer.TIME_KEY -// ) - } -} diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/DebunchAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/DebunchAnalyzer.kt deleted file mode 100644 index 69e82a7..0000000 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/DebunchAnalyzer.kt +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2017 Alexander Nozik. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ru.inr.mass.data.analysis - -import ru.inr.mass.data.api.NumassBlock -import ru.inr.mass.data.api.SignalProcessor -import space.kscience.dataforge.meta.Meta -import space.kscience.dataforge.meta.descriptors.MetaDescriptor - -/** - * Block analyzer that can perform debunching - * Created by darksnake on 11.07.2017. - */ -public class DebunchAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) { - - override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult { - TODO() - } - - override val descriptor: MetaDescriptor? = null -} diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAmplitudeSpectrum.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAmplitudeSpectrum.kt new file mode 100644 index 0000000..9c452ff --- /dev/null +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAmplitudeSpectrum.kt @@ -0,0 +1,70 @@ +package ru.inr.mass.data.analysis + +import kotlinx.coroutines.coroutineScope +import kotlinx.coroutines.flow.collect +import kotlinx.coroutines.launch +import ru.inr.mass.data.api.NumassBlock +import space.kscience.kmath.histogram.LongCounter +import kotlin.math.min + +public class NumassAmplitudeSpectrum(public val amplitudes: Map) { + + public val minChannel: UShort by lazy { amplitudes.keys.minOf { it } } + public val maxChannel: UShort by lazy { amplitudes.keys.maxOf { it } } + + public fun binned(binSize: UInt, range: UIntRange = minChannel..maxChannel): Map { + val keys = sequence { + var left = range.first + do { + val right = min(left + binSize, range.last) + yield(left..right) + left = right + } while (right < range.last) + } + + return keys.associateWith { bin -> amplitudes.filter { it.key in bin }.values.sum().toDouble() } + } +} + +/** + * Build an amplitude spectrum with bin of 1.0 counted from 0.0. Some bins could be missing + */ +public suspend fun NumassBlock.amplitudeSpectrum( + extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, +): NumassAmplitudeSpectrum { + val map = HashMap() + extractor.extract(this).collect { event -> + map.getOrPut(event.amplitude) { LongCounter() }.add(1L) + } + return NumassAmplitudeSpectrum(map.mapValues { it.value.value.toULong() }) +} + +/** + * Collect events from block in parallel + */ +public suspend fun Collection.amplitudeSpectrum( + extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, +): NumassAmplitudeSpectrum { + val hist = List(UShort.MAX_VALUE.toInt()) { + LongCounter() + } + coroutineScope { + forEach { block -> + launch { + extractor.extract(block).collect { event -> + hist[event.amplitude.toInt()].add(1L) + } + } + } + } + + val map = hist.mapIndexedNotNull { index, counter -> + if (counter.value == 0L) { + null + } else { + index.toUShort() to counter.value.toULong() + } + }.toMap() + + return NumassAmplitudeSpectrum(map) +} \ No newline at end of file diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzer.kt index 4dfb833..b7a5a9a 100644 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzer.kt +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzer.kt @@ -17,42 +17,46 @@ package ru.inr.mass.data.analysis import kotlinx.coroutines.flow.Flow -import kotlinx.coroutines.flow.collect -import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.MAX_CHANNEL -import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.channel -import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.count -import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.cr -import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.crError +import kotlinx.coroutines.flow.filter import ru.inr.mass.data.api.* import ru.inr.mass.data.api.NumassPoint.Companion.HV_KEY -import ru.inr.mass.data.api.NumassPoint.Companion.LENGTH_KEY import space.kscience.dataforge.meta.* -import space.kscience.dataforge.meta.descriptors.Described +import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.asName -import space.kscience.dataforge.values.* -import space.kscience.kmath.histogram.Counter -import space.kscience.kmath.histogram.LongCounter -import space.kscience.tables.* -import kotlin.math.max -import kotlin.math.min -import kotlin.math.pow -import kotlin.math.sqrt +import space.kscience.dataforge.values.ListValue +import space.kscience.dataforge.values.Value +import space.kscience.dataforge.values.ValueType +import space.kscience.dataforge.values.int +import space.kscience.tables.ColumnHeader +import space.kscience.tables.MetaRow +import space.kscience.tables.RowTable +import space.kscience.tables.Table +import kotlin.properties.ReadWriteProperty + + +public fun MutableMetaProvider.uIntRange( + default: UIntRange = 0U..Int.MAX_VALUE.toUInt(), + key: Name? = null, +): ReadWriteProperty = value( + key, + reader = { value -> + val (l, r) = value?.list ?: return@value default + l.int.toUInt()..r.int.toUInt() + }, + writer = { range -> + ListValue(range.first.toInt(), range.last.toInt()) + } +) public class NumassAnalyzerResult : Scheme() { - public var count: Long? by long(NumassAnalyzer.count.name.asName()) - public var countRate: Double? by double(NumassAnalyzer.cr.name.asName()) - public var countRateError: Double? by double(NumassAnalyzer.crError.name.asName()) - public var length: Long? by long(NumassAnalyzer.length.name.asName()) + public var count: Long by long(0L, NumassAnalyzer.count.name.asName()) + public var countRate: Double by double(0.0, NumassAnalyzer.cr.name.asName()) + public var countRateError: Double by double(0.0, NumassAnalyzer.crError.name.asName()) + public var length: Double by double(0.0, NumassAnalyzer.length.name.asName()) public var voltage: Double? by double(HV_KEY.asName()) - public var window: UIntRange? - get() = meta["window"]?.value?.list?.let { - it[0].int.toUInt().rangeTo(it[1].int.toUInt()) - } - set(value) { - meta["window"] = value?.let { ListValue(it.first.toInt(), it.first.toInt()) } - } + public var parameters: NumassAnalyzerParameters by spec(NumassAnalyzerParameters) public companion object : SchemeSpec(::NumassAnalyzerResult) } @@ -62,7 +66,9 @@ public class NumassAnalyzerResult : Scheme() { * A general raw data analysis utility. Could have different implementations * Created by darksnake on 06-Jul-17. */ -public interface NumassAnalyzer : Described { +public abstract class NumassAnalyzer { + + public abstract val extractor: NumassEventExtractor /** * Perform analysis on block. The values for count rate, its error and point length in nanos must @@ -71,67 +77,36 @@ public interface NumassAnalyzer : Described { * @param block * @return */ - public suspend fun analyze(block: NumassBlock, config: Meta = Meta.EMPTY): NumassAnalyzerResult + protected abstract suspend fun analyzeInternal( + block: NumassBlock, + parameters: NumassAnalyzerParameters, + ): NumassAnalyzerResult /** * Analysis result for point including hv information * @param point - * @param config + * @param parameters * @return */ - public suspend fun analyzeParent(point: ParentBlock, config: Meta = Meta.EMPTY): NumassAnalyzerResult { -// //Add properties to config -// val newConfig = config.builder.apply { -// if (point is NumassPoint) { -// setValue("voltage", point.voltage) -// setValue("index", point.index) -// } -// setValue("channel", point.channel) -// } - val res = analyze(point, config) + public suspend fun analyze( + point: ParentBlock, + parameters: NumassAnalyzerParameters = NumassAnalyzerParameters.empty(), + ): NumassAnalyzerResult { + val res = analyzeInternal(point, parameters) if (point is NumassPoint) { res.voltage = point.voltage } + res.parameters = parameters return res } - /** - * Return unsorted stream of events including events from frames - * - * @param block - * @return - */ - public fun getEvents(block: NumassBlock, meta: Meta = Meta.EMPTY): Flow - - /** - * Analyze the whole set. And return results as a table - * - * @param set - * @param config - * @return - */ - public suspend fun analyzeSet(set: NumassSet, config: Meta): Table - - /** - * Get the approximate number of events in block. Not all analyzers support precise event counting - * - * @param block - * @param config - * @return - */ - public suspend fun getCount(block: NumassBlock, config: Meta): Long = - analyze(block, config).getValue(count.name)?.long ?: 0L - - /** - * Get approximate effective point length in nanos. It is not necessary corresponds to real point length. - * - * @param block - * @param config - * @return - */ - public suspend fun getLength(block: NumassBlock, config: Meta = Meta.EMPTY): Long = - analyze(block, config).getValue(LENGTH_KEY)?.long ?: 0L + protected suspend fun NumassBlock.flowFilteredEvents( + parameters: NumassAnalyzerParameters, + ): Flow { + val window = parameters.window + return extractor.extract(this).filter { it.amplitude in window } + } public companion object { @@ -149,120 +124,143 @@ public interface NumassAnalyzer : Described { } } -public suspend fun NumassAnalyzer.getAmplitudeSpectrum( - block: NumassBlock, - range: UIntRange = 0U..MAX_CHANNEL, - config: Meta = Meta.EMPTY, -): Table { - val seconds = block.getLength().inWholeMilliseconds.toDouble() / 1000.0 - return getEvents(block, config).getAmplitudeSpectrum(seconds, range) -} - /** - * Calculate number of counts in the given channel + * Analyze the whole set. And return results as a table * - * @param spectrum - * @param loChannel - * @param upChannel - * @return - */ -internal fun Table.countInWindow(loChannel: Short, upChannel: Short): Long = rows.filter { row -> - row[channel]?.int in loChannel until upChannel -}.sumOf { it[count]?.long ?: 0L } - -/** - * Calculate the amplitude spectrum for a given block. The s - * - * @param this@getAmplitudeSpectrum - * @param length length in seconds, used for count rate calculation + * @param set * @param config * @return */ -private suspend fun Flow.getAmplitudeSpectrum( - length: Double, - range: UIntRange = 0U..MAX_CHANNEL, -): Table { - - //optimized for fastest computation - val spectrum: MutableMap = HashMap() - collect { event -> - val channel = event.amplitude - spectrum.getOrPut(channel.toUInt()) { - LongCounter() - }.add(1L) - } - - return RowTable(channel, count, cr, crError) { - range.forEach { ch -> - val countValue: Long = spectrum[ch]?.value ?: 0L - valueRow( - channel to ch, - count to countValue, - cr to (countValue.toDouble() / length), - crError to sqrt(countValue.toDouble()) / length - ) - } +public suspend fun NumassAnalyzer.analyzeSet( + set: NumassSet, + config: NumassAnalyzerParameters = NumassAnalyzerParameters.empty(), +): Table = RowTable( + NumassAnalyzer.length, + NumassAnalyzer.count, + NumassAnalyzer.cr, + NumassAnalyzer.crError, +// NumassAnalyzer.window, +// NumassAnalyzer.timestamp +) { + set.points.forEach { point -> + addRow(MetaRow(analyze(point, config).meta)) } } - -/** - * Apply window and binning to a spectrum. Empty bins are filled with zeroes - */ -private fun Table.withBinning( - binSize: UInt, range: UIntRange = 0U..MAX_CHANNEL, -): Table = RowTable(channel, count, cr, crError) { -// var chan = loChannel -// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0) // -// val top = upChannel -// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1) - - val binSizeColumn = newColumn("binSize") - - var chan = range.first - - while (chan < range.last - binSize) { - val counter = LongCounter() - val countRateCounter = Counter.real() - val countRateDispersionCounter = Counter.real() - - val binLo = chan - val binUp = chan + binSize - - rows.filter { row -> - (row[channel]?.int ?: 0U) in binLo until binUp - }.forEach { row -> - counter.add(row[count]?.long ?: 0L) - countRateCounter.add(row[cr]?.double ?: 0.0) - countRateDispersionCounter.add(row[crError]?.double?.pow(2.0) ?: 0.0) - } - val bin = min(binSize, range.last - chan) - - valueRow( - channel to (chan.toDouble() + bin.toDouble() / 2.0), - count to counter.value, - cr to countRateCounter.value, - crError to sqrt(countRateDispersionCounter.value), - binSizeColumn to bin - ) - chan += binSize - } -} - -/** - * Subtract reference spectrum. - */ -private fun subtractAmplitudeSpectrum( - sp1: Table, sp2: Table, -): Table = RowTable(channel, cr, crError) { - sp1.rows.forEach { row1 -> - val channelValue = row1[channel]?.double - val row2 = sp2.rows.find { it[channel]?.double == channelValue } ?: MapRow(emptyMap()) - - val value = max((row1[cr]?.double ?: 0.0) - (row2[cr]?.double ?: 0.0), 0.0) - val error1 = row1[crError]?.double ?: 0.0 - val error2 = row2[crError]?.double ?: 0.0 - val error = sqrt(error1 * error1 + error2 * error2) - valueRow(channel to channelValue, cr to value, crError to error) - } -} \ No newline at end of file +//public suspend fun NumassAnalyzer.getAmplitudeSpectrum( +// block: NumassBlock, +// range: UIntRange = 0U..MAX_CHANNEL, +// config: Meta = Meta.EMPTY, +//): Table { +// val seconds = block.getLength().inWholeMilliseconds.toDouble() / 1000.0 +// return getEvents(block, config).getAmplitudeSpectrum(seconds, range) +//} +// +///** +// * Calculate number of counts in the given channel +// * +// * @param spectrum +// * @param loChannel +// * @param upChannel +// * @return +// */ +//internal fun Table.countInWindow(loChannel: Short, upChannel: Short): Long = rows.filter { row -> +// row[channel]?.int in loChannel until upChannel +//}.sumOf { it[count]?.long ?: 0L } +// +///** +// * Calculate the amplitude spectrum for a given block. The s +// * +// * @param this@getAmplitudeSpectrum +// * @param length length in seconds, used for count rate calculation +// * @param config +// * @return +// */ +//private suspend fun Flow.getAmplitudeSpectrum( +// length: Double, +// range: UIntRange = 0U..MAX_CHANNEL, +//): Table { +// +// //optimized for fastest computation +// val spectrum: MutableMap = HashMap() +// collect { event -> +// val channel = event.amplitude +// spectrum.getOrPut(channel.toUInt()) { +// LongCounter() +// }.add(1L) +// } +// +// return RowTable(channel, count, cr, crError) { +// range.forEach { ch -> +// val countValue: Long = spectrum[ch]?.value ?: 0L +// valueRow( +// channel to ch, +// count to countValue, +// cr to (countValue.toDouble() / length), +// crError to sqrt(countValue.toDouble()) / length +// ) +// } +// } +//} +// +///** +// * Apply window and binning to a spectrum. Empty bins are filled with zeroes +// */ +//private fun Table.withBinning( +// binSize: UInt, range: UIntRange = 0U..MAX_CHANNEL, +//): Table = RowTable(channel, count, cr, crError) { +//// var chan = loChannel +//// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0) +//// +//// val top = upChannel +//// ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1) +// +// val binSizeColumn = newColumn("binSize") +// +// var chan = range.first +// +// while (chan < range.last - binSize) { +// val counter = LongCounter() +// val countRateCounter = Counter.real() +// val countRateDispersionCounter = Counter.real() +// +// val binLo = chan +// val binUp = chan + binSize +// +// rows.filter { row -> +// (row[channel]?.int ?: 0U) in binLo until binUp +// }.forEach { row -> +// counter.add(row[count]?.long ?: 0L) +// countRateCounter.add(row[cr]?.double ?: 0.0) +// countRateDispersionCounter.add(row[crError]?.double?.pow(2.0) ?: 0.0) +// } +// val bin = min(binSize, range.last - chan) +// +// valueRow( +// channel to (chan.toDouble() + bin.toDouble() / 2.0), +// count to counter.value, +// cr to countRateCounter.value, +// crError to sqrt(countRateDispersionCounter.value), +// binSizeColumn to bin +// ) +// chan += binSize +// } +//} +// +///** +// * Subtract reference spectrum. +// */ +//private fun subtractAmplitudeSpectrum( +// sp1: Table, sp2: Table, +//): Table = RowTable(channel, cr, crError) { +// sp1.rows.forEach { row1 -> +// val channelValue = row1[channel]?.double +// val row2 = sp2.rows.find { it[channel]?.double == channelValue } ?: MapRow(emptyMap()) +// +// val value = max((row1[cr]?.double ?: 0.0) - (row2[cr]?.double ?: 0.0), 0.0) +// val error1 = row1[crError]?.double ?: 0.0 +// val error2 = row2[crError]?.double ?: 0.0 +// val error = sqrt(error1 * error1 + error2 * error2) +// valueRow(channel to channelValue, cr to value, crError to error) +// } +//} \ No newline at end of file diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzerParameters.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzerParameters.kt new file mode 100644 index 0000000..264f337 --- /dev/null +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassAnalyzerParameters.kt @@ -0,0 +1,46 @@ +package ru.inr.mass.data.analysis + +import space.kscience.dataforge.meta.* + +public class TimeAnalyzerParameters : Scheme() { + + public enum class AveragingMethod { + ARITHMETIC, + WEIGHTED, + GEOMETRIC + } + + public var value: Int? by int() + + /** + * The relative fraction of events that should be removed by time cut + */ + public var crFraction by double() + public var min by double(0.0) + public var crMin by double(0.0) + + /** + * The number of events in chunk to split the chain into. If null, no chunks are used + */ + public var chunkSize: Int? by int() + + public var inverted: Boolean by boolean(true) + public var sortEvents: Boolean by boolean(false) + + /** + * Chunk averaging method + */ + public var averagingMethod: AveragingMethod by enum(AveragingMethod.WEIGHTED) + + public companion object : SchemeSpec(::TimeAnalyzerParameters) +} + +public class NumassAnalyzerParameters : Scheme() { + public var deadTime: Double by double(0.0) + public var window: UIntRange by uIntRange() + + public var t0: TimeAnalyzerParameters by spec(TimeAnalyzerParameters) + + + public companion object : SchemeSpec(::NumassAnalyzerParameters) +} \ No newline at end of file diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassEventExtractor.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassEventExtractor.kt new file mode 100644 index 0000000..ee6d7c4 --- /dev/null +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassEventExtractor.kt @@ -0,0 +1,22 @@ +package ru.inr.mass.data.analysis + +import kotlinx.coroutines.flow.Flow +import ru.inr.mass.data.api.NumassBlock +import ru.inr.mass.data.api.NumassEvent + +public fun interface NumassEventExtractor { + public suspend fun extract(block: NumassBlock): Flow + + public companion object { + /** + * A default event extractor that ignores frames + */ + public val EVENTS_ONLY: NumassEventExtractor = NumassEventExtractor { it.events } + } +} + +//public fun NumassEventExtractor.filter( +// condition: NumassBlock.(NumassEvent) -> Boolean, +//): NumassEventExtractor = NumassEventExtractor { block -> +// extract(block).filter { block.condition(it) } +//} \ No newline at end of file diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassGenerator.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassGenerator.kt index f3936aa..161a59b 100644 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassGenerator.kt +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/NumassGenerator.kt @@ -11,11 +11,12 @@ package ru.inr.mass.data.analysis //import ru.inr.mass.data.api.NumassBlock //import ru.inr.mass.data.api.OrphanNumassEvent //import ru.inr.mass.data.api.SimpleBlock -//import space.kscience.dataforge.tables.Table //import space.kscience.kmath.chains.Chain //import space.kscience.kmath.chains.MarkovChain //import space.kscience.kmath.chains.StatefulChain //import space.kscience.kmath.stat.RandomGenerator +//import space.kscience.tables.Table +//import kotlin.math.ln //import kotlin.time.Duration.Companion.nanoseconds // //private fun RandomGenerator.nextExp(mean: Double): Double { diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SimpleAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SimpleAnalyzer.kt index 2b2af92..b6afbe5 100644 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SimpleAnalyzer.kt +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SimpleAnalyzer.kt @@ -14,43 +14,29 @@ * limitations under the License. */ -package inr.numass.data.analyzers +package ru.inr.mass.data.analysis import kotlinx.coroutines.flow.count -import ru.inr.mass.data.analysis.AbstractAnalyzer -import ru.inr.mass.data.analysis.NumassAnalyzerResult import ru.inr.mass.data.api.NumassBlock -import ru.inr.mass.data.api.SignalProcessor -import space.kscience.dataforge.meta.Meta -import space.kscience.dataforge.meta.descriptors.MetaDescriptor -import space.kscience.dataforge.meta.descriptors.value -import space.kscience.dataforge.meta.double -import space.kscience.dataforge.meta.get -import space.kscience.dataforge.meta.int -import space.kscience.dataforge.values.ValueType import kotlin.math.sqrt /** * A simple event counter * Created by darksnake on 07.07.2017. */ -public class SimpleAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) { +public class SimpleAnalyzer( + override val extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, +) : NumassAnalyzer() { - override val descriptor: MetaDescriptor = MetaDescriptor { - value("deadTime", ValueType.NUMBER) { - info = "Dead time in nanoseconds for correction" - default(0.0) - } - } + override suspend fun analyzeInternal( + block: NumassBlock, + parameters: NumassAnalyzerParameters + ): NumassAnalyzerResult { - override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult { - val loChannel = config["window.lo"]?.int ?: 0 - val upChannel = config["window.up"]?.int ?: Int.MAX_VALUE - - val count: Int = getEvents(block, config).count() + val count: Int = block.flowFilteredEvents(parameters).count() val length: Double = block.getLength().inWholeNanoseconds.toDouble() / 1e9 - val deadTime = config["deadTime"]?.double ?: 0.0 + val deadTime = parameters.deadTime val countRate = if (deadTime > 0) { val mu = count.toDouble() / length @@ -61,11 +47,10 @@ public class SimpleAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyz val countRateError = sqrt(count.toDouble()) / length return NumassAnalyzerResult { - this.length = length.toLong() + this.length = length this.count = count.toLong() this.countRate = countRate this.countRateError = countRateError - this.window = loChannel.toUInt().rangeTo(upChannel.toUInt()) //TODO NumassAnalyzer.timestamp to block.startTime } } diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SmartAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SmartAnalyzer.kt deleted file mode 100644 index 67412d0..0000000 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/SmartAnalyzer.kt +++ /dev/null @@ -1,104 +0,0 @@ -///* -// * Copyright 2017 Alexander Nozik. -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// */ -// -package ru.inr.mass.data.analysis - -import inr.numass.data.analyzers.SimpleAnalyzer -import kotlinx.coroutines.flow.Flow -import ru.inr.mass.data.api.NumassBlock -import ru.inr.mass.data.api.NumassEvent -import ru.inr.mass.data.api.NumassSet -import ru.inr.mass.data.api.SignalProcessor -import space.kscience.dataforge.meta.Meta -import space.kscience.dataforge.meta.descriptors.MetaDescriptor -import space.kscience.dataforge.meta.get -import space.kscience.dataforge.meta.string -import space.kscience.dataforge.values.Value -import space.kscience.dataforge.values.asValue -import space.kscience.dataforge.values.setValue -import space.kscience.tables.Table - -/** - * An analyzer dispatcher which uses different analyzer for different meta - * Created by darksnake on 11.07.2017. - */ -public open class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) { - private val simpleAnalyzer = SimpleAnalyzer(processor) - private val debunchAnalyzer = DebunchAnalyzer(processor) - private val timeAnalyzer: NumassAnalyzer = TODO()// TimeAnalyzer(processor) - - override val descriptor: MetaDescriptor? = null - - private fun getAnalyzer(config: Meta): NumassAnalyzer = when (val type = config["type"]?.string) { - null -> if (config["t0"] != null) { - timeAnalyzer - } else { - simpleAnalyzer - } - "simple" -> simpleAnalyzer - "time" -> timeAnalyzer - "debunch" -> debunchAnalyzer - else -> throw IllegalArgumentException("Analyzer $type not found") - } - - override suspend fun analyze(block: NumassBlock, config: Meta): NumassAnalyzerResult { - val analyzer = getAnalyzer(config) - val res = analyzer.analyze(block, config) - return NumassAnalyzerResult.read(res.meta).apply { - setValue(T0_KEY, 0.0.asValue()) - } - } - - override fun getEvents(block: NumassBlock, meta: Meta): Flow = - getAnalyzer(meta).getEvents(block, meta) - - - override suspend fun analyzeSet(set: NumassSet, config: Meta): Table { - return getAnalyzer(config).analyzeSet(set, config) -// fun Value.computeExpression(point: NumassPoint): Int { -// return when { -// this.type == ValueType.NUMBER -> this.int -// this.type == ValueType.STRING -> { -// val exprParams = HashMap() -// -// exprParams["U"] = point.voltage -// -// ExpressionUtils.function(this.string, exprParams).toInt() -// } -// else -> error("Can't interpret $type as expression or number") -// } -// } -// -// val lo = config.getValue("window.lo", 0) -// val up = config.getValue("window.up", Int.MAX_VALUE) -// -// val format = getTableFormat(config) -// -// return ListTable.Builder(format) -// .rows(set.points.map { point -> -// val newConfig = config.builder.apply { -// setValue("window.lo", lo.computeExpression(point)) -// setValue("window.up", up.computeExpression(point)) -// } -// analyzeParent(point, newConfig) -// }) -// .build() - } - - public companion object : SmartAnalyzer() { - public const val T0_KEY: String = "t0" - } -} \ No newline at end of file diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/TimeAnalyzer.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/TimeAnalyzer.kt index 7ac8b4f..8930c2d 100644 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/TimeAnalyzer.kt +++ b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/TimeAnalyzer.kt @@ -15,286 +15,161 @@ // */ // package ru.inr.mass.data.analysis -// -//import hep.dataforge.description.ValueDef -//import hep.dataforge.description.ValueDefs -//import hep.dataforge.meta.Meta -//import hep.dataforge.tables.Adapters.* -//import hep.dataforge.tables.TableFormat -//import hep.dataforge.tables.TableFormatBuilder -//import hep.dataforge.values.* -//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_KEY -//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY -//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.COUNT_RATE_KEY -//import ru.inr.mass.data.analysis.NumassAnalyzer.Companion.LENGTH_KEY -//import ru.inr.mass.data.analysis.TimeAnalyzer.AveragingMethod.* -//import inr.numass.data.api.* -//import inr.numass.data.api.NumassPoint.Companion.HV_KEY -//import ru.inr.mass.data.api.NumassBlock -//import ru.inr.mass.data.api.SignalProcessor -//import space.kscience.dataforge.values.ValueType -//import java.util.* -//import java.util.concurrent.atomic.AtomicLong -//import kotlin.collections.List -//import kotlin.collections.asSequence -//import kotlin.collections.count -//import kotlin.collections.first -//import kotlin.collections.map -//import kotlin.collections.set -//import kotlin.collections.sortBy -//import kotlin.collections.sumBy -//import kotlin.collections.sumByDouble -//import kotlin.collections.toMutableList -//import kotlin.math.* -//import kotlin.streams.asSequence -// -// -///** -// * An analyzer which uses time information from events -// * Created by darksnake on 11.07.2017. -// */ -//@ValueDefs( -// ValueDef( -// key = "separateParallelBlocks", -// type = [ValueType.BOOLEAN], -// info = "If true, then parallel blocks will be forced to be evaluated separately" -// ), -// ValueDef( -// key = "chunkSize", -// type = [ValueType.NUMBER], -// def = "-1", -// info = "The number of events in chunk to split the chain into. If negative, no chunks are used" -// ) -//) -//open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) { -// -// override fun analyze(block: NumassBlock, config: Meta): Values { -// //In case points inside points -// if (block is ParentBlock && (block.isSequential || config.getBoolean("separateParallelBlocks", false))) { -// return analyzeParent(block, config) -// } -// -// val t0 = getT0(block, config).toLong() -// -// val chunkSize = config.getInt("chunkSize", -1) -// -// val count = super.getEvents(block, config).count() -// val length = block.length.toNanos().toDouble() / 1e9 -// -// val res = when { -// count < 1000 -> ValueMap.ofPairs( -// LENGTH_KEY to length, -// COUNT_KEY to count, -// COUNT_RATE_KEY to count.toDouble() / length, -// COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length -// ) -// chunkSize > 0 -> getEventsWithDelay(block, config) -// .chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) } -// .toList() -// .mean(config.getEnum("mean", WEIGHTED)) -// else -> analyzeSequence(getEventsWithDelay(block, config), t0) -// } -// -// return ValueMap.Builder(res) -// .putValue("blockLength", length) -// .putValue(NumassAnalyzer.WINDOW_KEY, config.getRange()) -// .putValue(NumassAnalyzer.TIME_KEY, block.startTime) -// .putValue(T0_KEY, t0.toDouble() / 1000.0) -// .build() -// } -// -// -// private fun analyzeSequence(sequence: Sequence>, t0: Long): Values { -// val totalN = AtomicLong(0) -// val totalT = AtomicLong(0) -// sequence.filter { pair -> pair.second >= t0 } -// .forEach { pair -> -// totalN.incrementAndGet() -// //TODO add progress listener here -// totalT.addAndGet(pair.second) -// } -// -// if (totalN.toInt() == 0) { -// error("Zero number of intervals") -// } -// -// val countRate = -// 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0); -// val countRateError = countRate / sqrt(totalN.get().toDouble()) -// val length = totalT.get() / 1e9 -// val count = (length * countRate).toLong() -// -// return ValueMap.ofPairs( -// LENGTH_KEY to length, -// COUNT_KEY to count, -// COUNT_RATE_KEY to countRate, -// COUNT_RATE_ERROR_KEY to countRateError -// ) -// -// } -// -// override fun analyzeParent(point: ParentBlock, config: Meta): Values { -// //Average count rates, do not sum events -// val res = point.blocks.map { it -> analyze(it, config) } -// -// val map = HashMap(res.mean(config.getEnum("mean", WEIGHTED)).asMap()) -// if (point is NumassPoint) { -// map[HV_KEY] = Value.of(point.voltage) -// } -// return ValueMap(map) -// } -// -// enum class AveragingMethod { -// ARITHMETIC, -// WEIGHTED, -// GEOMETRIC -// } -// -// /** -// * Combine multiple blocks from the same point into one -// * -// * @return -// */ -// private fun List.mean(method: AveragingMethod): Values { -// -// if (this.isEmpty()) { -// return ValueMap.Builder() -// .putValue(LENGTH_KEY, 0) -// .putValue(COUNT_KEY, 0) -// .putValue(COUNT_RATE_KEY, 0) -// .putValue(COUNT_RATE_ERROR_KEY, 0) -// .build() -// } -// -// val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) } -// -// val (countRate, countRateDispersion) = when (method) { -// ARITHMETIC -> Pair( -// sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size, -// sumByDouble { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size -// ) -// WEIGHTED -> Pair( -// sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime, -// sumByDouble { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) } -// ) -// GEOMETRIC -> { -// val mean = exp(sumByDouble { ln(it.getDouble(COUNT_RATE_KEY)) } / size) -// val variance = (mean / size).pow(2.0) * sumByDouble { -// (it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble( -// COUNT_RATE_KEY -// )).pow(2.0) -// } -// Pair(mean, variance) -// } -// } -// -// return ValueMap.Builder(first()) -// .putValue(LENGTH_KEY, totalTime) -// .putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) }) -// .putValue(COUNT_RATE_KEY, countRate) -// .putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion)) -// .build() -// } -// -// @ValueDefs( -// ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"), -// ValueDef( -// key = "t0.crFraction", -// type = arrayOf(ValueType.NUMBER), -// info = "The relative fraction of events that should be removed by time cut" -// ), -// ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0") -// ) -// protected fun getT0(block: NumassBlock, meta: Meta): Int { -// return if (meta.hasValue("t0")) { -// meta.getInt("t0") -// } else if (meta.hasMeta("t0")) { -// val fraction = meta.getDouble("t0.crFraction") -// val cr = estimateCountRate(block) -// if (cr < meta.getDouble("t0.minCR", 0.0)) { -// 0 -// } else { -// max(-1e9 / cr * ln(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt() -// } -// } else { -// 0 -// } -// -// } -// -// private fun estimateCountRate(block: NumassBlock): Double { -// return block.events.count().toDouble() / block.length.toMillis() * 1000 -// } -// -// fun zipEvents(block: NumassBlock, config: Meta): Sequence> { -// return getAllEvents(block).asSequence().zipWithNext() -// } -// -// /** -// * The chain of event with delays in nanos -// * -// * @param block -// * @param config -// * @return -// */ -// fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence> { -// val inverted = config.getBoolean("inverted", true) -// //range is included in super.getEvents -// val events = super.getEvents(block, config).toMutableList() -// -// if (config.getBoolean("sortEvents", false) || (block is ParentBlock && !block.isSequential)) { -// //sort in place if needed -// events.sortBy { it.timeOffset } -// } -// -// return events.asSequence().zipWithNext { prev, next -> -// val delay = max(next.timeOffset - prev.timeOffset, 0) -// if (inverted) { -// Pair(next, delay) -// } else { -// Pair(prev, delay) -// } -// } -// } -// -// /** -// * The filtered stream of events -// * -// * @param block -// * @param meta -// * @return -// */ -// override fun getEvents(block: NumassBlock, meta: Meta): List { -// val t0 = getT0(block, meta).toLong() -// return getEventsWithDelay(block, meta) -// .filter { pair -> pair.second >= t0 } -// .map { it.first }.toList() -// } -// -// public override fun getTableFormat(config: Meta): TableFormat { -// return TableFormatBuilder() -// .addNumber(HV_KEY, X_VALUE_KEY) -// .addNumber(LENGTH_KEY) -// .addNumber(COUNT_KEY) -// .addNumber(COUNT_RATE_KEY, Y_VALUE_KEY) -// .addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY) -// .addColumn(NumassAnalyzer.WINDOW_KEY) -// .addTime() -// .addNumber(T0_KEY) -// .build() -// } -// -// companion object { -// const val T0_KEY = "t0" -// -// val NAME_LIST = arrayOf( -// LENGTH_KEY, -// COUNT_KEY, -// COUNT_RATE_KEY, -// COUNT_RATE_ERROR_KEY, -// NumassAnalyzer.WINDOW_KEY, -// NumassAnalyzer.TIME_KEY, -// T0_KEY -// ) -// } -//} + +import kotlinx.coroutines.flow.* +import ru.inr.mass.data.analysis.TimeAnalyzerParameters.AveragingMethod +import ru.inr.mass.data.api.NumassBlock +import ru.inr.mass.data.api.NumassEvent +import ru.inr.mass.data.api.ParentBlock +import space.kscience.kmath.streaming.asFlow +import space.kscience.kmath.streaming.chunked +import space.kscience.kmath.structures.Buffer +import kotlin.math.* + + +/** + * An analyzer which uses time information from events + * Created by darksnake on 11.07.2017. + */ +public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : NumassAnalyzer() { + + override suspend fun analyzeInternal( + block: NumassBlock, + parameters: NumassAnalyzerParameters, + ): NumassAnalyzerResult { + //Parallel processing and merging of parent blocks + if (block is ParentBlock) { + val res = block.flowBlocks().map { analyzeInternal(it, parameters) }.toList() + return res.combineResults(parameters.t0.averagingMethod) + } + + val t0 = getT0(block, parameters.t0).toLong() + + return when (val chunkSize = parameters.t0.chunkSize) { + null -> block.flowFilteredEvents(parameters) + .byPairs(parameters.t0.inverted) + .analyze(t0) + // // chunk is larger than a number of event + // chunkSize > count -> NumassAnalyzerResult { + // this.length = length + // this.count = count + // this.countRate = count.toDouble() / length + // this.countRateError = sqrt(count.toDouble()) / length + // } + else -> block.flowFilteredEvents(parameters) + .byPairs(parameters.t0.inverted) + .chunked(chunkSize, Buffer.Companion::auto) + .map { it.asFlow().analyze(t0) } + .toList() + .combineResults(parameters.t0.averagingMethod) + } + + } + + + /** + * Analyze given flow of events + delays + */ + private suspend fun Flow>.analyze(t0: Long): NumassAnalyzerResult { + var totalN = 0L + var totalT = 0L + filter { pair -> pair.second >= t0 }.collect { pair -> + totalN++ + //TODO add progress listener here + totalT+= pair.second + } + + if (totalN == 0L) { + error("Zero number of intervals") + } + + val countRate = 1e6 * totalN / (totalT / 1000 - t0 * totalN / 1000) + val countRateError = countRate / sqrt(totalN.toDouble()) + val length = totalT / 1e9 + val count = (length * countRate).toLong() + + return NumassAnalyzerResult { + this.length = totalT / 1e9 + this.count = count + this.countRate = countRate + this.countRateError = countRateError + } + } + + /** + * Combine multiple blocks from the same point into one + * + * @return + */ + private fun List.combineResults(method: AveragingMethod): NumassAnalyzerResult { + + if (this.isEmpty()) { + return NumassAnalyzerResult.empty() + } + + val totalTime = sumOf { it.length } + + val (countRate, countRateDispersion) = when (method) { + AveragingMethod.ARITHMETIC -> Pair( + sumOf { it.countRate } / size, + sumOf { it.countRateError.pow(2.0) } / size / size + ) + AveragingMethod.WEIGHTED -> Pair( + sumOf { it.countRate * it.length } / totalTime, + sumOf { (it.countRateError * it.length / totalTime).pow(2.0) } + ) + AveragingMethod.GEOMETRIC -> { + val mean = exp(sumOf { ln(it.countRate) } / size) + val variance = (mean / size).pow(2.0) * sumOf { + (it.countRateError / it.countRate).pow(2.0) + } + Pair(mean, variance) + } + } + + return NumassAnalyzerResult { + length = totalTime + count = sumOf { it.count } + this.countRate = countRate + this.countRateError = sqrt(countRateDispersion) + } + } + + /** + * Compute actual t0 + */ + private suspend fun getT0(block: NumassBlock, parameters: TimeAnalyzerParameters): Int { + parameters.value?.let { return it } + parameters.crFraction?.let { fraction -> + val cr = block.events.count().toDouble() / block.getLength().inWholeMilliseconds * 1000 + if (cr < parameters.crMin) { + 0 + } else { + max(-1e9 / cr * ln(1.0 - fraction), parameters.min).toInt() + } + } + return 0 + } + + /** + * Add a delay after (inverted = false) or before (inverted = true) event to each event + */ + private suspend fun Flow.byPairs(inverted: Boolean = true): Flow> = flow { + var prev: NumassEvent? + var next: NumassEvent? + collect { value -> + next = value + prev = next + if (prev != null && next != null) { + val delay = next!!.timeOffset - prev!!.timeOffset + if (delay < 0) error("Events are not ordered!") + if (inverted) { + emit(Pair(next!!, delay)) + } else { + emit(Pair(prev!!, delay)) + } + } + } + } + +} diff --git a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/Values.kt b/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/Values.kt deleted file mode 100644 index 9529c62..0000000 --- a/numass-analysis/src/commonMain/kotlin/ru/inr/mass/data/analysis/Values.kt +++ /dev/null @@ -1,5 +0,0 @@ -package ru.inr.mass.data.analysis - -import space.kscience.dataforge.values.Value - -public typealias Values = Map \ No newline at end of file diff --git a/numass-analysis/src/jvmMain/kotlin/ru/inr/mass/data/analysis/timeHistogram.kt b/numass-analysis/src/jvmMain/kotlin/ru/inr/mass/data/analysis/timeHistogram.kt new file mode 100644 index 0000000..03061d2 --- /dev/null +++ b/numass-analysis/src/jvmMain/kotlin/ru/inr/mass/data/analysis/timeHistogram.kt @@ -0,0 +1,37 @@ +package ru.inr.mass.data.analysis + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.collect +import kotlinx.coroutines.flow.transform +import kotlinx.coroutines.runBlocking +import ru.inr.mass.data.api.NumassBlock +import ru.inr.mass.data.api.getTime +import space.kscience.kmath.histogram.UnivariateHistogram +import kotlin.math.max + +public fun Flow.zipWithNext(block: (l: T, r: T) -> R): Flow { + var current: T? = null + return transform { r -> + current?.let { l -> + emit(block(l, r)) + } + current = r + } +} + +public fun NumassBlock.timeHistogram( + binSize: Double, + extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, +): UnivariateHistogram = UnivariateHistogram.uniform(binSize) { + runBlocking { + extractor.extract(this@timeHistogram).zipWithNext { l, r -> + if(l.owner == r.owner) { + max((r.getTime() - l.getTime()).inWholeMicroseconds,0L) + } else { + 0 + } + }.collect { + putValue(it.toDouble()) + } + } +} diff --git a/numass-data-proto/src/test/kotlin/ru/inr/mass/data/proto/TestNumassDirectory.kt b/numass-data-proto/src/test/kotlin/ru/inr/mass/data/proto/TestNumassDirectory.kt index 876ef21..5448158 100644 --- a/numass-data-proto/src/test/kotlin/ru/inr/mass/data/proto/TestNumassDirectory.kt +++ b/numass-data-proto/src/test/kotlin/ru/inr/mass/data/proto/TestNumassDirectory.kt @@ -3,14 +3,13 @@ package ru.inr.mass.data.proto import kotlinx.coroutines.flow.toList import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.Test -import ru.inr.mass.data.api.NumassPoint +import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.api.ParentBlock import space.kscience.dataforge.context.Context import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.string import space.kscience.dataforge.values.ListValue import java.nio.file.Path -import kotlin.test.Ignore import kotlin.test.assertEquals class TestNumassDirectory { @@ -31,10 +30,10 @@ class TestNumassDirectory { } @Test - @Ignore fun testTQDCRead() = runBlocking { - val pointPath = Path.of("C:\\Users\\altavir\\Desktop\\p20211122173034(20s).dat") - val point: NumassPoint = context.readNumassPointFile(pointPath)!! + val pointPath = Path.of("src/test/resources", "testData/tqdc") + val set: NumassSet = context.readNumassDirectory(pointPath) + val point = set.first { it.voltage == 16000.0 } point.getChannels().forEach { (channel, block) -> println("$channel: $block") if(block is ParentBlock){ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/meta b/numass-data-proto/src/test/resources/testData/tqdc/meta new file mode 100644 index 0000000..38af918 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/meta differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p0.df b/numass-data-proto/src/test/resources/testData/tqdc/p0.df deleted file mode 100644 index ccb33f1..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p0.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p1(30s)(HV1=18600) b/numass-data-proto/src/test/resources/testData/tqdc/p1(30s)(HV1=18600) new file mode 100644 index 0000000..33b695e Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p1(30s)(HV1=18600) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p1.df b/numass-data-proto/src/test/resources/testData/tqdc/p1.df deleted file mode 100644 index f11d36e..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p1.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p2(30s)(HV1=18550) b/numass-data-proto/src/test/resources/testData/tqdc/p2(30s)(HV1=18550) new file mode 100644 index 0000000..59b3c78 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p2(30s)(HV1=18550) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p2.df b/numass-data-proto/src/test/resources/testData/tqdc/p2.df deleted file mode 100644 index f71fbd1..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p2.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p3(30s)(HV1=18500) b/numass-data-proto/src/test/resources/testData/tqdc/p3(30s)(HV1=18500) new file mode 100644 index 0000000..f8d1626 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p3(30s)(HV1=18500) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p3.df b/numass-data-proto/src/test/resources/testData/tqdc/p3.df deleted file mode 100644 index fd7ad6c..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p3.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p4(30s)(HV1=18450) b/numass-data-proto/src/test/resources/testData/tqdc/p4(30s)(HV1=18450) new file mode 100644 index 0000000..e86b8a7 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p4(30s)(HV1=18450) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p4.df b/numass-data-proto/src/test/resources/testData/tqdc/p4.df deleted file mode 100644 index a536407..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p4.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p5(30s)(HV1=18400) b/numass-data-proto/src/test/resources/testData/tqdc/p5(30s)(HV1=18400) new file mode 100644 index 0000000..e996c29 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p5(30s)(HV1=18400) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p5.df b/numass-data-proto/src/test/resources/testData/tqdc/p5.df deleted file mode 100644 index 7520233..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p5.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p6(30s)(HV1=18350) b/numass-data-proto/src/test/resources/testData/tqdc/p6(30s)(HV1=18350) new file mode 100644 index 0000000..dc73e80 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p6(30s)(HV1=18350) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p6.df b/numass-data-proto/src/test/resources/testData/tqdc/p6.df deleted file mode 100644 index 1fdb9a7..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p6.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p7(30s)(HV1=18300) b/numass-data-proto/src/test/resources/testData/tqdc/p7(30s)(HV1=18300) new file mode 100644 index 0000000..18d2ae9 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p7(30s)(HV1=18300) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p7.df b/numass-data-proto/src/test/resources/testData/tqdc/p7.df deleted file mode 100644 index 28852ae..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p7.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p8(30s)(HV1=18250) b/numass-data-proto/src/test/resources/testData/tqdc/p8(30s)(HV1=18250) new file mode 100644 index 0000000..ef7c383 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p8(30s)(HV1=18250) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p8.df b/numass-data-proto/src/test/resources/testData/tqdc/p8.df deleted file mode 100644 index e5350d0..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p8.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p9(30s)(HV1=18200) b/numass-data-proto/src/test/resources/testData/tqdc/p9(30s)(HV1=18200) new file mode 100644 index 0000000..0433885 Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/p9(30s)(HV1=18200) differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/p9.df b/numass-data-proto/src/test/resources/testData/tqdc/p9.df deleted file mode 100644 index 4d902be..0000000 Binary files a/numass-data-proto/src/test/resources/testData/tqdc/p9.df and /dev/null differ diff --git a/numass-data-proto/src/test/resources/testData/tqdc/scenario b/numass-data-proto/src/test/resources/testData/tqdc/scenario new file mode 100644 index 0000000..e69de29 diff --git a/numass-data-proto/src/test/resources/testData/tqdc/voltage b/numass-data-proto/src/test/resources/testData/tqdc/voltage new file mode 100644 index 0000000..f0ac61a Binary files /dev/null and b/numass-data-proto/src/test/resources/testData/tqdc/voltage differ diff --git a/numass-workspace/build.gradle.kts b/numass-workspace/build.gradle.kts index b5ab348..0d46591 100644 --- a/numass-workspace/build.gradle.kts +++ b/numass-workspace/build.gradle.kts @@ -13,11 +13,14 @@ val plotlyVersion: String by rootProject.extra val kmathVersion: String by rootProject.extra dependencies { - implementation(project(":numass-data-proto")) - implementation(project(":numass-model")) - implementation(project(":numass-analysis")) + implementation(projects.numassDataProto) + implementation(projects.numassModel) + implementation(projects.numassAnalysis) implementation("space.kscience:dataforge-workspace:$dataforgeVersion") - implementation("space.kscience:plotlykt-core:$plotlyVersion") - implementation("space.kscience:kmath-histograms:$kmathVersion") - implementation("space.kscience:kmath-for-real:$kmathVersion") + implementation("space.kscience:plotlykt-jupyter:$plotlyVersion") + implementation("space.kscience:kmath-jupyter:$kmathVersion") +} + +kscience{ + jupyterLibrary("ru.inr.mass.notebook.NumassJupyter") } \ No newline at end of file diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/notebook/NumassJupyter.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/notebook/NumassJupyter.kt new file mode 100644 index 0000000..9d06756 --- /dev/null +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/notebook/NumassJupyter.kt @@ -0,0 +1,40 @@ +package ru.inr.mass.notebook + + +import org.jetbrains.kotlinx.jupyter.api.HTML +import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import ru.inr.mass.data.api.NumassBlock +import ru.inr.mass.data.api.NumassSet +import ru.inr.mass.workspace.Numass +import ru.inr.mass.workspace.numassSet +import space.kscience.plotly.Plotly + +internal class NumassJupyter : JupyterIntegration() { + override fun Builder.onLoaded() { + repositories( + "https://repo.kotlin.link" + ) + + import( + "ru.inr.mass.models.*", + "ru.inr.mass.data.analysis.*", + "ru.inr.mass.workspace.*", + "ru.inr.mass.data.api.*", + "ru.inr.mass.data.proto.*", + "space.kscience.dataforge.data.*", + "kotlinx.coroutines.*", + "kotlinx.coroutines.flow.*", + ) + + import() + + + render { + + } + + render { numassSet -> + HTML(Plotly.numassSet(numassSet).render(), true) + } + } +} \ No newline at end of file diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/demo.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/demo.kt index 76efb9f..f247f88 100644 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/demo.kt +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/demo.kt @@ -1,6 +1,8 @@ -package ru.inr.mass.workspace +package ru.inr.mass.scripts import ru.inr.mass.data.proto.NumassDirectorySet +import ru.inr.mass.workspace.Numass.readNumassRepository +import ru.inr.mass.workspace.numassSet import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.await import space.kscience.dataforge.data.getData @@ -11,6 +13,6 @@ suspend fun main() { val repo: DataTree = readNumassRepository("D:\\Work\\Numass\\data\\2018_04") //val dataPath = Path.of("D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\set_4\\") //val testSet = NUMASS.context.readNumassDirectory(dataPath) - val testSet = repo.getData("Adiabacity_19.set_4")?.await() ?: error("Not found") - Plotly.numassDirectory(testSet).makeFile() + val testSet = repo.getData("Adiabacity_19.set_3")?.await() ?: error("Not found") + Plotly.numassSet(testSet).makeFile() } \ No newline at end of file diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2020_12.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2020_12.kt index 0c5e533..e62c5e7 100644 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2020_12.kt +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2020_12.kt @@ -2,7 +2,7 @@ package ru.inr.mass.scripts import kotlinx.coroutines.flow.collect import ru.inr.mass.data.proto.NumassDirectorySet -import ru.inr.mass.workspace.readNumassRepository +import ru.inr.mass.workspace.Numass.readNumassRepository import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.filter import space.kscience.dataforge.meta.string diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2021_11.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2021_11.kt index f202f94..6ad4038 100644 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2021_11.kt +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/scripts/run_2021_11.kt @@ -1,13 +1,10 @@ package ru.inr.mass.scripts import kotlinx.coroutines.flow.toList -import kotlinx.html.code import kotlinx.html.h2 import kotlinx.html.p import kotlinx.serialization.json.Json -import ru.inr.mass.workspace.readNumassDirectory -import space.kscience.dataforge.io.JsonMetaFormat -import space.kscience.dataforge.io.toString +import ru.inr.mass.workspace.Numass.readNumassDirectory import space.kscience.dataforge.meta.MetaSerializer import space.kscience.plotly.* @@ -33,18 +30,18 @@ suspend fun main() { h2 { +"Analysis" } plot { histogram { - name="max" - x.numbers = frames.map { frame -> frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } } + name = "max" + x.numbers = frames.map { frame -> frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } } } histogram { - name="max-min" - xbins{ + name = "max-min" + xbins { size = 2.0 } x.numbers = frames.map { frame -> - frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } - - frame.signal.minOf { it.toUShort().toInt() - Short.MAX_VALUE } + frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } - + frame.signal.minOf { it.toUShort().toInt() - Short.MAX_VALUE } } } } diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/Numass.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/Numass.kt new file mode 100644 index 0000000..72be471 --- /dev/null +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/Numass.kt @@ -0,0 +1,47 @@ +package ru.inr.mass.workspace + +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.runBlocking +import kotlinx.coroutines.withContext +import ru.inr.mass.data.api.NumassSet +import ru.inr.mass.data.proto.NumassDirectorySet +import ru.inr.mass.data.proto.readNumassDirectory +import space.kscience.dataforge.data.* +import space.kscience.dataforge.names.Name +import space.kscience.dataforge.names.NameToken +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.ExperimentalPathApi +import kotlin.io.path.exists +import kotlin.io.path.isDirectory +import kotlin.io.path.relativeTo +import kotlin.streams.toList + +object Numass { + fun readNumassDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path) + + @OptIn(ExperimentalPathApi::class) + fun readNumassRepository(path: Path): DataTree = runBlocking { + ActiveDataTree { + @Suppress("BlockingMethodInNonBlockingContext") + withContext(Dispatchers.IO) { + Files.walk(path).filter { + it.isDirectory() && it.resolve("meta").exists() + }.toList().forEach { childPath -> + val name = Name(childPath.relativeTo(path).map { segment -> + NameToken(segment.fileName.toString()) + }) + val value = NUMASS.context.readNumassDirectory(childPath) + static(name, value, value.meta) + } + } + //TODO add file watcher + } + } + + fun readNumassRepository(path: String): DataTree = readNumassRepository(Path.of(path)) +} + +operator fun DataSet.get(name: String): NumassSet? = runBlocking { + getData(Name.parse(name))?.await() +} diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/amplitudeSpectrum.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/amplitudeSpectrum.kt deleted file mode 100644 index c0abcf3..0000000 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/amplitudeSpectrum.kt +++ /dev/null @@ -1,57 +0,0 @@ -@file:Suppress("EXPERIMENTAL_API_USAGE") -package ru.inr.mass.workspace - -import kotlinx.coroutines.flow.collect -import kotlinx.coroutines.runBlocking -import ru.inr.mass.data.api.NumassPoint -import space.kscience.dataforge.context.logger -import space.kscience.dataforge.context.warn -import space.kscience.kmath.histogram.UnivariateHistogram -import space.kscience.kmath.histogram.center -import space.kscience.kmath.histogram.put -import space.kscience.kmath.misc.UnstableKMathAPI -import space.kscience.kmath.structures.DoubleBuffer -import space.kscience.kmath.structures.asBuffer - - -/** - * Build an amplitude spectrum with bin of 1.0 counted from 0.0. Some bins could be missing - */ -fun NumassPoint.spectrum(): UnivariateHistogram = UnivariateHistogram.uniform(1.0) { - runBlocking { - events.collect { - putValue(it.amplitude.toDouble()) - } - } -} - -operator fun UnivariateHistogram.component1(): DoubleBuffer = bins.map { it.domain.center }.toDoubleArray().asBuffer() -operator fun UnivariateHistogram.component2(): DoubleBuffer = bins.map { it.value }.toDoubleArray().asBuffer() - -fun Collection.spectrum(): UnivariateHistogram { - if (distinctBy { it.voltage }.size != 1) { - NUMASS.logger.warn { "Spectrum is generated from points with different hv value: $this" } - } - - return UnivariateHistogram.uniform(1.0) { - runBlocking { - this@spectrum.forEach { point -> - point.events.collect { put(it.amplitude.toDouble()) } - } - } - } -} - -/** - * Re-shape the spectrum with the increased bin size and range. Throws a error if new bin is smaller than before. - */ -@OptIn(UnstableKMathAPI::class) -fun UnivariateHistogram.reShape( - binSize: Int, - channelRange: IntRange, -): UnivariateHistogram = UnivariateHistogram.uniform(binSize.toDouble()) { - this@reShape.bins.filter { it.domain.center.toInt() in channelRange }.forEach { bin -> - if (bin.domain.volume() > binSize.toDouble()) error("Can't reShape the spectrum with increased binning") - putValue(bin.domain.center, bin.value) - } -} \ No newline at end of file diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/files.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/files.kt deleted file mode 100644 index dfb02a9..0000000 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/files.kt +++ /dev/null @@ -1,39 +0,0 @@ -package ru.inr.mass.workspace - -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.withContext -import ru.inr.mass.data.proto.NumassDirectorySet -import ru.inr.mass.data.proto.readNumassDirectory -import space.kscience.dataforge.data.ActiveDataTree -import space.kscience.dataforge.data.DataTree -import space.kscience.dataforge.data.static -import space.kscience.dataforge.names.Name -import space.kscience.dataforge.names.NameToken -import java.nio.file.Files -import java.nio.file.Path -import kotlin.io.path.ExperimentalPathApi -import kotlin.io.path.exists -import kotlin.io.path.isDirectory -import kotlin.io.path.relativeTo -import kotlin.streams.toList - -fun readNumassDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path) - -@OptIn(ExperimentalPathApi::class) -suspend fun readNumassRepository(path: Path): DataTree = ActiveDataTree { - @Suppress("BlockingMethodInNonBlockingContext") - withContext(Dispatchers.IO) { - Files.walk(path).filter { - it.isDirectory() && it.resolve("meta").exists() - }.toList().forEach { childPath -> - val name = Name(childPath.relativeTo(path).map { segment -> - NameToken(segment.fileName.toString()) - }) - val value = NUMASS.context.readNumassDirectory(childPath) - static(name, value, value.meta) - } - } - //TODO add file watcher -} - -suspend fun readNumassRepository(path: String): DataTree = readNumassRepository(Path.of(path)) diff --git a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/plots.kt b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/plots.kt index 5ff2565..e8f261f 100644 --- a/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/plots.kt +++ b/numass-workspace/src/main/kotlin/ru/inr/mass/workspace/plots.kt @@ -1,8 +1,13 @@ package ru.inr.mass.workspace +import kotlinx.coroutines.runBlocking import kotlinx.html.h1 import kotlinx.html.h2 -import ru.inr.mass.data.api.NumassPoint +import ru.inr.mass.data.analysis.NumassAmplitudeSpectrum +import ru.inr.mass.data.analysis.NumassEventExtractor +import ru.inr.mass.data.analysis.amplitudeSpectrum +import ru.inr.mass.data.analysis.timeHistogram +import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.proto.HVData import ru.inr.mass.data.proto.NumassDirectorySet import space.kscience.dataforge.values.asValue @@ -14,29 +19,29 @@ import space.kscience.kmath.operations.asIterable import space.kscience.kmath.structures.Buffer import space.kscience.kmath.structures.DoubleBuffer import space.kscience.plotly.* -import space.kscience.plotly.models.Scatter -import space.kscience.plotly.models.Trace -import space.kscience.plotly.models.TraceValues +import space.kscience.plotly.models.* /** * Plot a kmath histogram */ @OptIn(UnstableKMathAPI::class) -fun Plot.histogram(histogram: UnivariateHistogram, block: Scatter.() -> Unit): Trace = scatter { +fun Plot.histogram(histogram: UnivariateHistogram, block: Scatter.() -> Unit = {}): Trace = scatter { x.numbers = histogram.bins.map { it.domain.center } y.numbers = histogram.bins.map { it.value } + line.shape = LineShape.hv block() } -fun Plot.amplitudeSpectrum( - point: NumassPoint, - binSize: Int = 20, - range: IntRange = 0..2000, - name: String = point.toString(), +fun Plot.histogram( + spectrum: NumassAmplitudeSpectrum, + binSize: UInt = 20U, + block: Scatter.() -> Unit = {}, ): Trace = scatter { - histogram(point.spectrum().reShape(binSize, range)) { - this.name = name - } + val binned = spectrum.binned(binSize) + x.numbers = binned.keys.map { (it.first + it.last).toDouble() / 2.0 } + y.numbers = binned.values + line.shape = LineShape.hv + block() } /** @@ -47,26 +52,44 @@ fun Plot.hvData(data: HVData): Trace = scatter { y.numbers = data.map { it.value } } -fun Plotly.numassDirectory(set: NumassDirectorySet, binSize: Int = 20, range: IntRange = 0..2000): PlotlyPage = +fun Plotly.numassSet( + set: NumassSet, + amplitudeBinSize: UInt = 20U, + eventExtractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, +): PlotlyPage = Plotly.page { h1 { - +"Numass point set ${set.path}" + +"Numass point set ${ShapeType.path}" } h2 { +"Amplitude spectrum" } plot { - set.points.sortedBy { it.index }.forEach { - amplitudeSpectrum(it, binSize, range) + runBlocking { + set.points.sortedBy { it.index }.forEach { + histogram(it.amplitudeSpectrum(eventExtractor), amplitudeBinSize) + } } } - set.getHvData()?.let { entries -> - h2 { - +"HV" + h2 { + +"Time spectra" + } + plot { + set.points.sortedBy { it.index }.forEach { + histogram(it.timeHistogram(1e3)) } - plot { - hvData(entries) + layout.yaxis.type = AxisType.log + + } + if (set is NumassDirectorySet) { + set.getHvData()?.let { entries -> + h2 { + +"HV" + } + plot { + hvData(entries) + } } } }