remove notebook outputs

Fixe channel readout
This commit is contained in:
Alexander Nozik 2021-12-21 18:49:21 +03:00
parent 7d053d4fa9
commit 917b043b21
No known key found for this signature in database
GPG Key ID: F7FCF2DD25C71357
19 changed files with 172 additions and 162 deletions

View File

@ -9,7 +9,7 @@ allprojects {
} }
group = "ru.inr.mass" group = "ru.inr.mass"
version = "0.1.0" version = "0.1.1"
} }
val dataforgeVersion by extra("0.5.2") val dataforgeVersion by extra("0.5.2")
@ -21,3 +21,7 @@ ksciencePublish{
github("numass") github("numass")
space("https://maven.pkg.jetbrains.space/mipt-npm/p/numass/maven") space("https://maven.pkg.jetbrains.space/mipt-npm/p/numass/maven")
} }
apiValidation {
validationDisabled = true
}

View File

@ -9,7 +9,7 @@
"source": [ "source": [
"@file:Repository(\"https://repo.kotlin.link\")\n", "@file:Repository(\"https://repo.kotlin.link\")\n",
"@file:Repository(\"*mavenLocal\")\n", "@file:Repository(\"*mavenLocal\")\n",
"@file:DependsOn(\"ru.inr.mass:numass-workspace:0.1.0\")" "@file:DependsOn(\"ru.inr.mass:numass-workspace:0.1.1\")"
] ]
}, },
{ {
@ -21,7 +21,7 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"val repo: DataTree<NumassDirectorySet> = Numass.readNumassRepository(\"D:\\\\Work\\\\Numass\\\\data\\\\test\")\n", "val repo: DataTree<NumassDirectorySet> = Numass.readRepository(\"D:\\\\Work\\\\Numass\\\\data\\\\test\")\n",
"repo" "repo"
] ]
}, },
@ -32,8 +32,18 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"val numassSet = repo[\"set_7\"]\n", "val numassSet = repo[\"set_7\"]!!\n",
"numassSet" "Plotly.plotNumassSet(numassSet, amplitudeBinSize = 8U, eventExtractor = NumassEventExtractor.TQDC)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "568f62e4-86ec-4e06-835a-ef41ca7b4fb9",
"metadata": {},
"outputs": [],
"source": [
"//numassSet.meta"
] ]
}, },
{ {
@ -43,7 +53,7 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"val point = numassSet.points.first{it.voltage == 14000.0}" "val point = numassSet.points.first { it.voltage == 14200.0}"
] ]
}, },
{ {
@ -59,61 +69,17 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "50df6925-82f5-4330-a1c2-3e43fb9cd17d", "id": "ac448607-3e7a-4849-9cab-0187e15a7238",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"Plotly.plotNumassBlock(point, eventExtractor = NumassEventExtractor.TQDC)" "Plotly.plotNumassBlock(point, amplitudeBinSize = 8U, eventExtractor = NumassEventExtractor.TQDC)"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
"id": "196f94ca-0439-4190-bda7-8d692c37b2db", "id": "c00f251f-5858-43b9-bcf2-a48efcbee3c9",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"val frames = point.listFrames()\n",
"Plotly.page {\n",
" p { +\"${frames.size} frames\" }\n",
" h2 { +\"Random frames\" }\n",
" plot {\n",
" val random = kotlin.random.Random(1234)\n",
"\n",
" repeat(10) {\n",
" val frame = frames.random(random)\n",
" scatter {\n",
" y.numbers = frame.signal.map { (it.toUShort().toInt() - Short.MAX_VALUE).toShort() }\n",
" }\n",
" }\n",
" }\n",
" h2 { +\"Analysis\" }\n",
" plot {\n",
" histogram {\n",
" name = \"max\"\n",
" x.numbers = frames.map { frame -> frame.signal.maxOf { (it.toUShort().toInt() - Short.MAX_VALUE).toShort() } }\n",
" }\n",
"\n",
" histogram {\n",
" name = \"max-min\"\n",
" xbins {\n",
" size = 2.0\n",
" }\n",
" x.numbers = frames.map { frame ->\n",
" frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } -\n",
" frame.signal.minOf { it.toUShort().toInt() - Short.MAX_VALUE }\n",
" }\n",
" }\n",
" }\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5320d9d5-eae3-469b-a1f2-5d33d3db286c",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [] "source": []

View File

@ -7,18 +7,18 @@ import ru.inr.mass.data.api.NumassBlock
import space.kscience.kmath.histogram.LongCounter import space.kscience.kmath.histogram.LongCounter
import kotlin.math.min import kotlin.math.min
public class NumassAmplitudeSpectrum(public val amplitudes: Map<UShort, ULong>) { public class NumassAmplitudeSpectrum(public val amplitudes: Map<Short, ULong>) {
public val minChannel: UShort by lazy { amplitudes.keys.minOf { it } } public val minChannel: Short by lazy { amplitudes.keys.minOf { it } }
public val maxChannel: UShort by lazy { amplitudes.keys.maxOf { it } } public val maxChannel: Short by lazy { amplitudes.keys.maxOf { it } }
public val channels: UIntRange by lazy { minChannel..maxChannel } public val channels: IntRange by lazy { minChannel..maxChannel }
public fun binned(binSize: UInt, range: UIntRange = channels): Map<UIntRange, Double> { public fun binned(binSize: UInt, range: IntRange = channels): Map<IntRange, Double> {
val keys = sequence { val keys = sequence {
var left = range.first var left = range.first
do { do {
val right = min(left + binSize, range.last) val right = min(left + binSize.toInt(), range.last)
yield(left..right) yield(left..right)
left = right left = right
} while (right < range.last) } while (right < range.last)
@ -27,7 +27,7 @@ public class NumassAmplitudeSpectrum(public val amplitudes: Map<UShort, ULong>)
return keys.associateWith { bin -> amplitudes.filter { it.key in bin }.values.sum().toDouble() } return keys.associateWith { bin -> amplitudes.filter { it.key in bin }.values.sum().toDouble() }
} }
public fun sum(range: UIntRange = channels): ULong = public fun sum(range: IntRange = channels): ULong =
amplitudes.filter { it.key in range }.values.sum() amplitudes.filter { it.key in range }.values.sum()
} }
@ -37,7 +37,7 @@ public class NumassAmplitudeSpectrum(public val amplitudes: Map<UShort, ULong>)
public suspend fun NumassBlock.amplitudeSpectrum( public suspend fun NumassBlock.amplitudeSpectrum(
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): NumassAmplitudeSpectrum { ): NumassAmplitudeSpectrum {
val map = HashMap<UShort, LongCounter>() val map = HashMap<Short, LongCounter>()
extractor.extract(this).collect { event -> extractor.extract(this).collect { event ->
map.getOrPut(event.amplitude) { LongCounter() }.add(1L) map.getOrPut(event.amplitude) { LongCounter() }.add(1L)
} }
@ -67,7 +67,7 @@ public suspend fun Collection<NumassBlock>.amplitudeSpectrum(
if (counter.value == 0L) { if (counter.value == 0L) {
null null
} else { } else {
index.toUShort() to counter.value.toULong() index.toShort() to counter.value.toULong()
} }
}.toMap() }.toMap()

View File

@ -105,7 +105,7 @@ public abstract class NumassAnalyzer {
parameters: NumassAnalyzerParameters, parameters: NumassAnalyzerParameters,
): Flow<NumassEvent> { ): Flow<NumassEvent> {
val window = parameters.window val window = parameters.window
return extractor.extract(this).filter { it.amplitude in window } return extractor.extract(this).filter { it.amplitude.toUInt() in window }
} }
public companion object { public companion object {

View File

@ -13,25 +13,25 @@ public fun interface NumassEventExtractor {
* A default event extractor that ignores frames * A default event extractor that ignores frames
*/ */
public val EVENTS_ONLY: NumassEventExtractor = NumassEventExtractor { it.events } public val EVENTS_ONLY: NumassEventExtractor = NumassEventExtractor { it.events }
public val TQDC: NumassEventExtractor = NumassEventExtractor { block -> public val TQDC: NumassEventExtractor = NumassEventExtractor { block ->
block.frames.map { frame -> block.frames.map { frame ->
var max = 0 var max = Short.MIN_VALUE
var min = 0 var min = Short.MAX_VALUE
var indexOfMax = 0 var indexOfMax = 0
frame.signal.forEachIndexed { index, sh -> frame.signal.forEachIndexed { index, sh: Short ->
val corrected = sh.toUShort().toInt() - Short.MAX_VALUE if (sh >= max) {
if (corrected >= max) { max = sh
max = corrected
indexOfMax = index indexOfMax = index
} }
if (corrected <= min) { if (sh <= min) {
min = corrected min = sh
} }
} }
NumassEvent( NumassEvent(
(max - min).toShort().toUShort(), (max - min).toShort(),
frame.timeOffset + frame.tickSize.inWholeNanoseconds * indexOfMax, frame.timeOffset + frame.tickSize.inWholeNanoseconds * indexOfMax,
block block
) )

View File

@ -39,7 +39,7 @@ public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : N
): NumassAnalyzerResult { ): NumassAnalyzerResult {
//Parallel processing and merging of parent blocks //Parallel processing and merging of parent blocks
if (block is ParentBlock) { if (block is ParentBlock) {
val res = block.flowBlocks().map { analyzeInternal(it, parameters) }.toList() val res = block.blocks.map { analyzeInternal(it, parameters) }.toList()
return res.combineResults(parameters.t0.averagingMethod) return res.combineResults(parameters.t0.averagingMethod)
} }

View File

@ -8,7 +8,7 @@ import kotlin.time.DurationUnit
public interface ParentBlock : NumassBlock { public interface ParentBlock : NumassBlock {
public fun flowBlocks(): Flow<NumassBlock> public val blocks: List<NumassBlock>
/** /**
* If true, the sub-blocks a considered to be sequential, if not, the sub-blocks are parallel * If true, the sub-blocks a considered to be sequential, if not, the sub-blocks are parallel
@ -20,9 +20,7 @@ public interface ParentBlock : NumassBlock {
* A block constructed from a set of other blocks. Internal blocks are not necessary subsequent. Blocks are automatically sorted. * A block constructed from a set of other blocks. Internal blocks are not necessary subsequent. Blocks are automatically sorted.
* Created by darksnake on 16.07.2017. * Created by darksnake on 16.07.2017.
*/ */
public open class MetaBlock(protected val blocks: List<NumassBlock>) : ParentBlock { public class MetaBlock(override val blocks: List<NumassBlock>) : ParentBlock {
override fun flowBlocks(): Flow<NumassBlock> = blocks.asFlow()
override val startTime: Instant override val startTime: Instant
get() = blocks.first().startTime get() = blocks.first().startTime
@ -36,7 +34,7 @@ public open class MetaBlock(protected val blocks: List<NumassBlock>) : ParentBlo
} }
override val frames: Flow<NumassFrame> override val frames: Flow<NumassFrame>
get() = blocks.sortedBy { it.startTime }.asFlow().flatMapConcat { it.frames } get() = blocks.sortedBy { it.startTime }.asFlow().flatMapMerge { it.frames }
override val eventsCount: Long override val eventsCount: Long
get() = blocks.sumOf { it.eventsCount } get() = blocks.sumOf { it.eventsCount }

View File

@ -25,10 +25,10 @@ import kotlinx.datetime.plus
import kotlin.time.Duration import kotlin.time.Duration
public open class OrphanNumassEvent( public open class OrphanNumassEvent(
public val amplitude: UShort, public val amplitude: Short,
public val timeOffset: Long, public val timeOffset: Long,
) : Comparable<OrphanNumassEvent> { ) : Comparable<OrphanNumassEvent> {
public operator fun component1(): UShort = amplitude public operator fun component1(): Short = amplitude
public operator fun component2(): Long = timeOffset public operator fun component2(): Long = timeOffset
override fun compareTo(other: OrphanNumassEvent): Int { override fun compareTo(other: OrphanNumassEvent): Int {
@ -46,7 +46,7 @@ public open class OrphanNumassEvent(
* *
*/ */
public class NumassEvent( public class NumassEvent(
amplitude: UShort, amplitude: Short,
timeOffset: Long, timeOffset: Long,
public val owner: NumassBlock, public val owner: NumassBlock,
) : OrphanNumassEvent(amplitude, timeOffset) ) : OrphanNumassEvent(amplitude, timeOffset)

View File

@ -17,7 +17,9 @@
package ru.inr.mass.data.api package ru.inr.mass.data.api
import kotlinx.coroutines.FlowPreview import kotlinx.coroutines.FlowPreview
import kotlinx.coroutines.flow.* import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.asFlow
import kotlinx.coroutines.flow.flatMapConcat
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.double import space.kscience.dataforge.meta.double
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
@ -34,18 +36,6 @@ public interface NumassPoint : ParentBlock {
public val meta: Meta public val meta: Meta
/**
* Distinct map of channel number to corresponding grouping block
*/
public suspend fun getChannels(): Map<Int, NumassBlock> =
flowBlocks().toList().groupBy { it.channel }.mapValues { entry ->
if (entry.value.size == 1) {
entry.value.first()
} else {
MetaBlock(entry.value)
}
}
/** /**
* Get the voltage setting for the point * Get the voltage setting for the point
*/ */
@ -59,22 +49,22 @@ public interface NumassPoint : ParentBlock {
/** /**
* Get the length key of meta or calculate length as a sum of block lengths. The latter could be a bit slow * Get the length key of meta or calculate length as a sum of block lengths. The latter could be a bit slow
*/ */
override suspend fun getLength(): Duration = flowBlocks().filter { it.channel == 0 }.toList() override suspend fun getLength(): Duration = blocks.filter { it.channel == 0 }.toList()
.sumOf { it.getLength().toLong(DurationUnit.NANOSECONDS) }.nanoseconds .sumOf { it.getLength().toLong(DurationUnit.NANOSECONDS) }.nanoseconds
/** /**
* Get all events it all blocks as a single sequence * Get all events it all blocks as a single sequence
* Some performance analysis of different stream concatenation approaches is given here: https://www.techempower.com/blog/2016/10/19/efficient-multiple-stream-concatenation-in-java/ * Some performance analysis of different stream concatenation approaches is given here: https://www.techempower.com/blog/2016/10/19/efficient-multiple-stream-concatenation-in-java/
*/ */
override val events: Flow<NumassEvent> get() = flowBlocks().flatMapConcat { it.events } override val events: Flow<NumassEvent> get() = blocks.asFlow().flatMapConcat { it.events }
/** /**
* Get all frames in all blocks as a single sequence * Get all frames in all blocks as a single sequence
*/ */
override val frames: Flow<NumassFrame> get() = flowBlocks().flatMapConcat { it.frames } override val frames: Flow<NumassFrame> get() = blocks.asFlow().flatMapConcat { it.frames }
public suspend fun isSequential(): Boolean = getChannels().size == 1 public suspend fun isSequential(): Boolean = channels.size == 1
override fun toString(): String override fun toString(): String
@ -89,6 +79,18 @@ public interface NumassPoint : ParentBlock {
} }
} }
/**
* Distinct map of channel number to corresponding grouping block
*/
public val NumassPoint.channels: Map<Int, NumassBlock>
get() = blocks.groupBy { it.channel }.mapValues { entry ->
if (entry.value.size == 1) {
entry.value.first()
} else {
MetaBlock(entry.value)
}
}
public val NumassPoint.title: String get() = "p$index(HV=$voltage)" public val NumassPoint.title: String get() = "p$index(HV=$voltage)"
/** /**
@ -96,4 +98,4 @@ public val NumassPoint.title: String get() = "p$index(HV=$voltage)"
* *
*/ */
public suspend fun NumassPoint.getFirstBlock(): NumassBlock = public suspend fun NumassPoint.getFirstBlock(): NumassBlock =
flowBlocks().firstOrNull() ?: throw RuntimeException("The point is empty") blocks.firstOrNull() ?: throw RuntimeException("The point is empty")

View File

@ -1,11 +0,0 @@
package ru.inr.mass.data.api
import kotlinx.coroutines.flow.Flow
/**
* An ancestor to numass frame analyzers
* Created by darksnake on 07.07.2017.
*/
public interface SignalProcessor {
public fun analyze(frame: NumassFrame): Flow<NumassEvent>
}

View File

@ -22,7 +22,10 @@ import kotlinx.coroutines.runBlocking
import kotlinx.datetime.Instant import kotlinx.datetime.Instant
import okio.ByteString import okio.ByteString
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import ru.inr.mass.data.api.* import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
import ru.inr.mass.data.api.NumassFrame
import ru.inr.mass.data.api.NumassPoint
import space.kscience.dataforge.io.Envelope import space.kscience.dataforge.io.Envelope
import space.kscience.dataforge.meta.* import space.kscience.dataforge.meta.*
import java.io.ByteArrayInputStream import java.io.ByteArrayInputStream
@ -34,6 +37,11 @@ import kotlin.time.Duration
import kotlin.time.Duration.Companion.milliseconds import kotlin.time.Duration.Companion.milliseconds
import kotlin.time.Duration.Companion.nanoseconds import kotlin.time.Duration.Companion.nanoseconds
public enum class FrameType {
DEFAULT,
TQDC2021
}
/** /**
* Protobuf based numass point * Protobuf based numass point
* Created by Alexander Nozik on 09.07.2017. * Created by Alexander Nozik on 09.07.2017.
@ -45,23 +53,25 @@ internal class ProtoNumassPoint(
val point: Point by lazy(protoBuilder) val point: Point by lazy(protoBuilder)
override fun flowBlocks(): Flow<ProtoNumassBlock> { override val blocks: List<ProtoNumassBlock>
val frameByteOrder = if (meta["tqdc"] != null) { get() {
ByteOrder.LITTLE_ENDIAN val frameByteOrder = if (meta["tqdc"] != null) {
} else { ByteOrder.LITTLE_ENDIAN
ByteOrder.BIG_ENDIAN } else {
} ByteOrder.BIG_ENDIAN
}
return point.channels.flatMap { channel -> val frameType = if (meta["tqdc"] != null) {
channel.blocks FrameType.TQDC2021
.map { block -> ProtoNumassBlock(channel.id.toInt(), block, this, frameByteOrder) } } else {
.sortedBy { it.startTime } FrameType.DEFAULT
}.asFlow() }
}
override suspend fun getChannels(): Map<Int, NumassBlock> = return point.channels.flatMap { channel ->
point.channels.groupBy { it.id.toInt() }.mapValues { entry -> channel.blocks
MetaBlock(entry.value.flatMap { it.blocks }.map { ProtoNumassBlock(entry.key, it, this) }) .map { block -> ProtoNumassBlock(channel.id.toInt(), block, this, frameType) }
.sortedBy { it.startTime }
}
} }
override val voltage: Double get() = meta["external_meta.HV1_value"].double ?: super.voltage override val voltage: Double get() = meta["external_meta.HV1_value"].double ?: super.voltage
@ -142,7 +152,7 @@ public class ProtoNumassBlock(
override val channel: Int, override val channel: Int,
private val block: Point.Channel.Block, private val block: Point.Channel.Block,
private val parent: NumassPoint? = null, private val parent: NumassPoint? = null,
private val frameByteOrder: ByteOrder = ByteOrder.BIG_ENDIAN, private val frameType: FrameType = FrameType.DEFAULT,
) : NumassBlock { ) : NumassBlock {
override val startTime: Instant override val startTime: Instant
@ -178,7 +188,7 @@ public class ProtoNumassBlock(
} }
amplitudes.zip(times) { amp, time -> amplitudes.zip(times) { amp, time ->
NumassEvent(amp.toUShort(), time, this) NumassEvent(amp.toShort(), time, this)
}.asFlow() }.asFlow()
} else { } else {
@ -187,12 +197,20 @@ public class ProtoNumassBlock(
private fun ByteString.toShortArray(): ShortArray { private fun ByteString.toShortArray(): ShortArray {
val shortBuffer = asByteBuffer().apply { val shortBuffer = asByteBuffer().apply {
order(frameByteOrder) when (frameType) {
FrameType.DEFAULT -> order(ByteOrder.BIG_ENDIAN)
FrameType.TQDC2021 -> order(ByteOrder.LITTLE_ENDIAN)
}
}.asShortBuffer() }.asShortBuffer()
return if (shortBuffer.hasArray()) { return when (frameType) {
shortBuffer.array() FrameType.DEFAULT -> if (shortBuffer.hasArray()) {
} else { shortBuffer.array()
ShortArray(shortBuffer.limit()) { shortBuffer.get(it) } } else {
ShortArray(shortBuffer.limit()) { shortBuffer.get(it) }
}
FrameType.TQDC2021 -> ShortArray(shortBuffer.limit()){
(shortBuffer.get(it).toUShort().toInt() - Short.MAX_VALUE).toShort()
}
} }
} }

View File

@ -1,10 +1,10 @@
package ru.inr.mass.data.proto package ru.inr.mass.data.proto
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Test import org.junit.jupiter.api.Test
import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.ParentBlock import ru.inr.mass.data.api.ParentBlock
import ru.inr.mass.data.api.channels
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
@ -25,7 +25,7 @@ class TestNumassDirectory {
assertEquals(ListValue.EMPTY, testSet.meta["comments"]?.value) assertEquals(ListValue.EMPTY, testSet.meta["comments"]?.value)
assertEquals(31, testSet.points.size) assertEquals(31, testSet.points.size)
val point22 = testSet.points.find { it.index == 22 }!! val point22 = testSet.points.find { it.index == 22 }!!
point22.flowBlocks() point22.blocks
assertEquals("2018-04-13T21:56:09", point22.meta["end_time"].string) assertEquals("2018-04-13T21:56:09", point22.meta["end_time"].string)
} }
@ -34,10 +34,10 @@ class TestNumassDirectory {
val pointPath = Path.of("src/test/resources", "testData/tqdc") val pointPath = Path.of("src/test/resources", "testData/tqdc")
val set: NumassSet = context.readNumassDirectory(pointPath) val set: NumassSet = context.readNumassDirectory(pointPath)
val point = set.first { it.voltage == 18200.0 } val point = set.first { it.voltage == 18200.0 }
point.getChannels().forEach { (channel, block) -> point.channels.forEach { (channel, block) ->
println("$channel: $block") println("$channel: $block")
if(block is ParentBlock){ if(block is ParentBlock){
block.flowBlocks().toList().forEach{ block.blocks.toList().forEach{
println("\t${it.channel}:${it.eventsCount}") println("\t${it.channel}:${it.eventsCount}")
} }
} }

View File

@ -1,7 +1,7 @@
package ru.inr.mass.scripts package ru.inr.mass.scripts
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.workspace.Numass.readNumassRepository import ru.inr.mass.workspace.Numass.readRepository
import ru.inr.mass.workspace.plotNumassSet import ru.inr.mass.workspace.plotNumassSet
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.await import space.kscience.dataforge.data.await
@ -10,9 +10,8 @@ import space.kscience.plotly.Plotly
import space.kscience.plotly.makeFile import space.kscience.plotly.makeFile
suspend fun main() { suspend fun main() {
val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\Numass\\data\\2018_04")
//val dataPath = Path.of("D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\set_4\\") val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04")
//val testSet = NUMASS.context.readNumassDirectory(dataPath)
val testSet = repo.getData("Adiabacity_19.set_3")?.await() ?: error("Not found") val testSet = repo.getData("Adiabacity_19.set_3")?.await() ?: error("Not found")
Plotly.plotNumassSet(testSet).makeFile() Plotly.plotNumassSet(testSet).makeFile()
} }

View File

@ -2,13 +2,13 @@ package ru.inr.mass.scripts
import kotlinx.coroutines.flow.collect import kotlinx.coroutines.flow.collect
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.workspace.Numass.readNumassRepository import ru.inr.mass.workspace.Numass.readRepository
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.filter import space.kscience.dataforge.data.filter
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
suspend fun main() { suspend fun main() {
val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\Numass\\data\\2018_04") val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04")
val filtered = repo.filter { _, data -> val filtered = repo.filter { _, data ->
val operator by data.meta.string() val operator by data.meta.string()
operator?.startsWith("Vas") ?: false operator?.startsWith("Vas") ?: false

View File

@ -3,17 +3,34 @@ package ru.inr.mass.scripts
import kotlinx.html.h2 import kotlinx.html.h2
import kotlinx.html.p import kotlinx.html.p
import kotlinx.serialization.json.Json import kotlinx.serialization.json.Json
import ru.inr.mass.workspace.Numass.readNumassDirectory import ru.inr.mass.data.api.NumassFrame
import ru.inr.mass.workspace.Numass.readDirectory
import ru.inr.mass.workspace.listFrames import ru.inr.mass.workspace.listFrames
import space.kscience.dataforge.meta.MetaSerializer import space.kscience.dataforge.meta.MetaSerializer
import space.kscience.plotly.* import space.kscience.plotly.*
fun NumassFrame.tqdcAmplitude(): Short {
var max = Short.MIN_VALUE
var min = Short.MAX_VALUE
signal.forEach { sh: Short ->
if (sh >= max) {
max = sh
}
if (sh <= min) {
min = sh
}
}
return (max - min).toShort()
}
suspend fun main() { suspend fun main() {
//val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\numass-data\\") //val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\numass-data\\")
val directory = readNumassDirectory("D:\\Work\\Numass\\data\\test\\set_7") val directory = readDirectory("D:\\Work\\Numass\\data\\test\\set_7")
val point = directory.points.first() val point = directory.points.first()
val frames = point.listFrames() val frames: List<NumassFrame> = point.listFrames()
Plotly.page { Plotly.page {
p { +"${frames.size} frames" } p { +"${frames.size} frames" }
h2 { +"Random frames" } h2 { +"Random frames" }
@ -23,7 +40,7 @@ suspend fun main() {
repeat(10) { repeat(10) {
val frame = frames.random(random) val frame = frames.random(random)
scatter { scatter {
y.numbers = frame.signal.map { (it.toUShort().toInt() - Short.MAX_VALUE).toShort() } y.numbers = frame.signal.toList()
} }
} }
} }
@ -31,7 +48,7 @@ suspend fun main() {
plot { plot {
histogram { histogram {
name = "max" name = "max"
x.numbers = frames.map { frame -> frame.signal.maxOf { (it.toUShort().toInt() - Short.MAX_VALUE).toShort() } } x.numbers = frames.map { frame -> frame.signal.maxOrNull() ?: 0 }
} }
histogram { histogram {
@ -39,13 +56,28 @@ suspend fun main() {
xbins { xbins {
size = 2.0 size = 2.0
} }
x.numbers = frames.map { frame -> x.numbers = frames.map { it.tqdcAmplitude() }
frame.signal.maxOf { it.toUShort().toInt() - Short.MAX_VALUE } -
frame.signal.minOf { it.toUShort().toInt() - Short.MAX_VALUE }
}
} }
} }
h2 { +"Meta" } h2 { +"Meta" }
p { +Json.encodeToString(MetaSerializer, point.meta) } p { +Json.encodeToString(MetaSerializer, point.meta) }
}.makeFile() }.makeFile()
// val point = Numass.readPoint("D:\\Work\\Numass\\data\\test\\set_7\\p0(30s)(HV1=14000)")
//
// Plotly.plot {
// histogram {
// xbins.size = 2
// x.numbers = point.frames.map { it.tqdcAmplitude() }.toList()
// }
//
// histogram {
// x.numbers = point.flowBlocks().flatMapMerge { it.frames.map { it.tqdcAmplitude() } }.toList()
// }
//
// histogram {
// x.numbers = point.getChannels().values.flatMap { it.listFrames().map { it.tqdcAmplitude() } }
// }
// }.makeFile()
} }

View File

@ -21,7 +21,7 @@ import kotlin.math.pow
import kotlin.system.measureTimeMillis import kotlin.system.measureTimeMillis
fun main() { fun main() {
val spectrum = SterileNeutrinoSpectrum(fss = FSS.default).withNBkg() val spectrum: NBkgSpectrum = SterileNeutrinoSpectrum(fss = FSS.default).withNBkg()
val args: Map<Symbol, Double> = mapOf( val args: Map<Symbol, Double> = mapOf(
norm to 8e5, norm to 8e5,

View File

@ -5,9 +5,11 @@ import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext import kotlinx.coroutines.withContext
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.data.proto.readNumassDirectory import ru.inr.mass.data.proto.readNumassDirectory
import ru.inr.mass.data.proto.readNumassPointFile
import space.kscience.dataforge.data.* import space.kscience.dataforge.data.*
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken import space.kscience.dataforge.names.NameToken
@ -20,10 +22,10 @@ import kotlin.io.path.relativeTo
import kotlin.streams.toList import kotlin.streams.toList
object Numass { object Numass {
fun readNumassDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path) fun readDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path)
@OptIn(ExperimentalPathApi::class) @OptIn(ExperimentalPathApi::class)
fun readNumassRepository(path: Path): DataTree<NumassDirectorySet> = runBlocking { fun readRepository(path: Path): DataTree<NumassDirectorySet> = runBlocking {
ActiveDataTree { ActiveDataTree {
@Suppress("BlockingMethodInNonBlockingContext") @Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) { withContext(Dispatchers.IO) {
@ -41,7 +43,10 @@ object Numass {
} }
} }
fun readNumassRepository(path: String): DataTree<NumassDirectorySet> = readNumassRepository(Path.of(path)) fun readRepository(path: String): DataTree<NumassDirectorySet> = readRepository(Path.of(path))
fun readPoint(path: String): NumassPoint = NUMASS.context.readNumassPointFile(path)
?: error("Can't read numass point at $path")
} }
operator fun DataSet<NumassSet>.get(name: String): NumassSet? = runBlocking { operator fun DataSet<NumassSet>.get(name: String): NumassSet? = runBlocking {

View File

@ -7,10 +7,7 @@ import ru.inr.mass.data.analysis.NumassAmplitudeSpectrum
import ru.inr.mass.data.analysis.NumassEventExtractor import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.amplitudeSpectrum import ru.inr.mass.data.analysis.amplitudeSpectrum
import ru.inr.mass.data.analysis.timeHistogram import ru.inr.mass.data.analysis.timeHistogram
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.*
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.title
import ru.inr.mass.data.proto.HVData import ru.inr.mass.data.proto.HVData
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import space.kscience.dataforge.values.asValue import space.kscience.dataforge.values.asValue
@ -66,7 +63,7 @@ fun Plotly.plotNumassBlock(
plot { plot {
runBlocking { runBlocking {
if (splitChannels && block is NumassPoint) { if (splitChannels && block is NumassPoint) {
block.getChannels().forEach { (channel, channelBlock) -> block.channels.forEach { (channel, channelBlock) ->
val spectrum = channelBlock.amplitudeSpectrum(eventExtractor) val spectrum = channelBlock.amplitudeSpectrum(eventExtractor)
histogram(spectrum, amplitudeBinSize) { histogram(spectrum, amplitudeBinSize) {
name = block.title + "[$channel]" name = block.title + "[$channel]"