Merge branch 'feature/data-viewer' into dev

# Conflicts:
#	build.gradle.kts
#	gradle.properties
#	gradle/wrapper/gradle-wrapper.properties
#	kotlin-js-store/yarn.lock
#	numass-analysis/build.gradle.kts
#	numass-data-proto/build.gradle.kts
#	numass-data-server/build.gradle.kts
#	numass-workspace/build.gradle.kts
#	numass-workspace/src/main/kotlin/ru/inr/mass/workspace/Numass.kt
#	numass-workspace/src/main/kotlin/ru/inr/mass/workspace/NumassPlugin.kt
#	settings.gradle.kts
This commit is contained in:
Alexander Nozik 2023-01-24 15:34:40 +03:00
commit 26f5e7461c
48 changed files with 3532 additions and 2388 deletions

1
.gitignore vendored
View File

@ -5,5 +5,6 @@ out/
.gradle .gradle
build/ build/
/notebooks/.ipynb_checkpoints /notebooks/.ipynb_checkpoints
/kotlin-js-store/
!gradle-wrapper.jar !gradle-wrapper.jar

View File

@ -8,16 +8,18 @@ allprojects {
repositories { repositories {
mavenLocal() mavenLocal()
maven("https://repo.kotlin.link") maven("https://repo.kotlin.link")
maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
} }
group = "ru.inr.mass" group = "ru.inr.mass"
version = "0.1.2" version = "0.1.3"
} }
val dataforgeVersion by extra("0.5.3-dev-4") val dataforgeVersion by extra("0.6.0-dev-15")
val tablesVersion: String by extra("0.1.2") val tablesVersion: String by extra("0.2.0-dev-3")
val kmathVersion by extra("0.3.0-dev-17") val kmathVersion by extra("0.3.1-dev-6")
val plotlyVersion: String by extra("0.5.0") val visionForgeVersion: String by rootProject.extra("0.3.0-dev-6")
ksciencePublish { ksciencePublish {

View File

@ -13,3 +13,4 @@ org.gradle.parallel=true
org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
toolsVersion=0.13.3-kotlin-1.7.20 toolsVersion=0.13.3-kotlin-1.7.20
compose.version=1.2.1

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,7 @@
"source": [ "source": [
"@file:Repository(\"https://repo.kotlin.link\")\n", "@file:Repository(\"https://repo.kotlin.link\")\n",
"@file:Repository(\"*mavenLocal\")\n", "@file:Repository(\"*mavenLocal\")\n",
"@file:DependsOn(\"ru.inr.mass:numass-workspace:0.1.1\")" "@file:DependsOn(\"ru.inr.mass:numass-workspace:0.1.3\")"
] ]
}, },
{ {

View File

@ -1,9 +1,9 @@
package ru.inr.mass.data.analysis package ru.inr.mass.data.analysis
import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent
import space.kscience.kmath.histogram.LongCounter import space.kscience.kmath.histogram.LongCounter
import kotlin.math.min import kotlin.math.min
@ -44,6 +44,25 @@ public suspend fun NumassBlock.amplitudeSpectrum(
return NumassAmplitudeSpectrum(map.mapValues { it.value.value.toULong() }) return NumassAmplitudeSpectrum(map.mapValues { it.value.value.toULong() })
} }
public suspend fun NumassBlock.energySpectrum(
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
calibration: (NumassEvent) -> Double,
): Map<Double, Long> {
val map = HashMap<Double, LongCounter>()
extractor.extract(this).collect { event ->
map.getOrPut(calibration(event)) { LongCounter() }.add(1L)
}
return map.mapValues { it.value.value }
}
public suspend fun NumassBlock.eventsCount(extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY): Long {
var counter: Long = 0L
extractor.extract(this).collect {
counter++
}
return counter
}
/** /**
* Collect events from block in parallel * Collect events from block in parallel
*/ */

View File

@ -23,10 +23,6 @@ import ru.inr.mass.data.api.NumassPoint.Companion.HV_KEY
import space.kscience.dataforge.meta.* import space.kscience.dataforge.meta.*
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName import space.kscience.dataforge.names.asName
import space.kscience.dataforge.values.ListValue
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.values.ValueType
import space.kscience.dataforge.values.int
import space.kscience.tables.ColumnHeader import space.kscience.tables.ColumnHeader
import space.kscience.tables.MetaRow import space.kscience.tables.MetaRow
import space.kscience.tables.RowTable import space.kscience.tables.RowTable

View File

@ -2,6 +2,7 @@ package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.mapNotNull
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassEvent import ru.inr.mass.data.api.NumassEvent
@ -37,6 +38,41 @@ public fun interface NumassEventExtractor {
) )
} }
} }
public val TQDC_V2: NumassEventExtractor = NumassEventExtractor { block ->
block.frames.mapNotNull { frame ->
var max = Short.MIN_VALUE
var min = Short.MAX_VALUE
var indexOfMax = 0
// Taking first 8 points as a baseline
val baseline = frame.signal.take(8).average()
frame.signal.forEachIndexed { index, sh: Short ->
if (sh >= max) {
max = sh
indexOfMax = index
}
if (sh <= min) {
min = sh
}
}
/*
* Filtering large negative splashes
*/
if (baseline - min < 300) {
NumassEvent(
(max - baseline).toInt().toShort(),
frame.timeOffset + frame.tickSize.inWholeNanoseconds * indexOfMax,
block
)
} else {
null
}
}
}
} }
} }

View File

@ -31,7 +31,9 @@ import kotlin.math.*
* An analyzer which uses time information from events * An analyzer which uses time information from events
* Created by darksnake on 11.07.2017. * Created by darksnake on 11.07.2017.
*/ */
public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : NumassAnalyzer() { public open class TimeAnalyzer(
override val extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
) : NumassAnalyzer() {
override suspend fun analyzeInternal( override suspend fun analyzeInternal(
block: NumassBlock, block: NumassBlock,
@ -76,7 +78,7 @@ public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : N
filter { pair -> pair.second >= t0 }.collect { pair -> filter { pair -> pair.second >= t0 }.collect { pair ->
totalN++ totalN++
//TODO add progress listener here //TODO add progress listener here
totalT+= pair.second totalT += pair.second
} }
if (totalN == 0L) { if (totalN == 0L) {
@ -114,10 +116,12 @@ public open class TimeAnalyzer(override val extractor: NumassEventExtractor) : N
sumOf { it.countRate } / size, sumOf { it.countRate } / size,
sumOf { it.countRateError.pow(2.0) } / size / size sumOf { it.countRateError.pow(2.0) } / size / size
) )
AveragingMethod.WEIGHTED -> Pair( AveragingMethod.WEIGHTED -> Pair(
sumOf { it.countRate * it.length } / totalTime, sumOf { it.countRate * it.length } / totalTime,
sumOf { (it.countRateError * it.length / totalTime).pow(2.0) } sumOf { (it.countRateError * it.length / totalTime).pow(2.0) }
) )
AveragingMethod.GEOMETRIC -> { AveragingMethod.GEOMETRIC -> {
val mean = exp(sumOf { ln(it.countRate) } / size) val mean = exp(sumOf { ln(it.countRate) } / size)
val variance = (mean / size).pow(2.0) * sumOf { val variance = (mean / size).pow(2.0) * sumOf {

View File

@ -1,12 +1,14 @@
package ru.inr.mass.data.analysis package ru.inr.mass.data.analysis
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.transform import kotlinx.coroutines.flow.transform
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.getTime import ru.inr.mass.data.api.getTime
import space.kscience.kmath.histogram.UnivariateHistogram import space.kscience.kmath.histogram.Histogram
import space.kscience.kmath.histogram.UniformHistogram1D
import space.kscience.kmath.histogram.uniform1D
import space.kscience.kmath.operations.DoubleField
import kotlin.math.max import kotlin.math.max
import kotlin.time.DurationUnit import kotlin.time.DurationUnit
@ -23,7 +25,7 @@ public fun <T, R> Flow<T>.zipWithNext(block: (l: T, r: T) -> R): Flow<R> {
public fun NumassBlock.timeHistogram( public fun NumassBlock.timeHistogram(
binSize: Double, binSize: Double,
extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, extractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
): UnivariateHistogram = UnivariateHistogram.uniform(binSize) { ): UniformHistogram1D<Double> = Histogram.uniform1D(DoubleField, binSize).produce {
runBlocking { runBlocking {
extractor.extract(this@timeHistogram).zipWithNext { l, r -> extractor.extract(this@timeHistogram).zipWithNext { l, r ->
if(l.owner == r.owner) { if(l.owner == r.owner) {

View File

@ -6,6 +6,7 @@
package ru.inr.mass.data.api package ru.inr.mass.data.api
import kotlinx.datetime.Instant import kotlinx.datetime.Instant
import ru.inr.mass.data.proto.HVData
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.long import space.kscience.dataforge.meta.long
@ -40,6 +41,8 @@ public interface NumassSet : Iterable<NumassPoint>, Provider {
override val defaultTarget: String get() = NUMASS_POINT_TARGET override val defaultTarget: String get() = NUMASS_POINT_TARGET
public val hvData: HVData?
override fun content(target: String): Map<Name, Any> = if (target == NUMASS_POINT_TARGET) { override fun content(target: String): Map<Name, Any> = if (target == NUMASS_POINT_TARGET) {
points.associateBy { NameToken("point", it.voltage.toString()).asName() } points.associateBy { NameToken("point", it.voltage.toString()).asName() }
} else { } else {

View File

@ -1,6 +1,6 @@
plugins { plugins {
id("space.kscience.gradle.jvm") id("space.kscience.gradle.jvm")
id("com.squareup.wire") version "4.4.3" id("com.squareup.wire") version "4.4.1"
`maven-publish` `maven-publish`
} }
@ -9,9 +9,15 @@ val dataforgeVersion: String by rootProject.extra
dependencies { dependencies {
api(project(":numass-data-model")) api(project(":numass-data-model"))
api("space.kscience:dataforge-io:$dataforgeVersion") api("space.kscience:dataforge-io:$dataforgeVersion")
// api(npmlibs.ktor.io)
} }
wire{ wire {
kotlin{} kotlin {
out = "src/gen/kotlin"
}
}
sourceSets.main {
kotlin.srcDir("src/gen/kotlin")
} }

View File

@ -0,0 +1,659 @@
// Code generated by Wire protocol buffer compiler, do not edit.
// Source: ru.inr.mass.data.proto.Point in numass-proto.proto
package ru.inr.mass.`data`.proto
import com.squareup.wire.*
import com.squareup.wire.Syntax.PROTO_3
import com.squareup.wire.internal.immutableCopyOf
import com.squareup.wire.internal.redactElements
import okio.ByteString
public class Point(
channels: List<Channel> = emptyList(),
unknownFields: ByteString = ByteString.EMPTY,
) : Message<Point, Nothing>(ADAPTER, unknownFields) {
/**
* Array of measuring channels
*/
@field:WireField(
tag = 1,
adapter = "ru.inr.mass.data.proto.Point${'$'}Channel#ADAPTER",
label = WireField.Label.REPEATED,
)
public val channels: List<Channel> = immutableCopyOf("channels", channels)
@Deprecated(
message = "Shouldn't be used in Kotlin",
level = DeprecationLevel.HIDDEN,
)
public override fun newBuilder(): Nothing = throw
AssertionError("Builders are deprecated and only available in a javaInterop build; see https://square.github.io/wire/wire_compiler/#kotlin")
public override fun equals(other: Any?): Boolean {
if (other === this) return true
if (other !is Point) return false
if (unknownFields != other.unknownFields) return false
if (channels != other.channels) return false
return true
}
public override fun hashCode(): Int {
var result = super.hashCode
if (result == 0) {
result = unknownFields.hashCode()
result = result * 37 + channels.hashCode()
super.hashCode = result
}
return result
}
public override fun toString(): String {
val result = mutableListOf<String>()
if (channels.isNotEmpty()) result += """channels=$channels"""
return result.joinToString(prefix = "Point{", separator = ", ", postfix = "}")
}
public fun copy(channels: List<Channel> = this.channels, unknownFields: ByteString =
this.unknownFields): Point = Point(channels, unknownFields)
public companion object {
@JvmField
public val ADAPTER: ProtoAdapter<Point> = object : ProtoAdapter<Point>(
FieldEncoding.LENGTH_DELIMITED,
Point::class,
"type.googleapis.com/ru.inr.mass.data.proto.Point",
PROTO_3,
null,
"numass-proto.proto"
) {
public override fun encodedSize(`value`: Point): Int {
var size = value.unknownFields.size
size += Channel.ADAPTER.asRepeated().encodedSizeWithTag(1, value.channels)
return size
}
public override fun encode(writer: ProtoWriter, `value`: Point): Unit {
Channel.ADAPTER.asRepeated().encodeWithTag(writer, 1, value.channels)
writer.writeBytes(value.unknownFields)
}
public override fun encode(writer: ReverseProtoWriter, `value`: Point): Unit {
writer.writeBytes(value.unknownFields)
Channel.ADAPTER.asRepeated().encodeWithTag(writer, 1, value.channels)
}
public override fun decode(reader: ProtoReader): Point {
val channels = mutableListOf<Channel>()
val unknownFields = reader.forEachTag { tag ->
when (tag) {
1 -> channels.add(Channel.ADAPTER.decode(reader))
else -> reader.readUnknownField(tag)
}
}
return Point(
channels = channels,
unknownFields = unknownFields
)
}
public override fun redact(`value`: Point): Point = value.copy(
channels = value.channels.redactElements(Channel.ADAPTER),
unknownFields = ByteString.EMPTY
)
}
private const val serialVersionUID: Long = 0L
}
/**
* A single channel for multichannel detector readout
*/
public class Channel(
/**
* The number of measuring channel
*/
@field:WireField(
tag = 1,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.OMIT_IDENTITY,
)
public val id: Long = 0L,
blocks: List<Block> = emptyList(),
unknownFields: ByteString = ByteString.EMPTY,
) : Message<Channel, Nothing>(ADAPTER, unknownFields) {
/**
* Blocks
*/
@field:WireField(
tag = 2,
adapter = "ru.inr.mass.data.proto.Point${'$'}Channel${'$'}Block#ADAPTER",
label = WireField.Label.REPEATED,
)
public val blocks: List<Block> = immutableCopyOf("blocks", blocks)
@Deprecated(
message = "Shouldn't be used in Kotlin",
level = DeprecationLevel.HIDDEN,
)
public override fun newBuilder(): Nothing = throw
AssertionError("Builders are deprecated and only available in a javaInterop build; see https://square.github.io/wire/wire_compiler/#kotlin")
public override fun equals(other: Any?): Boolean {
if (other === this) return true
if (other !is Channel) return false
if (unknownFields != other.unknownFields) return false
if (id != other.id) return false
if (blocks != other.blocks) return false
return true
}
public override fun hashCode(): Int {
var result = super.hashCode
if (result == 0) {
result = unknownFields.hashCode()
result = result * 37 + id.hashCode()
result = result * 37 + blocks.hashCode()
super.hashCode = result
}
return result
}
public override fun toString(): String {
val result = mutableListOf<String>()
result += """id=$id"""
if (blocks.isNotEmpty()) result += """blocks=$blocks"""
return result.joinToString(prefix = "Channel{", separator = ", ", postfix = "}")
}
public fun copy(
id: Long = this.id,
blocks: List<Block> = this.blocks,
unknownFields: ByteString = this.unknownFields,
): Channel = Channel(id, blocks, unknownFields)
public companion object {
@JvmField
public val ADAPTER: ProtoAdapter<Channel> = object : ProtoAdapter<Channel>(
FieldEncoding.LENGTH_DELIMITED,
Channel::class,
"type.googleapis.com/ru.inr.mass.data.proto.Point.Channel",
PROTO_3,
null,
"numass-proto.proto"
) {
public override fun encodedSize(`value`: Channel): Int {
var size = value.unknownFields.size
if (value.id != 0L) size += ProtoAdapter.UINT64.encodedSizeWithTag(1, value.id)
size += Block.ADAPTER.asRepeated().encodedSizeWithTag(2, value.blocks)
return size
}
public override fun encode(writer: ProtoWriter, `value`: Channel): Unit {
if (value.id != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.id)
Block.ADAPTER.asRepeated().encodeWithTag(writer, 2, value.blocks)
writer.writeBytes(value.unknownFields)
}
public override fun encode(writer: ReverseProtoWriter, `value`: Channel): Unit {
writer.writeBytes(value.unknownFields)
Block.ADAPTER.asRepeated().encodeWithTag(writer, 2, value.blocks)
if (value.id != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.id)
}
public override fun decode(reader: ProtoReader): Channel {
var id: Long = 0L
val blocks = mutableListOf<Block>()
val unknownFields = reader.forEachTag { tag ->
when (tag) {
1 -> id = ProtoAdapter.UINT64.decode(reader)
2 -> blocks.add(Block.ADAPTER.decode(reader))
else -> reader.readUnknownField(tag)
}
}
return Channel(
id = id,
blocks = blocks,
unknownFields = unknownFields
)
}
public override fun redact(`value`: Channel): Channel = value.copy(
blocks = value.blocks.redactElements(Block.ADAPTER),
unknownFields = ByteString.EMPTY
)
}
private const val serialVersionUID: Long = 0L
}
/**
* A continuous measurement block
*/
public class Block(
/**
* Block start in epoch nanos
*/
@field:WireField(
tag = 1,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.OMIT_IDENTITY,
)
public val time: Long = 0L,
frames: List<Frame> = emptyList(),
/**
* Events array
*/
@field:WireField(
tag = 3,
adapter = "ru.inr.mass.data.proto.Point${'$'}Channel${'$'}Block${'$'}Events#ADAPTER",
label = WireField.Label.OMIT_IDENTITY,
)
public val events: Events? = null,
/**
* block size in nanos. If missing, take from meta.
*/
@field:WireField(
tag = 4,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.OMIT_IDENTITY,
)
public val length: Long = 0L,
/**
* tick size in nanos. Obsolete, to be removed
*/
@field:WireField(
tag = 5,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.OMIT_IDENTITY,
jsonName = "binSize",
)
public val bin_size: Long = 0L,
unknownFields: ByteString = ByteString.EMPTY,
) : Message<Block, Nothing>(ADAPTER, unknownFields) {
/**
* Frames array
*/
@field:WireField(
tag = 2,
adapter = "ru.inr.mass.data.proto.Point${'$'}Channel${'$'}Block${'$'}Frame#ADAPTER",
label = WireField.Label.REPEATED,
)
public val frames: List<Frame> = immutableCopyOf("frames", frames)
@Deprecated(
message = "Shouldn't be used in Kotlin",
level = DeprecationLevel.HIDDEN,
)
public override fun newBuilder(): Nothing = throw
AssertionError("Builders are deprecated and only available in a javaInterop build; see https://square.github.io/wire/wire_compiler/#kotlin")
public override fun equals(other: Any?): Boolean {
if (other === this) return true
if (other !is Block) return false
if (unknownFields != other.unknownFields) return false
if (time != other.time) return false
if (frames != other.frames) return false
if (events != other.events) return false
if (length != other.length) return false
if (bin_size != other.bin_size) return false
return true
}
public override fun hashCode(): Int {
var result = super.hashCode
if (result == 0) {
result = unknownFields.hashCode()
result = result * 37 + time.hashCode()
result = result * 37 + frames.hashCode()
result = result * 37 + (events?.hashCode() ?: 0)
result = result * 37 + length.hashCode()
result = result * 37 + bin_size.hashCode()
super.hashCode = result
}
return result
}
public override fun toString(): String {
val result = mutableListOf<String>()
result += """time=$time"""
if (frames.isNotEmpty()) result += """frames=$frames"""
if (events != null) result += """events=$events"""
result += """length=$length"""
result += """bin_size=$bin_size"""
return result.joinToString(prefix = "Block{", separator = ", ", postfix = "}")
}
public fun copy(
time: Long = this.time,
frames: List<Frame> = this.frames,
events: Events? = this.events,
length: Long = this.length,
bin_size: Long = this.bin_size,
unknownFields: ByteString = this.unknownFields,
): Block = Block(time, frames, events, length, bin_size, unknownFields)
public companion object {
@JvmField
public val ADAPTER: ProtoAdapter<Block> = object : ProtoAdapter<Block>(
FieldEncoding.LENGTH_DELIMITED,
Block::class,
"type.googleapis.com/ru.inr.mass.data.proto.Point.Channel.Block",
PROTO_3,
null,
"numass-proto.proto"
) {
public override fun encodedSize(`value`: Block): Int {
var size = value.unknownFields.size
if (value.time != 0L) size += ProtoAdapter.UINT64.encodedSizeWithTag(1, value.time)
size += Frame.ADAPTER.asRepeated().encodedSizeWithTag(2, value.frames)
if (value.events != null) size += Events.ADAPTER.encodedSizeWithTag(3, value.events)
if (value.length != 0L) size += ProtoAdapter.UINT64.encodedSizeWithTag(4, value.length)
if (value.bin_size != 0L) size += ProtoAdapter.UINT64.encodedSizeWithTag(5,
value.bin_size)
return size
}
public override fun encode(writer: ProtoWriter, `value`: Block): Unit {
if (value.time != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.time)
Frame.ADAPTER.asRepeated().encodeWithTag(writer, 2, value.frames)
if (value.events != null) Events.ADAPTER.encodeWithTag(writer, 3, value.events)
if (value.length != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 4, value.length)
if (value.bin_size != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 5, value.bin_size)
writer.writeBytes(value.unknownFields)
}
public override fun encode(writer: ReverseProtoWriter, `value`: Block): Unit {
writer.writeBytes(value.unknownFields)
if (value.bin_size != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 5, value.bin_size)
if (value.length != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 4, value.length)
if (value.events != null) Events.ADAPTER.encodeWithTag(writer, 3, value.events)
Frame.ADAPTER.asRepeated().encodeWithTag(writer, 2, value.frames)
if (value.time != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.time)
}
public override fun decode(reader: ProtoReader): Block {
var time: Long = 0L
val frames = mutableListOf<Frame>()
var events: Events? = null
var length: Long = 0L
var bin_size: Long = 0L
val unknownFields = reader.forEachTag { tag ->
when (tag) {
1 -> time = ProtoAdapter.UINT64.decode(reader)
2 -> frames.add(Frame.ADAPTER.decode(reader))
3 -> events = Events.ADAPTER.decode(reader)
4 -> length = ProtoAdapter.UINT64.decode(reader)
5 -> bin_size = ProtoAdapter.UINT64.decode(reader)
else -> reader.readUnknownField(tag)
}
}
return Block(
time = time,
frames = frames,
events = events,
length = length,
bin_size = bin_size,
unknownFields = unknownFields
)
}
public override fun redact(`value`: Block): Block = value.copy(
frames = value.frames.redactElements(Frame.ADAPTER),
events = value.events?.let(Events.ADAPTER::redact),
unknownFields = ByteString.EMPTY
)
}
private const val serialVersionUID: Long = 0L
}
/**
* Raw data frame
*/
public class Frame(
/**
* Time in nanos from the beginning of the block
*/
@field:WireField(
tag = 1,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.OMIT_IDENTITY,
)
public val time: Long = 0L,
/**
* Frame data as an array of int16 measured in arbitrary channels
*/
@field:WireField(
tag = 2,
adapter = "com.squareup.wire.ProtoAdapter#BYTES",
label = WireField.Label.OMIT_IDENTITY,
declaredName = "data",
)
public val data_: ByteString = ByteString.EMPTY,
unknownFields: ByteString = ByteString.EMPTY,
) : Message<Frame, Nothing>(ADAPTER, unknownFields) {
@Deprecated(
message = "Shouldn't be used in Kotlin",
level = DeprecationLevel.HIDDEN,
)
public override fun newBuilder(): Nothing = throw
AssertionError("Builders are deprecated and only available in a javaInterop build; see https://square.github.io/wire/wire_compiler/#kotlin")
public override fun equals(other: Any?): Boolean {
if (other === this) return true
if (other !is Frame) return false
if (unknownFields != other.unknownFields) return false
if (time != other.time) return false
if (data_ != other.data_) return false
return true
}
public override fun hashCode(): Int {
var result = super.hashCode
if (result == 0) {
result = unknownFields.hashCode()
result = result * 37 + time.hashCode()
result = result * 37 + data_.hashCode()
super.hashCode = result
}
return result
}
public override fun toString(): String {
val result = mutableListOf<String>()
result += """time=$time"""
result += """data_=$data_"""
return result.joinToString(prefix = "Frame{", separator = ", ", postfix = "}")
}
public fun copy(
time: Long = this.time,
data_: ByteString = this.data_,
unknownFields: ByteString = this.unknownFields,
): Frame = Frame(time, data_, unknownFields)
public companion object {
@JvmField
public val ADAPTER: ProtoAdapter<Frame> = object : ProtoAdapter<Frame>(
FieldEncoding.LENGTH_DELIMITED,
Frame::class,
"type.googleapis.com/ru.inr.mass.data.proto.Point.Channel.Block.Frame",
PROTO_3,
null,
"numass-proto.proto"
) {
public override fun encodedSize(`value`: Frame): Int {
var size = value.unknownFields.size
if (value.time != 0L) size += ProtoAdapter.UINT64.encodedSizeWithTag(1, value.time)
if (value.data_ != ByteString.EMPTY) size += ProtoAdapter.BYTES.encodedSizeWithTag(2,
value.data_)
return size
}
public override fun encode(writer: ProtoWriter, `value`: Frame): Unit {
if (value.time != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.time)
if (value.data_ != ByteString.EMPTY) ProtoAdapter.BYTES.encodeWithTag(writer, 2,
value.data_)
writer.writeBytes(value.unknownFields)
}
public override fun encode(writer: ReverseProtoWriter, `value`: Frame): Unit {
writer.writeBytes(value.unknownFields)
if (value.data_ != ByteString.EMPTY) ProtoAdapter.BYTES.encodeWithTag(writer, 2,
value.data_)
if (value.time != 0L) ProtoAdapter.UINT64.encodeWithTag(writer, 1, value.time)
}
public override fun decode(reader: ProtoReader): Frame {
var time: Long = 0L
var data_: ByteString = ByteString.EMPTY
val unknownFields = reader.forEachTag { tag ->
when (tag) {
1 -> time = ProtoAdapter.UINT64.decode(reader)
2 -> data_ = ProtoAdapter.BYTES.decode(reader)
else -> reader.readUnknownField(tag)
}
}
return Frame(
time = time,
data_ = data_,
unknownFields = unknownFields
)
}
public override fun redact(`value`: Frame): Frame = value.copy(
unknownFields = ByteString.EMPTY
)
}
private const val serialVersionUID: Long = 0L
}
}
/**
* Event block obtained directly from device of from frame analysis
* In order to save space, times and amplitudes are in separate arrays.
* Amplitude and time with the same index correspond to the same event
*/
public class Events(
times: List<Long> = emptyList(),
amplitudes: List<Long> = emptyList(),
unknownFields: ByteString = ByteString.EMPTY,
) : Message<Events, Nothing>(ADAPTER, unknownFields) {
/**
* Array of time in nanos from the beginning of the block
*/
@field:WireField(
tag = 1,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.PACKED,
)
public val times: List<Long> = immutableCopyOf("times", times)
/**
* Array of amplitudes of events in channels
*/
@field:WireField(
tag = 2,
adapter = "com.squareup.wire.ProtoAdapter#UINT64",
label = WireField.Label.PACKED,
)
public val amplitudes: List<Long> = immutableCopyOf("amplitudes", amplitudes)
@Deprecated(
message = "Shouldn't be used in Kotlin",
level = DeprecationLevel.HIDDEN,
)
public override fun newBuilder(): Nothing = throw
AssertionError("Builders are deprecated and only available in a javaInterop build; see https://square.github.io/wire/wire_compiler/#kotlin")
public override fun equals(other: Any?): Boolean {
if (other === this) return true
if (other !is Events) return false
if (unknownFields != other.unknownFields) return false
if (times != other.times) return false
if (amplitudes != other.amplitudes) return false
return true
}
public override fun hashCode(): Int {
var result = super.hashCode
if (result == 0) {
result = unknownFields.hashCode()
result = result * 37 + times.hashCode()
result = result * 37 + amplitudes.hashCode()
super.hashCode = result
}
return result
}
public override fun toString(): String {
val result = mutableListOf<String>()
if (times.isNotEmpty()) result += """times=$times"""
if (amplitudes.isNotEmpty()) result += """amplitudes=$amplitudes"""
return result.joinToString(prefix = "Events{", separator = ", ", postfix = "}")
}
public fun copy(
times: List<Long> = this.times,
amplitudes: List<Long> = this.amplitudes,
unknownFields: ByteString = this.unknownFields,
): Events = Events(times, amplitudes, unknownFields)
public companion object {
@JvmField
public val ADAPTER: ProtoAdapter<Events> = object : ProtoAdapter<Events>(
FieldEncoding.LENGTH_DELIMITED,
Events::class,
"type.googleapis.com/ru.inr.mass.data.proto.Point.Channel.Block.Events",
PROTO_3,
null,
"numass-proto.proto"
) {
public override fun encodedSize(`value`: Events): Int {
var size = value.unknownFields.size
size += ProtoAdapter.UINT64.asPacked().encodedSizeWithTag(1, value.times)
size += ProtoAdapter.UINT64.asPacked().encodedSizeWithTag(2, value.amplitudes)
return size
}
public override fun encode(writer: ProtoWriter, `value`: Events): Unit {
ProtoAdapter.UINT64.asPacked().encodeWithTag(writer, 1, value.times)
ProtoAdapter.UINT64.asPacked().encodeWithTag(writer, 2, value.amplitudes)
writer.writeBytes(value.unknownFields)
}
public override fun encode(writer: ReverseProtoWriter, `value`: Events): Unit {
writer.writeBytes(value.unknownFields)
ProtoAdapter.UINT64.asPacked().encodeWithTag(writer, 2, value.amplitudes)
ProtoAdapter.UINT64.asPacked().encodeWithTag(writer, 1, value.times)
}
public override fun decode(reader: ProtoReader): Events {
val times = mutableListOf<Long>()
val amplitudes = mutableListOf<Long>()
val unknownFields = reader.forEachTag { tag ->
when (tag) {
1 -> times.add(ProtoAdapter.UINT64.decode(reader))
2 -> amplitudes.add(ProtoAdapter.UINT64.decode(reader))
else -> reader.readUnknownField(tag)
}
}
return Events(
times = times,
amplitudes = amplitudes,
unknownFields = unknownFields
)
}
public override fun redact(`value`: Events): Events = value.copy(
unknownFields = ByteString.EMPTY
)
}
private const val serialVersionUID: Long = 0L
}
}
}
}
}

View File

@ -4,10 +4,7 @@ import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.NumassSet.Companion.NUMASS_HV_TARGET import ru.inr.mass.data.api.NumassSet.Companion.NUMASS_HV_TARGET
import ru.inr.mass.data.api.readEnvelope import ru.inr.mass.data.api.readEnvelope
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.*
import space.kscience.dataforge.context.error
import space.kscience.dataforge.context.logger
import space.kscience.dataforge.context.warn
import space.kscience.dataforge.io.io import space.kscience.dataforge.io.io
import space.kscience.dataforge.io.readEnvelopeFile import space.kscience.dataforge.io.readEnvelopeFile
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
@ -16,13 +13,16 @@ import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName import space.kscience.dataforge.names.asName
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.util.stream.Collectors
import kotlin.io.path.* import kotlin.io.path.*
import kotlin.streams.toList
public class NumassDirectorySet internal constructor( public class NumassDirectorySet internal constructor(
public val context: Context, public val numassProto: NumassProtoPlugin,
public val path: Path, public val path: Path,
) : NumassSet { ) : NumassSet, ContextAware {
override val context: Context
get() = numassProto.context
@OptIn(DFExperimental::class) @OptIn(DFExperimental::class)
override val meta: Meta override val meta: Meta
@ -42,27 +42,24 @@ public class NumassDirectorySet internal constructor(
it.fileName.name.startsWith("p") it.fileName.name.startsWith("p")
}.map { pointPath -> }.map { pointPath ->
try { try {
context.readNumassPointFile(pointPath) numassProto.readNumassPointFile(pointPath)
} catch (e: Exception) { } catch (e: Exception) {
context.logger.error(e) { "Error reading Numass point file $pointPath" } context.logger.error(e) { "Error reading Numass point file $pointPath" }
null null
} }
}.toList().filterNotNull() }.collect(Collectors.toList()).filterNotNull()
@OptIn(DFExperimental::class) @OptIn(DFExperimental::class)
public fun getHvData(): HVData? { override val hvData: HVData?
val hvFile = path / "voltage" get() = (path / "voltage").takeIf { it.exists() }?.let { hvFile ->
return if (hvFile.exists()) {
val envelope = context.io.readEnvelopeFile(hvFile) val envelope = context.io.readEnvelopeFile(hvFile)
HVData.readEnvelope(envelope) HVData.readEnvelope(envelope)
} else {
null
} }
}
override fun content(target: String): Map<Name, Any> = if (target == NUMASS_HV_TARGET) { override fun content(target: String): Map<Name, Any> = if (target == NUMASS_HV_TARGET) {
val hvData = getHvData() val hvData = hvData
if (hvData != null) { if (hvData != null) {
mapOf("hv".asName() to hvData) mapOf("hv".asName() to hvData)
} else { } else {
@ -72,20 +69,3 @@ public class NumassDirectorySet internal constructor(
public companion object public companion object
} }
@OptIn(DFExperimental::class)
public fun Context.readNumassPointFile(path: Path): NumassPoint? {
val envelope = io.readEnvelopeFile(path)
return ProtoNumassPoint.fromEnvelope(envelope)
}
public fun Context.readNumassPointFile(path: String): NumassPoint? = readNumassPointFile(Path.of(path))
@OptIn(ExperimentalPathApi::class)
public fun Context.readNumassDirectory(path: Path): NumassDirectorySet {
if (!path.exists()) error("Path $path does not exist")
if (!path.isDirectory()) error("The path $path is not a directory")
return NumassDirectorySet(this, path)
}
public fun Context.readNumassDirectory(path: String): NumassDirectorySet = readNumassDirectory(Path.of(path))

View File

@ -1,22 +1,34 @@
package ru.inr.mass.data.proto package ru.inr.mass.data.proto
import ru.inr.mass.data.api.NumassPoint
import space.kscience.dataforge.context.AbstractPlugin import space.kscience.dataforge.context.AbstractPlugin
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.data.DataSource
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.static
import space.kscience.dataforge.io.EnvelopeFormatFactory import space.kscience.dataforge.io.EnvelopeFormatFactory
import space.kscience.dataforge.io.IOPlugin import space.kscience.dataforge.io.IOPlugin
import space.kscience.dataforge.io.readEnvelopeFile
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import java.nio.file.Files
import java.nio.file.Path
import kotlin.io.path.exists
import kotlin.io.path.isDirectory
import kotlin.io.path.relativeTo
import kotlin.reflect.KClass import kotlin.reflect.KClass
public class NumassProtoPlugin : AbstractPlugin() { public class NumassProtoPlugin : AbstractPlugin() {
public val io: IOPlugin by require(IOPlugin) public val io: IOPlugin by require(IOPlugin)
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
override fun content(target: String): Map<Name, Any> = if(target == EnvelopeFormatFactory.ENVELOPE_FORMAT_TYPE){ override fun content(target: String): Map<Name, Any> = if (target == EnvelopeFormatFactory.ENVELOPE_FORMAT_TYPE) {
mapOf(TaggedNumassEnvelopeFormat.name to TaggedNumassEnvelopeFormat) mapOf(TaggedNumassEnvelopeFormat.name to TaggedNumassEnvelopeFormat)
} else{ } else {
super.content(target) super.content(target)
} }
@ -26,3 +38,39 @@ public class NumassProtoPlugin : AbstractPlugin() {
override val type: KClass<out NumassProtoPlugin> = NumassProtoPlugin::class override val type: KClass<out NumassProtoPlugin> = NumassProtoPlugin::class
} }
} }
@OptIn(DFExperimental::class)
public fun NumassProtoPlugin.readNumassPointFile(path: Path): NumassPoint? {
val envelope = io.readEnvelopeFile(path)
return ProtoNumassPoint.fromEnvelope(envelope)
}
public fun NumassProtoPlugin.readNumassPointFile(path: String): NumassPoint? = readNumassPointFile(Path.of(path))
public fun NumassProtoPlugin.readNumassDirectory(path: Path): NumassDirectorySet {
if (!path.exists()) error("Path $path does not exist")
if (!path.isDirectory()) error("The path $path is not a directory")
return NumassDirectorySet(this, path)
}
public fun NumassProtoPlugin.readNumassDirectory(path: String): NumassDirectorySet = readNumassDirectory(Path.of(path))
public suspend fun NumassProtoPlugin.readRepository(path: Path): DataTree<NumassDirectorySet> = DataSource {
Files.walk(path).filter {
it.isDirectory() && it.resolve("meta").exists()
}.forEach { childPath ->
val name = Name(childPath.relativeTo(path).map { segment ->
NameToken(segment.fileName.toString())
})
val value = readNumassDirectory(childPath)
static(name, value, value.meta)
}
//TODO add file watcher
}
public suspend fun NumassProtoPlugin.readRepository(path: String): DataTree<NumassDirectorySet> =
readRepository(Path.of(path))
public fun NumassProtoPlugin.readPoint(path: String): NumassPoint = readNumassPointFile(path)
?: error("Can't read numass point at $path")

View File

@ -20,6 +20,9 @@ import io.ktor.utils.io.core.readBytes
import kotlinx.coroutines.flow.* import kotlinx.coroutines.flow.*
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import kotlinx.datetime.Instant import kotlinx.datetime.Instant
import kotlinx.datetime.LocalDateTime
import kotlinx.datetime.TimeZone
import kotlinx.datetime.toInstant
import okio.ByteString import okio.ByteString
import org.slf4j.LoggerFactory import org.slf4j.LoggerFactory
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
@ -79,9 +82,17 @@ internal class ProtoNumassPoint(
override val index: Int get() = meta["external_meta.point_index"].int ?: super.index override val index: Int get() = meta["external_meta.point_index"].int ?: super.index
override val startTime: Instant override val startTime: Instant
get() = meta["start_time"].long?.let { get() {
Instant.fromEpochMilliseconds(it) val startTimeValue = meta["start_time"]?.value
} ?: Instant.DISTANT_PAST return when {
startTimeValue == null -> Instant.DISTANT_PAST
startTimeValue.type == ValueType.STRING -> LocalDateTime.parse(startTimeValue.string)
.toInstant(TimeZone.UTC)
//TODO fix time zones!!!
startTimeValue.type == ValueType.NUMBER -> Instant.fromEpochMilliseconds(startTimeValue.long)
else -> error("Can't decode start time")
}
}
override suspend fun getLength(): Duration = meta["acquisition_time"].double?.let { override suspend fun getLength(): Duration = meta["acquisition_time"].double?.let {
(it * 1000).milliseconds (it * 1000).milliseconds
@ -103,7 +114,7 @@ internal class ProtoNumassPoint(
override fun toString(): String = "ProtoNumassPoint(index = ${index}, hv = $voltage)" override fun toString(): String = "ProtoNumassPoint(index = ${index}, hv = $voltage)"
public companion object { companion object {
/** /**
* Get valid data stream utilizing compression if it is present * Get valid data stream utilizing compression if it is present
@ -129,6 +140,7 @@ internal class ProtoNumassPoint(
inflater.end() inflater.end()
ByteArrayInputStream(unzippeddata).use(block) ByteArrayInputStream(unzippeddata).use(block)
} }
else -> { else -> {
data?.read { data?.read {
block(asInputStream()) block(asInputStream())
@ -136,7 +148,7 @@ internal class ProtoNumassPoint(
} }
} }
public fun fromEnvelope(envelope: Envelope): ProtoNumassPoint? { fun fromEnvelope(envelope: Envelope): ProtoNumassPoint? {
if (envelope.data == null) return null if (envelope.data == null) return null
return ProtoNumassPoint(envelope.meta) { return ProtoNumassPoint(envelope.meta) {
envelope.useData { envelope.useData {
@ -167,6 +179,7 @@ public class ProtoNumassBlock(
block.length > 0 -> block.length.nanoseconds block.length > 0 -> block.length.nanoseconds
parent?.meta?.get("acquisition_time") != null -> parent?.meta?.get("acquisition_time") != null ->
(parent.meta["acquisition_time"].double ?: (0.0 * 1000)).milliseconds (parent.meta["acquisition_time"].double ?: (0.0 * 1000)).milliseconds
else -> { else -> {
LoggerFactory.getLogger(javaClass) LoggerFactory.getLogger(javaClass)
.error("No length information on block. Trying to infer from first and last events") .error("No length information on block. Trying to infer from first and last events")
@ -208,7 +221,8 @@ public class ProtoNumassBlock(
} else { } else {
ShortArray(shortBuffer.limit()) { shortBuffer.get(it) } ShortArray(shortBuffer.limit()) { shortBuffer.get(it) }
} }
FrameType.TQDC2021 -> ShortArray(shortBuffer.limit()){
FrameType.TQDC2021 -> ShortArray(shortBuffer.limit()) {
(shortBuffer.get(it).toUShort().toInt() - Short.MAX_VALUE).toShort() (shortBuffer.get(it).toUShort().toInt() - Short.MAX_VALUE).toShort()
} }
} }

View File

@ -24,13 +24,13 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.plus import space.kscience.dataforge.names.parseAsName
import java.util.* import java.util.*
public class TaggedNumassEnvelopeFormat(private val io: IOPlugin) : EnvelopeFormat { public class TaggedNumassEnvelopeFormat(private val io: IOPlugin) : EnvelopeFormat {
private fun Tag.toBinary() = Binary(24) { private fun Tag.toBinary() = Binary {
writeRawString(START_SEQUENCE) writeRawString(START_SEQUENCE)
writeRawString("DFNU") writeRawString("DFNU")
writeShort(metaFormatKey) writeShort(metaFormatKey)
@ -71,7 +71,7 @@ public class TaggedNumassEnvelopeFormat(private val io: IOPlugin) : EnvelopeForm
val metaFormat = io.resolveMetaFormat(tag.metaFormatKey) val metaFormat = io.resolveMetaFormat(tag.metaFormatKey)
?: error("Meta format with key ${tag.metaFormatKey} not found") ?: error("Meta format with key ${tag.metaFormatKey} not found")
val meta: Meta = metaFormat.readObject(input.readBinary(tag.metaSize.toInt())) val meta: Meta = metaFormat.readObjectFrom(input.readBinary(tag.metaSize.toInt()))
val data = input.readBinary(tag.dataSize.toInt()) val data = input.readBinary(tag.dataSize.toInt())
@ -88,10 +88,10 @@ public class TaggedNumassEnvelopeFormat(private val io: IOPlugin) : EnvelopeForm
?: error("Meta format with key ${tag.metaFormatKey} not found") ?: error("Meta format with key ${tag.metaFormatKey} not found")
} }
val meta: Meta = metaFormat.readObject(input.readBinary(tag.metaSize.toInt())) val meta: Meta = metaFormat.readObjectFrom(input.readBinary(tag.metaSize.toInt()))
return PartialEnvelope(meta, 30u + tag.metaSize, tag.dataSize) return PartialEnvelope(meta, 30 + tag.metaSize.toInt(), tag.dataSize)
} }
private data class Tag( private data class Tag(
@ -100,15 +100,15 @@ public class TaggedNumassEnvelopeFormat(private val io: IOPlugin) : EnvelopeForm
val dataSize: ULong, val dataSize: ULong,
) )
override fun toMeta(): Meta = Meta { // override fun toMeta(): Meta = Meta {
IOFormat.NAME_KEY put name.toString() // NAME_KEY put name.toString()
} // }
public companion object : EnvelopeFormatFactory { public companion object : EnvelopeFormatFactory {
private const val START_SEQUENCE = "#!" private const val START_SEQUENCE = "#!"
private const val END_SEQUENCE = "!#\r\n" private const val END_SEQUENCE = "!#\r\n"
override val name: Name = super.name + "numass" override val name: Name = "envelope.numass".parseAsName()
override fun build(context: Context, meta: Meta): EnvelopeFormat { override fun build(context: Context, meta: Meta): EnvelopeFormat {
val io = context.io val io = context.io

View File

@ -6,9 +6,10 @@ import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.api.ParentBlock import ru.inr.mass.data.api.ParentBlock
import ru.inr.mass.data.api.channels import ru.inr.mass.data.api.channels
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.fetch
import space.kscience.dataforge.meta.ListValue
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
import space.kscience.dataforge.values.ListValue
import java.nio.file.Path import java.nio.file.Path
import kotlin.test.assertEquals import kotlin.test.assertEquals
@ -16,11 +17,12 @@ class TestNumassDirectory {
val context = Context("numass-test") { val context = Context("numass-test") {
plugin(NumassProtoPlugin) plugin(NumassProtoPlugin)
} }
val numassProto = context.fetch(NumassProtoPlugin)
@Test @Test
fun testDanteRead() { fun testDanteRead() {
val dataPath = Path.of("src/test/resources", "testData/dante") val dataPath = Path.of("src/test/resources", "testData/dante")
val testSet = context.readNumassDirectory(dataPath) val testSet = numassProto.readNumassDirectory(dataPath)
assertEquals("2018-04-13T22:01:46", testSet.meta["end_time"].string) assertEquals("2018-04-13T22:01:46", testSet.meta["end_time"].string)
assertEquals(ListValue.EMPTY, testSet.meta["comments"]?.value) assertEquals(ListValue.EMPTY, testSet.meta["comments"]?.value)
assertEquals(31, testSet.points.size) assertEquals(31, testSet.points.size)
@ -32,12 +34,12 @@ class TestNumassDirectory {
@Test @Test
fun testTQDCRead() = runBlocking { fun testTQDCRead() = runBlocking {
val pointPath = Path.of("src/test/resources", "testData/tqdc") val pointPath = Path.of("src/test/resources", "testData/tqdc")
val set: NumassSet = context.readNumassDirectory(pointPath) val set: NumassSet = numassProto.readNumassDirectory(pointPath)
val point = set.first { it.voltage == 18200.0 } val point = set.first { it.voltage == 18200.0 }
point.channels.forEach { (channel, block) -> point.channels.forEach { (channel, block) ->
println("$channel: $block") println("$channel: $block")
if(block is ParentBlock){ if (block is ParentBlock) {
block.blocks.toList().forEach{ block.blocks.toList().forEach {
println("\t${it.channel}:${it.eventsCount}") println("\t${it.channel}:${it.eventsCount}")
} }
} }

View File

@ -1,13 +1,15 @@
plugins { plugins {
kotlin("multiplatform") id("space.kscience.gradle.mpp")
id("space.kscience.gradle.common") id("org.jetbrains.compose")
`maven-publish` `maven-publish`
} }
val visionForgeVersion = "0.2.0-dev-24" val visionForgeVersion: String by rootProject.extra
val production: Boolean by rootProject.extra(true)
kotlin { kotlin {
js{ js(IR) {
browser { browser {
webpackTask { webpackTask {
this.outputFileName = "js/numass-web.js" this.outputFileName = "js/numass-web.js"
@ -16,36 +18,51 @@ kotlin {
binaries.executable() binaries.executable()
} }
afterEvaluate {
val jsBrowserDistribution by tasks.getting
tasks.getByName<ProcessResources>("jvmProcessResources") {
dependsOn(jsBrowserDistribution)
afterEvaluate {
from(jsBrowserDistribution)
}
}
}
sourceSets { sourceSets {
commonMain { commonMain {
dependencies { dependencies {
implementation(project(":numass-data-model")) implementation(project(":numass-data-model"))
implementation("space.kscience:visionforge-core:$visionForgeVersion")
implementation("space.kscience:visionforge-plotly:$visionForgeVersion") implementation("space.kscience:visionforge-plotly:$visionForgeVersion")
} }
} }
jvmMain { jvmMain {
dependencies { dependencies {
implementation(project(":numass-data-proto")) implementation(project(":numass-data-proto"))
implementation("space.kscience:visionforge-server:$visionForgeVersion") implementation("io.ktor:ktor-server-cio:2.1.3")
implementation("io.ktor:ktor-server-html-builder-jvm:2.1.3")
implementation("space.kscience:visionforge-plotly:$visionForgeVersion")
}
}
jsMain{
dependencies{
implementation(compose.web.core)
} }
} }
} }
} }
kscience{ afterEvaluate {
val distributionTask = if (production) {
tasks.getByName("jsBrowserDistribution")
} else {
tasks.getByName("jsBrowserDevelopmentExecutableDistribution")
}
tasks.getByName<ProcessResources>("jvmProcessResources") {
dependsOn(distributionTask)
from(distributionTask)
include("**/*.js")
if (production) {
include("**/*.map")
}
}
}
kscience {
useSerialization { useSerialization {
json() json()
} }
withContextReceivers()
} }

View File

@ -8,32 +8,27 @@ import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.visionforge.Vision import space.kscience.visionforge.Vision
import space.kscience.visionforge.VisionBase
import space.kscience.visionforge.VisionGroupBase
import space.kscience.visionforge.VisionPlugin import space.kscience.visionforge.VisionPlugin
import space.kscience.visionforge.plotly.PlotlyPlugin
import kotlin.reflect.KClass import kotlin.reflect.KClass
public class NumassCommonPlugin(meta: Meta) : VisionPlugin(meta) { public class NumassCommonPlugin(meta: Meta = Meta.EMPTY) : VisionPlugin(meta) {
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
public val plotlyPlugin: PlotlyPlugin by require(PlotlyPlugin)
override val visionSerializersModule: SerializersModule get() = numassSerializersModule override val visionSerializersModule: SerializersModule get() = numassSerializersModule
public companion object : PluginFactory<NumassCommonPlugin> { public companion object : PluginFactory<NumassCommonPlugin> {
override val tag: PluginTag = PluginTag("numass.common", "ru.inr.mass") override val tag: PluginTag = PluginTag("numass.common", "ru.inr.mass")
override val type: KClass<NumassCommonPlugin> = NumassCommonPlugin::class override val type: KClass<NumassCommonPlugin> = NumassCommonPlugin::class
override fun invoke(meta: Meta, context: Context): NumassCommonPlugin = NumassCommonPlugin()
private val numassSerializersModule = SerializersModule { override fun build(context: Context, meta: Meta): NumassCommonPlugin = NumassCommonPlugin()
internal val numassSerializersModule = SerializersModule {
polymorphic(Vision::class) { polymorphic(Vision::class) {
subclass(VisionBase.serializer())
subclass(VisionGroupBase.serializer())
subclass(VisionOfNumassHv.serializer()) subclass(VisionOfNumassHv.serializer())
subclass(VisionOfNumassPoint.serializer()) subclass(VisionOfNumassPoint.serializer())
subclass(VisionOfNumassHv.serializer())
subclass(VisionOfNumassSet.serializer()) subclass(VisionOfNumassSet.serializer())
subclass(VisionOfNumassSetRef.serializer())
subclass(VisionOfNumassRepository.serializer())
} }
} }
} }

View File

@ -1,24 +1,20 @@
package ru.inr.mass.data.server package ru.inr.mass.data.server
import kotlinx.coroutines.flow.collect
import kotlinx.serialization.Serializable import kotlinx.serialization.Serializable
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassPoint import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.api.channels
import ru.inr.mass.data.api.NumassSet.Companion.NUMASS_HV_TARGET
import ru.inr.mass.data.proto.HVData import ru.inr.mass.data.proto.HVData
import ru.inr.mass.data.proto.HVEntry import ru.inr.mass.data.proto.HVEntry
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.NameToken import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.provider.top import space.kscience.visionforge.AbstractVision
import space.kscience.visionforge.VisionBase
import space.kscience.visionforge.VisionGroupBase
public typealias SimpleAmplitudeSpectrum = Map<UShort, UInt> public typealias SimpleAmplitudeSpectrum = Map<Short, UInt>
private suspend fun NumassBlock.simpleAmplitudeSpectrum(): SimpleAmplitudeSpectrum { internal suspend fun NumassBlock.simpleAmplitudeSpectrum(): SimpleAmplitudeSpectrum {
val res = mutableMapOf<UShort, UInt>() val res = mutableMapOf<Short, UInt>()
events.collect { events.collect {
res[it.amplitude] = (res[it.amplitude] ?: 0U) + 1U res[it.amplitude] = (res[it.amplitude] ?: 0U) + 1U
} }
@ -31,36 +27,21 @@ public class VisionOfNumassPoint(
public val index: Int, public val index: Int,
public val voltage: Double, public val voltage: Double,
public val spectra: Map<String, SimpleAmplitudeSpectrum>, public val spectra: Map<String, SimpleAmplitudeSpectrum>,
) : VisionBase() ) : AbstractVision()
public suspend fun NumassPoint.toVision(): VisionOfNumassPoint = VisionOfNumassPoint( public suspend fun NumassPoint.toVision(): VisionOfNumassPoint = VisionOfNumassPoint(
meta, meta,
index, index,
voltage, voltage,
getChannels().entries.associate { (k, v) -> channels.entries.associate { (k, v) ->
k.toString() to v.simpleAmplitudeSpectrum() k.toString() to v.simpleAmplitudeSpectrum()
} }
) )
@Serializable @Serializable
public class VisionOfNumassHv(public val hv: HVData) : VisionBase(), Iterable<HVEntry> { public class VisionOfNumassHv(public val hv: HVData) : AbstractVision(), Iterable<HVEntry> {
override fun iterator(): Iterator<HVEntry> = hv.iterator() override fun iterator(): Iterator<HVEntry> = hv.iterator()
} }
private val VisionOfNumassPoint.token: NameToken get() = NameToken("point", index.toString()) private val VisionOfNumassPoint.token: NameToken get() = NameToken("point", index.toString())
@Serializable
public class VisionOfNumassSet(public val points: List<VisionOfNumassPoint>) : VisionBase() {
// init {
// points.forEach {
// //childrenInternal[it.token] = it
// }
//
// }
}
public suspend fun NumassSet.toVision(): VisionOfNumassSet = VisionOfNumassSet(points.map { it.toVision() }).apply {
this@toVision.top<HVData>(NUMASS_HV_TARGET).forEach { (key, hv) ->
// set(key, VisionOfNumassHv(hv))
}
}

View File

@ -0,0 +1,35 @@
package ru.inr.mass.data.server
import kotlinx.serialization.Serializable
import ru.inr.mass.data.api.NumassSet
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.DataTreeItem
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.names.plus
import space.kscience.visionforge.AbstractVision
import space.kscience.visionforge.AbstractVisionGroup
@Serializable
public class VisionOfNumassRepository : AbstractVisionGroup() {
override fun createGroup(): VisionOfNumassRepository = VisionOfNumassRepository()
}
@Serializable
public class VisionOfNumassSetRef(
override val name: Name,
) : Named, AbstractVision()
public suspend fun VisionOfNumassRepository(
repoName: Name,
tree: DataTree<NumassSet>,
): VisionOfNumassRepository = VisionOfNumassRepository().apply {
tree.items.forEach { (key: NameToken, value) ->
children[key] = when (value) {
is DataTreeItem.Leaf -> VisionOfNumassSetRef(repoName + key)
is DataTreeItem.Node -> VisionOfNumassRepository(repoName + key, value.tree)
}
}
//TODO listen to changes
}

View File

@ -0,0 +1,38 @@
package ru.inr.mass.data.server
import kotlinx.serialization.Serializable
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.proto.HVData
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.plus
import space.kscience.visionforge.AbstractVision
@Serializable
public data class PointRef(
override val name: Name,
public val pointMeta: Meta,
public val index: Int,
public val voltage: Double,
) : Named
@Serializable
public class VisionOfNumassSet(
public val meta: Meta,
public val points: List<PointRef>,
public val hvData: HVData? = null,
) : AbstractVision()
public fun VisionOfNumassSet(setName: Name, set: NumassSet): VisionOfNumassSet = VisionOfNumassSet(
set.meta,
set.points.map { point ->
PointRef(
setName + point.index.toString(),
point.meta,
point.index,
point.voltage
)
},
set.hvData
)

View File

@ -8,17 +8,22 @@ import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.plotly.models.LineShape import space.kscience.plotly.models.LineShape
import space.kscience.plotly.models.ScatterMode import space.kscience.plotly.models.ScatterMode
import space.kscience.plotly.plot import space.kscience.plotly.plotElement
import space.kscience.plotly.scatter import space.kscience.plotly.scatter
import space.kscience.visionforge.ElementVisionRenderer import space.kscience.visionforge.ElementVisionRenderer
import space.kscience.visionforge.Vision import space.kscience.visionforge.Vision
import space.kscience.visionforge.VisionClient
import space.kscience.visionforge.plotly.PlotlyPlugin
import kotlin.reflect.KClass import kotlin.reflect.KClass
public class NumassJsPlugin : AbstractPlugin(), ElementVisionRenderer { public class NumassJsPlugin : AbstractPlugin(), ElementVisionRenderer {
public val client: VisionClient by require(VisionClient)
public val numassCommon: NumassCommonPlugin by require(NumassCommonPlugin) public val numassCommon: NumassCommonPlugin by require(NumassCommonPlugin)
private val plotly = numassCommon.plotlyPlugin public val plotly: PlotlyPlugin by require(PlotlyPlugin)
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
@ -27,7 +32,12 @@ public class NumassJsPlugin : AbstractPlugin(), ElementVisionRenderer {
else -> ElementVisionRenderer.ZERO_RATING else -> ElementVisionRenderer.ZERO_RATING
} }
override fun render(element: Element, vision: Vision, meta: Meta) { override fun content(target: String): Map<Name, Any> = when (target) {
ElementVisionRenderer.TYPE -> mapOf("numass".asName() to this)
else -> super.content(target)
}
override fun render(element: Element, name: Name, vision: Vision, meta: Meta) {
when (vision) { when (vision) {
is VisionOfNumassHv -> element.append { is VisionOfNumassHv -> element.append {
h1 { +"HV" } h1 { +"HV" }
@ -47,22 +57,25 @@ public class NumassJsPlugin : AbstractPlugin(), ElementVisionRenderer {
} }
} }
} }
is VisionOfNumassPoint -> element.append { is VisionOfNumassPoint -> element.append {
h1{ +"Point"} h1 { +"Point" }
plot { plotElement {
vision.spectra.forEach { (channel, spectrum) -> vision.spectra.forEach { (channel, spectrum) ->
val pairs = spectrum.entries.sortedBy { it.key }
scatter { scatter {
name = channel this.name = channel
mode = ScatterMode.lines mode = ScatterMode.lines
line { line {
shape = LineShape.hv shape = LineShape.hv
} }
x.numbers = spectrum.keys.map { it.toInt() } x.numbers = pairs.map { it.key.toInt() }
y.numbers = spectrum.values.map { it.toInt() } y.numbers = pairs.map { it.value.toInt() }
} }
} }
} }
} }
is VisionOfNumassSet -> {} is VisionOfNumassSet -> {}
} }
} }
@ -71,6 +84,8 @@ public class NumassJsPlugin : AbstractPlugin(), ElementVisionRenderer {
public companion object : PluginFactory<NumassJsPlugin> { public companion object : PluginFactory<NumassJsPlugin> {
override val tag: PluginTag = PluginTag("numass.js", "ru.inr.mass") override val tag: PluginTag = PluginTag("numass.js", "ru.inr.mass")
override val type: KClass<NumassJsPlugin> = NumassJsPlugin::class override val type: KClass<NumassJsPlugin> = NumassJsPlugin::class
override fun invoke(meta: Meta, context: Context): NumassJsPlugin = NumassJsPlugin()
override fun build(context: Context, meta: Meta): NumassJsPlugin = NumassJsPlugin()
} }
} }

View File

@ -0,0 +1,27 @@
package ru.inr.mass.data.server
import org.jetbrains.compose.web.dom.Div
import org.jetbrains.compose.web.renderComposable
import org.w3c.dom.Document
import space.kscience.dataforge.context.Context
import space.kscience.visionforge.Application
public class NumassViewerApplication : Application {
private val context = Context("NumassViewer") {
plugin(NumassJsPlugin)
}
override fun start(document: Document, state: Map<String, Any>) {
renderComposable(rootElementId = "application") {
Div({ classes("container") }) {
Div({ classes("row") })
Div({ classes("col-md-3") }) {
}
Div({ classes("col-md-9") }) {
}
}
}
}
}

View File

@ -1,53 +0,0 @@
package ru.inr.mass.data.server
import kotlinx.coroutines.runBlocking
import kotlinx.html.div
import kotlinx.html.h1
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.proto.NumassProtoPlugin
import ru.inr.mass.data.proto.readNumassPointFile
import space.kscience.dataforge.context.Context
import space.kscience.visionforge.three.server.close
import space.kscience.visionforge.three.server.serve
import space.kscience.visionforge.three.server.show
import space.kscience.visionforge.visionManager
import java.nio.file.Path
public fun main() {
val context = Context("Numass") {
plugin(NumassProtoPlugin)
plugin(NumassCommonPlugin)
}
val pointPath = Path.of("C:\\Users\\altavir\\Desktop\\p20211122173034(20s).dat")
val point: NumassPoint = context.readNumassPointFile(pointPath)!!
val visionOfNumass = runBlocking {
point.toVision()
}
val server = context.visionManager.serve {
//use client library
useNumassWeb()
//use css
//useCss("css/styles.css")
page {
div("flex-column") {
h1 { +"Satellite detector demo" }
//vision(visionOfNumass)
}
}
}
server.show()
println("Enter 'exit' to close server")
while (readLine() != "exit") {
//
}
server.close()
}

View File

@ -0,0 +1,142 @@
package ru.inr.mass.data.server
import io.ktor.http.ContentType
import io.ktor.http.HttpStatusCode
import io.ktor.server.application.Application
import io.ktor.server.application.call
import io.ktor.server.cio.CIO
import io.ktor.server.engine.embeddedServer
import io.ktor.server.html.respondHtml
import io.ktor.server.http.content.resources
import io.ktor.server.response.respondText
import io.ktor.server.routing.get
import io.ktor.server.routing.routing
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext
import kotlinx.html.*
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.data.proto.NumassProtoPlugin
import ru.inr.mass.data.proto.readRepository
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.fetch
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.await
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.names.cutLast
import space.kscience.dataforge.names.lastOrNull
public fun main() {
val port = 7777
val host = "localhost"
embeddedServer(CIO, port, host, module = Application::numassModule).start()
}
public fun Application.numassModule(repositoryName: String = "D:\\Work\\Numass\\data\\test") {
val context = Context("Numass") {
plugin(NumassProtoPlugin)
plugin(NumassCommonPlugin)
}
val numassProto = context.fetch(NumassProtoPlugin)
val numassCommon = context.fetch(NumassCommonPlugin)
val visionManager = numassCommon.visionManager
val repository: DataTree<NumassDirectorySet> = runBlocking { numassProto.readRepository(repositoryName) }
routing {
resources()
get("/") {
call.respondHtml {
head {
meta { charset = "utf-8" }
meta {
name = "viewport"
content = "width=device-width, initial-scale=1"
}
title("Numass Data Viewer")
link {
href = "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css"
rel = "stylesheet"
attributes["integrity"] =
"sha384-rbsA2VBKQhggwzxH7pPCaAqO46MgnOM80zW1RWuH61DGLwZJEdK2Kadq2F9CUG65"
attributes["crossorigin"] = "anonymous"
}
}
body {
div {
id = "application"
}
script {
src = "js/numass-web.js"
}
script {
src = "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.bundle.min.js"
integrity = "sha384-kenU1KFdBIe4zVF0s0G1M5b4hcpxyD9F7jL+jjXkk+Q2h455rYXK/7HAuoJl+0I4"
attributes["crossorigin"] = "anonymous"
}
}
}
}
get("/repository") {
call.respondText(ContentType.Application.Json) {
visionManager.encodeToString(VisionOfNumassRepository(Name.EMPTY, repository))
}
}
get("/sets/{name...}") {
val setName: Name? = call.parameters.getAll("name")
?.map { NameToken.parse(it) }?.let(::Name)
if (setName == null) {
call.respondText(status = HttpStatusCode.BadRequest) { "Set name is empty" }
return@get
}
val set: NumassDirectorySet? = withContext(Dispatchers.IO) { repository[setName]?.await() }
if (set == null) {
call.respondText(status = HttpStatusCode.NotFound) { "A set with name $setName not found in the repository" }
return@get
}
call.respondText(ContentType.Application.Json) {
visionManager.encodeToString(VisionOfNumassSet(setName, set))
}
}
get("/points/{name...}") {
val fullName: Name? = call.parameters.getAll("name")
?.map { NameToken.parse(it) }?.let(::Name)
if (fullName == null) {
call.respondText(status = HttpStatusCode.BadRequest) { "Point name is empty" }
return@get
}
val setName = fullName.cutLast()
val set: NumassDirectorySet? = withContext(Dispatchers.IO) { repository[setName]?.await() }
if (set == null) {
call.respondText(status = HttpStatusCode.NotFound) { "A set with name $setName not found in the repository" }
return@get
}
val pointIndex: Int? = fullName.lastOrNull()?.body?.toIntOrNull()
val point: NumassPoint? = set.points.find { it.index == pointIndex }
if (point == null) {
call.respondText(status = HttpStatusCode.NotFound) { "A point with name $setName/$pointIndex not found in the repository" }
return@get
}
call.respondText(ContentType.Application.Json) {
visionManager.encodeToString(point.toVision())
}
}
}
//serveVisionData(VisionRoute("/visions", visionManager))
}

View File

@ -4,29 +4,26 @@ import space.kscience.dataforge.context.Context
import space.kscience.dataforge.misc.DFExperimental import space.kscience.dataforge.misc.DFExperimental
import space.kscience.visionforge.html.HtmlVisionFragment import space.kscience.visionforge.html.HtmlVisionFragment
import space.kscience.visionforge.html.ResourceLocation import space.kscience.visionforge.html.ResourceLocation
import space.kscience.visionforge.html.page import space.kscience.visionforge.html.VisionPage
import space.kscience.visionforge.html.scriptHeader import space.kscience.visionforge.html.importScriptHeader
import space.kscience.visionforge.makeFile import space.kscience.visionforge.makeFile
import space.kscience.visionforge.three.server.VisionServer
import space.kscience.visionforge.three.server.useScript
import java.awt.Desktop import java.awt.Desktop
import java.nio.file.Path import java.nio.file.Path
public fun VisionServer.useNumassWeb(): Unit {
useScript("js/numass-web.js")
}
@DFExperimental @DFExperimental
public fun Context.makeNumassWebFile( public fun Context.makeNumassWebFile(
content: HtmlVisionFragment,
path: Path? = null, path: Path? = null,
title: String = "VisionForge Numass page", title: String = "VisionForge Numass page",
resourceLocation: ResourceLocation = ResourceLocation.SYSTEM, resourceLocation: ResourceLocation = ResourceLocation.SYSTEM,
show: Boolean = true, show: Boolean = true,
content: HtmlVisionFragment,
): Unit { ): Unit {
val actualPath = page(title, content = content).makeFile(path) { actualPath -> val actualPath = VisionPage(this, content = content).makeFile(path) { actualPath: Path ->
mapOf("numassWeb" to scriptHeader("js/numass-web.js", resourceLocation, actualPath)) mapOf(
"title" to VisionPage.title(title),
"numassWeb" to VisionPage.importScriptHeader("js/numass-web.js", resourceLocation, actualPath)
)
} }
if (show) Desktop.getDesktop().browse(actualPath.toFile().toURI()) if (show) Desktop.getDesktop().browse(actualPath.toFile().toURI())
} }

View File

@ -0,0 +1,28 @@
package ru.inr.mass.data.server
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.proto.NumassProtoPlugin
import ru.inr.mass.data.proto.readNumassPointFile
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.fetch
import java.nio.file.Path
@Suppress("OPT_IN_USAGE")
public suspend fun main() {
val context = Context("Numass") {
plugin(NumassProtoPlugin)
plugin(NumassCommonPlugin)
}
val numassProto = context.fetch(NumassProtoPlugin)
val pointPath = Path.of("D:\\Work\\Numass\\data\\test\\set_7\\p120(30s)(HV1=13300)")
val point: NumassPoint = numassProto.readNumassPointFile(pointPath)!!
val visionOfNumass: VisionOfNumassPoint = point.toVision()
context.makeNumassWebFile {
vision { visionOfNumass }
}
}

View File

@ -7,8 +7,8 @@ package ru.inr.mass.models
import space.kscience.kmath.expressions.Symbol import space.kscience.kmath.expressions.Symbol
import space.kscience.kmath.expressions.symbol import space.kscience.kmath.expressions.symbol
import space.kscience.kmath.functions.Function1D
import space.kscience.kmath.functions.PiecewisePolynomial import space.kscience.kmath.functions.PiecewisePolynomial
import space.kscience.kmath.functions.UnivariateFunction
import space.kscience.kmath.functions.asFunction import space.kscience.kmath.functions.asFunction
import space.kscience.kmath.integration.* import space.kscience.kmath.integration.*
import space.kscience.kmath.operations.DoubleField import space.kscience.kmath.operations.DoubleField
@ -84,7 +84,7 @@ public class NumassTransmission(
public val trap: Symbol by symbol public val trap: Symbol by symbol
public val thickness: Symbol by symbol public val thickness: Symbol by symbol
private val cache = HashMap<Int, UnivariateFunction<Double>>() private val cache = HashMap<Int, Function1D<Double>>()
private const val ION_POTENTIAL = 15.4//eV private const val ION_POTENTIAL = 15.4//eV
@ -130,7 +130,7 @@ public class NumassTransmission(
return exp(-x) return exp(-x)
} }
private fun getCachedSpectrum(order: Int): UnivariateFunction<Double> { private fun getCachedSpectrum(order: Int): Function1D<Double> {
return when { return when {
order <= 0 -> error("Non-positive loss cache order") order <= 0 -> error("Non-positive loss cache order")
order == 1 -> singleScatterFunction order == 1 -> singleScatterFunction
@ -147,7 +147,7 @@ public class NumassTransmission(
* @param order * @param order
* @return * @return
*/ */
private fun getLoss(order: Int): UnivariateFunction<Double> = getCachedSpectrum(order) private fun getLoss(order: Int): Function1D<Double> = getCachedSpectrum(order)
private fun getLossProbDerivs(x: Double): List<Double> { private fun getLossProbDerivs(x: Double): List<Double> {
val res = ArrayList<Double>() val res = ArrayList<Double>()
@ -260,7 +260,7 @@ public class NumassTransmission(
* @return * @return
*/ */
@Synchronized @Synchronized
private fun getNextLoss(margin: Double, loss: UnivariateFunction<Double>): PiecewisePolynomial<Double> { private fun getNextLoss(margin: Double, loss: Function1D<Double>): PiecewisePolynomial<Double> {
val res = { x: Double -> val res = { x: Double ->
DoubleField.simpsonIntegrator.integrate(5.0..margin, IntegrandMaxCalls(200)) { y -> DoubleField.simpsonIntegrator.integrate(5.0..margin, IntegrandMaxCalls(200)) { y ->
loss(x - y) * singleScatterFunction(y) loss(x - y) * singleScatterFunction(y)
@ -324,7 +324,7 @@ public class NumassTransmission(
private val w1 = 1.85 private val w1 = 1.85
private val w2 = 12.5 private val w2 = 12.5
public val singleScatterFunction: UnivariateFunction<Double> = { eps: Double -> public val singleScatterFunction: Function1D<Double> = { eps: Double ->
when { when {
eps <= 0 -> 0.0 eps <= 0 -> 0.0
eps <= b -> { eps <= b -> {

View File

@ -1,15 +1,15 @@
package ru.inr.mass.models package ru.inr.mass.models
import space.kscience.kmath.functions.Function1D
import space.kscience.kmath.functions.PiecewisePolynomial import space.kscience.kmath.functions.PiecewisePolynomial
import space.kscience.kmath.functions.UnivariateFunction
import space.kscience.kmath.interpolation.SplineInterpolator import space.kscience.kmath.interpolation.SplineInterpolator
import space.kscience.kmath.interpolation.interpolatePolynomials import space.kscience.kmath.interpolation.interpolatePolynomials
import space.kscience.kmath.operations.DoubleField import space.kscience.kmath.operations.DoubleField
import space.kscience.kmath.structures.DoubleBuffer import space.kscience.kmath.structures.DoubleBuffer
import kotlin.math.abs import kotlin.math.abs
public fun UnivariateFunction<Double>.cache( public fun Function1D<Double>.cache(
range: ClosedFloatingPointRange<Double>, range: ClosedFloatingPointRange<Double>,
numCachePoints: Int, numCachePoints: Int,
): PiecewisePolynomial<Double> { ): PiecewisePolynomial<Double> {

View File

@ -1,6 +1,6 @@
plugins { plugins {
id("space.kscience.gradle.jvm") id("space.kscience.gradle.jvm")
id("com.github.johnrengelman.shadow") version "7.1.1" id("com.github.johnrengelman.shadow") version "7.1.2"
`maven-publish` `maven-publish`
} }
@ -9,7 +9,7 @@ kotlin {
} }
val dataforgeVersion: String by rootProject.extra val dataforgeVersion: String by rootProject.extra
val plotlyVersion: String by rootProject.extra("0.5.0") val visionForgeVersion: String by rootProject.extra
val kmathVersion: String by rootProject.extra val kmathVersion: String by rootProject.extra
val tablesVersion: String by rootProject.extra val tablesVersion: String by rootProject.extra
@ -18,12 +18,10 @@ dependencies {
implementation(projects.numassModel) implementation(projects.numassModel)
implementation(projects.numassAnalysis) implementation(projects.numassAnalysis)
implementation("space.kscience:dataforge-workspace:$dataforgeVersion") implementation("space.kscience:dataforge-workspace:$dataforgeVersion")
implementation("space.kscience:plotlykt-jupyter:$plotlyVersion")
implementation("space.kscience:kmath-jupyter:$kmathVersion") implementation("space.kscience:kmath-jupyter:$kmathVersion")
implementation("space.kscience:tables-kt:$tablesVersion") implementation("space.kscience:tables-kt:$tablesVersion")
// implementation(platform("com.google.cloud:libraries-bom:23.0.0")) implementation("space.kscience:visionforge-plotly:$visionForgeVersion")
// implementation("com.google.cloud:google-cloud-nio:0.123.10")
// implementation("com.google.auth:google-auth-library-oauth2-http:1.3.0")
} }
kscience{ kscience{

View File

@ -5,6 +5,7 @@ import kotlinx.coroutines.runBlocking
import kotlinx.html.* import kotlinx.html.*
import kotlinx.html.stream.createHTML import kotlinx.html.stream.createHTML
import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.HTML
import org.jetbrains.kotlinx.jupyter.api.declare
import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassFrame import ru.inr.mass.data.api.NumassFrame
@ -23,7 +24,8 @@ import space.kscience.plotly.toPage
internal class NumassJupyter : JupyterIntegration() { internal class NumassJupyter : JupyterIntegration() {
override fun Builder.onLoaded() { override fun Builder.onLoaded() {
repositories( repositories(
"https://repo.kotlin.link" "https://repo.kotlin.link",
"https://maven.pkg.jetbrains.space/spc/p/sci/dev"
) )
import( import(
@ -39,6 +41,9 @@ internal class NumassJupyter : JupyterIntegration() {
import<Numass>() import<Numass>()
onLoaded {
declare("Numass" to Numass, "workspace" to Numass.workspace)
}
render<NumassBlock> { render<NumassBlock> {
HTML(Plotly.plotNumassBlock(it).toPage().render()) HTML(Plotly.plotNumassBlock(it).toPage().render())
@ -60,7 +65,7 @@ internal class NumassJupyter : JupyterIntegration() {
} }
render<DataTree<NumassDirectorySet>> { tree -> render<DataTree<NumassDirectorySet>> { tree ->
HTML(createHTML().div { numassTree(tree)}) HTML(createHTML().div { numassTree(tree) })
} }
} }
} }
@ -68,7 +73,7 @@ internal class NumassJupyter : JupyterIntegration() {
private fun FlowContent.numassTree(tree: DataTree<NumassDirectorySet>) { private fun FlowContent.numassTree(tree: DataTree<NumassDirectorySet>) {
ul { ul {
runBlocking { runBlocking {
tree.items().forEach { (token, treeItem) -> tree.items.forEach { (token, treeItem) ->
li { li {
p { +token.toString() } p { +token.toString() }
when (treeItem) { when (treeItem) {

View File

@ -5,13 +5,13 @@ import ru.inr.mass.workspace.Numass.readRepository
import ru.inr.mass.workspace.plotNumassSet import ru.inr.mass.workspace.plotNumassSet
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.await import space.kscience.dataforge.data.await
import space.kscience.dataforge.data.getData import space.kscience.dataforge.data.get
import space.kscience.plotly.Plotly import space.kscience.plotly.Plotly
import space.kscience.plotly.makeFile import space.kscience.plotly.makeFile
suspend fun main() { suspend fun main() {
val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04") val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04")
val testSet = repo.getData("Adiabacity_19.set_3")?.await() ?: error("Not found") val testSet = repo["Adiabacity_19.set_3"]?.await() ?: error("Not found")
Plotly.plotNumassSet(testSet).makeFile() Plotly.plotNumassSet(testSet).makeFile()
} }

View File

@ -0,0 +1,40 @@
package ru.inr.mass.scripts
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.channels
import ru.inr.mass.workspace.Numass
import ru.inr.mass.workspace.listFrames
import space.kscience.dataforge.io.write
import space.kscience.dataforge.io.writeUtf8String
import java.nio.file.Files
import kotlin.io.path.createDirectories
import kotlin.io.path.writeText
fun main() {
val point = Numass.readPoint("D:\\Work\\Numass\\data\\test\\set_7\\p59(30s)(HV1=14000)")
val channels: Map<Int, NumassBlock> = point.channels
//Initialize and create target directory
val targetDir = Files.createTempDirectory("numass_p101(30s)(HV1=14150)")
targetDir.createDirectories()
//dumping meta
targetDir.resolve("meta").writeText(point.meta.toString())
val pointTime = point.startTime
channels.forEach { (key, block) ->
targetDir.resolve("channel-$key.csv").write {
block.listFrames().forEach { frame ->
// val frameTime = pointTime.plus(frame.timeOffset, DateTimeUnit.NANOSECOND)
// writeUtf8String("$frameTime,")
writeUtf8String("${frame.timeOffset},")
val line = frame.signal.joinToString(",", postfix = "\n" )
writeUtf8String(line)
}
}
}
println("Exported to $targetDir")
}

View File

@ -1,8 +1,10 @@
package ru.inr.mass.scripts package ru.inr.mass.scripts
import ru.inr.mass.data.analysis.NumassEventExtractor import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.amplitudeSpectrum import ru.inr.mass.data.analysis.energySpectrum
import ru.inr.mass.data.api.NumassEvent
import ru.inr.mass.data.api.NumassPoint import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.channel
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.models.* import ru.inr.mass.models.*
import ru.inr.mass.workspace.Numass import ru.inr.mass.workspace.Numass
@ -38,6 +40,30 @@ fun Spectrum.convolve(range: ClosedRange<Double>, function: (Double) -> Double):
}.value }.value
} }
/**
* E = A * ADC +B
* Channel A B
* 0 0.01453 1.3
* 2 0.01494 -4.332
* 3 0.01542 -5.183
* 4 0.01573 -2.115
* 5 0.0152 -3.808
* 6 0.0155 -3.015
* 7 0.01517 -0.5429
*/
val calibration: (NumassEvent) -> Double = {
when (it.channel) {
0 -> 0.01453 * it.amplitude + 1.3
2 -> 0.01494 * it.amplitude - 5.183
3 -> 0.01542 * it.amplitude - 5.183
4 -> 0.01573 * it.amplitude - 2.115
5 -> 0.0152 * it.amplitude - 3.808
6 -> 0.0155 * it.amplitude - 3.015
7 -> 0.01517 * it.amplitude - 0.5429
else -> error("Unrecognized channel ${it.channel}")
} * 1000.0
}
private val neutrinoSpectrum = NumassBeta.withFixedX(0.0) private val neutrinoSpectrum = NumassBeta.withFixedX(0.0)
private val args: Map<Symbol, Double> = mapOf( private val args: Map<Symbol, Double> = mapOf(
@ -59,26 +85,27 @@ suspend fun main() {
val hv = 16900.0 val hv = 16900.0
//select point number 2 (U = 16900 V) from each directory //select point number 2 (U = 16900 V) from each directory
val points: Map<NameToken, NumassPoint?> = repo.items().mapValues { val points: Map<NameToken, NumassPoint?> = repo.items.mapValues {
val directory = it.value.data?.await() val directory = it.value.data?.await()
val point = directory?.points?.find { point -> point.voltage == hv } val point = directory?.points?.find { point -> point.voltage == hv }
point point
} }
val spectrum: Map<Short, ULong> = points.values.first()!! val spectrum: Map<Double, Long> = points.values.first()!!
.amplitudeSpectrum(NumassEventExtractor.TQDC) .energySpectrum(NumassEventExtractor.TQDC, calibration)
.amplitudes.toSortedMap() .filter { it.key > 9000.0 }
.toSortedMap()
//the channel of spectrum peak position // //the channel of spectrum peak position
val argmax = spectrum.maxByOrNull { it.value }!!.key // val argmax = spectrum.maxByOrNull { it.value }!!.key
//
// convert channel to energy // // convert channel to energy
fun Short.toEnergy(): Double = toDouble() / argmax * gunEnergy // fun Short.toEnergy(): Double = toDouble() / argmax * gunEnergy
val norm = spectrum.values.sum().toDouble() val norm = spectrum.values.sum().toDouble()
val interpolated: PiecewisePolynomial<Double> = LinearInterpolator(DoubleField).interpolatePolynomials( val interpolated: PiecewisePolynomial<Double> = LinearInterpolator(DoubleField).interpolatePolynomials(
spectrum.keys.map { it.toEnergy() - gunEnergy }.asBuffer(), spectrum.keys.map { it - gunEnergy }.asBuffer(),
spectrum.values.map { it.toDouble() / norm }.asBuffer() spectrum.values.map { it.toDouble() / norm }.asBuffer()
) )
@ -93,7 +120,7 @@ suspend fun main() {
Plotly.plot { Plotly.plot {
scatter { scatter {
name = "gun" name = "gun"
x.numbers = spectrum.keys.map { it.toEnergy() } x.numbers = spectrum.keys
y.numbers = spectrum.values.map { it.toDouble() / norm } y.numbers = spectrum.values.map { it.toDouble() / norm }
} }
@ -101,15 +128,17 @@ suspend fun main() {
name = "convoluted" name = "convoluted"
x.buffer = 0.0..19000.0 step 100.0 x.buffer = 0.0..19000.0 step 100.0
y.numbers = x.doubles.map { model(it, args) } y.numbers = x.doubles.map { model(it, args) }
y.numbers = y.doubles.map { it / y.doubles.maxOrNull()!! } val yNorm = y.doubles.maxOrNull()!!
y.numbers = y.doubles.map { it / yNorm }
} }
scatter { scatter {
name = "tritium" name = "tritium"
val tritiumSpectrum = tritiumData.amplitudeSpectrum(NumassEventExtractor.TQDC).amplitudes.toSortedMap() val tritiumSpectrum = tritiumData.energySpectrum(NumassEventExtractor.TQDC, calibration).toSortedMap()
x.numbers = tritiumSpectrum.keys.map { it.toEnergy() } x.numbers = tritiumSpectrum.keys
y.numbers = tritiumSpectrum.values.map { it.toDouble()} y.numbers = tritiumSpectrum.values.map { it.toDouble() }
y.numbers = y.doubles.map { it / y.doubles.maxOrNull()!! } val yNorm = y.doubles.maxOrNull()!!
y.numbers = y.doubles.map { it / yNorm }
} }
}.makeFile() }.makeFile()
} }

View File

@ -0,0 +1,63 @@
package ru.inr.mass.scripts
import ru.inr.mass.workspace.buffer
import space.kscience.kmath.functions.asFunction
import space.kscience.kmath.integration.integrate
import space.kscience.kmath.integration.splineIntegrator
import space.kscience.kmath.integration.value
import space.kscience.kmath.interpolation.interpolatePolynomials
import space.kscience.kmath.interpolation.splineInterpolator
import space.kscience.kmath.operations.DoubleField
import space.kscience.kmath.real.step
import space.kscience.plotly.Plotly
import space.kscience.plotly.layout
import space.kscience.plotly.makeFile
import space.kscience.plotly.models.AxisType
import space.kscience.plotly.scatter
import kotlin.math.PI
import kotlin.math.exp
import kotlin.math.pow
import kotlin.math.sqrt
fun main() {
val backScatteringSpectrum: List<Pair<Double, Double>> = {}.javaClass
.getResource("/simulation/Gun19_E_back_scatt.dat")!!.readText()
.lineSequence().drop(2).mapNotNull {
if (it.isBlank()) return@mapNotNull null
val (e, p) = it.split('\t')
Pair(e.toDouble(), p.toDouble())
}.toList()
val interpolated = DoubleField.splineInterpolator
.interpolatePolynomials(backScatteringSpectrum)
.asFunction(DoubleField, 0.0)
val sigma = 0.3
val detectorResolution: (Double) -> Double = { x ->
1.0 / sqrt(2 * PI) / sigma * exp(-(x / sigma).pow(2) / 2.0)
}
val convoluted: (Double) -> Double = { x ->
DoubleField.splineIntegrator.integrate(-2.0..2.0) { y ->
detectorResolution(y) * interpolated(x - y)
}.value
}
Plotly.plot {
// scatter {
// name = "simulation"
// x.numbers = backScatteringSpectrum.map { 19.0 - it.first }
// y.numbers = backScatteringSpectrum.map { it.second }
// }
scatter {
name = "smeared"
x.buffer = 0.0..20.0 step 0.1
y.numbers = x.doubles.map { convoluted(19.0 - it) * 0.14/0.01 + 0.86 * detectorResolution(it - 19.0) }
println(y.doubles.sum()*0.1)//Norm check
}
layout {
yaxis.type = AxisType.log
}
}.makeFile()
}

View File

@ -1,20 +1,22 @@
package ru.inr.mass.scripts package ru.inr.mass.scripts
import kotlinx.coroutines.flow.collect
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import ru.inr.mass.workspace.Numass.readRepository import ru.inr.mass.workspace.Numass.readRepository
import space.kscience.dataforge.data.DataSource
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.filter import space.kscience.dataforge.data.filter
import space.kscience.dataforge.data.forEach
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
suspend fun main() { suspend fun main() {
val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04") val repo: DataTree<NumassDirectorySet> = readRepository("D:\\Work\\Numass\\data\\2018_04")
val filtered = repo.filter { _, data -> val filtered: DataSource<NumassDirectorySet> = repo.filter { _, meta: Meta ->
val operator by data.meta.string() val operator by meta.string()
operator?.startsWith("Vas") ?: false operator?.startsWith("Vas") ?: false
} }
filtered.flowData().collect { filtered.forEach{
println(it) println(it)
} }
} }

View File

@ -1,50 +1,59 @@
package ru.inr.mass.scripts package ru.inr.mass.scripts
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.toList import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import kotlinx.html.code
import kotlinx.html.h2 import kotlinx.html.h2
import kotlinx.html.p import kotlinx.html.p
import kotlinx.html.unsafe
import kotlinx.serialization.json.Json import kotlinx.serialization.json.Json
import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.amplitudeSpectrum
import ru.inr.mass.data.api.NumassFrame import ru.inr.mass.data.api.NumassFrame
import ru.inr.mass.data.api.channels
import ru.inr.mass.workspace.Numass.readDirectory import ru.inr.mass.workspace.Numass.readDirectory
import ru.inr.mass.workspace.Numass.readPoint
import ru.inr.mass.workspace.listFrames import ru.inr.mass.workspace.listFrames
import space.kscience.dataforge.meta.MetaSerializer import space.kscience.dataforge.meta.MetaSerializer
import space.kscience.plotly.* import space.kscience.plotly.*
fun NumassFrame.tqdcAmplitude(): Short { //fun NumassFrame.tqdcAmplitude(): Short {
var max = Short.MIN_VALUE // var max = Short.MIN_VALUE
var min = Short.MAX_VALUE // var min = Short.MAX_VALUE
//
// signal.forEach { sh: Short ->
// if (sh >= max) {
// max = sh
// }
// if (sh <= min) {
// min = sh
// }
// }
//
// return (max - min).toShort()
//}
signal.forEach { sh: Short -> //fun Flow<NumassFrame>.tqdcAmplitudes(): List<Short> = runBlocking {
if (sh >= max) { // map { it.tqdcAmplitude() }.toList()
max = sh //}
}
if (sh <= min) {
min = sh
}
}
return (max - min).toShort() val IntRange.center: Double get() = (endInclusive + start).toDouble() / 2.0
}
fun Flow<NumassFrame>.tqdcAmplitudes(): List<Short> = runBlocking {
map { it.tqdcAmplitude() }.toList()
}
suspend fun main() { suspend fun main() {
//val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\numass-data\\") //val repo: DataTree<NumassDirectorySet> = readNumassRepository("D:\\Work\\numass-data\\")
val directory = readDirectory("D:\\Work\\Numass\\data\\test\\set_7") //val directory = readDirectory("D:\\Work\\Numass\\data\\2021_11\\Tritium_2\\set_11\\")
val point = directory.points.first() val point = readPoint("D:\\Work\\Numass\\data\\2021_11\\Tritium_2\\set_11\\p0(30s)(HV1=14000)")
val channel = point.channels[4]!!
val frames: List<NumassFrame> = point.listFrames() val binning = 16U
val frames: List<NumassFrame> = channel.listFrames()
Plotly.page { Plotly.page {
p { +"${frames.size} frames" } p { +"${frames.size} frames" }
h2 { +"Random frames" } h2 { +"Random frames" }
plot { plot {
val random = kotlin.random.Random(1234) val random = kotlin.random.Random(1234)
repeat(10) { repeat(10) {
val frame = frames.random(random) val frame = frames.random(random)
scatter { scatter {
@ -54,21 +63,46 @@ suspend fun main() {
} }
h2 { +"Analysis" } h2 { +"Analysis" }
plot { plot {
histogram { scatter {
name = "max" name = "max"
x.numbers = frames.map { frame -> frame.signal.maxOrNull() ?: 0 } val spectrum = runBlocking {
channel.amplitudeSpectrum(NumassEventExtractor.EVENTS_ONLY)
}.binned(binning)
x.numbers = spectrum.keys.map { it.center }
y.numbers = spectrum.values.map { it }
} }
histogram { scatter {
name = "max-min" name = "max-min"
val spectrum = runBlocking {
channel.amplitudeSpectrum(NumassEventExtractor.TQDC)
}.binned(binning)
x.numbers = spectrum.keys.map { it.center }
y.numbers = spectrum.values.map { it}
}
scatter {
name = "max-baseline + filter"
val spectrum = runBlocking {
channel.amplitudeSpectrum(NumassEventExtractor.TQDC_V2)
}.binned(binning)
x.numbers = spectrum.keys.map { it.center }
y.numbers = spectrum.values.map { it }
}
histogram {
name = "events"
xbins { xbins {
size = 2.0 size = 2.0
} }
x.numbers = frames.map { it.tqdcAmplitude() } x.numbers = runBlocking { point.events.map { it.amplitude.toInt() }.toList() }
} }
} }
h2 { +"Meta" } h2 { +"Meta" }
p { +Json.encodeToString(MetaSerializer, point.meta) } code {
unsafe {
+Json { prettyPrint = true }.encodeToString(MetaSerializer, point.meta)
}
}
}.makeFile() }.makeFile()

View File

@ -0,0 +1,22 @@
package ru.inr.mass.scripts
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.workspace.Numass
import space.kscience.plotly.Plotly
import space.kscience.plotly.makeFile
import space.kscience.plotly.scatter
fun main() {
val directory = Numass.readDirectory("D:\\Work\\Numass\\data\\test\\set_7\\")
val monitorPoints: List<NumassPoint> = directory.filter { it.voltage == 14000.0 }.sortedBy { it.startTime }
Plotly.plot {
scatter {
x.numbers = monitorPoints.map {
it.startTime.toEpochMilliseconds()
}
y.numbers = monitorPoints.map { it.framesCount }
}
}.makeFile()
}

View File

@ -3,51 +3,41 @@ package ru.inr.mass.workspace
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.toList import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext
import ru.inr.mass.data.api.NumassBlock import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.NumassPoint import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.data.api.NumassSet import ru.inr.mass.data.proto.*
import ru.inr.mass.data.proto.NumassDirectorySet import space.kscience.dataforge.context.fetch
import ru.inr.mass.data.proto.readNumassDirectory import space.kscience.dataforge.data.DataTree
import ru.inr.mass.data.proto.readNumassPointFile import space.kscience.dataforge.workspace.Workspace
import space.kscience.dataforge.data.*
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import kotlin.io.path.exists
import kotlin.io.path.isDirectory
import kotlin.io.path.relativeTo
object Numass { object Numass {
fun readDirectory(path: String): NumassDirectorySet = NUMASS.context.readNumassDirectory(path) val workspace = Workspace {
context {
fun readRepository(path: Path): DataTree<NumassDirectorySet> = runBlocking { plugin(NumassWorkspacePlugin)
ActiveDataTree {
@Suppress("BlockingMethodInNonBlockingContext")
withContext(Dispatchers.IO) {
Files.walk(path).filter {
it.isDirectory() && it.resolve("meta").exists()
}.toList().forEach { childPath ->
val name = Name(childPath.relativeTo(path).map { segment ->
NameToken(segment.fileName.toString())
})
val value = NUMASS.context.readNumassDirectory(childPath)
static(name, value, value.meta)
}
}
//TODO add file watcher
} }
} }
fun readRepository(path: String): DataTree<NumassDirectorySet> = readRepository(Path.of(path)) val context get() = workspace.context
fun readPoint(path: String): NumassPoint = NUMASS.context.readNumassPointFile(path) val numassProto by lazy { context.fetch(NumassProtoPlugin) }
?: error("Can't read numass point at $path")
} fun readPoint(path: Path): NumassPoint =
numassProto.readNumassPointFile(path) ?: error("Can't read numass point at $path")
fun readPoint(path: String): NumassPoint =
numassProto.readNumassPointFile(path) ?: error("Can't read numass point at $path")
fun readDirectory(path: Path): NumassDirectorySet = numassProto.readNumassDirectory(path)
fun readDirectory(path: String): NumassDirectorySet = numassProto.readNumassDirectory(path)
fun readRepository(path: Path): DataTree<NumassDirectorySet> =
runBlocking(Dispatchers.IO) { numassProto.readRepository(path) }
fun readRepository(path: String): DataTree<NumassDirectorySet> =
runBlocking(Dispatchers.IO) { numassProto.readRepository(path) }
operator fun DataSet<NumassSet>.get(name: String): NumassSet? = runBlocking {
getData(Name.parse(name))?.await()
} }
fun NumassBlock.listFrames() = runBlocking { frames.toList() } fun NumassBlock.listFrames() = runBlocking { frames.toList() }

View File

@ -1,119 +0,0 @@
package ru.inr.mass.workspace
import ru.inr.mass.data.proto.NumassProtoPlugin
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.values.Value
import space.kscience.dataforge.workspace.TaskReference
import space.kscience.dataforge.workspace.WorkspacePlugin
import space.kscience.dataforge.workspace.task
import space.kscience.tables.Table
import kotlin.reflect.KClass
class NumassPlugin : WorkspacePlugin() {
override val tag: PluginTag get() = Companion.tag
val numassProtoPlugin by require(NumassProtoPlugin)
// val select by task<NumassSet>(
// descriptor = MetaDescriptor {
// info = "Select data from workspace data pool"
// value("forward", ValueType.BOOLEAN) {
// info = "Select only forward or only backward sets"
// }
// }
// ) {
// val forward = meta["forward"]?.boolean
// val filtered = workspace.data.select<NumassSet> { _, meta ->
// when (forward) {
// true -> meta["iteration_info.reverse"]?.boolean?.not() ?: false
// false -> meta["iteration_info.reverse"]?.boolean ?: false
// else -> true
// }
// }
//
// emit(Name.EMPTY, filtered)
// }
//
// val analyze by task<Table<Value>>(
// MetaDescriptor {
// info = "Count the number of events for each voltage and produce a table with the results"
// }
// ) {
// pipeFrom(select) { set, name, meta ->
// val res = SmartAnalyzer.analyzeSet(set, meta["analyzer"] ?: Meta.EMPTY)
// val outputMeta = meta.toMutableMeta().apply {
// "data" put set.meta
// }
// // context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
// res
// }
// }
val monitorTableTask: TaskReference<Table<Value>> by task {
// descriptor {
// value("showPlot", types = listOf(ValueType.BOOLEAN), info = "Show plot after complete")
// value("monitorPoint", types = listOf(ValueType.NUMBER), info = "The voltage for monitor point")
// }
// model { meta ->
// dependsOn(selectTask, meta)
//// if (meta.getBoolean("monitor.correctForThreshold", false)) {
//// dependsOn(subThresholdTask, meta, "threshold")
//// }
// configure(meta.getMetaOrEmpty("monitor"))
// configure {
// meta.useMeta("analyzer") { putNode(it) }
// setValue("@target", meta.getString("@target", meta.name))
// }
// }
// join<NumassSet, Table> { data ->
// val monitorVoltage = meta.getDouble("monitorPoint", 16000.0);
// val analyzer = SmartAnalyzer()
// val analyzerMeta = meta.getMetaOrEmpty("analyzer")
//
// //val thresholdCorrection = da
// //TODO add separator labels
// val res = ListTable.Builder("timestamp", "count", "cr", "crErr", "index", "set")
// .rows(
// data.values.stream().flatMap { set ->
// set.points.stream()
// .filter { it.voltage == monitorVoltage }
// .parallel()
// .map { point ->
// analyzer.analyzeParent(point, analyzerMeta).edit {
// "index" to point.index
// "set" to set.name
// }
// }
// }
//
// ).build()
//
// if (meta.getBoolean("showPlot", true)) {
// val plot = DataPlot.plot(name, res, Adapters.buildXYAdapter("timestamp", "cr", "crErr"))
// context.plot(plot, name, "numass.monitor") {
// "xAxis.title" to "time"
// "xAxis.type" to "time"
// "yAxis.title" to "Count rate"
// "yAxis.units" to "Hz"
// }
//
// ((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
// }
//
// context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
//
// return@join res;
// }
}
companion object : PluginFactory<NumassPlugin> {
override val tag: PluginTag = PluginTag("numass", "ru.mipt.npm")
override val type: KClass<out NumassPlugin> = NumassPlugin::class
override fun build(context: Context, meta: Meta): NumassPlugin = NumassPlugin()
}
}

View File

@ -0,0 +1,134 @@
package ru.inr.mass.workspace
import ru.inr.mass.data.analysis.NumassAnalyzerParameters
import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.TimeAnalyzer
import ru.inr.mass.data.analysis.analyzeSet
import ru.inr.mass.data.api.NumassSet
import ru.inr.mass.data.proto.NumassProtoPlugin
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.data.filterByType
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.value
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.workspace.WorkspacePlugin
import space.kscience.dataforge.workspace.pipeFrom
import space.kscience.dataforge.workspace.task
import space.kscience.tables.Table
import kotlin.reflect.KClass
class NumassWorkspacePlugin : WorkspacePlugin() {
override val tag: PluginTag get() = Companion.tag
val numassProtoPlugin by require(NumassProtoPlugin)
val selectSets by task<NumassSet>(
descriptor = MetaDescriptor {
info = "Select data from workspace data pool"
value("forward", ValueType.BOOLEAN) {
info = "Select only forward or only backward sets"
}
}
) {
val forward = meta["forward"]?.boolean
val filtered = workspace.data.filterByType<NumassSet> { _, meta ->
when (forward) {
true -> meta["iteration_info.reverse"]?.boolean?.not() ?: false
false -> meta["iteration_info.reverse"]?.boolean ?: false
else -> true
}
}
node(Name.EMPTY, filtered)
}
val analyzeSets by task<Table<Value>>(
MetaDescriptor {
info = "Count the number of events for each voltage and produce a table with the results"
}
) {
pipeFrom(selectSets) { set, name, meta ->
val res = TimeAnalyzer(NumassEventExtractor.EVENTS_ONLY).analyzeSet(
set,
NumassAnalyzerParameters.read(meta["analyzer"] ?: Meta.EMPTY)
)
val outputMeta = meta.toMutableMeta().apply {
"data" put set.meta
}
// context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
res
}
}
//
// val monitorTableTask: TaskReference<Table<Value>> by task(
// MetaDescriptor {
// value("showPlot", type = ValueType.BOOLEAN) {
// info = "Show plot after complete"
// }
// value("monitorPoint", type = ValueType.NUMBER) {
// info = "The voltage for monitor point"
// }
// }
// ) {
// val data = from(selectSets)
//// model { meta ->
//// dependsOn(selectTask, meta)
////// if (meta.getBoolean("monitor.correctForThreshold", false)) {
////// dependsOn(subThresholdTask, meta, "threshold")
////// }
//// configure(meta.getMetaOrEmpty("monitor"))
//// configure {
//// meta.useMeta("analyzer") { putNode(it) }
//// setValue("@target", meta.getString("@target", meta.name))
//// }
//// }
//
// val monitorVoltage = meta["monitorPoint"].double ?: 16000.0
// val analyzer = TimeAnalyzer()
// val analyzerMeta = meta["analyzer"]
//
// //val thresholdCorrection = da
// //TODO add separator labels
// val res = ListTable.Builder("timestamp", "count", "cr", "crErr", "index", "set")
// .rows(
// data.values.stream().flatMap { set ->
// set.points.stream()
// .filter { it.voltage == monitorVoltage }
// .parallel()
// .map { point ->
// analyzer.analyzeParent(point, analyzerMeta).edit {
// "index" to point.index
// "set" to set.name
// }
// }
// }
//
// ).build()
//
// if (meta["showPlot"].boolean ?: true) {
// val plot = DataPlot.plot(name, res, Adapters.buildXYAdapter("timestamp", "cr", "crErr"))
// context.plot(plot, name, "numass.monitor") {
// "xAxis.title" to "time"
// "xAxis.type" to "time"
// "yAxis.title" to "Count rate"
// "yAxis.units" to "Hz"
// }
//
// ((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
// }
//
// context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
//
// data(Name.EMPTY, res)
// }
companion object : PluginFactory<NumassWorkspacePlugin> {
override val tag: PluginTag = PluginTag("numass", "ru.mipt.npm")
override val type: KClass<out NumassWorkspacePlugin> = NumassWorkspacePlugin::class
override fun build(context: Context, meta: Meta): NumassWorkspacePlugin = NumassWorkspacePlugin()
}
}

View File

@ -10,10 +10,10 @@ import ru.inr.mass.data.analysis.timeHistogram
import ru.inr.mass.data.api.* import ru.inr.mass.data.api.*
import ru.inr.mass.data.proto.HVData import ru.inr.mass.data.proto.HVData
import ru.inr.mass.data.proto.NumassDirectorySet import ru.inr.mass.data.proto.NumassDirectorySet
import space.kscience.dataforge.values.asValue import space.kscience.dataforge.meta.asValue
import space.kscience.dataforge.values.double import space.kscience.dataforge.meta.double
import space.kscience.kmath.histogram.UnivariateHistogram import space.kscience.kmath.domains.center
import space.kscience.kmath.histogram.center import space.kscience.kmath.histogram.Histogram1D
import space.kscience.kmath.misc.UnstableKMathAPI import space.kscience.kmath.misc.UnstableKMathAPI
import space.kscience.kmath.operations.asIterable import space.kscience.kmath.operations.asIterable
import space.kscience.kmath.structures.Buffer import space.kscience.kmath.structures.Buffer
@ -26,9 +26,9 @@ import kotlin.time.DurationUnit
* Plot a kmath histogram * Plot a kmath histogram
*/ */
@OptIn(UnstableKMathAPI::class) @OptIn(UnstableKMathAPI::class)
fun Plot.histogram(histogram: UnivariateHistogram, block: Scatter.() -> Unit = {}): Trace = scatter { fun Plot.histogram(histogram: Histogram1D<Double, Double>, block: Scatter.() -> Unit = {}): Trace = scatter {
x.numbers = histogram.bins.map { it.domain.center } x.numbers = histogram.bins.map { it.domain.center }
y.numbers = histogram.bins.map { it.value } y.numbers = histogram.bins.map { it.binValue }
line.shape = LineShape.hv line.shape = LineShape.hv
block() block()
} }
@ -58,7 +58,7 @@ fun Plotly.plotNumassBlock(
block: NumassBlock, block: NumassBlock,
amplitudeBinSize: UInt = 20U, amplitudeBinSize: UInt = 20U,
eventExtractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY, eventExtractor: NumassEventExtractor = NumassEventExtractor.EVENTS_ONLY,
splitChannels: Boolean = true splitChannels: Boolean = true,
): PlotlyFragment = Plotly.fragment { ): PlotlyFragment = Plotly.fragment {
plot { plot {
runBlocking { runBlocking {
@ -105,11 +105,11 @@ fun Plotly.plotNumassSet(
h2 { +"Time spectra" } h2 { +"Time spectra" }
plot { plot {
spectra.forEach { (point,spectrum) -> spectra.forEach { (point, spectrum) ->
val countRate = runBlocking { val countRate = runBlocking {
spectrum.sum().toDouble() / point.getLength().toDouble(DurationUnit.SECONDS) spectrum.sum().toDouble() / point.getLength().toDouble(DurationUnit.SECONDS)
} }
val binSize = 1.0 / countRate / 10.0 val binSize = 1.0 / countRate / 10.0
histogram(point.timeHistogram(binSize)) { histogram(point.timeHistogram(binSize)) {
name = point.title name = point.title
} }
@ -128,7 +128,7 @@ fun Plotly.plotNumassSet(
} }
if (set is NumassDirectorySet) { if (set is NumassDirectorySet) {
set.getHvData()?.let { entries -> set.hvData?.let { entries ->
h2 { +"HV" } h2 { +"HV" }
plot { plot {
hvData(entries) hvData(entries)

View File

@ -1,10 +0,0 @@
package ru.inr.mass.workspace
import ru.inr.mass.data.proto.NumassProtoPlugin
import space.kscience.dataforge.workspace.Workspace
val NUMASS = Workspace {
context{
plugin(NumassProtoPlugin)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -15,6 +15,7 @@ pluginManagement {
} }
plugins { plugins {
id("org.jetbrains.compose").version(extra["compose.version"] as String)
id("space.kscience.gradle.project") version toolsVersion id("space.kscience.gradle.project") version toolsVersion
id("space.kscience.gradle.mpp") version toolsVersion id("space.kscience.gradle.mpp") version toolsVersion
id("space.kscience.gradle.jvm") version toolsVersion id("space.kscience.gradle.jvm") version toolsVersion
@ -43,7 +44,7 @@ include(
":numass-data-model", ":numass-data-model",
":numass-analysis", ":numass-analysis",
":numass-data-proto", ":numass-data-proto",
//":numass-data-server", ":numass-data-server",
":numass-workspace", ":numass-workspace",
":numass-model", ":numass-model",
//":numass-detector-client" //":numass-detector-client"