Data conversion and monitor plot

This commit is contained in:
Alexander Nozik 2022-04-09 16:54:44 +03:00
parent 51bc07ad40
commit 03bf4f2d96
6 changed files with 95 additions and 9 deletions

View File

@ -9,11 +9,11 @@ allprojects {
}
group = "ru.inr.mass"
version = "0.1.2"
version = "0.1.3"
}
val dataforgeVersion by extra("0.5.2")
val tablesVersion: String by extra("0.1.2")
val tablesVersion: String by extra("0.1.5")
val kmathVersion by extra("0.3.0-dev-18")
val plotlyVersion: String by extra("0.5.0")

View File

@ -12,4 +12,4 @@ org.gradle.configureondemand=true
org.gradle.parallel=true
org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
toolsVersion=0.11.1-kotlin-1.6.10
toolsVersion=0.11.2-kotlin-1.6.10

View File

@ -20,6 +20,9 @@ import io.ktor.utils.io.core.readBytes
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.runBlocking
import kotlinx.datetime.Instant
import kotlinx.datetime.LocalDateTime
import kotlinx.datetime.TimeZone
import kotlinx.datetime.toInstant
import okio.ByteString
import org.slf4j.LoggerFactory
import ru.inr.mass.data.api.NumassBlock
@ -28,6 +31,9 @@ import ru.inr.mass.data.api.NumassFrame
import ru.inr.mass.data.api.NumassPoint
import space.kscience.dataforge.io.Envelope
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.values.ValueType
import space.kscience.dataforge.values.long
import space.kscience.dataforge.values.string
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
@ -79,9 +85,16 @@ internal class ProtoNumassPoint(
override val index: Int get() = meta["external_meta.point_index"].int ?: super.index
override val startTime: Instant
get() = meta["start_time"].long?.let {
Instant.fromEpochMilliseconds(it)
} ?: Instant.DISTANT_PAST
get() {
val startTimeValue = meta["start_time"]?.value
return when{
startTimeValue == null -> Instant.DISTANT_PAST
startTimeValue.type == ValueType.STRING -> LocalDateTime.parse(startTimeValue.string).toInstant(TimeZone.UTC)
//TODO fix time zones!!!
startTimeValue.type == ValueType.NUMBER -> Instant.fromEpochMilliseconds(startTimeValue.long)
else -> error("Can't decode start time")
}
}
override suspend fun getLength(): Duration = meta["acquisition_time"].double?.let {
(it * 1000).milliseconds

View File

@ -0,0 +1,35 @@
package ru.inr.mass.scripts
import ru.inr.mass.data.api.NumassBlock
import ru.inr.mass.data.api.channels
import ru.inr.mass.workspace.Numass
import ru.inr.mass.workspace.listFrames
import space.kscience.dataforge.io.write
import space.kscience.dataforge.io.writeUtf8String
import java.nio.file.Files
import kotlin.io.path.createDirectories
import kotlin.io.path.writeText
fun main() {
val point = Numass.readPoint("D:\\Work\\numass-data\\set_3\\p101(30s)(HV1=14150)")
val channels: Map<Int, NumassBlock> = point.channels
//Initialize and create target directory
val targetDir = Files.createTempDirectory("numass_p101(30s)(HV1=14150)")
targetDir.createDirectories()
//dumping meta
targetDir.resolve("meta").writeText(point.meta.toString())
channels.forEach { (key, block) ->
targetDir.resolve("channel-$key.csv").write {
block.listFrames().forEach { frame ->
val line = frame.signal.joinToString(", ", postfix = "\n" )
writeUtf8String(line)
}
}
}
println("Exported to $targetDir")
}

View File

@ -1,13 +1,18 @@
package ru.inr.mass.scripts
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking
import kotlinx.html.code
import kotlinx.html.h2
import kotlinx.html.p
import kotlinx.html.unsafe
import kotlinx.serialization.json.Json
import ru.inr.mass.data.analysis.NumassEventExtractor
import ru.inr.mass.data.analysis.amplitudeSpectrum
import ru.inr.mass.data.api.NumassFrame
import ru.inr.mass.data.api.channels
import ru.inr.mass.workspace.Numass.readDirectory
import ru.inr.mass.workspace.Numass.readPoint
import ru.inr.mass.workspace.listFrames
import space.kscience.dataforge.meta.MetaSerializer
@ -84,9 +89,20 @@ suspend fun main() {
x.numbers = spectrum.keys.map { it.center }
y.numbers = spectrum.values.map { it }
}
histogram {
name = "events"
xbins {
size = 2.0
}
x.numbers = runBlocking { point.events.map { it.amplitude.toInt() }.toList() }
}
}
h2 { +"Meta" }
p { +Json.encodeToString(MetaSerializer, point.meta) }
code {
unsafe {
+Json { prettyPrint = true }.encodeToString(MetaSerializer, point.meta)
}
}
}.makeFile()

View File

@ -0,0 +1,22 @@
package ru.inr.mass.scripts
import ru.inr.mass.data.api.NumassPoint
import ru.inr.mass.workspace.Numass
import space.kscience.plotly.Plotly
import space.kscience.plotly.makeFile
import space.kscience.plotly.scatter
fun main() {
val directory = Numass.readDirectory("D:\\Work\\numass-data\\set_3\\")
val monitorPoints: List<NumassPoint> = directory.filter { it.voltage == 14000.0 }.sortedBy { it.startTime }
Plotly.plot {
scatter {
x.numbers = monitorPoints.map {
it.startTime.toEpochMilliseconds()
}
y.numbers = monitorPoints.map { it.framesCount }
}
}.makeFile()
}