Data conversion and monitor plot
This commit is contained in:
parent
51bc07ad40
commit
03bf4f2d96
@ -9,11 +9,11 @@ allprojects {
|
|||||||
}
|
}
|
||||||
|
|
||||||
group = "ru.inr.mass"
|
group = "ru.inr.mass"
|
||||||
version = "0.1.2"
|
version = "0.1.3"
|
||||||
}
|
}
|
||||||
|
|
||||||
val dataforgeVersion by extra("0.5.2")
|
val dataforgeVersion by extra("0.5.2")
|
||||||
val tablesVersion: String by extra("0.1.2")
|
val tablesVersion: String by extra("0.1.5")
|
||||||
val kmathVersion by extra("0.3.0-dev-18")
|
val kmathVersion by extra("0.3.0-dev-18")
|
||||||
val plotlyVersion: String by extra("0.5.0")
|
val plotlyVersion: String by extra("0.5.0")
|
||||||
|
|
||||||
|
@ -12,4 +12,4 @@ org.gradle.configureondemand=true
|
|||||||
org.gradle.parallel=true
|
org.gradle.parallel=true
|
||||||
org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
|
org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
|
||||||
|
|
||||||
toolsVersion=0.11.1-kotlin-1.6.10
|
toolsVersion=0.11.2-kotlin-1.6.10
|
||||||
|
@ -20,6 +20,9 @@ import io.ktor.utils.io.core.readBytes
|
|||||||
import kotlinx.coroutines.flow.*
|
import kotlinx.coroutines.flow.*
|
||||||
import kotlinx.coroutines.runBlocking
|
import kotlinx.coroutines.runBlocking
|
||||||
import kotlinx.datetime.Instant
|
import kotlinx.datetime.Instant
|
||||||
|
import kotlinx.datetime.LocalDateTime
|
||||||
|
import kotlinx.datetime.TimeZone
|
||||||
|
import kotlinx.datetime.toInstant
|
||||||
import okio.ByteString
|
import okio.ByteString
|
||||||
import org.slf4j.LoggerFactory
|
import org.slf4j.LoggerFactory
|
||||||
import ru.inr.mass.data.api.NumassBlock
|
import ru.inr.mass.data.api.NumassBlock
|
||||||
@ -28,6 +31,9 @@ import ru.inr.mass.data.api.NumassFrame
|
|||||||
import ru.inr.mass.data.api.NumassPoint
|
import ru.inr.mass.data.api.NumassPoint
|
||||||
import space.kscience.dataforge.io.Envelope
|
import space.kscience.dataforge.io.Envelope
|
||||||
import space.kscience.dataforge.meta.*
|
import space.kscience.dataforge.meta.*
|
||||||
|
import space.kscience.dataforge.values.ValueType
|
||||||
|
import space.kscience.dataforge.values.long
|
||||||
|
import space.kscience.dataforge.values.string
|
||||||
import java.io.ByteArrayInputStream
|
import java.io.ByteArrayInputStream
|
||||||
import java.io.ByteArrayOutputStream
|
import java.io.ByteArrayOutputStream
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
@ -79,9 +85,16 @@ internal class ProtoNumassPoint(
|
|||||||
override val index: Int get() = meta["external_meta.point_index"].int ?: super.index
|
override val index: Int get() = meta["external_meta.point_index"].int ?: super.index
|
||||||
|
|
||||||
override val startTime: Instant
|
override val startTime: Instant
|
||||||
get() = meta["start_time"].long?.let {
|
get() {
|
||||||
Instant.fromEpochMilliseconds(it)
|
val startTimeValue = meta["start_time"]?.value
|
||||||
} ?: Instant.DISTANT_PAST
|
return when{
|
||||||
|
startTimeValue == null -> Instant.DISTANT_PAST
|
||||||
|
startTimeValue.type == ValueType.STRING -> LocalDateTime.parse(startTimeValue.string).toInstant(TimeZone.UTC)
|
||||||
|
//TODO fix time zones!!!
|
||||||
|
startTimeValue.type == ValueType.NUMBER -> Instant.fromEpochMilliseconds(startTimeValue.long)
|
||||||
|
else -> error("Can't decode start time")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override suspend fun getLength(): Duration = meta["acquisition_time"].double?.let {
|
override suspend fun getLength(): Duration = meta["acquisition_time"].double?.let {
|
||||||
(it * 1000).milliseconds
|
(it * 1000).milliseconds
|
||||||
|
@ -0,0 +1,35 @@
|
|||||||
|
package ru.inr.mass.scripts
|
||||||
|
|
||||||
|
import ru.inr.mass.data.api.NumassBlock
|
||||||
|
import ru.inr.mass.data.api.channels
|
||||||
|
import ru.inr.mass.workspace.Numass
|
||||||
|
import ru.inr.mass.workspace.listFrames
|
||||||
|
import space.kscience.dataforge.io.write
|
||||||
|
import space.kscience.dataforge.io.writeUtf8String
|
||||||
|
import java.nio.file.Files
|
||||||
|
import kotlin.io.path.createDirectories
|
||||||
|
import kotlin.io.path.writeText
|
||||||
|
|
||||||
|
fun main() {
|
||||||
|
|
||||||
|
val point = Numass.readPoint("D:\\Work\\numass-data\\set_3\\p101(30s)(HV1=14150)")
|
||||||
|
val channels: Map<Int, NumassBlock> = point.channels
|
||||||
|
|
||||||
|
//Initialize and create target directory
|
||||||
|
val targetDir = Files.createTempDirectory("numass_p101(30s)(HV1=14150)")
|
||||||
|
targetDir.createDirectories()
|
||||||
|
|
||||||
|
//dumping meta
|
||||||
|
targetDir.resolve("meta").writeText(point.meta.toString())
|
||||||
|
|
||||||
|
channels.forEach { (key, block) ->
|
||||||
|
targetDir.resolve("channel-$key.csv").write {
|
||||||
|
block.listFrames().forEach { frame ->
|
||||||
|
val line = frame.signal.joinToString(", ", postfix = "\n" )
|
||||||
|
writeUtf8String(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println("Exported to $targetDir")
|
||||||
|
}
|
@ -1,13 +1,18 @@
|
|||||||
package ru.inr.mass.scripts
|
package ru.inr.mass.scripts
|
||||||
|
|
||||||
|
import kotlinx.coroutines.flow.map
|
||||||
|
import kotlinx.coroutines.flow.toList
|
||||||
import kotlinx.coroutines.runBlocking
|
import kotlinx.coroutines.runBlocking
|
||||||
|
import kotlinx.html.code
|
||||||
import kotlinx.html.h2
|
import kotlinx.html.h2
|
||||||
import kotlinx.html.p
|
import kotlinx.html.p
|
||||||
|
import kotlinx.html.unsafe
|
||||||
import kotlinx.serialization.json.Json
|
import kotlinx.serialization.json.Json
|
||||||
import ru.inr.mass.data.analysis.NumassEventExtractor
|
import ru.inr.mass.data.analysis.NumassEventExtractor
|
||||||
import ru.inr.mass.data.analysis.amplitudeSpectrum
|
import ru.inr.mass.data.analysis.amplitudeSpectrum
|
||||||
import ru.inr.mass.data.api.NumassFrame
|
import ru.inr.mass.data.api.NumassFrame
|
||||||
import ru.inr.mass.data.api.channels
|
import ru.inr.mass.data.api.channels
|
||||||
|
import ru.inr.mass.workspace.Numass.readDirectory
|
||||||
import ru.inr.mass.workspace.Numass.readPoint
|
import ru.inr.mass.workspace.Numass.readPoint
|
||||||
import ru.inr.mass.workspace.listFrames
|
import ru.inr.mass.workspace.listFrames
|
||||||
import space.kscience.dataforge.meta.MetaSerializer
|
import space.kscience.dataforge.meta.MetaSerializer
|
||||||
@ -73,7 +78,7 @@ suspend fun main() {
|
|||||||
channel.amplitudeSpectrum(NumassEventExtractor.TQDC)
|
channel.amplitudeSpectrum(NumassEventExtractor.TQDC)
|
||||||
}.binned(binning)
|
}.binned(binning)
|
||||||
x.numbers = spectrum.keys.map { it.center }
|
x.numbers = spectrum.keys.map { it.center }
|
||||||
y.numbers = spectrum.values.map { it }
|
y.numbers = spectrum.values.map { it}
|
||||||
}
|
}
|
||||||
|
|
||||||
scatter {
|
scatter {
|
||||||
@ -84,9 +89,20 @@ suspend fun main() {
|
|||||||
x.numbers = spectrum.keys.map { it.center }
|
x.numbers = spectrum.keys.map { it.center }
|
||||||
y.numbers = spectrum.values.map { it }
|
y.numbers = spectrum.values.map { it }
|
||||||
}
|
}
|
||||||
|
histogram {
|
||||||
|
name = "events"
|
||||||
|
xbins {
|
||||||
|
size = 2.0
|
||||||
|
}
|
||||||
|
x.numbers = runBlocking { point.events.map { it.amplitude.toInt() }.toList() }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
h2 { +"Meta" }
|
h2 { +"Meta" }
|
||||||
p { +Json.encodeToString(MetaSerializer, point.meta) }
|
code {
|
||||||
|
unsafe {
|
||||||
|
+Json { prettyPrint = true }.encodeToString(MetaSerializer, point.meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
}.makeFile()
|
}.makeFile()
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,22 @@
|
|||||||
|
package ru.inr.mass.scripts
|
||||||
|
|
||||||
|
import ru.inr.mass.data.api.NumassPoint
|
||||||
|
import ru.inr.mass.workspace.Numass
|
||||||
|
import space.kscience.plotly.Plotly
|
||||||
|
import space.kscience.plotly.makeFile
|
||||||
|
import space.kscience.plotly.scatter
|
||||||
|
|
||||||
|
fun main() {
|
||||||
|
val directory = Numass.readDirectory("D:\\Work\\numass-data\\set_3\\")
|
||||||
|
|
||||||
|
val monitorPoints: List<NumassPoint> = directory.filter { it.voltage == 14000.0 }.sortedBy { it.startTime }
|
||||||
|
|
||||||
|
Plotly.plot {
|
||||||
|
scatter {
|
||||||
|
x.numbers = monitorPoints.map {
|
||||||
|
it.startTime.toEpochMilliseconds()
|
||||||
|
}
|
||||||
|
y.numbers = monitorPoints.map { it.framesCount }
|
||||||
|
}
|
||||||
|
}.makeFile()
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user