Viewer update
This commit is contained in:
parent
7e676681b7
commit
73dd8cb4eb
@ -199,9 +199,11 @@ fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = nu
|
||||
.addNumber("binSize")
|
||||
val builder = ListTable.Builder(format)
|
||||
|
||||
var chan = loChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
|
||||
var chan = loChannel
|
||||
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
|
||||
|
||||
val top = upChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
|
||||
val top = upChannel
|
||||
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
|
||||
|
||||
while (chan < top - binSize) {
|
||||
val count = AtomicLong(0)
|
||||
|
@ -62,7 +62,7 @@ interface NumassBlock : Metoid {
|
||||
|
||||
typealias OrphanNumassEvent = Pair<Short, Long>
|
||||
|
||||
inline fun OrphanNumassEvent.adopt(parent: NumassBlock): NumassEvent {
|
||||
fun OrphanNumassEvent.adopt(parent: NumassBlock): NumassEvent {
|
||||
return NumassEvent(this.first, this.second, parent)
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,9 @@
|
||||
package inr.numass.data.api
|
||||
|
||||
import hep.dataforge.io.envelopes.Envelope
|
||||
import hep.dataforge.meta.Metoid
|
||||
import inr.numass.data.storage.ClassicNumassPoint
|
||||
import inr.numass.data.storage.ProtoNumassPoint
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.stream.Stream
|
||||
@ -81,5 +84,13 @@ interface NumassPoint : Metoid, NumassBlock {
|
||||
const val LENGTH_KEY = "length"
|
||||
const val HV_KEY = "voltage"
|
||||
const val INDEX_KEY = "index"
|
||||
|
||||
fun read(envelope: Envelope): NumassPoint {
|
||||
return if (envelope.dataType?.startsWith("numass.point.classic") ?: envelope.meta.hasValue("split")) {
|
||||
ClassicNumassPoint(envelope)
|
||||
} else {
|
||||
ProtoNumassPoint(envelope)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,12 @@ import java.util.stream.StreamSupport
|
||||
*/
|
||||
class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
|
||||
override val meta: Meta = envelope.meta
|
||||
|
||||
override val voltage: Double = meta.getDouble("external_meta.HV1_value", super.voltage)
|
||||
|
||||
override val index: Int = meta.getInt("external_meta.point_index", super.index)
|
||||
|
||||
override val blocks: Stream<NumassBlock>
|
||||
get() {
|
||||
val length: Long = if (envelope.meta.hasValue("external_meta.acquisition_time")) {
|
||||
@ -36,15 +42,9 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
get() = if (meta.hasValue("start_time")) {
|
||||
meta.getValue("start_time").timeValue()
|
||||
} else {
|
||||
Instant.EPOCH
|
||||
super.startTime
|
||||
}
|
||||
|
||||
override val meta: Meta = envelope.meta
|
||||
|
||||
override val voltage: Double = meta.getDouble("external_meta.HV1_value", 0.0)
|
||||
|
||||
override val index: Int = meta.getInt("external_meta.point_index", -1)
|
||||
|
||||
|
||||
//TODO split blocks using meta
|
||||
private inner class ClassicBlock(
|
||||
|
@ -89,7 +89,9 @@ class NumassDataLoader(
|
||||
|
||||
override val points: Stream<NumassPoint>
|
||||
get() {
|
||||
return pointEnvelopes.map { ClassicNumassPoint(it) }
|
||||
return pointEnvelopes.map {
|
||||
NumassPoint.read(it)
|
||||
}
|
||||
}
|
||||
|
||||
override fun pull(fragmentName: String): Envelope {
|
||||
|
@ -135,7 +135,7 @@ class NumassStorage : FileStorage {
|
||||
val files = ArrayList<NumassSet>()
|
||||
Files.list(dataDir).forEach { file ->
|
||||
if (Files.isRegularFile(file) && file.fileName.toString().toLowerCase().endsWith(".dat")) {
|
||||
val name = file.fileName.toString()
|
||||
//val name = file.fileName.toString()
|
||||
try {
|
||||
files.add(NumassDatFile(file, Meta.empty()))
|
||||
} catch (ex: Exception) {
|
||||
|
@ -4,6 +4,8 @@ import hep.dataforge.context.Context
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.io.envelopes.Envelope
|
||||
import hep.dataforge.kodex.buildMeta
|
||||
import hep.dataforge.kodex.toList
|
||||
import hep.dataforge.meta.Laminate
|
||||
import hep.dataforge.meta.Meta
|
||||
import inr.numass.data.NumassProto
|
||||
import inr.numass.data.api.NumassBlock
|
||||
@ -12,6 +14,7 @@ import inr.numass.data.api.NumassFrame
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.dataStream
|
||||
import inr.numass.data.legacy.NumassFileEnvelope
|
||||
import org.slf4j.LoggerFactory
|
||||
import java.io.IOException
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
@ -39,13 +42,25 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
get() = point.channelsList.stream()
|
||||
.flatMap { channel ->
|
||||
channel.blocksList.stream()
|
||||
.map { block -> ProtoBlock(channel.id.toInt(), block) }
|
||||
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
|
||||
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
|
||||
}
|
||||
|
||||
|
||||
override val meta: Meta = envelope.meta
|
||||
|
||||
|
||||
override val voltage: Double = meta.getDouble("external_meta.HV1_value", super.voltage)
|
||||
|
||||
override val index: Int = meta.getInt("external_meta.point_index", super.index)
|
||||
|
||||
override val startTime: Instant
|
||||
get() = if (meta.hasValue("start_time")) {
|
||||
meta.getValue("start_time").timeValue()
|
||||
} else {
|
||||
super.startTime
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun readFile(path: Path): ProtoNumassPoint {
|
||||
return ProtoNumassPoint(NumassFileEnvelope.open(path, true))
|
||||
@ -63,25 +78,27 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||
}
|
||||
}
|
||||
|
||||
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block) : NumassBlock {
|
||||
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block, parent: NumassBlock? = null) : NumassBlock {
|
||||
override val meta: Meta by lazy {
|
||||
buildMeta {
|
||||
val blockMeta = buildMeta {
|
||||
"channel" to channel
|
||||
}
|
||||
return@lazy parent?.let { Laminate(blockMeta, parent.meta) } ?: blockMeta
|
||||
}
|
||||
|
||||
override val startTime: Instant
|
||||
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
||||
|
||||
override val length: Duration
|
||||
get() = if (meta.hasMeta("params")) {
|
||||
Duration.ofNanos((meta.getDouble("params.b_size") / meta.getDouble("params.sample_freq") * 1e9).toLong())
|
||||
} else if (meta.hasValue("length")) {
|
||||
Duration.ofNanos(meta.getValue("length").longValue())
|
||||
} else {
|
||||
Duration.ZERO
|
||||
override val length: Duration = when {
|
||||
block.length > 0 -> Duration.ofNanos(block.length)
|
||||
meta.hasValue("acquisition_time") -> Duration.ofMillis((meta.getDouble("acquisition_time") * 1000).toLong())
|
||||
else -> {
|
||||
LoggerFactory.getLogger(javaClass).error("No length information on block. Trying to infer from first and last events")
|
||||
val times = events.map { it.timeOffset }.toList()
|
||||
val nanos = (times.max()!! - times.min()!!)
|
||||
Duration.ofNanos(nanos)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override val events: Stream<NumassEvent>
|
||||
get() = if (block.hasEvents()) {
|
||||
@ -94,13 +111,7 @@ class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.
|
||||
|
||||
override val frames: Stream<NumassFrame>
|
||||
get() {
|
||||
val tickSize = if (meta.hasMeta("params")) {
|
||||
Duration.ofNanos((1e9 / meta.getInt("params.sample_freq")).toLong())
|
||||
} else if (meta.hasValue("tick_length")) {
|
||||
Duration.ofNanos(meta.getInt("tick_length").toLong())
|
||||
} else {
|
||||
Duration.ofNanos(1)
|
||||
}
|
||||
val tickSize = Duration.ofNanos(block.binSize)
|
||||
return block.framesList.stream().map { frame ->
|
||||
val time = startTime.plusNanos(frame.time)
|
||||
val data = frame.data.asReadOnlyByteBuffer()
|
||||
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright 2018 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package inr.numass.scripts.tristan
|
||||
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.kodex.toList
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.channel
|
||||
import inr.numass.data.storage.NumassDataLoader
|
||||
import inr.numass.data.storage.NumassStorageFactory
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
val storage = NumassStorageFactory.buildLocal(Global, "D:\\Work\\Numass\\data\\2018_04\\Adiabacity_19\\", true, false)
|
||||
val set = storage.optLoader("set_4").get() as NumassDataLoader
|
||||
set.points.forEach { point ->
|
||||
if (point.voltage == 18700.0) {
|
||||
println("${point.index}:")
|
||||
point.blocks.forEach {
|
||||
println("\t${it.channel}: events: ${it.events.count()}, time: ${it.length}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val point: NumassPoint = set.points.filter { it.index == 18 }.findFirst().get()
|
||||
(0..99).forEach { bin ->
|
||||
val times = point.events.filter { it.amp > 0 }.map { it.timeOffset }.toList()
|
||||
val count = times.filter { it > bin.toDouble() / 10 * 1e9 && it < (bin + 1).toDouble() / 10 * 1e9 }.count()
|
||||
println("${bin.toDouble() / 10.0}: $count")
|
||||
}
|
||||
}
|
@ -12,7 +12,7 @@ if (!hasProperty('mainClass')) {
|
||||
|
||||
mainClassName = mainClass
|
||||
|
||||
version = "0.5.0"
|
||||
version = "0.5.1 - SNAPSHOT"
|
||||
|
||||
description = "The viewer for numass data"
|
||||
|
||||
|
@ -6,8 +6,11 @@ import hep.dataforge.fx.runGoal
|
||||
import hep.dataforge.fx.ui
|
||||
import hep.dataforge.goals.Goal
|
||||
import hep.dataforge.kodex.configure
|
||||
import hep.dataforge.kodex.toList
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.plots.PlotFrame
|
||||
import hep.dataforge.plots.PlotGroup
|
||||
import hep.dataforge.plots.Plottable
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.tables.Adapters
|
||||
@ -15,7 +18,10 @@ import hep.dataforge.tables.Table
|
||||
import inr.numass.data.analyzers.NumassAnalyzer
|
||||
import inr.numass.data.analyzers.SimpleAnalyzer
|
||||
import inr.numass.data.analyzers.withBinning
|
||||
import inr.numass.data.api.MetaBlock
|
||||
import inr.numass.data.api.NumassBlock
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.channel
|
||||
import javafx.beans.Observable
|
||||
import javafx.beans.binding.DoubleBinding
|
||||
import javafx.beans.property.SimpleBooleanProperty
|
||||
@ -30,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
class AmplitudeView(
|
||||
private val analyzer: NumassAnalyzer = SimpleAnalyzer(),
|
||||
private val cache: MutableMap<NumassPoint, Table> = ConcurrentHashMap()
|
||||
private val cache: MutableMap<NumassBlock, Table> = ConcurrentHashMap()
|
||||
) : View(title = "Numass amplitude spectrum plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val frame: PlotFrame = JFreeChartFrame().configure {
|
||||
@ -45,9 +51,17 @@ class AmplitudeView(
|
||||
"units" to "Hz"
|
||||
}
|
||||
"legend.showComponent" to false
|
||||
}.apply {
|
||||
plots.configure {
|
||||
"connectionType" to "step"
|
||||
"thickness" to 2
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
}.setType(DataPlot::class)
|
||||
}
|
||||
|
||||
val binningProperty = SimpleObjectProperty<Int>(20)
|
||||
val binningProperty = SimpleObjectProperty(20)
|
||||
var binning by binningProperty
|
||||
|
||||
val normalizeProperty = SimpleBooleanProperty(true)
|
||||
@ -68,7 +82,7 @@ class AmplitudeView(
|
||||
}
|
||||
|
||||
private val data: ObservableMap<String, NumassPoint> = FXCollections.observableHashMap()
|
||||
private val plots: ObservableMap<String, Goal<DataPlot>> = FXCollections.observableHashMap()
|
||||
private val plots: ObservableMap<String, Goal<Plottable>> = FXCollections.observableHashMap()
|
||||
|
||||
val isEmpty = booleanBinding(data) { data.isEmpty() }
|
||||
|
||||
@ -83,7 +97,6 @@ class AmplitudeView(
|
||||
|
||||
}
|
||||
|
||||
|
||||
init {
|
||||
data.addListener { _: Observable ->
|
||||
invalidate()
|
||||
@ -106,7 +119,7 @@ class AmplitudeView(
|
||||
/**
|
||||
* Calculate or get spectrum from the immutable
|
||||
*/
|
||||
private suspend fun getSpectrum(point: NumassPoint): Table {
|
||||
private fun getSpectrum(point: NumassBlock): Table {
|
||||
return cache.computeIfAbsent(point) { analyzer.getAmplitudeSpectrum(point, Meta.empty()) }
|
||||
}
|
||||
|
||||
@ -114,33 +127,56 @@ class AmplitudeView(
|
||||
* Put or replace current plot with name `key`
|
||||
*/
|
||||
fun add(key: String, point: NumassPoint) {
|
||||
data.put(key, point)
|
||||
data[key] = point
|
||||
}
|
||||
|
||||
fun addAll(data: Map<String, NumassPoint>) {
|
||||
this.data.putAll(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Distinct map of channel number to corresponding grouping block
|
||||
*/
|
||||
private fun NumassPoint.getChannels(): Map<Int, NumassBlock> {
|
||||
return blocks.toList().groupBy { it.channel ?: 0 }.mapValues { entry ->
|
||||
if (entry.value.size == 1) {
|
||||
entry.value.first()
|
||||
} else {
|
||||
MetaBlock(entry.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun invalidate() {
|
||||
data.forEach { key, point ->
|
||||
plots.computeIfAbsent(key) {
|
||||
runGoal("loadAmplitudeSpectrum_$key") {
|
||||
runGoal<Plottable>("loadAmplitudeSpectrum_$key") {
|
||||
val valueAxis = if (normalize) {
|
||||
NumassAnalyzer.COUNT_RATE_KEY
|
||||
} else {
|
||||
NumassAnalyzer.COUNT_KEY
|
||||
}
|
||||
val adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
|
||||
|
||||
val channels = point.getChannels()
|
||||
|
||||
return@runGoal if (channels.size == 1) {
|
||||
DataPlot.plot(
|
||||
key,
|
||||
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
|
||||
adapter,
|
||||
getSpectrum(point).withBinning(binning)
|
||||
).configure {
|
||||
"connectionType" to "step"
|
||||
"thickness" to 2
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"JFreeChart.cache" to true
|
||||
)
|
||||
} else {
|
||||
val group = PlotGroup.typed<DataPlot>(key)
|
||||
channels.forEach { key, block ->
|
||||
val plot = DataPlot.plot(
|
||||
key.toString(),
|
||||
adapter,
|
||||
getSpectrum(block).withBinning(binning)
|
||||
)
|
||||
group.add(plot)
|
||||
}
|
||||
group
|
||||
}
|
||||
} ui { plot ->
|
||||
frame.add(plot)
|
||||
|
@ -9,6 +9,7 @@ import hep.dataforge.storage.api.Loader
|
||||
import hep.dataforge.storage.api.Storage
|
||||
import hep.dataforge.storage.api.TableLoader
|
||||
import hep.dataforge.storage.commons.StorageManager
|
||||
import hep.dataforge.tables.Table
|
||||
import inr.numass.NumassProperties
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
@ -29,6 +30,7 @@ import org.controlsfx.control.StatusBar
|
||||
import tornadofx.*
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import kotlin.streams.toList
|
||||
|
||||
class StorageView(private val context: Context = Global) : View(title = "Numass storage", icon = ImageView(dfIcon)) {
|
||||
@ -43,8 +45,10 @@ class StorageView(private val context: Context = Global) : View(title = "Numass
|
||||
|
||||
private val statusBar = StatusBar();
|
||||
|
||||
private val ampView: AmplitudeView by inject();
|
||||
private val spectrumView: SpectrumView by inject();
|
||||
private val cache: MutableMap<NumassPoint, Table> = ConcurrentHashMap()
|
||||
|
||||
private val ampView: AmplitudeView by inject(params = mapOf("cache" to cache));
|
||||
private val spectrumView: SpectrumView by inject(params = mapOf("cache" to cache));
|
||||
private val hvView: HVView by inject();
|
||||
private val scView: SlowControlView by inject();
|
||||
|
||||
@ -122,7 +126,7 @@ class StorageView(private val context: Context = Global) : View(title = "Numass
|
||||
isSelected = false
|
||||
LogFragment().apply {
|
||||
addLogHandler(context.logger)
|
||||
bindWindow(this@togglebutton, selectedProperty())
|
||||
bindWindow(this@togglebutton)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -263,15 +267,6 @@ class StorageView(private val context: Context = Global) : View(title = "Numass
|
||||
else -> throw IllegalArgumentException("Unknown content type: ${content::class.java}");
|
||||
}
|
||||
|
||||
|
||||
// private fun getSetName(value: NumassSet): String {
|
||||
// return if (value is NumassDataLoader) {
|
||||
// value.path
|
||||
// } else {
|
||||
// value.name
|
||||
// }
|
||||
// }
|
||||
|
||||
private fun loadDirectory(path: URI) {
|
||||
statusBar.text = "Loading storage: $path"
|
||||
runGoal("loadDirectory[$path]") {
|
||||
@ -289,22 +284,4 @@ class StorageView(private val context: Context = Global) : View(title = "Numass
|
||||
}
|
||||
}
|
||||
|
||||
// fun setRootStorage(root: Storage) {
|
||||
// runGoal("loadStorage[${root.name}]") {
|
||||
// title = "Fill data to UI (" + root.name + ")"
|
||||
// progress = -1.0
|
||||
// runLater { statusBar.progress = -1.0 }
|
||||
//
|
||||
// message = "Loading numass storage tree..."
|
||||
//
|
||||
// runLater {
|
||||
// storage = root
|
||||
// }
|
||||
//
|
||||
// // callback.setProgress(1, 1);
|
||||
// runLater { statusBar.progress = 0.0 }
|
||||
// message = "Numass storage tree loaded."
|
||||
// progress = 1.0
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user