Added time spectra for viewer
This commit is contained in:
parent
2e312047cf
commit
4264d51a61
@ -23,6 +23,8 @@ import hep.dataforge.storage.StorageElement
|
||||
import hep.dataforge.storage.files.FileStorage
|
||||
import hep.dataforge.storage.files.FileStorageElement
|
||||
import inr.numass.NumassEnvelopeType
|
||||
import kotlinx.coroutines.experimental.runBlocking
|
||||
import java.io.File
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
@ -44,6 +46,10 @@ class NumassDirectory : FileStorage.Directory() {
|
||||
companion object {
|
||||
val INSTANCE = NumassDirectory()
|
||||
const val NUMASS_DIRECTORY_TYPE = "inr.numass.storage.directory"
|
||||
|
||||
fun read(context: Context, path: String): FileStorageElement{
|
||||
return runBlocking { INSTANCE.read(context, File(path).toPath())!!}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,6 +13,7 @@ import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.api.SimpleNumassPoint
|
||||
import inr.numass.data.channel
|
||||
import inr.numass.data.storage.NumassDirectory
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
|
||||
@ -22,7 +23,7 @@ fun main(args: Array<String>) {
|
||||
dataDir = "D:\\Work\\Numass\\data\\2018_04"
|
||||
}
|
||||
|
||||
val storage = NumassStorageFactory.buildLocal(context, "Fill_4", true, false);
|
||||
val storage = NumassDirectory.read(context, "Fill_4");
|
||||
|
||||
val meta = buildMeta {
|
||||
"t0" to 3000
|
||||
|
@ -23,6 +23,7 @@ import inr.numass.NumassPlugin
|
||||
import inr.numass.data.NumassDataUtils
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.storage.NumassDirectory
|
||||
import kotlin.streams.asStream
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
@ -31,7 +32,7 @@ fun main(args: Array<String>) {
|
||||
dataDir = "D:\\Work\\Numass\\data\\2018_04"
|
||||
}
|
||||
|
||||
val storage = NumassStorageFactory.buildLocal(context, "Fill_4", true, false);
|
||||
val storage = NumassDirectory.read(context, "Fill_4");
|
||||
|
||||
val meta = buildMeta {
|
||||
"t0" to 3000
|
||||
|
@ -12,7 +12,7 @@ if (!hasProperty('mainClass')) {
|
||||
|
||||
mainClassName = mainClass
|
||||
|
||||
version = "0.5.4"
|
||||
version = "0.5.5"
|
||||
|
||||
description = "The viewer for numass data"
|
||||
|
||||
|
@ -20,12 +20,14 @@ import tornadofx.*
|
||||
class StorageView(val storage: Storage) : View(title = "Numass storage", icon = dfIconView) {
|
||||
|
||||
private val ampView: AmplitudeView by inject()
|
||||
private val timeView: TimeView by inject()
|
||||
private val spectrumView: SpectrumView by inject()
|
||||
private val hvView: HVView by inject()
|
||||
private val scView: SlowControlView by inject()
|
||||
|
||||
init {
|
||||
ampView.clear()
|
||||
timeView.clear()
|
||||
spectrumView.clear()
|
||||
hvView.clear()
|
||||
scView.clear()
|
||||
@ -49,8 +51,10 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
|
||||
is CachedPoint -> {
|
||||
if (selected) {
|
||||
ampView[id] = content
|
||||
timeView[id] = content
|
||||
} else {
|
||||
ampView.remove(id)
|
||||
timeView.remove(id)
|
||||
}
|
||||
}
|
||||
is CachedSet -> {
|
||||
@ -149,6 +153,11 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
|
||||
isClosable = false
|
||||
//visibleWhen(ampView.isEmpty.not())
|
||||
}
|
||||
tab("Time spectra") {
|
||||
content = timeView.root
|
||||
isClosable = false
|
||||
//visibleWhen(ampView.isEmpty.not())
|
||||
}
|
||||
tab("HV") {
|
||||
content = hvView.root
|
||||
isClosable = false
|
||||
|
174
numass-viewer/src/main/kotlin/inr/numass/viewer/TimeView.kt
Normal file
174
numass-viewer/src/main/kotlin/inr/numass/viewer/TimeView.kt
Normal file
@ -0,0 +1,174 @@
|
||||
package inr.numass.viewer
|
||||
|
||||
import hep.dataforge.configure
|
||||
import hep.dataforge.fx.dfIcon
|
||||
import hep.dataforge.fx.except
|
||||
import hep.dataforge.fx.plots.PlotContainer
|
||||
import hep.dataforge.fx.runGoal
|
||||
import hep.dataforge.fx.ui
|
||||
import hep.dataforge.goals.Goal
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.plots.PlotFrame
|
||||
import hep.dataforge.plots.Plottable
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.tables.Adapters
|
||||
import hep.dataforge.values.ValueMap
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import javafx.beans.Observable
|
||||
import javafx.beans.binding.DoubleBinding
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.scene.image.ImageView
|
||||
import tornadofx.*
|
||||
|
||||
class TimeView : View(title = "Numass time spectrum plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val frame: PlotFrame = JFreeChartFrame().configure {
|
||||
"title" to "Time plot"
|
||||
node("xAxis") {
|
||||
"title" to "delay"
|
||||
"units" to "us"
|
||||
|
||||
}
|
||||
node("yAxis") {
|
||||
"title" to "number of events"
|
||||
"type" to "log"
|
||||
}
|
||||
}.apply {
|
||||
plots.configure {
|
||||
"connectionType" to "step"
|
||||
"thickness" to 2
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
}.setType<DataPlot>()
|
||||
}
|
||||
|
||||
// val stepProperty = SimpleDoubleProperty()
|
||||
// var step by stepProperty
|
||||
//
|
||||
// private val container = PlotContainer(frame).apply {
|
||||
// val binningSelector: ChoiceBox<Int> = ChoiceBox(FXCollections.observableArrayList(1, 5, 10, 20, 50)).apply {
|
||||
// minWidth = 0.0
|
||||
// selectionModel.selectLast()
|
||||
// stepProperty.bind(this.selectionModel.selectedItemProperty())
|
||||
// }
|
||||
// addToSideBar(0, binningSelector)
|
||||
// }
|
||||
|
||||
private val container = PlotContainer(frame)
|
||||
|
||||
private val data: ObservableMap<String, CachedPoint> = FXCollections.observableHashMap()
|
||||
private val plots: ObservableMap<String, Goal<Plottable>> = FXCollections.observableHashMap()
|
||||
|
||||
val isEmpty = booleanBinding(data) { isEmpty() }
|
||||
|
||||
private val progress = object : DoubleBinding() {
|
||||
init {
|
||||
bind(plots)
|
||||
}
|
||||
|
||||
override fun computeValue(): Double {
|
||||
return plots.values.count { it.isDone }.toDouble() / data.size;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
init {
|
||||
data.addListener { _: Observable ->
|
||||
invalidate()
|
||||
}
|
||||
}
|
||||
|
||||
override val root = borderpane {
|
||||
center = container.root
|
||||
}
|
||||
|
||||
/**
|
||||
* Put or replace current plot with name `key`
|
||||
*/
|
||||
operator fun set(key: String, point: CachedPoint) {
|
||||
data[key] = point
|
||||
}
|
||||
|
||||
fun addAll(data: Map<String, CachedPoint>) {
|
||||
this.data.putAll(data);
|
||||
}
|
||||
|
||||
private val analyzer = TimeAnalyzer();
|
||||
|
||||
|
||||
private fun invalidate() {
|
||||
data.forEach { key, point ->
|
||||
plots.getOrPut(key) {
|
||||
runGoal<Plottable>("loadAmplitudeSpectrum_$key") {
|
||||
|
||||
val initialEstimate = analyzer.analyze(point)
|
||||
val cr = initialEstimate.getDouble("cr")
|
||||
|
||||
val binNum = 200//inputMeta.getInt("binNum", 1000);
|
||||
val binSize = 1.0 / cr * 10 / binNum * 1e6//inputMeta.getDouble("binSize", 1.0 / cr * 10 / binNum * 1e6)
|
||||
|
||||
val histogram = analyzer.getEventsWithDelay(point, Meta.empty())
|
||||
.map { it.second.toDouble() / 1000.0 }
|
||||
.groupBy { Math.floor(it / binSize) }
|
||||
.toSortedMap()
|
||||
.map {
|
||||
ValueMap.ofPairs("x" to it.key, "count" to it.value.count())
|
||||
}
|
||||
|
||||
DataPlot(key, adapter = Adapters.buildXYAdapter("x", "count"))
|
||||
.configure {
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"connectionType" to "step"
|
||||
}.fillData(histogram)
|
||||
|
||||
} ui { plot ->
|
||||
frame.add(plot)
|
||||
progress.invalidate()
|
||||
} except {
|
||||
progress.invalidate()
|
||||
}
|
||||
}
|
||||
plots.keys.filter { !data.containsKey(it) }.forEach { remove(it) }
|
||||
}
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
data.clear()
|
||||
plots.values.forEach {
|
||||
it.cancel()
|
||||
}
|
||||
plots.clear()
|
||||
invalidate()
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the plot and cancel loading task if it is in progress.
|
||||
*/
|
||||
fun remove(name: String) {
|
||||
frame.plots.remove(Name.ofSingle(name))
|
||||
plots[name]?.cancel()
|
||||
plots.remove(name)
|
||||
data.remove(name)
|
||||
progress.invalidate()
|
||||
}
|
||||
|
||||
/**
|
||||
* Set frame content to the given map. All keys not in the map are removed.
|
||||
*/
|
||||
fun setAll(map: Map<String, CachedPoint>) {
|
||||
plots.clear();
|
||||
//Remove obsolete keys
|
||||
data.keys.filter { !map.containsKey(it) }.forEach {
|
||||
remove(it)
|
||||
}
|
||||
this.addAll(map);
|
||||
}
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user