Switch to ZGC. Centralize data storage
This commit is contained in:
parent
78ff8d4f6e
commit
d683170a73
@ -82,7 +82,7 @@ object NumassDataUtils {
|
||||
}
|
||||
|
||||
fun read(envelope: Envelope): NumassPoint =
|
||||
if (envelope.meta.hasMeta("dpp_params") || envelope.meta.hasMeta("tqdc")) {
|
||||
if (envelope.meta.hasMeta("dpp_params") || envelope.meta.hasMeta("channels") || envelope.meta.hasMeta("tqdc")) {
|
||||
ProtoNumassPoint.fromEnvelope(envelope)
|
||||
} else {
|
||||
ClassicNumassPoint(envelope)
|
||||
|
@ -30,6 +30,7 @@ dependencies {
|
||||
}
|
||||
|
||||
val addJvmArgs = listOf(
|
||||
"-XX:+UseZGC",
|
||||
"--add-exports=javafx.graphics/com.sun.glass.ui=ALL-UNNAMED",
|
||||
"--add-opens=javafx.graphics/com.sun.javafx.css=ALL-UNNAMED",
|
||||
"--add-opens=javafx.graphics/com.sun.javafx.scene=ALL-UNNAMED",
|
||||
|
@ -2,35 +2,30 @@ package inr.numass.viewer
|
||||
|
||||
import hep.dataforge.configure
|
||||
import hep.dataforge.fx.dfIcon
|
||||
import hep.dataforge.fx.except
|
||||
import hep.dataforge.fx.plots.PlotContainer
|
||||
import hep.dataforge.fx.runGoal
|
||||
import hep.dataforge.fx.ui
|
||||
import hep.dataforge.goals.Goal
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.plots.PlotGroup
|
||||
import hep.dataforge.plots.Plottable
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.tables.Adapters
|
||||
import inr.numass.data.analyzers.NumassAnalyzer
|
||||
import inr.numass.data.analyzers.withBinning
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import javafx.beans.Observable
|
||||
import javafx.beans.binding.DoubleBinding
|
||||
import javafx.beans.property.SimpleBooleanProperty
|
||||
import javafx.beans.property.SimpleObjectProperty
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.MapChangeListener
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.scene.control.CheckBox
|
||||
import javafx.scene.control.ChoiceBox
|
||||
import javafx.scene.image.ImageView
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.javafx.JavaFx
|
||||
import tornadofx.*
|
||||
|
||||
class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val pointCache by inject<PointCache>()
|
||||
private val dataController by inject<DataController>()
|
||||
private val data get() = dataController.points
|
||||
|
||||
private val frame = JFreeChartFrame().configure {
|
||||
"title" to "Detector response plot"
|
||||
@ -74,129 +69,96 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
|
||||
addToSideBar(0, binningSelector, normalizeSwitch)
|
||||
}
|
||||
|
||||
private val data: ObservableMap<String, NumassPoint> = FXCollections.observableHashMap()
|
||||
private val plots: ObservableMap<String, Goal<Plottable>> = FXCollections.observableHashMap()
|
||||
private val plotJobs: ObservableMap<String, Job> = FXCollections.observableHashMap()
|
||||
|
||||
val isEmpty = booleanBinding(data) { isEmpty() }
|
||||
|
||||
private val progress = object : DoubleBinding() {
|
||||
init {
|
||||
bind(plots)
|
||||
}
|
||||
|
||||
override fun computeValue(): Double {
|
||||
return plots.values.count { it.isDone }.toDouble() / data.size;
|
||||
bind(plotJobs)
|
||||
}
|
||||
|
||||
override fun computeValue(): Double = plotJobs.values.count { it.isCompleted }.toDouble() / plotJobs.size
|
||||
}
|
||||
|
||||
init {
|
||||
data.addListener { _: Observable ->
|
||||
invalidate()
|
||||
}
|
||||
data.addListener(MapChangeListener { change ->
|
||||
val key = change.key
|
||||
if (change.wasAdded()) {
|
||||
replotOne(key, change.valueAdded)
|
||||
} else if (change.wasRemoved()) {
|
||||
plotJobs[key]?.cancel()
|
||||
plotJobs.remove(key)
|
||||
frame.plots.remove(Name.ofSingle(key))
|
||||
progress.invalidate()
|
||||
}
|
||||
})
|
||||
|
||||
binningProperty.onChange {
|
||||
frame.plots.clear()
|
||||
plots.clear()
|
||||
invalidate()
|
||||
replot()
|
||||
}
|
||||
|
||||
normalizeProperty.onChange {
|
||||
frame.plots.clear()
|
||||
plots.clear()
|
||||
invalidate()
|
||||
replot()
|
||||
}
|
||||
|
||||
container.progressProperty.bind(progress)
|
||||
}
|
||||
|
||||
override val root = borderpane {
|
||||
center = container.root
|
||||
}
|
||||
private fun replotOne(key: String, point: DataController.CachedPoint) {
|
||||
plotJobs[key]?.cancel()
|
||||
plotJobs[key] = app.context.launch {
|
||||
val valueAxis = if (normalize) {
|
||||
NumassAnalyzer.COUNT_RATE_KEY
|
||||
} else {
|
||||
NumassAnalyzer.COUNT_KEY
|
||||
}
|
||||
val adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
|
||||
|
||||
/**
|
||||
* Put or replace current plot with name `key`
|
||||
*/
|
||||
operator fun set(key: String, point: NumassPoint) {
|
||||
data[key] = point
|
||||
}
|
||||
val channels = point.channelSpectra.await()
|
||||
|
||||
fun addAll(data: Map<String, NumassPoint>) {
|
||||
this.data.putAll(data);
|
||||
}
|
||||
|
||||
private fun invalidate() {
|
||||
data.forEach { (key, point) ->
|
||||
plots.getOrPut(key) {
|
||||
runGoal<Plottable>(app.context, "loadAmplitudeSpectrum_$key", Dispatchers.IO) {
|
||||
val valueAxis = if (normalize) {
|
||||
NumassAnalyzer.COUNT_RATE_KEY
|
||||
} else {
|
||||
NumassAnalyzer.COUNT_KEY
|
||||
}
|
||||
val adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
|
||||
|
||||
val channels = pointCache.getChannelSpectra(key, point)
|
||||
|
||||
return@runGoal if (channels.size == 1) {
|
||||
DataPlot.plot(
|
||||
key,
|
||||
channels.values.first().withBinning(binning),
|
||||
adapter
|
||||
)
|
||||
} else {
|
||||
val group = PlotGroup.typed<DataPlot>(key)
|
||||
channels.forEach { key, spectrum ->
|
||||
val plot = DataPlot.plot(
|
||||
key.toString(),
|
||||
spectrum.withBinning(binning),
|
||||
adapter
|
||||
)
|
||||
group.add(plot)
|
||||
}
|
||||
group
|
||||
}
|
||||
} ui { plot ->
|
||||
frame.add(plot)
|
||||
progress.invalidate()
|
||||
} except {
|
||||
val plot = if (channels.size == 1) {
|
||||
DataPlot.plot(
|
||||
key,
|
||||
channels.values.first().withBinning(binning),
|
||||
adapter
|
||||
)
|
||||
} else {
|
||||
val group = PlotGroup.typed<DataPlot>(key)
|
||||
channels.forEach { (key, spectrum) ->
|
||||
val plot = DataPlot.plot(
|
||||
key.toString(),
|
||||
spectrum.withBinning(binning),
|
||||
adapter
|
||||
)
|
||||
group.add(plot)
|
||||
}
|
||||
group
|
||||
}
|
||||
ensureActive()
|
||||
withContext(Dispatchers.JavaFx) {
|
||||
frame.add(plot)
|
||||
}
|
||||
}.apply {
|
||||
invokeOnCompletion {
|
||||
runLater{
|
||||
progress.invalidate()
|
||||
}
|
||||
}
|
||||
plots.keys.filter { !data.containsKey(it) }.forEach { remove(it) }
|
||||
}
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
data.clear()
|
||||
plots.values.forEach{
|
||||
it.cancel()
|
||||
private fun replot() {
|
||||
frame.plots.clear()
|
||||
plotJobs.forEach { (_, job) -> job.cancel() }
|
||||
plotJobs.clear()
|
||||
|
||||
data.forEach { (key, point) ->
|
||||
replotOne(key, point)
|
||||
}
|
||||
plots.clear()
|
||||
invalidate()
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the plot and cancel loading task if it is in progress.
|
||||
*/
|
||||
fun remove(name: String) {
|
||||
frame.plots.remove(Name.ofSingle(name))
|
||||
plots[name]?.cancel()
|
||||
plots.remove(name)
|
||||
data.remove(name)
|
||||
progress.invalidate()
|
||||
override val root = borderpane {
|
||||
center = container.root
|
||||
}
|
||||
|
||||
/**
|
||||
* Set frame content to the given map. All keys not in the map are removed.
|
||||
*/
|
||||
fun setAll(map: Map<String, NumassPoint>) {
|
||||
plots.clear();
|
||||
//Remove obsolete keys
|
||||
data.keys.filter { !map.containsKey(it) }.forEach {
|
||||
remove(it)
|
||||
}
|
||||
this.addAll(map);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,115 @@
|
||||
package inr.numass.viewer
|
||||
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.storage.tables.TableLoader
|
||||
import hep.dataforge.tables.Adapters
|
||||
import hep.dataforge.tables.ListTable
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.tables.TableFormatBuilder
|
||||
import hep.dataforge.utils.Misc
|
||||
import hep.dataforge.values.ValueMap
|
||||
import inr.numass.data.analyzers.NumassAnalyzer
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.ObservableMap
|
||||
import kotlinx.coroutines.Deferred
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.async
|
||||
import tornadofx.*
|
||||
import kotlin.math.floor
|
||||
|
||||
class DataController : Controller() {
|
||||
private val context = app.context
|
||||
|
||||
val analyzer = TimeAnalyzer()
|
||||
|
||||
inner class CachedPoint(point: NumassPoint) {
|
||||
val length = point.length
|
||||
|
||||
val voltage = point.voltage
|
||||
|
||||
val meta = point.meta
|
||||
|
||||
val channelSpectra: Deferred<Map<Int, Table>> = context.async(Dispatchers.IO) {
|
||||
point.channels.mapValues { (_, value) -> analyzer.getAmplitudeSpectrum(value) }
|
||||
}
|
||||
|
||||
val spectrum: Deferred<Table> = context.async(Dispatchers.IO) {
|
||||
analyzer.getAmplitudeSpectrum(point)
|
||||
}
|
||||
|
||||
val timeSpectrum: Deferred<Table> = context.async(Dispatchers.IO) {
|
||||
val cr = spectrum.await().sumOf {
|
||||
it.getValue(NumassAnalyzer.COUNT_KEY).int
|
||||
}.toDouble() / point.length.toMillis() * 1000
|
||||
|
||||
val binNum = 200
|
||||
//inputMeta.getInt("binNum", 1000);
|
||||
val binSize = 1.0 / cr * 10 / binNum * 1e6
|
||||
//inputMeta.getDouble("binSize", 1.0 / cr * 10 / binNum * 1e6)
|
||||
|
||||
val format = TableFormatBuilder()
|
||||
.addNumber("x", Adapters.X_VALUE_KEY)
|
||||
.addNumber(NumassAnalyzer.COUNT_KEY, Adapters.Y_VALUE_KEY)
|
||||
.build()
|
||||
|
||||
ListTable.Builder(format).rows(
|
||||
analyzer.getEventsWithDelay(point, Meta.empty())
|
||||
.map { it.second.toDouble() / 1000.0 }
|
||||
.groupBy { floor(it / binSize) }
|
||||
.toSortedMap()
|
||||
.map {
|
||||
ValueMap.ofPairs("x" to it.key, "count" to it.value.count())
|
||||
}
|
||||
).build()
|
||||
}
|
||||
}
|
||||
|
||||
private val cache = Misc.getLRUCache<String, CachedPoint>(400)
|
||||
|
||||
fun getCachedPoint(id: String, point: NumassPoint): CachedPoint = cache.getOrPut(id) { CachedPoint(point) }
|
||||
|
||||
fun getSpectrumAsync(id: String, point: NumassPoint): Deferred<Table> =
|
||||
getCachedPoint(id, point).spectrum
|
||||
|
||||
suspend fun getChannelSpectra(id: String, point: NumassPoint): Map<Int, Table> =
|
||||
getCachedPoint(id, point).channelSpectra.await()
|
||||
|
||||
val sets: ObservableMap<String, NumassSet> = FXCollections.observableHashMap()
|
||||
val points: ObservableMap<String, CachedPoint> = FXCollections.observableHashMap()
|
||||
val sc: ObservableMap<String, TableLoader> = FXCollections.observableHashMap()
|
||||
|
||||
|
||||
fun clear() {
|
||||
cache.clear()
|
||||
sets.clear()
|
||||
points.clear()
|
||||
sc.clear()
|
||||
}
|
||||
|
||||
|
||||
fun addPoint(id: String, point: NumassPoint) {
|
||||
points[id] = getCachedPoint(id, point)
|
||||
}
|
||||
|
||||
fun addSet(id: String, set: NumassSet) {
|
||||
sets[id] = set
|
||||
}
|
||||
|
||||
fun addSc(id: String, set: TableLoader) {
|
||||
sc[id] = set
|
||||
}
|
||||
|
||||
fun remove(id: String) {
|
||||
points.remove(id)
|
||||
sets.remove(id)
|
||||
sc.remove(id)
|
||||
}
|
||||
|
||||
|
||||
fun addAllPoints(points: Map<String, NumassPoint>) {
|
||||
TODO()
|
||||
}
|
||||
}
|
@ -11,9 +11,7 @@ import hep.dataforge.plots.data.TimePlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.tables.Adapters
|
||||
import inr.numass.data.api.NumassSet
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.MapChangeListener
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.scene.image.ImageView
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import tornadofx.*
|
||||
@ -24,6 +22,9 @@ import tornadofx.*
|
||||
*/
|
||||
class HVView : View(title = "High voltage time plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val dataController by inject<DataController>()
|
||||
private val data get() = dataController.sets
|
||||
|
||||
private val frame = JFreeChartFrame().configure {
|
||||
"xAxis.title" to "time"
|
||||
"xAxis.type" to "time"
|
||||
@ -44,7 +45,6 @@ class HVView : View(title = "High voltage time plot", icon = ImageView(dfIcon))
|
||||
center = PlotContainer(frame).root
|
||||
}
|
||||
|
||||
private val data: ObservableMap<String, NumassSet> = FXCollections.observableHashMap()
|
||||
val isEmpty = booleanBinding(data) { data.isEmpty() }
|
||||
|
||||
init {
|
||||
@ -71,18 +71,4 @@ class HVView : View(title = "High voltage time plot", icon = ImageView(dfIcon))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
operator fun set(id: String, set: NumassSet) {
|
||||
data[id] = set
|
||||
}
|
||||
|
||||
fun remove(id: String) {
|
||||
data.remove(id);
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
data.clear()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ import java.nio.file.Path
|
||||
|
||||
class MainView : View(title = "Numass viewer", icon = dfIconView) {
|
||||
|
||||
private val pointCache by inject<PointCache>()
|
||||
private val dataController by inject<DataController>()
|
||||
|
||||
val storageView by inject<StorageView>()
|
||||
|
||||
@ -42,7 +42,7 @@ class MainView : View(title = "Numass viewer", icon = dfIconView) {
|
||||
private var path: Path by pathProperty
|
||||
|
||||
private val contentViewProperty = SimpleObjectProperty<UIComponent>()
|
||||
var contentView: UIComponent? by contentViewProperty
|
||||
private var contentView: UIComponent? by contentViewProperty
|
||||
|
||||
override val root = borderpane {
|
||||
prefHeight = 600.0
|
||||
@ -137,11 +137,13 @@ class MainView : View(title = "Numass viewer", icon = dfIconView) {
|
||||
}
|
||||
}
|
||||
|
||||
private val spectrumView by inject<SpectrumView>()
|
||||
|
||||
private suspend fun load(path: Path) {
|
||||
runLater {
|
||||
contentView = null
|
||||
}
|
||||
pointCache.clear()
|
||||
dataController.clear()
|
||||
if (Files.isDirectory(path)) {
|
||||
if (Files.exists(path.resolve(NumassDataLoader.META_FRAGMENT_NAME))) {
|
||||
//build set view
|
||||
@ -150,10 +152,9 @@ class MainView : View(title = "Numass viewer", icon = dfIconView) {
|
||||
message = "Building numass set..."
|
||||
NumassDataLoader(app.context, null, path.fileName.toString(), path)
|
||||
} ui { loader: NumassDataLoader ->
|
||||
contentView = SpectrumView().apply {
|
||||
clear()
|
||||
set(loader.name, loader)
|
||||
}
|
||||
contentView = spectrumView
|
||||
dataController.addSet(loader.name, loader)
|
||||
|
||||
} except {
|
||||
alert(
|
||||
type = Alert.AlertType.ERROR,
|
||||
@ -191,7 +192,7 @@ class MainView : View(title = "Numass viewer", icon = dfIconView) {
|
||||
val point = NumassDataUtils.read(it)
|
||||
runLater {
|
||||
contentView = AmplitudeView().apply {
|
||||
set(path.fileName.toString(), point)
|
||||
dataController.addPoint(path.fileName.toString(), point)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package inr.numass.viewer
|
||||
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.utils.Misc
|
||||
import inr.numass.data.analyzers.SimpleAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import kotlinx.coroutines.Deferred
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.async
|
||||
import tornadofx.*
|
||||
|
||||
|
||||
private val analyzer = SimpleAnalyzer()
|
||||
|
||||
|
||||
class PointCache : Controller() {
|
||||
private val context = app.context
|
||||
|
||||
inner class CachedPoint(point: NumassPoint) {
|
||||
val length = point.length
|
||||
|
||||
val voltage = point.voltage
|
||||
|
||||
val meta = point.meta
|
||||
|
||||
val channelSpectra: Deferred<Map<Int, Table>> = context.async(Dispatchers.IO) {
|
||||
point.channels.mapValues { (_, value) -> analyzer.getAmplitudeSpectrum(value) }
|
||||
}
|
||||
|
||||
val spectrum: Deferred<Table> = context.async(Dispatchers.IO) {
|
||||
analyzer.getAmplitudeSpectrum(point)
|
||||
}
|
||||
}
|
||||
|
||||
private val cache = Misc.getLRUCache<String, CachedPoint>(400)
|
||||
|
||||
fun getCachedPoint(id: String,point: NumassPoint): CachedPoint = cache.getOrPut(id) { CachedPoint(point) }
|
||||
|
||||
fun getSpectrumAsync(id: String, point: NumassPoint): Deferred<Table> =
|
||||
getCachedPoint(id, point).spectrum
|
||||
|
||||
suspend fun getChannelSpectra(id: String, point: NumassPoint): Map<Int, Table> =
|
||||
getCachedPoint(id, point).channelSpectra.await()
|
||||
|
||||
fun clear(){
|
||||
cache.clear()
|
||||
}
|
||||
}
|
@ -9,7 +9,7 @@ import tornadofx.*
|
||||
import tornadofx.controlsfx.borders
|
||||
import tornadofx.controlsfx.toGlyph
|
||||
|
||||
class PointInfoView(val cachedPoint: PointCache.CachedPoint) : MetaViewer(cachedPoint.meta) {
|
||||
class PointInfoView(val cachedPoint: DataController.CachedPoint) : MetaViewer(cachedPoint.meta) {
|
||||
val countProperty = SimpleIntegerProperty(0)
|
||||
var count by countProperty
|
||||
|
||||
|
@ -11,10 +11,7 @@ import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.storage.tables.TableLoader
|
||||
import hep.dataforge.storage.tables.asTable
|
||||
import hep.dataforge.tables.Adapters
|
||||
import hep.dataforge.tables.Table
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.MapChangeListener
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.scene.image.ImageView
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import tornadofx.*
|
||||
@ -24,6 +21,9 @@ import tornadofx.*
|
||||
*/
|
||||
class SlowControlView : View(title = "Numass slow control view", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val dataController by inject<DataController>()
|
||||
private val data get() = dataController.sc
|
||||
|
||||
private val plot = JFreeChartFrame().configure {
|
||||
"xAxis.type" to "time"
|
||||
"yAxis.type" to "log"
|
||||
@ -33,7 +33,6 @@ class SlowControlView : View(title = "Numass slow control view", icon = ImageVie
|
||||
center = PlotContainer(plot).root
|
||||
}
|
||||
|
||||
val data: ObservableMap<String, TableLoader> = FXCollections.observableHashMap();
|
||||
val isEmpty = booleanBinding(data) {
|
||||
data.isEmpty()
|
||||
}
|
||||
@ -45,7 +44,7 @@ class SlowControlView : View(title = "Numass slow control view", icon = ImageVie
|
||||
}
|
||||
if (change.wasAdded()) {
|
||||
runGoal(app.context,"loadTable[${change.key}]", Dispatchers.IO) {
|
||||
val plotData = getData(change.valueAdded)
|
||||
val plotData = change.valueAdded.asTable().await()
|
||||
val names = plotData.format.namesAsArray().filter { it != "timestamp" }
|
||||
|
||||
val group = PlotGroup(change.key)
|
||||
@ -68,21 +67,4 @@ class SlowControlView : View(title = "Numass slow control view", icon = ImageVie
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun getData(loader: TableLoader): Table {
|
||||
//TODO add query
|
||||
return loader.asTable().await()
|
||||
}
|
||||
|
||||
operator fun set(id: String, loader: TableLoader) {
|
||||
this.data[id] = loader
|
||||
}
|
||||
|
||||
fun remove(id: String) {
|
||||
this.data.remove(id)
|
||||
}
|
||||
|
||||
fun clear(){
|
||||
data.clear()
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -10,9 +10,7 @@ import hep.dataforge.tables.Adapters
|
||||
import inr.numass.data.analyzers.countInWindow
|
||||
import inr.numass.data.api.NumassSet
|
||||
import javafx.beans.property.SimpleIntegerProperty
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.MapChangeListener
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.geometry.Insets
|
||||
import javafx.geometry.Orientation
|
||||
import javafx.scene.image.ImageView
|
||||
@ -33,7 +31,8 @@ import kotlin.math.sqrt
|
||||
*/
|
||||
class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val pointCache by inject<PointCache>()
|
||||
private val dataController by inject<DataController>()
|
||||
private val data get() = dataController.sets
|
||||
|
||||
private val frame = JFreeChartFrame().configure {
|
||||
"xAxis.title" to "U"
|
||||
@ -44,7 +43,6 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
|
||||
}
|
||||
private val container = PlotContainer(frame)
|
||||
|
||||
|
||||
private val loChannelProperty = SimpleIntegerProperty(500).apply {
|
||||
addListener { _ -> updateView() }
|
||||
}
|
||||
@ -55,9 +53,7 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
|
||||
}
|
||||
private var upChannel by upChannelProperty
|
||||
|
||||
|
||||
private val data: ObservableMap<String, NumassSet> = FXCollections.observableHashMap()
|
||||
val isEmpty = booleanBinding(data) { data.isEmpty() }
|
||||
private val isEmpty = booleanBinding(data) { data.isEmpty() }
|
||||
|
||||
override val root = borderpane {
|
||||
top {
|
||||
@ -126,7 +122,7 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
|
||||
|
||||
app.context.launch {
|
||||
val points = set.points.map {
|
||||
pointCache.getCachedPoint("$name/${it.voltage}[${it.index}]", it)
|
||||
dataController.getCachedPoint("$name/${it.voltage}[${it.index}]", it)
|
||||
}.map { cachedPoint ->
|
||||
val count = cachedPoint.spectrum.await().countInWindow(loChannel.toShort(), upChannel.toShort())
|
||||
val seconds = cachedPoint.length.toMillis() / 1000.0
|
||||
@ -146,16 +142,4 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
operator fun set(key: String, value: NumassSet) {
|
||||
data[key] = value
|
||||
}
|
||||
|
||||
fun remove(key: String) {
|
||||
data.remove(key)
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
data.clear()
|
||||
}
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ class StorageView : View(title = "Numass storage", icon = dfIconView) {
|
||||
val storageProperty = SimpleObjectProperty<Storage>()
|
||||
val storage by storageProperty
|
||||
|
||||
private val pointCache by inject<PointCache>()
|
||||
private val dataController by inject<DataController>()
|
||||
|
||||
private val ampView: AmplitudeView by inject()
|
||||
private val timeView: TimeView by inject()
|
||||
@ -35,14 +35,8 @@ class StorageView : View(title = "Numass storage", icon = dfIconView) {
|
||||
|
||||
// private var watcher: WatchService? = null
|
||||
|
||||
|
||||
fun clear() {
|
||||
//watcher?.close()
|
||||
ampView.clear()
|
||||
timeView.clear()
|
||||
spectrumView.clear()
|
||||
hvView.clear()
|
||||
scView.clear()
|
||||
dataController.clear()
|
||||
}
|
||||
|
||||
private inner class Container(val id: String, val content: Any) {
|
||||
@ -51,7 +45,7 @@ class StorageView : View(title = "Numass storage", icon = dfIconView) {
|
||||
|
||||
val infoView: UIComponent by lazy {
|
||||
when (content) {
|
||||
is NumassPoint -> PointInfoView(pointCache.getCachedPoint(id, content))
|
||||
is NumassPoint -> PointInfoView(dataController.getCachedPoint(id, content))
|
||||
is Metoid -> MetaViewer(content.meta, title = "Meta view: $id")
|
||||
else -> MetaViewer(Meta.empty(), title = "Meta view: $id")
|
||||
}
|
||||
@ -64,27 +58,23 @@ class StorageView : View(title = "Numass storage", icon = dfIconView) {
|
||||
when (content) {
|
||||
is NumassPoint -> {
|
||||
if (selected) {
|
||||
ampView[id] = content
|
||||
timeView[id] = content
|
||||
dataController.addPoint(id, content)
|
||||
} else {
|
||||
ampView.remove(id)
|
||||
timeView.remove(id)
|
||||
dataController.remove(id)
|
||||
}
|
||||
}
|
||||
is NumassSet -> {
|
||||
if (selected) {
|
||||
spectrumView[id] = content
|
||||
hvView[id] = content
|
||||
dataController.addSet(id, content)
|
||||
} else {
|
||||
spectrumView.remove(id)
|
||||
hvView.remove(id)
|
||||
dataController.remove(id)
|
||||
}
|
||||
}
|
||||
is TableLoader -> {
|
||||
if (selected) {
|
||||
scView[id] = content
|
||||
dataController.addSc(id, content)
|
||||
} else {
|
||||
scView.remove(id)
|
||||
dataController.remove(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,30 +2,28 @@ package inr.numass.viewer
|
||||
|
||||
import hep.dataforge.configure
|
||||
import hep.dataforge.fx.dfIcon
|
||||
import hep.dataforge.fx.except
|
||||
import hep.dataforge.fx.plots.PlotContainer
|
||||
import hep.dataforge.fx.runGoal
|
||||
import hep.dataforge.fx.ui
|
||||
import hep.dataforge.goals.Goal
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.names.Name
|
||||
import hep.dataforge.plots.Plottable
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartFrame
|
||||
import hep.dataforge.tables.Adapters
|
||||
import hep.dataforge.values.ValueMap
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import javafx.beans.Observable
|
||||
import hep.dataforge.tables.Table
|
||||
import javafx.beans.binding.DoubleBinding
|
||||
import javafx.collections.FXCollections
|
||||
import javafx.collections.MapChangeListener
|
||||
import javafx.collections.ObservableMap
|
||||
import javafx.scene.image.ImageView
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.javafx.JavaFx
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.withContext
|
||||
import tornadofx.*
|
||||
|
||||
class TimeView : View(title = "Numass time spectrum plot", icon = ImageView(dfIcon)) {
|
||||
|
||||
private val dataController by inject<DataController>()
|
||||
|
||||
private val frame = JFreeChartFrame().configure {
|
||||
"title" to "Time plot"
|
||||
node("xAxis") {
|
||||
@ -47,128 +45,75 @@ class TimeView : View(title = "Numass time spectrum plot", icon = ImageView(dfIc
|
||||
}.setType<DataPlot>()
|
||||
}
|
||||
|
||||
// val stepProperty = SimpleDoubleProperty()
|
||||
// var step by stepProperty
|
||||
//
|
||||
// private val container = PlotContainer(frame).apply {
|
||||
// val binningSelector: ChoiceBox<Int> = ChoiceBox(FXCollections.observableArrayList(1, 5, 10, 20, 50)).apply {
|
||||
// minWidth = 0.0
|
||||
// selectionModel.selectLast()
|
||||
// stepProperty.bind(this.selectionModel.selectedItemProperty())
|
||||
// }
|
||||
// addToSideBar(0, binningSelector)
|
||||
// }
|
||||
|
||||
private val container = PlotContainer(frame)
|
||||
|
||||
private val data: ObservableMap<String, NumassPoint> = FXCollections.observableHashMap()
|
||||
private val plots: ObservableMap<String, Goal<Plottable>> = FXCollections.observableHashMap()
|
||||
//private val data: ObservableMap<String, NumassPoint> = FXCollections.observableHashMap()
|
||||
private val data get() = dataController.points
|
||||
private val plotJobs: ObservableMap<String, Job> = FXCollections.observableHashMap()
|
||||
|
||||
val isEmpty = booleanBinding(data) { isEmpty() }
|
||||
|
||||
private val progress = object : DoubleBinding() {
|
||||
init {
|
||||
bind(plots)
|
||||
bind(plotJobs)
|
||||
}
|
||||
|
||||
override fun computeValue(): Double {
|
||||
return plots.values.count { it.isDone }.toDouble() / data.size;
|
||||
}
|
||||
override fun computeValue(): Double = plotJobs.values.count { it.isCompleted }.toDouble() / data.size
|
||||
|
||||
}
|
||||
|
||||
init {
|
||||
data.addListener { _: Observable ->
|
||||
invalidate()
|
||||
}
|
||||
data.addListener(MapChangeListener { change ->
|
||||
val key = change.key
|
||||
if (change.wasAdded()) {
|
||||
replotOne(key, change.valueAdded)
|
||||
} else if(change.wasRemoved()){
|
||||
plotJobs[key]?.cancel()
|
||||
plotJobs.remove(key)
|
||||
frame.plots.remove(Name.ofSingle(key))
|
||||
progress.invalidate()
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
override val root = borderpane {
|
||||
center = container.root
|
||||
}
|
||||
|
||||
/**
|
||||
* Put or replace current plot with name `key`
|
||||
*/
|
||||
operator fun set(key: String, point: NumassPoint) {
|
||||
data[key] = point
|
||||
}
|
||||
private fun replotOne(key: String, point: DataController.CachedPoint) {
|
||||
plotJobs[key]?.cancel()
|
||||
plotJobs[key] = app.context.launch {
|
||||
try {
|
||||
val histogram: Table = point.timeSpectrum.await()
|
||||
|
||||
fun addAll(data: Map<String, NumassPoint>) {
|
||||
this.data.putAll(data);
|
||||
}
|
||||
|
||||
private val analyzer = TimeAnalyzer();
|
||||
|
||||
|
||||
private fun invalidate() {
|
||||
data.forEach { key, point ->
|
||||
plots.getOrPut(key) {
|
||||
runGoal<Plottable>(app.context, "loadAmplitudeSpectrum_$key", Dispatchers.IO) {
|
||||
|
||||
val initialEstimate = analyzer.analyze(point)
|
||||
val cr = initialEstimate.getDouble("cr")
|
||||
|
||||
val binNum = 200//inputMeta.getInt("binNum", 1000);
|
||||
val binSize = 1.0 / cr * 10 / binNum * 1e6//inputMeta.getDouble("binSize", 1.0 / cr * 10 / binNum * 1e6)
|
||||
|
||||
val histogram = analyzer.getEventsWithDelay(point, Meta.empty())
|
||||
.map { it.second.toDouble() / 1000.0 }
|
||||
.groupBy { Math.floor(it / binSize) }
|
||||
.toSortedMap()
|
||||
.map {
|
||||
ValueMap.ofPairs("x" to it.key, "count" to it.value.count())
|
||||
}
|
||||
|
||||
DataPlot(key, adapter = Adapters.buildXYAdapter("x", "count"))
|
||||
.configure {
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"connectionType" to "step"
|
||||
}.fillData(histogram)
|
||||
|
||||
} ui { plot ->
|
||||
val plot = DataPlot(key, adapter = Adapters.buildXYAdapter("x", "count"))
|
||||
.configure {
|
||||
"showLine" to true
|
||||
"showSymbol" to false
|
||||
"showErrors" to false
|
||||
"connectionType" to "step"
|
||||
}.fillData(histogram)
|
||||
withContext(Dispatchers.JavaFx) {
|
||||
frame.add(plot)
|
||||
progress.invalidate()
|
||||
} except {
|
||||
}
|
||||
} finally {
|
||||
withContext(Dispatchers.JavaFx) {
|
||||
progress.invalidate()
|
||||
}
|
||||
}
|
||||
plots.keys.filter { !data.containsKey(it) }.forEach { remove(it) }
|
||||
}
|
||||
}
|
||||
|
||||
fun clear() {
|
||||
data.clear()
|
||||
plots.values.forEach {
|
||||
it.cancel()
|
||||
}
|
||||
plots.clear()
|
||||
invalidate()
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the plot and cancel loading task if it is in progress.
|
||||
*/
|
||||
fun remove(name: String) {
|
||||
frame.plots.remove(Name.ofSingle(name))
|
||||
plots[name]?.cancel()
|
||||
plots.remove(name)
|
||||
data.remove(name)
|
||||
progress.invalidate()
|
||||
}
|
||||
private fun replot() {
|
||||
frame.plots.clear()
|
||||
plotJobs.forEach { (_, job) -> job.cancel() }
|
||||
plotJobs.clear()
|
||||
|
||||
/**
|
||||
* Set frame content to the given map. All keys not in the map are removed.
|
||||
*/
|
||||
fun setAll(map: Map<String, NumassPoint>) {
|
||||
plots.clear();
|
||||
//Remove obsolete keys
|
||||
data.keys.filter { !map.containsKey(it) }.forEach {
|
||||
remove(it)
|
||||
data.forEach { (key, point) ->
|
||||
replotOne(key, point)
|
||||
}
|
||||
this.addAll(map);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,68 +0,0 @@
|
||||
package inr.numass.viewer.test
|
||||
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.fx.dfIcon
|
||||
import hep.dataforge.nullable
|
||||
import hep.dataforge.tables.Table
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.storage.NumassDirectory
|
||||
import inr.numass.viewer.AmplitudeView
|
||||
import inr.numass.viewer.HVView
|
||||
import inr.numass.viewer.SpectrumView
|
||||
import javafx.application.Application
|
||||
import javafx.scene.image.ImageView
|
||||
import kotlinx.coroutines.launch
|
||||
import tornadofx.*
|
||||
import java.io.File
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
class ViewerComponentsTestApp : App(ViewerComponentsTest::class)
|
||||
|
||||
class ViewerComponentsTest : View(title = "Numass viewer test", icon = ImageView(dfIcon)) {
|
||||
|
||||
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
|
||||
|
||||
//val set: NumassSet = NumassStorageFactory.buildLocal(rootDir).provide("loader::set_8", NumassSet::class.java).orElseThrow { RuntimeException("err") }
|
||||
|
||||
|
||||
private val cache: MutableMap<NumassPoint, Table> = ConcurrentHashMap()
|
||||
val context = Global
|
||||
|
||||
val amp: AmplitudeView by inject(params = mapOf("cache" to cache))//= AmplitudeView(immutable = immutable)
|
||||
val sp: SpectrumView by inject(params = mapOf("cache" to cache))
|
||||
val hv: HVView by inject()
|
||||
|
||||
override val root = borderpane {
|
||||
top {
|
||||
button("Click me!") {
|
||||
action {
|
||||
context.launch {
|
||||
val set: NumassSet = NumassDirectory.INSTANCE.read(Global, File("D:\\Work\\Numass\\data\\2017_05\\Fill_2").toPath())
|
||||
?.provide("loader::set_2", NumassSet::class.java).nullable
|
||||
?: kotlin.error("Error")
|
||||
update(set)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
center {
|
||||
tabpane {
|
||||
tab("amplitude", amp.root)
|
||||
tab("spectrum", sp.root)
|
||||
tab("hv", hv.root)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun update(set: NumassSet) {
|
||||
amp.setAll(set.points.filter { it.voltage != 16000.0 }.associateBy { "point_${it.voltage}" })
|
||||
sp["test"] = set
|
||||
hv[set.name] = set
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
Application.launch(ViewerComponentsTestApp::class.java, *args)
|
||||
}
|
Loading…
Reference in New Issue
Block a user