Update numass viewer point caching.

This commit is contained in:
Alexander Nozik 2018-04-24 15:20:50 +03:00
parent 09a651b556
commit 42a69d1334
19 changed files with 188 additions and 156 deletions

View File

@ -25,7 +25,7 @@ allprojects {
repositories {
jcenter()
mavenCentral()
maven { url = "https://jitpack.io" }
//maven { url = "https://jitpack.io" }
maven { url = "http://dl.bintray.com/kotlin/ktor" }
maven { url = "https://dl.bintray.com/kotlin/kotlinx" }
}

View File

@ -3,15 +3,17 @@ package inr.numass.data
import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.tables.Table
import inr.numass.data.api.*
import inr.numass.data.storage.ProtoBlock
import kotlinx.coroutines.experimental.Deferred
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.util.stream.Collectors
import java.util.stream.Stream
import java.util.zip.Inflater
import kotlin.streams.asSequence
import kotlin.streams.toList
/**
@ -20,10 +22,14 @@ import kotlin.streams.asSequence
object NumassDataUtils {
fun join(setName: String, sets: Collection<NumassSet>): NumassSet {
return object : NumassSet {
override val points: Stream<out NumassPoint> by lazy {
val points = sets.stream().flatMap<NumassPoint> { it.points }
override val hvData: Deferred<Table?>
get() = TODO("Join hv tables")
override val points: List<NumassPoint> by lazy {
val points = sets.stream().flatMap<NumassPoint> { it.points.stream() }
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
points.entries.stream().map { entry -> SimpleNumassPoint(entry.value, entry.key) }
.toList()
}
override val meta: Meta by lazy {

View File

@ -95,11 +95,11 @@ interface NumassAnalyzer {
* @param config
* @return
*/
fun getLength(block: NumassBlock, config: Meta): Long {
fun getLength(block: NumassBlock, config: Meta = Meta.empty()): Long {
return analyze(block, config).getValue(LENGTH_KEY).getNumber().toLong()
}
fun getAmplitudeSpectrum(block: NumassBlock, config: Meta): Table {
fun getAmplitudeSpectrum(block: NumassBlock, config: Meta = Meta.empty()): Table {
val seconds = block.length.toMillis().toDouble() / 1000.0
return getAmplitudeSpectrum(getEvents(block, config).asSequence(), seconds, config)
}

View File

@ -2,6 +2,8 @@ package inr.numass.data.api
import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.meta.Metoid
import hep.dataforge.providers.Provider
import hep.dataforge.providers.Provides
import inr.numass.data.channel
import inr.numass.data.storage.ClassicNumassPoint
import inr.numass.data.storage.ProtoNumassPoint
@ -12,11 +14,40 @@ import java.util.stream.Stream
/**
* Created by darksnake on 06-Jul-17.
*/
interface NumassPoint : Metoid, NumassBlock {
interface NumassPoint : Metoid, NumassBlock, Provider {
val blocks: List<NumassBlock>
/**
* Provides block with given number (starting with 0)
*/
@Provides(NUMASS_BLOCK_TARGET)
operator fun get(index: Int): NumassBlock? {
return blocks[index]
}
/**
* Provides all blocks in given channel
*/
@Provides(NUMASS_CHANNEL_TARGET)
fun channel(index: Int): NumassBlock? {
return channels[index]
}
/**
* Distinct map of channel number to corresponding grouping block
*/
val channels: Map<Int, NumassBlock>
get() = blocks.toList().groupBy { it.channel }.mapValues { entry ->
if (entry.value.size == 1) {
entry.value.first()
} else {
MetaBlock(entry.value)
}
}
/**
* Get the voltage setting for the point
*
@ -77,6 +108,8 @@ interface NumassPoint : Metoid, NumassBlock {
get() = blocks.stream().flatMap { it.frames }
companion object {
const val NUMASS_BLOCK_TARGET = "block"
const val NUMASS_CHANNEL_TARGET = "channel"
const val START_TIME_KEY = "start"
const val LENGTH_KEY = "length"

View File

@ -6,15 +6,15 @@
package inr.numass.data.api
import hep.dataforge.Named
import hep.dataforge.kodex.toList
import hep.dataforge.kodex.optional
import hep.dataforge.meta.Metoid
import hep.dataforge.providers.Provider
import hep.dataforge.providers.Provides
import hep.dataforge.providers.ProvidesNames
import hep.dataforge.tables.Table
import kotlinx.coroutines.experimental.Deferred
import java.time.Instant
import java.util.*
import java.util.stream.Stream
/**
* A single set of numass points previously called file.
@ -23,7 +23,7 @@ import java.util.stream.Stream
*/
interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
val points: Stream<out NumassPoint>
val points: List<NumassPoint>
/**
* Get the first point if it exists. Throw runtime exception otherwise.
@ -31,7 +31,7 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
* @return
*/
val firstPoint: NumassPoint
get() = points.findFirst().orElseThrow { RuntimeException("The set is empty") }
get() = points.firstOrNull() ?: throw RuntimeException("The set is empty")
/**
* Get the starting time from meta or from first point
@ -39,14 +39,9 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
* @return
*/
val startTime: Instant
get() = meta.optValue(NumassPoint.START_TIME_KEY).map<Instant>{ it.getTime() }.orElseGet { firstPoint.startTime }
get() = meta.optValue(NumassPoint.START_TIME_KEY).map<Instant> { it.time }.orElseGet { firstPoint.startTime }
val hvData: Optional<Table>
get() = Optional.empty()
// default String getDescription() {
// return getMeta().getString(DESCRIPTION_KEY, "");
// }
val hvData: Deferred<Table?>
override fun iterator(): Iterator<NumassPoint> {
return points.iterator()
@ -59,7 +54,7 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
* @return
*/
fun optPoint(voltage: Double): Optional<out NumassPoint> {
return points.filter { it -> it.voltage == voltage }.findFirst()
return points.firstOrNull { it -> it.voltage == voltage }.optional
}
/**
@ -82,7 +77,7 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
}
@ProvidesNames(NUMASS_POINT_PROVIDER_KEY)
fun listPoints(): Stream<String> {
fun listPoints(): List<String> {
return points.map { it -> java.lang.Double.toString(it.voltage) }
}

View File

@ -2,8 +2,11 @@ package inr.numass.data.legacy
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.tables.Table
import inr.numass.data.api.*
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import kotlinx.coroutines.experimental.CompletableDeferred
import kotlinx.coroutines.experimental.Deferred
import org.apache.commons.io.FilenameUtils
import java.io.IOException
import java.nio.ByteBuffer
@ -17,20 +20,21 @@ import java.time.LocalDateTime
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.*
import java.util.stream.Stream
/**
* Created by darksnake on 08.07.2017.
*/
class NumassDatFile @Throws(IOException::class)
constructor(override val name: String, private val path: Path, meta: Meta) : NumassSet {
override val hvData: Deferred<Table?> = CompletableDeferred(null)
override val meta: Meta
private val hVdev: Double
get() = meta.getDouble("dat.hvDev", 2.468555393226049)
//TODO check point start
override val points: Stream<NumassPoint>
override val points: List<NumassPoint>
get() = try {
Files.newByteChannel(path, READ).use { channel ->
var lab: Int
@ -39,7 +43,7 @@ constructor(override val name: String, private val path: Path, meta: Meta) : Num
points.add(readPoint(channel))
lab = readBlock(channel, 1).get().toInt()
} while (lab != 0xff)
return points.stream()
return points
}
} catch (ex: IOException) {
throw RuntimeException(ex)

View File

@ -19,6 +19,7 @@ import hep.dataforge.context.Context
import hep.dataforge.exceptions.StorageException
import hep.dataforge.io.ColumnedDataReader
import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.kodex.toList
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.providers.Provider
@ -31,6 +32,9 @@ import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.legacy.NumassFileEnvelope
import kotlinx.coroutines.experimental.CoroutineStart
import kotlinx.coroutines.experimental.Deferred
import kotlinx.coroutines.experimental.async
import org.slf4j.LoggerFactory
import java.io.IOException
import java.nio.file.Files
@ -56,8 +60,8 @@ class NumassDataLoader(
override val meta: Meta = items[META_FRAGMENT_NAME]?.get()?.meta ?: Meta.empty()
private val hvEnvelope: Optional<Envelope>
get() = Optional.ofNullable(items[HV_FRAGMENT_NAME]).map { it.get() }
private val hvEnvelope: Envelope?
get() = items[HV_FRAGMENT_NAME]?.get()
private val pointEnvelopes: Stream<Envelope>
get() = items.entries.stream()
@ -77,22 +81,24 @@ class NumassDataLoader(
return items.keys
}
override val hvData: Optional<Table>
get() = hvEnvelope.map { hvEnvelope ->
try {
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
override val hvData: Deferred<Table?>
get() = async(start = CoroutineStart.LAZY) {
hvEnvelope?.let { hvEnvelope ->
try {
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
}
}
}
override val points: Stream<NumassPoint>
override val points: List<NumassPoint>
get() {
return pointEnvelopes.map {
NumassPoint.read(it)
}
}.toList()
}
override fun pull(fragmentName: String): Envelope {
@ -203,5 +209,6 @@ class NumassDataLoader(
*/
val HV_FRAGMENT_NAME = "voltage"
}
}

View File

@ -6,17 +6,13 @@ import hep.dataforge.io.envelopes.Envelope
import hep.dataforge.kodex.toList
import hep.dataforge.meta.Meta
import inr.numass.data.NumassProto
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassFrame
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.*
import inr.numass.data.dataStream
import inr.numass.data.legacy.NumassFileEnvelope
import org.slf4j.LoggerFactory
import java.nio.file.Path
import java.time.Duration
import java.time.Instant
import java.util.*
import java.util.stream.IntStream
import java.util.stream.Stream
@ -24,18 +20,23 @@ import java.util.stream.Stream
* Protobuf based numass point
* Created by darksnake on 09.07.2017.
*/
class ProtoNumassPoint(override val meta: Meta, protoBuilder: () -> NumassProto.Point) : NumassPoint {
class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassProto.Point) : NumassPoint {
val proto: NumassProto.Point by lazy(protoBuilder)
val proto: NumassProto.Point
get() = protoBuilder()
override val blocks: List<NumassBlock> by lazy {
proto.channelsList.stream()
override val blocks: List<NumassBlock>
get() = proto.channelsList
.flatMap { channel ->
channel.blocksList.stream()
channel.blocksList
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
}.toList()
}
.sortedBy { it.startTime }
}
override val channels: Map<Int, NumassBlock>
get() = proto.channelsList.groupBy { it.id.toInt() }.mapValues { entry ->
MetaBlock(entry.value.flatMap { it.blocksList }.map { ProtoBlock(entry.key, it, this) })
}
override val voltage: Double = meta.getDouble("external_meta.HV1_value", super.voltage)
@ -62,7 +63,7 @@ class ProtoNumassPoint(override val meta: Meta, protoBuilder: () -> NumassProto.
}
fun fromEnvelope(envelope: Envelope): ProtoNumassPoint {
return ProtoNumassPoint(envelope.meta){
return ProtoNumassPoint(envelope.meta) {
envelope.dataStream.use {
NumassProto.Point.parseFrom(it)
}

View File

@ -12,10 +12,9 @@ message Point {
uint64 time = 1; // Time in nanos from the beginning of the block
bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
}
// Обработанные события. Содержат только время и амплитуду сигналов.
// Для экономии места при сериализации амплитуды и времена лежат в
// разных массивах. Амплитуда и время имеющие одинаковые индексы
// соответствуют одному событию
// Event block obtained directly from device of from frame analysis
// In order to save space, times and amplitudes are in separate arrays.
// Amplitude and time with the same index correspond to the same event
message Events {
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
@ -24,8 +23,8 @@ message Point {
uint64 time = 1; // Block start in epoch nanos
repeated Frame frames = 2; // Frames array
Events events = 3; // Events array
uint64 length = 4; // block size in nanos
uint64 bin_size = 5; // tick size in nanos
uint64 length = 4; // block size in nanos. If missing, take from meta.
uint64 bin_size = 5; // tick size in nanos. Obsolete, to be removed
}
uint64 id = 1; // The number of measuring channel
repeated Block blocks = 2; // Blocks

View File

@ -6,7 +6,6 @@ import hep.dataforge.fx.runGoal
import hep.dataforge.fx.ui
import hep.dataforge.goals.Goal
import hep.dataforge.kodex.configure
import hep.dataforge.kodex.toList
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.PlotGroup
import hep.dataforge.plots.Plottable
@ -15,10 +14,6 @@ import hep.dataforge.plots.jfreechart.JFreeChartFrame
import hep.dataforge.tables.Adapters
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassPoint
import inr.numass.data.channel
import javafx.beans.Observable
import javafx.beans.binding.DoubleBinding
import javafx.beans.property.SimpleBooleanProperty
@ -74,7 +69,7 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
addToSideBar(0, binningSelector, normalizeSwitch)
}
private val data: ObservableMap<String, NumassPoint> = FXCollections.observableHashMap()
private val data: ObservableMap<String, CachedPoint> = FXCollections.observableHashMap()
private val plots: ObservableMap<String, Goal<Plottable>> = FXCollections.observableHashMap()
val isEmpty = booleanBinding(data) { data.isEmpty() }
@ -112,27 +107,14 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
/**
* Put or replace current plot with name `key`
*/
fun add(key: String, point: NumassPoint) {
fun add(key: String, point: CachedPoint) {
data[key] = point
}
fun addAll(data: Map<String, NumassPoint>) {
fun addAll(data: Map<String, CachedPoint>) {
this.data.putAll(data);
}
/**
* Distinct map of channel number to corresponding grouping block
*/
private fun NumassPoint.getChannels(): Map<Int, NumassBlock> {
return blocks.toList().groupBy { it.channel }.mapValues { entry ->
if (entry.value.size == 1) {
entry.value.first()
} else {
MetaBlock(entry.value)
}
}
}
private fun invalidate() {
data.forEach { key, point ->
plots.computeIfAbsent(key) {
@ -144,21 +126,21 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
}
val adapter = Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
val channels = point.getChannels()
val channels = point.channelSpectra.await()
return@runGoal if (channels.size == 1) {
DataPlot.plot(
key,
adapter,
PointCache[point].withBinning(binning)
channels.values.first().withBinning(binning)
)
} else {
val group = PlotGroup.typed<DataPlot>(key)
channels.forEach { key, block ->
channels.forEach { key, spectrum ->
val plot = DataPlot.plot(
key.toString(),
adapter,
PointCache[block].withBinning(binning)
spectrum.withBinning(binning)
)
group.add(plot)
}
@ -192,7 +174,7 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
/**
* Set frame content to the given map. All keys not in the map are removed.
*/
fun setAll(map: Map<String, NumassPoint>) {
fun setAll(map: Map<String, CachedPoint>) {
plots.clear();
//Remove obsolete keys
data.keys.filter { !map.containsKey(it) }.forEach {

View File

@ -0,0 +1,51 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package inr.numass.viewer
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import kotlinx.coroutines.experimental.CoroutineStart
import kotlinx.coroutines.experimental.Deferred
import kotlinx.coroutines.experimental.async
private val analyzer = SimpleAnalyzer()
class CachedPoint(point: NumassPoint) : NumassPoint by point {
private val lazyBlocks: () -> List<NumassBlock> = { point.blocks }
override val blocks: List<NumassBlock>
get() = lazyBlocks()
override val meta: Meta = point.meta
val channelSpectra: Deferred<Map<Int, Table>> = async(start = CoroutineStart.LAZY) {
return@async point.channels.mapValues { (_, value) -> analyzer.getAmplitudeSpectrum(value) }
}
val spectrum: Deferred<Table> = async(start = CoroutineStart.LAZY) {
analyzer.getAmplitudeSpectrum(point)
}
}
class CachedSet(set: NumassSet) : NumassSet by set {
override val points: List<CachedPoint> = set.points.map { CachedPoint(it) }
}

View File

@ -53,9 +53,9 @@ class HVView : View(title = "High voltage time plot", icon = ImageView(dfIcon))
if (change.wasAdded()) {
runLater { container.progress = -1.0 }
runGoal("hvData[${change.key}]") {
change.valueAdded.hvData
change.valueAdded.hvData.await()
} ui { hvData ->
hvData.ifPresent {
hvData?.let {
for (dp in it) {
val plot: TimePlot = frame[change.key] as TimePlot? ?: TimePlot(change.key).apply { frame.add(this) }
plot.put(dp.getValue("timestamp").getTime(), dp.getValue("value"))

View File

@ -144,7 +144,7 @@ class MainView(val context: Context = Global.getContext("viewer")) : View(title
NumassDataLoader.fromDir(context, path)
} ui {
contentView = SpectrumView().apply {
add(it.name, it)
set(it.name, CachedSet(it))
}
infoView = MetaViewer(it.meta)
} except {
@ -195,9 +195,9 @@ class MainView(val context: Context = Global.getContext("viewer")) : View(title
val point = NumassPoint.read(it)
runLater {
contentView = AmplitudeView().apply {
add(path.toString(), point)
add(path.toString(), CachedPoint(point))
}
infoView = PointInfoView(point)
infoView = PointInfoView(CachedPoint(point))
}
} else {
alert(

View File

@ -1,26 +0,0 @@
package inr.numass.viewer
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import java.util.*
import java.util.stream.Collectors
import java.util.stream.Stream
/**
* Cached numass data
* Created by darksnake on 23-Jun-17.
*/
class NumassDataCache(private val data: NumassSet) : NumassSet {
//private val cachedDescription: String by lazy { data.description }
override val meta: Meta by lazy { data.meta }
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
override val hvData: Optional<Table> by lazy { data.hvData }
override val points: Stream<NumassPoint>
get() = cachedPoints.stream()
override val name: String = data.name
}

View File

@ -2,13 +2,15 @@ package inr.numass.viewer
import hep.dataforge.fx.meta.MetaViewer
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassPoint
import kotlinx.coroutines.experimental.runBlocking
import tornadofx.*
import tornadofx.controlsfx.borders
class PointInfoView(val point: NumassPoint) : MetaViewer(point.meta) {
class PointInfoView(val point: CachedPoint) : MetaViewer(point.meta) {
private val count: Int by lazy {
PointCache[point].sumBy { it.getValue(NumassAnalyzer.COUNT_KEY).int }
runBlocking {
point.spectrum.await().sumBy { it.getValue(NumassAnalyzer.COUNT_KEY).int }
}
}
override val root = super.root.apply {

View File

@ -22,7 +22,6 @@ import javafx.util.converter.NumberStringConverter
import org.controlsfx.control.RangeSlider
import tornadofx.*
import java.util.concurrent.atomic.AtomicInteger
import java.util.stream.Collectors
/**
* View for energy spectrum
@ -52,7 +51,7 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
private var upChannel by upChannelProperty
private val data: ObservableMap<String, NumassSet> = FXCollections.observableHashMap();
private val data: ObservableMap<String, CachedSet> = FXCollections.observableHashMap();
val isEmpty = booleanBinding(data) { data.isEmpty() }
override val root = borderpane {
@ -113,24 +112,24 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
private fun updateView() {
runLater { container.progress = 0.0 }
val progress = AtomicInteger(0)
val totalProgress = data.values.stream().mapToLong() { it.points.count() }.sum()
val totalProgress = data.values.stream().mapToInt { it.points.size }.sum()
data.forEach { name, set ->
val plot: DataPlot = frame[name] as DataPlot? ?: DataPlot(name).apply { frame.add(this) }
runGoal("spectrumData[$name]") {
set.points.map { point ->
val count = PointCache[point].countInWindow(loChannel.toShort(), upChannel.toShort());
val count = point.spectrum.await().countInWindow(loChannel.toShort(), upChannel.toShort());
val seconds = point.length.toMillis() / 1000.0;
runLater {
container.progress = progress.incrementAndGet().toDouble() / totalProgress
container.progress = progress.incrementAndGet().toDouble() / totalProgress
}
Adapters.buildXYDataPoint(
point.voltage,
(count / seconds),
Math.sqrt(count.toDouble()) / seconds
)
}.collect(Collectors.toList())
}
} ui { points ->
plot.fillData(points)
container.progress = 1.0
@ -139,8 +138,8 @@ class SpectrumView : View(title = "Numass spectrum plot", icon = ImageView(dfIco
}
}
fun add(key: String, value: NumassSet) {
data[key] = NumassDataCache(value)
operator fun set(key: String, value: CachedSet) {
data[key] = value
}
fun remove(key: String) {

View File

@ -14,7 +14,6 @@ import javafx.beans.property.SimpleBooleanProperty
import javafx.scene.control.ContextMenu
import javafx.scene.control.TreeItem
import tornadofx.*
import kotlin.streams.toList
class StorageView(val storage: Storage) : View(title = "Numass storage", icon = dfIconView) {
@ -29,7 +28,7 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
val infoView: UIComponent? by lazy {
when (content) {
is NumassPoint -> PointInfoView(content)
is CachedPoint -> PointInfoView(content)
is Metoid -> MetaViewer(content.meta, title = "Meta view: $id")
else -> null
}
@ -38,16 +37,16 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
init {
checkedProperty.onChange { selected ->
when (content) {
is NumassPoint -> {
is CachedPoint -> {
if (selected) {
ampView.add(id, content)
} else {
ampView.remove(id)
}
}
is NumassSet -> {
is CachedSet -> {
if (selected) {
spectrumView.add(id, content)
spectrumView.set(id, content)
hvView.add(id, content)
} else {
spectrumView.remove(id)
@ -68,8 +67,8 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
val children: List<Container>? by lazy {
when (content) {
is Storage -> (content.shelves().sorted() + content.loaders().sorted()).map { buildContainer(it, this) }
is NumassSet -> content.points
.sorted(compareBy { it.index })
is CachedSet -> content.points
.sortedBy { it.index }
.map { buildContainer(it, this) }
.toList()
else -> null
@ -179,10 +178,10 @@ class StorageView(val storage: Storage) : View(title = "Numass storage", icon =
} else {
content.name
}
Container(id, content)
Container(id, content as? CachedSet ?: CachedSet(content))
}
is NumassPoint -> {
Container("${parent.id}/${content.voltage}[${content.index}]", content)
Container("${parent.id}/${content.voltage}[${content.index}]", content as? CachedPoint ?: CachedPoint(content))
}
is Loader -> {
Container(content.path.toString(), content);

View File

@ -4,11 +4,6 @@ import ch.qos.logback.classic.Level
import ch.qos.logback.classic.Logger
import hep.dataforge.context.Global
import hep.dataforge.fx.dfIcon
import hep.dataforge.meta.Meta
import hep.dataforge.tables.Table
import hep.dataforge.utils.Misc
import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.api.NumassBlock
import javafx.stage.Stage
import org.slf4j.LoggerFactory
import tornadofx.*
@ -17,7 +12,7 @@ import tornadofx.*
* Created by darksnake on 14-Apr-17.
*/
class Viewer : App(MainView::class) {
init{
init {
(LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) as Logger).level = Level.INFO
}
@ -30,16 +25,4 @@ class Viewer : App(MainView::class) {
super.stop()
Global.terminate();
}
}
/**
* Global point cache
*/
object PointCache{
private val analyzer = SimpleAnalyzer()
private val cache: MutableMap<NumassBlock, Table> = Misc.getLRUCache(1000)
operator fun get(point: NumassBlock): Table {
return cache.computeIfAbsent(point) { analyzer.getAmplitudeSpectrum(point, Meta.empty()) }
}
}

View File

@ -6,14 +6,11 @@ import hep.dataforge.tables.Table
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory
import inr.numass.viewer.AmplitudeView
import inr.numass.viewer.HVView
import inr.numass.viewer.SpectrumView
import inr.numass.viewer.*
import javafx.application.Application
import javafx.scene.image.ImageView
import tornadofx.*
import java.util.concurrent.ConcurrentHashMap
import java.util.stream.Collectors
class ViewerComponentsTestApp : App(ViewerComponentsTest::class)
@ -53,8 +50,8 @@ class ViewerComponentsTest : View(title = "Numass viewer test", icon = ImageView
}
fun update(set: NumassSet) {
amp.setAll(set.points.filter { it.voltage != 16000.0 }.collect(Collectors.toMap({ "point_${it.voltage}" }, { it })));
sp.add("test", set);
amp.setAll(set.points.filter { it.voltage != 16000.0 }.associateBy({ "point_${it.voltage}" }) { CachedPoint(it) });
sp.set("test", CachedSet(set));
hv.add(set.name, set)
}
}