Fix for color picker in plot config

This commit is contained in:
Alexander Nozik 2018-03-21 17:02:57 +03:00
parent caf8897194
commit bfb73dd300
16 changed files with 164 additions and 4668 deletions

View File

@ -24,7 +24,9 @@ import hep.dataforge.control.connections.Roles
import hep.dataforge.control.devices.Device
import hep.dataforge.control.devices.PortSensor
import hep.dataforge.control.measurements.AbstractMeasurement
import hep.dataforge.control.ports.GenericPortController
import hep.dataforge.control.ports.Port
import hep.dataforge.control.ports.PortFactory
import hep.dataforge.description.ValueDef
import hep.dataforge.exceptions.ControlException
import hep.dataforge.exceptions.MeasurementException
@ -76,11 +78,12 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
private val averagingDuration: Duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
@Throws(ControlException::class)
override fun init() {
super.init()
connection.weakOnError(this::notifyError)
onResponse("FilamentStatus"){
onResponse("FilamentStatus") {
val status = it[0, 2]
updateLogicalState("filamentOn", status == "ON")
updateLogicalState("filamentStatus", status)
@ -92,45 +95,22 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
* Add reaction on specific response
*/
private fun onResponse(command: String, action: (MspResponse) -> Unit) {
connection.weakOnPhrase({it.startsWith(command)}){
connection.weakOnPhrase({ it.startsWith(command) }) {
action(MspResponse(it))
}
}
/*
override fun acceptPhrase(message: String) {
dispatchEvent(
EventBuilder
.make("msp")
.setMetaValue("response", message.trim { it <= ' ' }).build()
)
val response = MspResponse(message)
when (response.commandName) {
// all possible async messages
"FilamentStatus" -> {
val status = response[0, 2]
updateLogicalState("filamentOn", status == "ON")
updateLogicalState("filamentStatus", status)
}
}
if (measurementDelegate != null) {
measurementDelegate!!.accept(response)
}
override fun connect(meta: Meta): GenericPortController {
val portName = meta.getString("name")
logger.info("Connecting to port {}", portName)
val port: Port = PortFactory.build(meta)
return GenericPortController(context, port, "\r\r")
}
override fun error(errorMessage: String?, error: Throwable?) {
notifyError(errorMessage, error)
}
*/
override fun buildPort(portName: String?): Port = super.buildPort(portName).apply { setDelimiter("\r\r") }
@Throws(ControlException::class)
override fun shutdown() {
super.stopMeasurement(true)
super.stopMeasurement()
if (connected) {
setFilamentOn(false)
setConnected(false)
@ -151,6 +131,10 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
}
}
override fun setMeasurement(oldMeta: Meta?, newMeta: Meta) {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
@Throws(ControlException::class)
override fun computeState(stateName: String): Any = when (stateName) {
"connected" -> false
@ -190,7 +174,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
if (response.isOK) {
sensorName = response[2, 1]
} else {
notifyError(response.errorDescription(), null)
notifyError(response.errorDescription, null)
return false
}
//PENDING определеить в конфиге номер прибора
@ -200,7 +184,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
updateLogicalState("selected", true)
// selected = true;
} else {
notifyError(response.errorDescription(), null)
notifyError(response.errorDescription, null)
return false
}
@ -210,7 +194,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
// invalidateState("controlled");
updateLogicalState("controlled", true)
} else {
notifyError(response.errorDescription(), null)
notifyError(response.errorDescription, null)
return false
}
// connected = true;
@ -266,7 +250,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
@Throws(PortException::class)
private fun commandAndWait(commandName: String, vararg parameters: Any): MspResponse {
send(buildCommand(commandName, *parameters))
val response = connection.waitFor(timeout) { str: String -> str.trim { it <= ' ' }.startsWith(commandName) }
val response = connection.waitFor(TIMEOUT) { str: String -> str.trim { it <= ' ' }.startsWith(commandName) }
return MspResponse(response)
}
@ -276,7 +260,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
if (response.isOK) {
updateLogicalState("filament", response[1, 1])
} else {
logger.error("Failed to set filament with error: {}", response.errorDescription())
logger.error("Failed to set filament with error: {}", response.errorDescription)
}
}
@ -341,9 +325,10 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
}
}
fun errorDescription(): String? {
val errorDescription: String
get() {
return if (isOK) {
null
throw RuntimeException("Not a error")
} else {
get(2, 1)
}
@ -425,12 +410,13 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
}
@Synchronized override fun result(result: Values, time: Instant) {
@Synchronized
override fun result(result: Values, time: Instant) {
super.result(result, time)
helper.push(result)
}
internal fun error(errorMessage: String?, error: Throwable?) {
private fun error(errorMessage: String?, error: Throwable?) {
if (error == null) {
error(MeasurementException(errorMessage))
} else {
@ -467,7 +453,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
}
companion object {
val MSP_DEVICE_TYPE = "numass.msp"
const val MSP_DEVICE_TYPE = "numass.msp"
private val TIMEOUT = Duration.ofMillis(200)
}

File diff suppressed because it is too large Load Diff

View File

@ -6,12 +6,15 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import inr.numass.data.api.*
import inr.numass.data.storage.ProtoBlock
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.util.stream.Collectors
import java.util.stream.Stream
import java.util.zip.ZipInputStream
import java.util.zip.Inflater
import kotlin.streams.asSequence
/**
* Created by darksnake on 30-Jan-17.
*/
@ -46,7 +49,18 @@ object NumassDataUtils {
*/
val Envelope.dataStream: InputStream
get() = if (this.meta.getString("compression", "none") == "zlib") {
ZipInputStream(this.data.stream)
//TODO move to new type of data
val inflatter = Inflater()
inflatter.setInput(data.buffer.array())
val bos = ByteArrayOutputStream()
val buffer = ByteArray(8192)
while (!inflatter.finished()) {
val size = inflatter.inflate(buffer)
bos.write(buffer, 0, size)
}
val unzippeddata = bos.toByteArray()
inflatter.end()
ByteArrayInputStream(unzippeddata)
} else {
this.data.stream
}
@ -59,11 +73,13 @@ val NumassBlock.channel: Int?
}
fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>): NumassBlock {
fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>?): NumassBlock {
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
this.events.asSequence()
.sortedBy { it.timeOffset }
.zipWithNext(transform).map { NumassEvent(it.first, it.second, owner) }.asIterable()
.zipWithNext(transform)
.filterNotNull()
.map { NumassEvent(it.first, it.second, owner) }.asIterable()
}
}
@ -77,8 +93,7 @@ fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): N
fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock {
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
this.events.asSequence()
.filter(condition).asIterable()
this.events.asSequence().filter(condition).asIterable()
}
}

View File

@ -119,11 +119,6 @@ interface NumassAnalyzer {
val AMPLITUDE_ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
// val MAX_CHANNEL = 10000
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
return spectrumWithBinning(this,binSize, loChannel, upChannel)
}
}
}
@ -135,8 +130,8 @@ interface NumassAnalyzer {
* @param upChannel
* @return
*/
fun countInWindow(spectrum: Table, loChannel: Short, upChannel: Short): Long {
return spectrum.rows.filter { row ->
fun Table.countInWindow(loChannel: Short, upChannel: Short): Long {
return this.rows.filter { row ->
row.getInt(NumassAnalyzer.CHANNEL_KEY) in loChannel..(upChannel - 1)
}.mapToLong { it -> it.getValue(NumassAnalyzer.COUNT_KEY).numberValue().toLong() }.sum()
}
@ -195,7 +190,7 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
* @return
*/
@JvmOverloads
fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
@ -204,9 +199,9 @@ fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, u
.addNumber("binSize")
val builder = ListTable.Builder(format)
var chan = loChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
var chan = loChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
val top = upChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
val top = upChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
while (chan < top - binSize) {
val count = AtomicLong(0)
@ -216,7 +211,7 @@ fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, u
val binLo = chan
val binUp = chan + binSize
spectrum.rows.filter { row ->
this.rows.filter { row ->
row.getInt(NumassAnalyzer.CHANNEL_KEY) in binLo..(binUp - 1)
}.forEach { row ->
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).longValue())

View File

@ -28,21 +28,23 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
private val point: NumassProto.Point
get() = try {
envelope.dataStream.use { stream -> return NumassProto.Point.parseFrom(stream) }
envelope.dataStream.use {
NumassProto.Point.parseFrom(it)
}
} catch (ex: IOException) {
throw RuntimeException("Failed to read point via protobuf")
throw RuntimeException("Failed to read point via protobuf", ex)
}
override val blocks: Stream<NumassBlock>
get() = point.channelsList.stream()
.flatMap { channel ->
channel.blocksList.stream()
.map { block -> ProtoBlock(channel.num.toInt(), block) }
.map { block -> ProtoBlock(channel.id.toInt(), block) }
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
}
override val meta: Meta =envelope.meta
override val meta: Meta = envelope.meta
companion object {
fun readFile(path: Path): ProtoNumassPoint {
@ -63,7 +65,7 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block) : NumassBlock {
override val meta: Meta by lazy {
buildMeta{
buildMeta {
"channel" to channel
}
}
@ -72,7 +74,13 @@ class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.
get() = ProtoNumassPoint.ofEpochNanos(block.time)
override val length: Duration
get() = Duration.ofNanos((meta.getDouble("params.b_size") / meta.getDouble("params.sample_freq") * 1e9).toLong())
get() = if (meta.hasMeta("params")) {
Duration.ofNanos((meta.getDouble("params.b_size") / meta.getDouble("params.sample_freq") * 1e9).toLong())
} else if (meta.hasValue("length")) {
Duration.ofNanos(meta.getValue("length").longValue())
} else {
Duration.ZERO
}
override val events: Stream<NumassEvent>
@ -86,7 +94,13 @@ class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.
override val frames: Stream<NumassFrame>
get() {
val tickSize = Duration.ofNanos((1e9 / meta.getInt("params.sample_freq")).toLong())
val tickSize = if (meta.hasMeta("params")) {
Duration.ofNanos((1e9 / meta.getInt("params.sample_freq")).toLong())
} else if (meta.hasValue("tick_length")) {
Duration.ofNanos(meta.getInt("tick_length").toLong())
} else {
Duration.ofNanos(1)
}
return block.framesList.stream().map { frame ->
val time = startTime.plusNanos(frame.time)
val data = frame.data.asReadOnlyByteBuffer()

View File

@ -20,11 +20,14 @@ message Point {
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
}
uint64 time = 1; // Block start in epoch nanos
repeated Frame frames = 2; // Frames array
Events events = 3; // Events array
uint64 length = 4; // block size in nanos
uint64 bin_size = 5; // tick size in nanos
}
uint64 num = 1; // The number of measuring channel
uint64 id = 1; // The number of measuring channel
repeated Block blocks = 2; // Blocks
}
repeated Channel channels = 1; // Array of measuring channels

View File

@ -77,7 +77,7 @@ shell.eval {
DataPlot.plot(
it.key as String,
adapter,
NumassAnalyzerKt.spectrumWithBinning(it.value as Table, binning)
NumassAnalyzerKt.withBinning(it.value as Table, binning)
)
)
}

View File

@ -12,6 +12,7 @@ import hep.dataforge.tables.TableTransform;
import hep.dataforge.tables.ValueMap;
import hep.dataforge.values.Values;
import inr.numass.data.analyzers.NumassAnalyzer;
import inr.numass.data.analyzers.NumassAnalyzerKt;
import inr.numass.data.api.NumassPoint;
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
import org.apache.commons.math3.exception.DimensionMismatchException;
@ -23,7 +24,6 @@ import java.util.stream.Collectors;
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY;
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
import static inr.numass.data.analyzers.NumassAnalyzerKt.spectrumWithBinning;
/**
* A class to calculate underflow correction
@ -110,7 +110,7 @@ public class UnderflowCorrection {
throw new IllegalArgumentException("Wrong borders for underflow calculation");
}
Table binned = TableTransform.filter(
spectrumWithBinning(spectrum, binning),
NumassAnalyzerKt.withBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh

View File

@ -22,19 +22,24 @@ import hep.dataforge.fx.plots.FXPlotManager
import hep.dataforge.kodex.KMetaBuilder
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.configure
import hep.dataforge.kodex.nullable
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.tables.Adapters
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassBlock
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit) {
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit = {}) {
val meta = buildMeta("meta", metaAction)
val plotManager = context.load(FXPlotManager::class)
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum"))
val binning = meta.getInt("binning", 20)
val lo = meta.optNumber("window.lo").nullable?.toInt()
val up = meta.optNumber("window.up").nullable?.toInt()
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum")).withBinning(binning, lo, up)
plotManager.display(name = frameName) {
val valueAxis = if (meta.getBoolean("normalize",true)) {
val valueAxis = if (meta.getBoolean("normalize", false)) {
NumassAnalyzer.COUNT_RATE_KEY
} else {
NumassAnalyzer.COUNT_KEY
@ -51,6 +56,7 @@ fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName:
"showErrors" to false
"JFreeChart.cache" to true
}
plot.configure(meta)
add(plot)
}
}

View File

@ -27,6 +27,7 @@ import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.subtractAmplitudeSpectrum
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory

View File

@ -24,8 +24,8 @@ import hep.dataforge.plots.data.DataPlot
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassStorageFactory

View File

@ -23,8 +23,9 @@ import hep.dataforge.kodex.buildMeta
import hep.dataforge.plots.data.DataPlot
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.storage.ProtoNumassPoint
import java.nio.file.Paths

View File

@ -1,11 +1,62 @@
package inr.numass.scripts.tristan
import inr.numass.data.channel
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.ProtoNumassPoint
import inr.numass.data.transformChain
import java.io.File
fun main(args: Array<String>) {
val file = File("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df").toPath()
val point = ProtoNumassPoint.readFile(file)
point.plotAmplitudeSpectrum()
val file = ProtoNumassPoint.readFile("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df ")
val filtered = file.filter { it.channel == 4 }
point.blocks.filter { it.channel == 0 }.findFirst().ifPresent {
it.plotAmplitudeSpectrum(plotName = "0") {
"title" to "pixel 0"
"binning" to 50
}
}
point.blocks.filter { it.channel == 4 }.findFirst().ifPresent {
it.plotAmplitudeSpectrum(plotName = "4") {
"title" to "pixel 4"
"binning" to 50
}
println("Number of events for pixel 4 is ${it.events.count()}")
}
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 4 && first.channel == 0 && dt > window && dt < 1000) {
Pair((first.amp + second.amp).toShort(), second.timeOffset)
} else {
null
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.before.$window") {
"binning" to 50
}
}
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 0 && first.channel == 4 && dt > window && dt < 1000) {
Pair((first.amp + second.amp).toShort(), second.timeOffset)
} else {
null
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.after.$window") {
"binning" to 50
}
}
}

View File

@ -10,13 +10,12 @@ import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageUtils
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableTransform
import hep.dataforge.tables.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.spectrumWithBinning
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
@ -78,12 +77,13 @@ object Threshold {
if (xHigh <= xLow) {
throw IllegalArgumentException("Wrong borders for underflow calculation");
}
val binned = TableTransform.filter(
spectrumWithBinning(spectrum, binning),
CHANNEL_KEY,
xLow,
xHigh
)
val binned = spectrum.withBinning(binning, xLow, xHigh)
// val binned = TableTransform.filter(
// spectrum.withBinning(binning),
// CHANNEL_KEY,
// xLow,
// xHigh
// )
return binned.rows
.map {

View File

@ -14,7 +14,7 @@ import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.SimpleAnalyzer
import inr.numass.data.analyzers.spectrumWithBinning
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassPoint
import javafx.beans.Observable
import javafx.beans.binding.DoubleBinding
@ -133,7 +133,7 @@ class AmplitudeView(
DataPlot.plot(
key,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
spectrumWithBinning(getSpectrum(point), binning)
getSpectrum(point).withBinning(binning)
).configure {
"connectionType" to "step"
"thickness" to 2

View File

@ -173,7 +173,7 @@ class SpectrumView(
runGoal("spectrumData[$name]") {
set.points.map { point ->
val count = countInWindow(getSpectrum(point), loChannel.toShort(), upChannel.toShort());
val count = getSpectrum(point).countInWindow(loChannel.toShort(), upChannel.toShort());
val seconds = point.length.toMillis() / 1000.0;
runLater {
container.progress = progress.incrementAndGet().toDouble() / totalProgress