Fix for color picker in plot config
This commit is contained in:
parent
caf8897194
commit
bfb73dd300
@ -24,7 +24,9 @@ import hep.dataforge.control.connections.Roles
|
|||||||
import hep.dataforge.control.devices.Device
|
import hep.dataforge.control.devices.Device
|
||||||
import hep.dataforge.control.devices.PortSensor
|
import hep.dataforge.control.devices.PortSensor
|
||||||
import hep.dataforge.control.measurements.AbstractMeasurement
|
import hep.dataforge.control.measurements.AbstractMeasurement
|
||||||
|
import hep.dataforge.control.ports.GenericPortController
|
||||||
import hep.dataforge.control.ports.Port
|
import hep.dataforge.control.ports.Port
|
||||||
|
import hep.dataforge.control.ports.PortFactory
|
||||||
import hep.dataforge.description.ValueDef
|
import hep.dataforge.description.ValueDef
|
||||||
import hep.dataforge.exceptions.ControlException
|
import hep.dataforge.exceptions.ControlException
|
||||||
import hep.dataforge.exceptions.MeasurementException
|
import hep.dataforge.exceptions.MeasurementException
|
||||||
@ -76,11 +78,12 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
|
|
||||||
private val averagingDuration: Duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
|
private val averagingDuration: Duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
|
||||||
|
|
||||||
|
|
||||||
@Throws(ControlException::class)
|
@Throws(ControlException::class)
|
||||||
override fun init() {
|
override fun init() {
|
||||||
super.init()
|
super.init()
|
||||||
connection.weakOnError(this::notifyError)
|
connection.weakOnError(this::notifyError)
|
||||||
onResponse("FilamentStatus"){
|
onResponse("FilamentStatus") {
|
||||||
val status = it[0, 2]
|
val status = it[0, 2]
|
||||||
updateLogicalState("filamentOn", status == "ON")
|
updateLogicalState("filamentOn", status == "ON")
|
||||||
updateLogicalState("filamentStatus", status)
|
updateLogicalState("filamentStatus", status)
|
||||||
@ -92,45 +95,22 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
* Add reaction on specific response
|
* Add reaction on specific response
|
||||||
*/
|
*/
|
||||||
private fun onResponse(command: String, action: (MspResponse) -> Unit) {
|
private fun onResponse(command: String, action: (MspResponse) -> Unit) {
|
||||||
connection.weakOnPhrase({it.startsWith(command)}){
|
connection.weakOnPhrase({ it.startsWith(command) }) {
|
||||||
action(MspResponse(it))
|
action(MspResponse(it))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
override fun connect(meta: Meta): GenericPortController {
|
||||||
|
val portName = meta.getString("name")
|
||||||
override fun acceptPhrase(message: String) {
|
logger.info("Connecting to port {}", portName)
|
||||||
dispatchEvent(
|
val port: Port = PortFactory.build(meta)
|
||||||
EventBuilder
|
return GenericPortController(context, port, "\r\r")
|
||||||
.make("msp")
|
|
||||||
.setMetaValue("response", message.trim { it <= ' ' }).build()
|
|
||||||
)
|
|
||||||
val response = MspResponse(message)
|
|
||||||
|
|
||||||
when (response.commandName) {
|
|
||||||
// all possible async messages
|
|
||||||
"FilamentStatus" -> {
|
|
||||||
val status = response[0, 2]
|
|
||||||
updateLogicalState("filamentOn", status == "ON")
|
|
||||||
updateLogicalState("filamentStatus", status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (measurementDelegate != null) {
|
|
||||||
measurementDelegate!!.accept(response)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun error(errorMessage: String?, error: Throwable?) {
|
|
||||||
notifyError(errorMessage, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
override fun buildPort(portName: String?): Port = super.buildPort(portName).apply { setDelimiter("\r\r") }
|
|
||||||
|
|
||||||
@Throws(ControlException::class)
|
@Throws(ControlException::class)
|
||||||
override fun shutdown() {
|
override fun shutdown() {
|
||||||
super.stopMeasurement(true)
|
super.stopMeasurement()
|
||||||
if (connected) {
|
if (connected) {
|
||||||
setFilamentOn(false)
|
setFilamentOn(false)
|
||||||
setConnected(false)
|
setConnected(false)
|
||||||
@ -151,6 +131,10 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun setMeasurement(oldMeta: Meta?, newMeta: Meta) {
|
||||||
|
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
||||||
|
}
|
||||||
|
|
||||||
@Throws(ControlException::class)
|
@Throws(ControlException::class)
|
||||||
override fun computeState(stateName: String): Any = when (stateName) {
|
override fun computeState(stateName: String): Any = when (stateName) {
|
||||||
"connected" -> false
|
"connected" -> false
|
||||||
@ -190,7 +174,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
if (response.isOK) {
|
if (response.isOK) {
|
||||||
sensorName = response[2, 1]
|
sensorName = response[2, 1]
|
||||||
} else {
|
} else {
|
||||||
notifyError(response.errorDescription(), null)
|
notifyError(response.errorDescription, null)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
//PENDING определеить в конфиге номер прибора
|
//PENDING определеить в конфиге номер прибора
|
||||||
@ -200,7 +184,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
updateLogicalState("selected", true)
|
updateLogicalState("selected", true)
|
||||||
// selected = true;
|
// selected = true;
|
||||||
} else {
|
} else {
|
||||||
notifyError(response.errorDescription(), null)
|
notifyError(response.errorDescription, null)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,7 +194,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
// invalidateState("controlled");
|
// invalidateState("controlled");
|
||||||
updateLogicalState("controlled", true)
|
updateLogicalState("controlled", true)
|
||||||
} else {
|
} else {
|
||||||
notifyError(response.errorDescription(), null)
|
notifyError(response.errorDescription, null)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
// connected = true;
|
// connected = true;
|
||||||
@ -266,7 +250,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
@Throws(PortException::class)
|
@Throws(PortException::class)
|
||||||
private fun commandAndWait(commandName: String, vararg parameters: Any): MspResponse {
|
private fun commandAndWait(commandName: String, vararg parameters: Any): MspResponse {
|
||||||
send(buildCommand(commandName, *parameters))
|
send(buildCommand(commandName, *parameters))
|
||||||
val response = connection.waitFor(timeout) { str: String -> str.trim { it <= ' ' }.startsWith(commandName) }
|
val response = connection.waitFor(TIMEOUT) { str: String -> str.trim { it <= ' ' }.startsWith(commandName) }
|
||||||
return MspResponse(response)
|
return MspResponse(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -276,7 +260,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
if (response.isOK) {
|
if (response.isOK) {
|
||||||
updateLogicalState("filament", response[1, 1])
|
updateLogicalState("filament", response[1, 1])
|
||||||
} else {
|
} else {
|
||||||
logger.error("Failed to set filament with error: {}", response.errorDescription())
|
logger.error("Failed to set filament with error: {}", response.errorDescription)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,9 +325,10 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun errorDescription(): String? {
|
val errorDescription: String
|
||||||
|
get() {
|
||||||
return if (isOK) {
|
return if (isOK) {
|
||||||
null
|
throw RuntimeException("Not a error")
|
||||||
} else {
|
} else {
|
||||||
get(2, 1)
|
get(2, 1)
|
||||||
}
|
}
|
||||||
@ -425,12 +410,13 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Synchronized override fun result(result: Values, time: Instant) {
|
@Synchronized
|
||||||
|
override fun result(result: Values, time: Instant) {
|
||||||
super.result(result, time)
|
super.result(result, time)
|
||||||
helper.push(result)
|
helper.push(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
internal fun error(errorMessage: String?, error: Throwable?) {
|
private fun error(errorMessage: String?, error: Throwable?) {
|
||||||
if (error == null) {
|
if (error == null) {
|
||||||
error(MeasurementException(errorMessage))
|
error(MeasurementException(errorMessage))
|
||||||
} else {
|
} else {
|
||||||
@ -467,7 +453,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
val MSP_DEVICE_TYPE = "numass.msp"
|
const val MSP_DEVICE_TYPE = "numass.msp"
|
||||||
|
|
||||||
private val TIMEOUT = Duration.ofMillis(200)
|
private val TIMEOUT = Duration.ofMillis(200)
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -6,12 +6,15 @@ import hep.dataforge.meta.Meta
|
|||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import inr.numass.data.api.*
|
import inr.numass.data.api.*
|
||||||
import inr.numass.data.storage.ProtoBlock
|
import inr.numass.data.storage.ProtoBlock
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.util.stream.Collectors
|
import java.util.stream.Collectors
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
import java.util.zip.ZipInputStream
|
import java.util.zip.Inflater
|
||||||
import kotlin.streams.asSequence
|
import kotlin.streams.asSequence
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 30-Jan-17.
|
* Created by darksnake on 30-Jan-17.
|
||||||
*/
|
*/
|
||||||
@ -46,7 +49,18 @@ object NumassDataUtils {
|
|||||||
*/
|
*/
|
||||||
val Envelope.dataStream: InputStream
|
val Envelope.dataStream: InputStream
|
||||||
get() = if (this.meta.getString("compression", "none") == "zlib") {
|
get() = if (this.meta.getString("compression", "none") == "zlib") {
|
||||||
ZipInputStream(this.data.stream)
|
//TODO move to new type of data
|
||||||
|
val inflatter = Inflater()
|
||||||
|
inflatter.setInput(data.buffer.array())
|
||||||
|
val bos = ByteArrayOutputStream()
|
||||||
|
val buffer = ByteArray(8192)
|
||||||
|
while (!inflatter.finished()) {
|
||||||
|
val size = inflatter.inflate(buffer)
|
||||||
|
bos.write(buffer, 0, size)
|
||||||
|
}
|
||||||
|
val unzippeddata = bos.toByteArray()
|
||||||
|
inflatter.end()
|
||||||
|
ByteArrayInputStream(unzippeddata)
|
||||||
} else {
|
} else {
|
||||||
this.data.stream
|
this.data.stream
|
||||||
}
|
}
|
||||||
@ -59,11 +73,13 @@ val NumassBlock.channel: Int?
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>): NumassBlock {
|
fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>?): NumassBlock {
|
||||||
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
this.events.asSequence()
|
this.events.asSequence()
|
||||||
.sortedBy { it.timeOffset }
|
.sortedBy { it.timeOffset }
|
||||||
.zipWithNext(transform).map { NumassEvent(it.first, it.second, owner) }.asIterable()
|
.zipWithNext(transform)
|
||||||
|
.filterNotNull()
|
||||||
|
.map { NumassEvent(it.first, it.second, owner) }.asIterable()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,8 +93,7 @@ fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): N
|
|||||||
|
|
||||||
fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock {
|
fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock {
|
||||||
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
this.events.asSequence()
|
this.events.asSequence().filter(condition).asIterable()
|
||||||
.filter(condition).asIterable()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,11 +119,6 @@ interface NumassAnalyzer {
|
|||||||
val AMPLITUDE_ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
|
val AMPLITUDE_ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
|
||||||
|
|
||||||
// val MAX_CHANNEL = 10000
|
// val MAX_CHANNEL = 10000
|
||||||
|
|
||||||
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
|
|
||||||
return spectrumWithBinning(this,binSize, loChannel, upChannel)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,8 +130,8 @@ interface NumassAnalyzer {
|
|||||||
* @param upChannel
|
* @param upChannel
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
fun countInWindow(spectrum: Table, loChannel: Short, upChannel: Short): Long {
|
fun Table.countInWindow(loChannel: Short, upChannel: Short): Long {
|
||||||
return spectrum.rows.filter { row ->
|
return this.rows.filter { row ->
|
||||||
row.getInt(NumassAnalyzer.CHANNEL_KEY) in loChannel..(upChannel - 1)
|
row.getInt(NumassAnalyzer.CHANNEL_KEY) in loChannel..(upChannel - 1)
|
||||||
}.mapToLong { it -> it.getValue(NumassAnalyzer.COUNT_KEY).numberValue().toLong() }.sum()
|
}.mapToLong { it -> it.getValue(NumassAnalyzer.COUNT_KEY).numberValue().toLong() }.sum()
|
||||||
}
|
}
|
||||||
@ -195,7 +190,7 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
|
|||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmOverloads
|
@JvmOverloads
|
||||||
fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
|
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
|
||||||
val format = TableFormatBuilder()
|
val format = TableFormatBuilder()
|
||||||
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
|
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
|
||||||
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
|
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
|
||||||
@ -204,9 +199,9 @@ fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, u
|
|||||||
.addNumber("binSize")
|
.addNumber("binSize")
|
||||||
val builder = ListTable.Builder(format)
|
val builder = ListTable.Builder(format)
|
||||||
|
|
||||||
var chan = loChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
|
var chan = loChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.min().orElse(0)
|
||||||
|
|
||||||
val top = upChannel ?: spectrum.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
|
val top = upChannel ?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.intValue() }.max().orElse(1)
|
||||||
|
|
||||||
while (chan < top - binSize) {
|
while (chan < top - binSize) {
|
||||||
val count = AtomicLong(0)
|
val count = AtomicLong(0)
|
||||||
@ -216,7 +211,7 @@ fun spectrumWithBinning(spectrum: Table, binSize: Int, loChannel: Int? = null, u
|
|||||||
val binLo = chan
|
val binLo = chan
|
||||||
val binUp = chan + binSize
|
val binUp = chan + binSize
|
||||||
|
|
||||||
spectrum.rows.filter { row ->
|
this.rows.filter { row ->
|
||||||
row.getInt(NumassAnalyzer.CHANNEL_KEY) in binLo..(binUp - 1)
|
row.getInt(NumassAnalyzer.CHANNEL_KEY) in binLo..(binUp - 1)
|
||||||
}.forEach { row ->
|
}.forEach { row ->
|
||||||
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).longValue())
|
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).longValue())
|
||||||
|
@ -28,21 +28,23 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
|
|
||||||
private val point: NumassProto.Point
|
private val point: NumassProto.Point
|
||||||
get() = try {
|
get() = try {
|
||||||
envelope.dataStream.use { stream -> return NumassProto.Point.parseFrom(stream) }
|
envelope.dataStream.use {
|
||||||
|
NumassProto.Point.parseFrom(it)
|
||||||
|
}
|
||||||
} catch (ex: IOException) {
|
} catch (ex: IOException) {
|
||||||
throw RuntimeException("Failed to read point via protobuf")
|
throw RuntimeException("Failed to read point via protobuf", ex)
|
||||||
}
|
}
|
||||||
|
|
||||||
override val blocks: Stream<NumassBlock>
|
override val blocks: Stream<NumassBlock>
|
||||||
get() = point.channelsList.stream()
|
get() = point.channelsList.stream()
|
||||||
.flatMap { channel ->
|
.flatMap { channel ->
|
||||||
channel.blocksList.stream()
|
channel.blocksList.stream()
|
||||||
.map { block -> ProtoBlock(channel.num.toInt(), block) }
|
.map { block -> ProtoBlock(channel.id.toInt(), block) }
|
||||||
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
|
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
override val meta: Meta =envelope.meta
|
override val meta: Meta = envelope.meta
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
fun readFile(path: Path): ProtoNumassPoint {
|
fun readFile(path: Path): ProtoNumassPoint {
|
||||||
@ -63,7 +65,7 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
|
|
||||||
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block) : NumassBlock {
|
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block) : NumassBlock {
|
||||||
override val meta: Meta by lazy {
|
override val meta: Meta by lazy {
|
||||||
buildMeta{
|
buildMeta {
|
||||||
"channel" to channel
|
"channel" to channel
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -72,7 +74,13 @@ class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.
|
|||||||
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
||||||
|
|
||||||
override val length: Duration
|
override val length: Duration
|
||||||
get() = Duration.ofNanos((meta.getDouble("params.b_size") / meta.getDouble("params.sample_freq") * 1e9).toLong())
|
get() = if (meta.hasMeta("params")) {
|
||||||
|
Duration.ofNanos((meta.getDouble("params.b_size") / meta.getDouble("params.sample_freq") * 1e9).toLong())
|
||||||
|
} else if (meta.hasValue("length")) {
|
||||||
|
Duration.ofNanos(meta.getValue("length").longValue())
|
||||||
|
} else {
|
||||||
|
Duration.ZERO
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
override val events: Stream<NumassEvent>
|
override val events: Stream<NumassEvent>
|
||||||
@ -86,7 +94,13 @@ class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.
|
|||||||
|
|
||||||
override val frames: Stream<NumassFrame>
|
override val frames: Stream<NumassFrame>
|
||||||
get() {
|
get() {
|
||||||
val tickSize = Duration.ofNanos((1e9 / meta.getInt("params.sample_freq")).toLong())
|
val tickSize = if (meta.hasMeta("params")) {
|
||||||
|
Duration.ofNanos((1e9 / meta.getInt("params.sample_freq")).toLong())
|
||||||
|
} else if (meta.hasValue("tick_length")) {
|
||||||
|
Duration.ofNanos(meta.getInt("tick_length").toLong())
|
||||||
|
} else {
|
||||||
|
Duration.ofNanos(1)
|
||||||
|
}
|
||||||
return block.framesList.stream().map { frame ->
|
return block.framesList.stream().map { frame ->
|
||||||
val time = startTime.plusNanos(frame.time)
|
val time = startTime.plusNanos(frame.time)
|
||||||
val data = frame.data.asReadOnlyByteBuffer()
|
val data = frame.data.asReadOnlyByteBuffer()
|
||||||
|
@ -20,11 +20,14 @@ message Point {
|
|||||||
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
|
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
|
||||||
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
|
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64 time = 1; // Block start in epoch nanos
|
uint64 time = 1; // Block start in epoch nanos
|
||||||
repeated Frame frames = 2; // Frames array
|
repeated Frame frames = 2; // Frames array
|
||||||
Events events = 3; // Events array
|
Events events = 3; // Events array
|
||||||
|
uint64 length = 4; // block size in nanos
|
||||||
|
uint64 bin_size = 5; // tick size in nanos
|
||||||
}
|
}
|
||||||
uint64 num = 1; // The number of measuring channel
|
uint64 id = 1; // The number of measuring channel
|
||||||
repeated Block blocks = 2; // Blocks
|
repeated Block blocks = 2; // Blocks
|
||||||
}
|
}
|
||||||
repeated Channel channels = 1; // Array of measuring channels
|
repeated Channel channels = 1; // Array of measuring channels
|
||||||
|
@ -77,7 +77,7 @@ shell.eval {
|
|||||||
DataPlot.plot(
|
DataPlot.plot(
|
||||||
it.key as String,
|
it.key as String,
|
||||||
adapter,
|
adapter,
|
||||||
NumassAnalyzerKt.spectrumWithBinning(it.value as Table, binning)
|
NumassAnalyzerKt.withBinning(it.value as Table, binning)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,7 @@ import hep.dataforge.tables.TableTransform;
|
|||||||
import hep.dataforge.tables.ValueMap;
|
import hep.dataforge.tables.ValueMap;
|
||||||
import hep.dataforge.values.Values;
|
import hep.dataforge.values.Values;
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer;
|
import inr.numass.data.analyzers.NumassAnalyzer;
|
||||||
|
import inr.numass.data.analyzers.NumassAnalyzerKt;
|
||||||
import inr.numass.data.api.NumassPoint;
|
import inr.numass.data.api.NumassPoint;
|
||||||
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
import org.apache.commons.math3.analysis.ParametricUnivariateFunction;
|
||||||
import org.apache.commons.math3.exception.DimensionMismatchException;
|
import org.apache.commons.math3.exception.DimensionMismatchException;
|
||||||
@ -23,7 +24,6 @@ import java.util.stream.Collectors;
|
|||||||
|
|
||||||
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY;
|
import static inr.numass.data.analyzers.NumassAnalyzer.CHANNEL_KEY;
|
||||||
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
|
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
|
||||||
import static inr.numass.data.analyzers.NumassAnalyzerKt.spectrumWithBinning;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A class to calculate underflow correction
|
* A class to calculate underflow correction
|
||||||
@ -110,7 +110,7 @@ public class UnderflowCorrection {
|
|||||||
throw new IllegalArgumentException("Wrong borders for underflow calculation");
|
throw new IllegalArgumentException("Wrong borders for underflow calculation");
|
||||||
}
|
}
|
||||||
Table binned = TableTransform.filter(
|
Table binned = TableTransform.filter(
|
||||||
spectrumWithBinning(spectrum, binning),
|
NumassAnalyzerKt.withBinning(spectrum, binning),
|
||||||
CHANNEL_KEY,
|
CHANNEL_KEY,
|
||||||
xLow,
|
xLow,
|
||||||
xHigh
|
xHigh
|
||||||
|
@ -22,19 +22,24 @@ import hep.dataforge.fx.plots.FXPlotManager
|
|||||||
import hep.dataforge.kodex.KMetaBuilder
|
import hep.dataforge.kodex.KMetaBuilder
|
||||||
import hep.dataforge.kodex.buildMeta
|
import hep.dataforge.kodex.buildMeta
|
||||||
import hep.dataforge.kodex.configure
|
import hep.dataforge.kodex.configure
|
||||||
|
import hep.dataforge.kodex.nullable
|
||||||
import hep.dataforge.plots.data.DataPlot
|
import hep.dataforge.plots.data.DataPlot
|
||||||
import hep.dataforge.tables.Adapters
|
import hep.dataforge.tables.Adapters
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer
|
import inr.numass.data.analyzers.NumassAnalyzer
|
||||||
import inr.numass.data.analyzers.SmartAnalyzer
|
import inr.numass.data.analyzers.SmartAnalyzer
|
||||||
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.NumassBlock
|
||||||
|
|
||||||
|
|
||||||
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit) {
|
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit = {}) {
|
||||||
val meta = buildMeta("meta", metaAction)
|
val meta = buildMeta("meta", metaAction)
|
||||||
val plotManager = context.load(FXPlotManager::class)
|
val plotManager = context.load(FXPlotManager::class)
|
||||||
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum"))
|
val binning = meta.getInt("binning", 20)
|
||||||
|
val lo = meta.optNumber("window.lo").nullable?.toInt()
|
||||||
|
val up = meta.optNumber("window.up").nullable?.toInt()
|
||||||
|
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum")).withBinning(binning, lo, up)
|
||||||
plotManager.display(name = frameName) {
|
plotManager.display(name = frameName) {
|
||||||
val valueAxis = if (meta.getBoolean("normalize",true)) {
|
val valueAxis = if (meta.getBoolean("normalize", false)) {
|
||||||
NumassAnalyzer.COUNT_RATE_KEY
|
NumassAnalyzer.COUNT_RATE_KEY
|
||||||
} else {
|
} else {
|
||||||
NumassAnalyzer.COUNT_KEY
|
NumassAnalyzer.COUNT_KEY
|
||||||
@ -51,6 +56,7 @@ fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName:
|
|||||||
"showErrors" to false
|
"showErrors" to false
|
||||||
"JFreeChart.cache" to true
|
"JFreeChart.cache" to true
|
||||||
}
|
}
|
||||||
|
plot.configure(meta)
|
||||||
add(plot)
|
add(plot)
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -27,6 +27,7 @@ import inr.numass.data.NumassDataUtils
|
|||||||
import inr.numass.data.analyzers.NumassAnalyzer
|
import inr.numass.data.analyzers.NumassAnalyzer
|
||||||
import inr.numass.data.analyzers.SmartAnalyzer
|
import inr.numass.data.analyzers.SmartAnalyzer
|
||||||
import inr.numass.data.analyzers.subtractAmplitudeSpectrum
|
import inr.numass.data.analyzers.subtractAmplitudeSpectrum
|
||||||
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.api.NumassSet
|
import inr.numass.data.api.NumassSet
|
||||||
import inr.numass.data.storage.NumassStorageFactory
|
import inr.numass.data.storage.NumassStorageFactory
|
||||||
|
|
||||||
|
@ -24,8 +24,8 @@ import hep.dataforge.plots.data.DataPlot
|
|||||||
import inr.numass.NumassPlugin
|
import inr.numass.NumassPlugin
|
||||||
import inr.numass.data.NumassDataUtils
|
import inr.numass.data.NumassDataUtils
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
|
|
||||||
import inr.numass.data.analyzers.SmartAnalyzer
|
import inr.numass.data.analyzers.SmartAnalyzer
|
||||||
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.api.NumassSet
|
import inr.numass.data.api.NumassSet
|
||||||
import inr.numass.data.storage.NumassStorageFactory
|
import inr.numass.data.storage.NumassStorageFactory
|
||||||
|
|
||||||
|
@ -23,8 +23,9 @@ import hep.dataforge.kodex.buildMeta
|
|||||||
import hep.dataforge.plots.data.DataPlot
|
import hep.dataforge.plots.data.DataPlot
|
||||||
import inr.numass.NumassPlugin
|
import inr.numass.NumassPlugin
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.AMPLITUDE_ADAPTER
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.withBinning
|
|
||||||
import inr.numass.data.analyzers.SmartAnalyzer
|
import inr.numass.data.analyzers.SmartAnalyzer
|
||||||
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.storage.ProtoNumassPoint
|
import inr.numass.data.storage.ProtoNumassPoint
|
||||||
import java.nio.file.Paths
|
import java.nio.file.Paths
|
||||||
|
|
||||||
|
@ -1,11 +1,62 @@
|
|||||||
package inr.numass.scripts.tristan
|
package inr.numass.scripts.tristan
|
||||||
|
|
||||||
|
import inr.numass.data.channel
|
||||||
|
import inr.numass.data.plotAmplitudeSpectrum
|
||||||
import inr.numass.data.storage.ProtoNumassPoint
|
import inr.numass.data.storage.ProtoNumassPoint
|
||||||
|
import inr.numass.data.transformChain
|
||||||
|
import java.io.File
|
||||||
|
|
||||||
fun main(args: Array<String>) {
|
fun main(args: Array<String>) {
|
||||||
|
val file = File("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df").toPath()
|
||||||
|
val point = ProtoNumassPoint.readFile(file)
|
||||||
|
point.plotAmplitudeSpectrum()
|
||||||
|
|
||||||
val file = ProtoNumassPoint.readFile("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df ")
|
point.blocks.filter { it.channel == 0 }.findFirst().ifPresent {
|
||||||
val filtered = file.filter { it.channel == 4 }
|
it.plotAmplitudeSpectrum(plotName = "0") {
|
||||||
|
"title" to "pixel 0"
|
||||||
|
"binning" to 50
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
point.blocks.filter { it.channel == 4 }.findFirst().ifPresent {
|
||||||
|
it.plotAmplitudeSpectrum(plotName = "4") {
|
||||||
|
"title" to "pixel 4"
|
||||||
|
"binning" to 50
|
||||||
|
}
|
||||||
|
println("Number of events for pixel 4 is ${it.events.count()}")
|
||||||
|
}
|
||||||
|
|
||||||
|
listOf(0, 20, 50, 100, 200).forEach { window ->
|
||||||
|
|
||||||
|
point.transformChain { first, second ->
|
||||||
|
val dt = second.timeOffset - first.timeOffset
|
||||||
|
if (second.channel == 4 && first.channel == 0 && dt > window && dt < 1000) {
|
||||||
|
Pair((first.amp + second.amp).toShort(), second.timeOffset)
|
||||||
|
} else {
|
||||||
|
null
|
||||||
|
}
|
||||||
|
}.also {
|
||||||
|
println("Number of events for $window is ${it.events.count()}")
|
||||||
|
}.plotAmplitudeSpectrum(plotName = "filtered.before.$window") {
|
||||||
|
"binning" to 50
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
listOf(0, 20, 50, 100, 200).forEach { window ->
|
||||||
|
|
||||||
|
point.transformChain { first, second ->
|
||||||
|
val dt = second.timeOffset - first.timeOffset
|
||||||
|
if (second.channel == 0 && first.channel == 4 && dt > window && dt < 1000) {
|
||||||
|
Pair((first.amp + second.amp).toShort(), second.timeOffset)
|
||||||
|
} else {
|
||||||
|
null
|
||||||
|
}
|
||||||
|
}.also {
|
||||||
|
println("Number of events for $window is ${it.events.count()}")
|
||||||
|
}.plotAmplitudeSpectrum(plotName = "filtered.after.$window") {
|
||||||
|
"binning" to 50
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
@ -10,13 +10,12 @@ import hep.dataforge.meta.Meta
|
|||||||
import hep.dataforge.storage.commons.StorageUtils
|
import hep.dataforge.storage.commons.StorageUtils
|
||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import hep.dataforge.tables.TableTransform
|
|
||||||
import hep.dataforge.tables.ValueMap
|
import hep.dataforge.tables.ValueMap
|
||||||
import hep.dataforge.values.Values
|
import hep.dataforge.values.Values
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
||||||
import inr.numass.data.analyzers.TimeAnalyzer
|
import inr.numass.data.analyzers.TimeAnalyzer
|
||||||
import inr.numass.data.analyzers.spectrumWithBinning
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.api.NumassPoint
|
import inr.numass.data.api.NumassPoint
|
||||||
import inr.numass.data.api.NumassSet
|
import inr.numass.data.api.NumassSet
|
||||||
import inr.numass.data.api.SimpleNumassPoint
|
import inr.numass.data.api.SimpleNumassPoint
|
||||||
@ -78,12 +77,13 @@ object Threshold {
|
|||||||
if (xHigh <= xLow) {
|
if (xHigh <= xLow) {
|
||||||
throw IllegalArgumentException("Wrong borders for underflow calculation");
|
throw IllegalArgumentException("Wrong borders for underflow calculation");
|
||||||
}
|
}
|
||||||
val binned = TableTransform.filter(
|
val binned = spectrum.withBinning(binning, xLow, xHigh)
|
||||||
spectrumWithBinning(spectrum, binning),
|
// val binned = TableTransform.filter(
|
||||||
CHANNEL_KEY,
|
// spectrum.withBinning(binning),
|
||||||
xLow,
|
// CHANNEL_KEY,
|
||||||
xHigh
|
// xLow,
|
||||||
)
|
// xHigh
|
||||||
|
// )
|
||||||
|
|
||||||
return binned.rows
|
return binned.rows
|
||||||
.map {
|
.map {
|
||||||
|
@ -14,7 +14,7 @@ import hep.dataforge.tables.Adapters
|
|||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer
|
import inr.numass.data.analyzers.NumassAnalyzer
|
||||||
import inr.numass.data.analyzers.SimpleAnalyzer
|
import inr.numass.data.analyzers.SimpleAnalyzer
|
||||||
import inr.numass.data.analyzers.spectrumWithBinning
|
import inr.numass.data.analyzers.withBinning
|
||||||
import inr.numass.data.api.NumassPoint
|
import inr.numass.data.api.NumassPoint
|
||||||
import javafx.beans.Observable
|
import javafx.beans.Observable
|
||||||
import javafx.beans.binding.DoubleBinding
|
import javafx.beans.binding.DoubleBinding
|
||||||
@ -133,7 +133,7 @@ class AmplitudeView(
|
|||||||
DataPlot.plot(
|
DataPlot.plot(
|
||||||
key,
|
key,
|
||||||
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
|
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
|
||||||
spectrumWithBinning(getSpectrum(point), binning)
|
getSpectrum(point).withBinning(binning)
|
||||||
).configure {
|
).configure {
|
||||||
"connectionType" to "step"
|
"connectionType" to "step"
|
||||||
"thickness" to 2
|
"thickness" to 2
|
||||||
|
@ -173,7 +173,7 @@ class SpectrumView(
|
|||||||
|
|
||||||
runGoal("spectrumData[$name]") {
|
runGoal("spectrumData[$name]") {
|
||||||
set.points.map { point ->
|
set.points.map { point ->
|
||||||
val count = countInWindow(getSpectrum(point), loChannel.toShort(), upChannel.toShort());
|
val count = getSpectrum(point).countInWindow(loChannel.toShort(), upChannel.toShort());
|
||||||
val seconds = point.length.toMillis() / 1000.0;
|
val seconds = point.length.toMillis() / 1000.0;
|
||||||
runLater {
|
runLater {
|
||||||
container.progress = progress.incrementAndGet().toDouble() / totalProgress
|
container.progress = progress.incrementAndGet().toDouble() / totalProgress
|
||||||
|
Loading…
Reference in New Issue
Block a user