Building function servers
This commit is contained in:
parent
ae97c0be68
commit
caf8897194
@ -11,7 +11,7 @@ class ServerApp : App(BoardView::class) {
|
|||||||
|
|
||||||
|
|
||||||
override fun start(stage: Stage) {
|
override fun start(stage: Stage) {
|
||||||
getConfig(this@ServerApp).ifPresent {
|
getConfig(this@ServerApp)?.let {
|
||||||
controller.configure(it)
|
controller.configure(it)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ internal fun createChannel(meta: Meta): PKT8Channel {
|
|||||||
/**
|
/**
|
||||||
* Created by darksnake on 28-Sep-16.
|
* Created by darksnake on 28-Sep-16.
|
||||||
*/
|
*/
|
||||||
class PKT8Channel(private val _meta: Meta, private val func: (Double) -> Double) : Named, Metoid {
|
class PKT8Channel(override val meta: Meta, private val func: (Double) -> Double) : Named, Metoid {
|
||||||
|
|
||||||
private val _name: String by meta.stringValue()
|
private val _name: String by meta.stringValue()
|
||||||
|
|
||||||
@ -57,10 +57,6 @@ class PKT8Channel(private val _meta: Meta, private val func: (Double) -> Double)
|
|||||||
return _name
|
return _name
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
|
||||||
return _meta
|
|
||||||
}
|
|
||||||
|
|
||||||
fun description(): String {
|
fun description(): String {
|
||||||
return meta.getString("description", "")
|
return meta.getString("description", "")
|
||||||
}
|
}
|
||||||
|
@ -85,7 +85,7 @@ class PKT8Device(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
|
|
||||||
val abuf: String by stringState(ABUF)
|
val abuf: String by stringState(ABUF)
|
||||||
|
|
||||||
private val duration = Duration.parse(getMeta().getString("averagingDuration", "PT30S"))
|
private val duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
|
||||||
|
|
||||||
private fun buildLoader(connection: StorageConnection): TableLoader {
|
private fun buildLoader(connection: StorageConnection): TableLoader {
|
||||||
val storage = connection.storage
|
val storage = connection.storage
|
||||||
|
@ -176,7 +176,7 @@ class PKT8Display : DeviceDisplay<PKT8Device>(), PKT8ValueListener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
init {
|
init {
|
||||||
if (device.getMeta().hasMeta("plotConfig")) {
|
if (device.meta.hasMeta("plotConfig")) {
|
||||||
with(plotFrame.plots) {
|
with(plotFrame.plots) {
|
||||||
//configure(device.meta().getMeta("plotConfig"))
|
//configure(device.meta().getMeta("plotConfig"))
|
||||||
TimePlot.setMaxItems(this, 1000)
|
TimePlot.setMaxItems(this, 1000)
|
||||||
@ -204,7 +204,7 @@ class PKT8Display : DeviceDisplay<PKT8Device>(), PKT8ValueListener {
|
|||||||
} else {
|
} else {
|
||||||
device.channels.values.find { it.name == channelName }?.let {
|
device.channels.values.find { it.name == channelName }?.let {
|
||||||
TimePlot(it.name).apply {
|
TimePlot(it.name).apply {
|
||||||
configure(it.getMeta())
|
configure(it.meta)
|
||||||
plotFrame.add(this)
|
plotFrame.add(this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
val isFilamentOn: Boolean
|
val isFilamentOn: Boolean
|
||||||
get() = getState("filamentOn").booleanValue()
|
get() = getState("filamentOn").booleanValue()
|
||||||
|
|
||||||
private val averagingDuration: Duration = Duration.parse(getMeta().getString("averagingDuration", "PT30S"))
|
private val averagingDuration: Duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
|
||||||
|
|
||||||
@Throws(ControlException::class)
|
@Throws(ControlException::class)
|
||||||
override fun init() {
|
override fun init() {
|
||||||
@ -140,7 +140,7 @@ class MspDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
|
|||||||
|
|
||||||
@Throws(MeasurementException::class)
|
@Throws(MeasurementException::class)
|
||||||
override fun createMeasurement(): PeakJumpMeasurement {
|
override fun createMeasurement(): PeakJumpMeasurement {
|
||||||
val measurementMeta = getMeta().getMeta("peakJump")
|
val measurementMeta = meta.getMeta("peakJump")
|
||||||
val s = measurementMeta.getString("type", "peakJump")
|
val s = measurementMeta.getString("type", "peakJump")
|
||||||
if (s == "peakJump") {
|
if (s == "peakJump") {
|
||||||
val measurement = PeakJumpMeasurement(measurementMeta)
|
val measurement = PeakJumpMeasurement(measurementMeta)
|
||||||
|
@ -71,7 +71,7 @@ class MspDisplay() : DeviceDisplay<MspDevice>(), DeviceListener, NamedValueListe
|
|||||||
|
|
||||||
|
|
||||||
inner class MspView : View("Numass mass-spectrometer measurement") {
|
inner class MspView : View("Numass mass-spectrometer measurement") {
|
||||||
private val plotFrameMeta: Meta = device.getMeta().getMeta("plotConfig", device.meta)
|
private val plotFrameMeta: Meta = device.meta.getMeta("plotConfig", device.meta)
|
||||||
|
|
||||||
private val plotFrame: PlotFrame by lazy {
|
private val plotFrame: PlotFrame by lazy {
|
||||||
val basePlotConfig = MetaBuilder("plotFrame")
|
val basePlotConfig = MetaBuilder("plotFrame")
|
||||||
|
@ -68,7 +68,7 @@ class VacCollectorDisplay : DeviceDisplay<VacCollectorDevice>() {
|
|||||||
private val plottables = TimePlottableGroup().apply {
|
private val plottables = TimePlottableGroup().apply {
|
||||||
viewList.forEach {
|
viewList.forEach {
|
||||||
val plot = TimePlot(it.getTitle(), it.device.name)
|
val plot = TimePlot(it.getTitle(), it.device.name)
|
||||||
plot.configure(it.device.getMeta())
|
plot.configure(it.device.meta)
|
||||||
add(plot)
|
add(plot)
|
||||||
}
|
}
|
||||||
setValue("thickness", 3)
|
setValue("thickness", 3)
|
||||||
|
@ -6,7 +6,6 @@
|
|||||||
package inr.numass.control.readvac
|
package inr.numass.control.readvac
|
||||||
|
|
||||||
import hep.dataforge.control.devices.Device
|
import hep.dataforge.control.devices.Device
|
||||||
import hep.dataforge.control.devices.PortSensor.CONNECTED_STATE
|
|
||||||
import hep.dataforge.control.devices.Sensor
|
import hep.dataforge.control.devices.Sensor
|
||||||
import hep.dataforge.control.measurements.Measurement
|
import hep.dataforge.control.measurements.Measurement
|
||||||
import hep.dataforge.control.measurements.MeasurementListener
|
import hep.dataforge.control.measurements.MeasurementListener
|
||||||
@ -78,7 +77,7 @@ open class VacDisplay : DeviceDisplay<Sensor>(), MeasurementListener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun getTitle(): String{
|
fun getTitle(): String{
|
||||||
return device.getMeta().getString("title", device.name);
|
return device.meta.getString("title", device.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
inner class VacView : View("Numass vacuumeter ${getTitle()}") {
|
inner class VacView : View("Numass vacuumeter ${getTitle()}") {
|
||||||
@ -123,7 +122,7 @@ open class VacDisplay : DeviceDisplay<Sensor>(), MeasurementListener {
|
|||||||
prefHeight = 60.0
|
prefHeight = 60.0
|
||||||
alignment = Pos.CENTER_RIGHT
|
alignment = Pos.CENTER_RIGHT
|
||||||
textProperty().bind(valueProperty)
|
textProperty().bind(valueProperty)
|
||||||
device.getMeta().optValue("color").ifPresent { colorValue -> textFill = Color.valueOf(colorValue.stringValue()) }
|
device.meta.optValue("color").ifPresent { colorValue -> textFill = Color.valueOf(colorValue.stringValue()) }
|
||||||
style {
|
style {
|
||||||
fontSize = 24.pt
|
fontSize = 24.pt
|
||||||
fontWeight = FontWeight.BOLD
|
fontWeight = FontWeight.BOLD
|
||||||
@ -136,7 +135,7 @@ open class VacDisplay : DeviceDisplay<Sensor>(), MeasurementListener {
|
|||||||
prefHeight = 60.0
|
prefHeight = 60.0
|
||||||
prefWidth = 75.0
|
prefWidth = 75.0
|
||||||
alignment = Pos.CENTER_LEFT
|
alignment = Pos.CENTER_LEFT
|
||||||
text = device.getMeta().getString("units", "mbar")
|
text = device.meta.getString("units", "mbar")
|
||||||
style {
|
style {
|
||||||
fontSize = 24.pt
|
fontSize = 24.pt
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,8 @@ package inr.numass
|
|||||||
|
|
||||||
import hep.dataforge.io.envelopes.*
|
import hep.dataforge.io.envelopes.*
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import inr.numass.data.legacy.NumassFileEnvelope.LEGACY_END_SEQUENCE
|
import inr.numass.data.legacy.NumassFileEnvelope.Companion.LEGACY_END_SEQUENCE
|
||||||
import inr.numass.data.legacy.NumassFileEnvelope.LEGACY_START_SEQUENCE
|
import inr.numass.data.legacy.NumassFileEnvelope.Companion.LEGACY_START_SEQUENCE
|
||||||
import org.slf4j.LoggerFactory
|
import org.slf4j.LoggerFactory
|
||||||
import java.io.IOException
|
import java.io.IOException
|
||||||
import java.nio.ByteBuffer
|
import java.nio.ByteBuffer
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
package inr.numass.data
|
package inr.numass.data
|
||||||
|
|
||||||
import hep.dataforge.io.envelopes.Envelope
|
import hep.dataforge.io.envelopes.Envelope
|
||||||
|
import hep.dataforge.kodex.nullable
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.*
|
||||||
import inr.numass.data.api.NumassPoint
|
|
||||||
import inr.numass.data.api.NumassSet
|
|
||||||
import inr.numass.data.api.SimpleNumassPoint
|
|
||||||
import inr.numass.data.storage.ProtoBlock
|
import inr.numass.data.storage.ProtoBlock
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.util.stream.Collectors
|
import java.util.stream.Collectors
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
import java.util.zip.ZipInputStream
|
import java.util.zip.ZipInputStream
|
||||||
|
import kotlin.streams.asSequence
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 30-Jan-17.
|
* Created by darksnake on 30-Jan-17.
|
||||||
@ -19,16 +18,16 @@ import java.util.zip.ZipInputStream
|
|||||||
object NumassDataUtils {
|
object NumassDataUtils {
|
||||||
fun join(name: String, sets: Collection<NumassSet>): NumassSet {
|
fun join(name: String, sets: Collection<NumassSet>): NumassSet {
|
||||||
return object : NumassSet {
|
return object : NumassSet {
|
||||||
override fun getPoints(): Stream<NumassPoint> {
|
override val points: Stream<out NumassPoint> by lazy {
|
||||||
val points = sets.stream().flatMap<NumassPoint> { it.points }
|
val points = sets.stream().flatMap<NumassPoint> { it.points }
|
||||||
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
|
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
|
||||||
return points.entries.stream().map { entry -> SimpleNumassPoint(entry.key, entry.value) }
|
points.entries.stream().map { entry -> SimpleNumassPoint(entry.key, entry.value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
override val meta: Meta by lazy {
|
||||||
val metaBuilder = MetaBuilder()
|
val metaBuilder = MetaBuilder()
|
||||||
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
|
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
|
||||||
return metaBuilder
|
metaBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getName(): String {
|
override fun getName(): String {
|
||||||
@ -52,9 +51,41 @@ val Envelope.dataStream: InputStream
|
|||||||
this.data.stream
|
this.data.stream
|
||||||
}
|
}
|
||||||
|
|
||||||
val NumassBlock.channel: Int
|
val NumassBlock.channel: Int?
|
||||||
get() = if (this is ProtoBlock) {
|
get() = if (this is ProtoBlock) {
|
||||||
this.channel
|
this.channel
|
||||||
} else {
|
} else {
|
||||||
0
|
this.meta.optValue("channel").map { it.intValue() }.nullable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>): NumassBlock {
|
||||||
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
|
this.events.asSequence()
|
||||||
|
.sortedBy { it.timeOffset }
|
||||||
|
.zipWithNext(transform).map { NumassEvent(it.first, it.second, owner) }.asIterable()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
|
||||||
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
|
this.events.asSequence()
|
||||||
|
.sortedBy { it.timeOffset }
|
||||||
|
.zipWithNext().filter { condition.invoke(it.first, it.second) }.map { it.second }.asIterable()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock {
|
||||||
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
|
this.events.asSequence()
|
||||||
|
.filter(condition).asIterable()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun NumassBlock.transform(transform: (NumassEvent) -> OrphanNumassEvent): NumassBlock {
|
||||||
|
return SimpleBlock(this.startTime, this.length, this.meta) { owner ->
|
||||||
|
this.events.asSequence()
|
||||||
|
.map { transform(it).adopt(owner) }
|
||||||
|
.asIterable()
|
||||||
|
}
|
||||||
|
}
|
@ -88,7 +88,7 @@ class TimeAnalyzer @JvmOverloads constructor(private val processor: SignalProces
|
|||||||
.reduce(null) { v1, v2 -> this.combineBlockResults(v1, v2) }
|
.reduce(null) { v1, v2 -> this.combineBlockResults(v1, v2) }
|
||||||
|
|
||||||
val map = HashMap(res.asMap())
|
val map = HashMap(res.asMap())
|
||||||
map.put(HV_KEY, Value.of(point.voltage))
|
map[HV_KEY] = Value.of(point.voltage)
|
||||||
return ValueMap(map)
|
return ValueMap(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package inr.numass.data.api
|
package inr.numass.data.api
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.*
|
import java.util.*
|
||||||
@ -9,9 +10,14 @@ import java.util.stream.Stream
|
|||||||
* A block constructed from a set of other blocks. Internal blocks are not necessary subsequent. Blocks are automatically sorted.
|
* A block constructed from a set of other blocks. Internal blocks are not necessary subsequent. Blocks are automatically sorted.
|
||||||
* Created by darksnake on 16.07.2017.
|
* Created by darksnake on 16.07.2017.
|
||||||
*/
|
*/
|
||||||
class MetaBlock : NumassBlock {
|
class MetaBlock(blocks: Collection<NumassBlock>, override val meta: Meta = Meta.empty()) : NumassBlock {
|
||||||
|
|
||||||
private val blocks = TreeSet(Comparator.comparing<NumassBlock, Instant>{ it.startTime })
|
private val blocks = TreeSet(Comparator.comparing<NumassBlock, Instant>{ it.startTime })
|
||||||
|
|
||||||
|
init{
|
||||||
|
this.blocks.addAll(blocks)
|
||||||
|
}
|
||||||
|
|
||||||
override val startTime: Instant
|
override val startTime: Instant
|
||||||
get() = blocks.first().startTime
|
get() = blocks.first().startTime
|
||||||
|
|
||||||
@ -28,11 +34,5 @@ class MetaBlock : NumassBlock {
|
|||||||
.sorted(Comparator.comparing<NumassBlock, Instant>{ it.startTime })
|
.sorted(Comparator.comparing<NumassBlock, Instant>{ it.startTime })
|
||||||
.flatMap{ it.frames }
|
.flatMap{ it.frames }
|
||||||
|
|
||||||
constructor(vararg blocks: NumassBlock) {
|
|
||||||
this.blocks.addAll(Arrays.asList(*blocks))
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor(blocks: Collection<NumassBlock>) {
|
|
||||||
this.blocks.addAll(blocks)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,33 @@
|
|||||||
package inr.numass.data.api
|
package inr.numass.data.api
|
||||||
|
|
||||||
|
import hep.dataforge.meta.Meta
|
||||||
|
import hep.dataforge.meta.Metoid
|
||||||
|
import inr.numass.data.channel
|
||||||
|
import kotlinx.coroutines.experimental.runBlocking
|
||||||
|
import java.io.Serializable
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
|
|
||||||
|
|
||||||
typealias NumassChannel = Int
|
/**
|
||||||
|
* A single numass event with given amplitude and time.
|
||||||
|
*
|
||||||
|
* @author Darksnake
|
||||||
|
* @property amp the amplitude of the event
|
||||||
|
* @property timeOffset time in nanoseconds relative to block start
|
||||||
|
* @property owner an owner block for this event
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
class NumassEvent(val amp: Short, val timeOffset: Long, val owner: NumassBlock) : Serializable {
|
||||||
|
|
||||||
|
val channel: Int?
|
||||||
|
get() = owner.channel
|
||||||
|
|
||||||
|
val time: Instant
|
||||||
|
get() = owner.startTime.plusNanos(timeOffset)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -15,13 +36,7 @@ typealias NumassChannel = Int
|
|||||||
*
|
*
|
||||||
* Created by darksnake on 06-Jul-17.
|
* Created by darksnake on 06-Jul-17.
|
||||||
*/
|
*/
|
||||||
interface NumassBlock {
|
interface NumassBlock : Metoid {
|
||||||
|
|
||||||
/**
|
|
||||||
* A channel
|
|
||||||
*/
|
|
||||||
val channel: NumassChannel
|
|
||||||
get() = DEFAULT_CHANNEL
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The absolute start time of the block
|
* The absolute start time of the block
|
||||||
@ -42,8 +57,37 @@ interface NumassBlock {
|
|||||||
* Stream of frames. Could be empty
|
* Stream of frames. Could be empty
|
||||||
*/
|
*/
|
||||||
val frames: Stream<NumassFrame>
|
val frames: Stream<NumassFrame>
|
||||||
|
}
|
||||||
companion object {
|
|
||||||
val DEFAULT_CHANNEL: NumassChannel = -1
|
|
||||||
}
|
typealias OrphanNumassEvent = Pair<Short, Long>
|
||||||
|
|
||||||
|
inline fun OrphanNumassEvent.adopt(parent: NumassBlock): NumassEvent {
|
||||||
|
return NumassEvent(this.first, this.second, parent)
|
||||||
|
}
|
||||||
|
|
||||||
|
val OrphanNumassEvent.timeOffset: Long
|
||||||
|
get() = this.second
|
||||||
|
|
||||||
|
val OrphanNumassEvent.amp: Short
|
||||||
|
get() = this.first
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simple in-memory implementation of block of events. No frames are allowed
|
||||||
|
* Created by darksnake on 08.07.2017.
|
||||||
|
*/
|
||||||
|
class SimpleBlock(
|
||||||
|
override val startTime: Instant,
|
||||||
|
override val length: Duration,
|
||||||
|
override val meta: Meta = Meta.empty(),
|
||||||
|
producer: suspend (NumassBlock) -> Iterable<NumassEvent>) : NumassBlock, Serializable {
|
||||||
|
|
||||||
|
private val eventList = runBlocking { producer(this@SimpleBlock).toList()}
|
||||||
|
|
||||||
|
override val frames: Stream<NumassFrame> = Stream.empty()
|
||||||
|
|
||||||
|
override val events: Stream<NumassEvent>
|
||||||
|
get() = eventList.stream()
|
||||||
|
|
||||||
}
|
}
|
@ -1,38 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass.data.api
|
|
||||||
|
|
||||||
import java.io.Serializable
|
|
||||||
import java.time.Instant
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A single numass event with given amplitude and time.
|
|
||||||
*
|
|
||||||
* @author Darksnake
|
|
||||||
* @property amp the amplitude of the event
|
|
||||||
* @property blockTime
|
|
||||||
* @property timeOffset time in nanoseconds relative to block start
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
class NumassEvent(val amp: Short, val timeOffset: Long, val block: NumassBlock? = null) : Serializable {
|
|
||||||
|
|
||||||
val channel: NumassChannel?
|
|
||||||
get() = block?.channel
|
|
||||||
|
|
||||||
val time: Instant
|
|
||||||
get() = (block?.startTime ?: Instant.EPOCH).plusNanos(timeOffset)
|
|
||||||
|
|
||||||
}
|
|
@ -23,7 +23,7 @@ import java.util.stream.Stream
|
|||||||
*/
|
*/
|
||||||
interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
|
interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
|
||||||
|
|
||||||
val points: Stream<NumassPoint>
|
val points: Stream<out NumassPoint>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the first point if it exists. Throw runtime exception otherwise.
|
* Get the first point if it exists. Throw runtime exception otherwise.
|
||||||
@ -58,7 +58,7 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
|
|||||||
* @param voltage
|
* @param voltage
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
fun optPoint(voltage: Double): Optional<NumassPoint> {
|
fun optPoint(voltage: Double): Optional<out NumassPoint> {
|
||||||
return points.filter { it -> it.voltage == voltage }.findFirst()
|
return points.filter { it -> it.voltage == voltage }.findFirst()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,7 +73,7 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Provides(NUMASS_POINT_PROVIDER_KEY)
|
@Provides(NUMASS_POINT_PROVIDER_KEY)
|
||||||
fun optPoint(voltage: String): Optional<NumassPoint> {
|
fun optPoint(voltage: String): Optional<out NumassPoint> {
|
||||||
return optPoint(java.lang.Double.parseDouble(voltage))
|
return optPoint(java.lang.Double.parseDouble(voltage))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
package inr.numass.data.api
|
|
||||||
|
|
||||||
import java.io.Serializable
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.stream.Stream
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A simple in-memory implementation of block of events. No frames are allowed
|
|
||||||
* Created by darksnake on 08.07.2017.
|
|
||||||
*/
|
|
||||||
class SimpleBlock(override val startTime: Instant, override val length: Duration, private val eventList: List<NumassEvent>) : NumassBlock, Serializable {
|
|
||||||
|
|
||||||
override val frames: Stream<NumassFrame> = Stream.empty()
|
|
||||||
|
|
||||||
override val events: Stream<NumassEvent>
|
|
||||||
get() = eventList.stream()
|
|
||||||
|
|
||||||
}
|
|
@ -24,14 +24,13 @@ import java.util.stream.Stream
|
|||||||
*/
|
*/
|
||||||
class NumassDatFile @Throws(IOException::class)
|
class NumassDatFile @Throws(IOException::class)
|
||||||
constructor(private val name: String, private val path: Path, meta: Meta) : NumassSet {
|
constructor(private val name: String, private val path: Path, meta: Meta) : NumassSet {
|
||||||
private val meta: Meta
|
override val meta: Meta
|
||||||
|
|
||||||
private val hVdev: Double
|
private val hVdev: Double
|
||||||
get() = getMeta().getDouble("dat.hvDev", 2.468555393226049)
|
get() = meta.getDouble("dat.hvDev", 2.468555393226049)
|
||||||
|
|
||||||
override//int lab = readBlock(channel,1).get();
|
|
||||||
//TODO check point start
|
//TODO check point start
|
||||||
val points: Stream<NumassPoint>
|
override val points: Stream<NumassPoint>
|
||||||
get() = try {
|
get() = try {
|
||||||
Files.newByteChannel(path, READ).use { channel ->
|
Files.newByteChannel(path, READ).use { channel ->
|
||||||
var lab: Int
|
var lab: Int
|
||||||
@ -58,16 +57,12 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
|
|||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
|
||||||
return meta
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getName(): String {
|
override fun getName(): String {
|
||||||
return name
|
return name
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun hasUset(): Boolean {
|
private fun hasUset(): Boolean {
|
||||||
return getMeta().getBoolean("dat.uSet", true)
|
return meta.getBoolean("dat.uSet", true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
@ -127,10 +122,10 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
|
|||||||
|
|
||||||
if (phoneFlag) {
|
if (phoneFlag) {
|
||||||
timeDiv /= 20.0
|
timeDiv /= 20.0
|
||||||
length = (length*20).toShort()
|
length = (length * 20).toShort()
|
||||||
}
|
}
|
||||||
|
|
||||||
val events = ArrayList<NumassEvent>()
|
val events = ArrayList<Pair<Short, Long>>()
|
||||||
var lab = readBlock(channel, 1).get().toInt()
|
var lab = readBlock(channel, 1).get().toInt()
|
||||||
|
|
||||||
while (lab == 0xBF) {
|
while (lab == 0xBF) {
|
||||||
@ -161,13 +156,15 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
|
|||||||
val uRead = ending.getInt(39)
|
val uRead = ending.getInt(39)
|
||||||
|
|
||||||
val uSet: Double
|
val uSet: Double
|
||||||
if (!this.hasUset()) {
|
uSet = if (!this.hasUset()) {
|
||||||
uSet = uRead.toDouble() / 10.0 / hVdev
|
uRead.toDouble() / 10.0 / hVdev
|
||||||
} else {
|
} else {
|
||||||
uSet = voltage / 10.0
|
voltage / 10.0
|
||||||
}
|
}
|
||||||
|
|
||||||
val block = SimpleBlock(absoluteTime.toInstant(ZoneOffset.UTC), Duration.ofSeconds(length.toLong()), events)
|
val block = SimpleBlock(absoluteTime.toInstant(ZoneOffset.UTC), Duration.ofSeconds(length.toLong())) { parent ->
|
||||||
|
events.map { it.adopt(parent) }
|
||||||
|
}
|
||||||
|
|
||||||
val pointMeta = MetaBuilder("point")
|
val pointMeta = MetaBuilder("point")
|
||||||
.setValue(HV_KEY, uSet)
|
.setValue(HV_KEY, uSet)
|
||||||
@ -192,7 +189,7 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
private fun readEvent(channel: SeekableByteChannel, b: Int, timeDiv: Double): NumassEvent {
|
private fun readEvent(channel: SeekableByteChannel, b: Int, timeDiv: Double): OrphanNumassEvent {
|
||||||
val chanel: Short
|
val chanel: Short
|
||||||
val time: Long
|
val time: Long
|
||||||
|
|
||||||
@ -219,7 +216,7 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
|
|||||||
else -> throw IOException("Event head expected")
|
else -> throw IOException("Event head expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
return NumassEvent(chanel, (time / timeDiv).toLong())
|
return Pair(chanel, (time / timeDiv).toLong())
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
|
@ -22,55 +22,38 @@ import java.util.stream.StreamSupport
|
|||||||
*/
|
*/
|
||||||
class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
||||||
|
|
||||||
override fun getBlocks(): Stream<NumassBlock> {
|
override val blocks: Stream<NumassBlock>
|
||||||
// double u = envelope.meta().getDouble("external_meta.HV1_value", 0);
|
get() {
|
||||||
val length: Long
|
val length: Long = if (envelope.meta.hasValue("external_meta.acquisition_time")) {
|
||||||
if (envelope.meta.hasValue("external_meta.acquisition_time")) {
|
envelope.meta.getValue("external_meta.acquisition_time").longValue()
|
||||||
length = envelope.meta.getValue("external_meta.acquisition_time").longValue()
|
} else {
|
||||||
} else {
|
envelope.meta.getValue("acquisition_time").longValue()
|
||||||
length = envelope.meta.getValue("acquisition_time").longValue()
|
}
|
||||||
|
return Stream.of(ClassicBlock(startTime, Duration.ofSeconds(length)))
|
||||||
}
|
}
|
||||||
return Stream.of(ClassicBlock(startTime, Duration.ofSeconds(length)))
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getStartTime(): Instant {
|
override val startTime: Instant
|
||||||
return if (meta.hasValue("start_time")) {
|
get() = if (meta.hasValue("start_time")) {
|
||||||
meta.getValue("start_time").timeValue()
|
meta.getValue("start_time").timeValue()
|
||||||
} else {
|
} else {
|
||||||
Instant.EPOCH
|
Instant.EPOCH
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
override fun getVoltage(): Double {
|
override val meta: Meta = envelope.meta
|
||||||
return meta.getDouble("external_meta.HV1_value", 0.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getIndex(): Int {
|
override val voltage: Double = meta.getDouble("external_meta.HV1_value", 0.0)
|
||||||
return meta.getInt("external_meta.point_index", -1)
|
|
||||||
}
|
override val index: Int = meta.getInt("external_meta.point_index", -1)
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
|
||||||
return envelope.meta
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO split blocks using meta
|
//TODO split blocks using meta
|
||||||
private inner class ClassicBlock
|
private inner class ClassicBlock(
|
||||||
// private final long blockOffset;
|
override val startTime: Instant,
|
||||||
|
override val length: Duration,
|
||||||
|
override val meta: Meta = Meta.empty()) : NumassBlock, Iterable<NumassEvent> {
|
||||||
|
|
||||||
(private val startTime: Instant, private val length: Duration)// this.blockOffset = blockOffset;
|
override val events: Stream<NumassEvent>
|
||||||
: NumassBlock, Iterable<NumassEvent> {
|
get() = StreamSupport.stream(this.spliterator(), false)
|
||||||
|
|
||||||
override fun getStartTime(): Instant {
|
|
||||||
return startTime
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getLength(): Duration {
|
|
||||||
return length
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getEvents(): Stream<NumassEvent> {
|
|
||||||
return StreamSupport.stream(this.spliterator(), false)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun iterator(): Iterator<NumassEvent> {
|
override fun iterator(): Iterator<NumassEvent> {
|
||||||
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
|
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
|
||||||
@ -84,16 +67,16 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
|
|
||||||
override fun hasNext(): Boolean {
|
override fun hasNext(): Boolean {
|
||||||
try {
|
try {
|
||||||
if (buffer.hasRemaining()) {
|
return if (buffer.hasRemaining()) {
|
||||||
return true
|
true
|
||||||
} else {
|
} else {
|
||||||
buffer.flip()
|
buffer.flip()
|
||||||
val num = channel.read(buffer)
|
val num = channel.read(buffer)
|
||||||
if (num > 0) {
|
if (num > 0) {
|
||||||
buffer.flip()
|
buffer.flip()
|
||||||
return true
|
true
|
||||||
} else {
|
} else {
|
||||||
return false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e: IOException) {
|
} catch (e: IOException) {
|
||||||
@ -104,10 +87,10 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun next(): NumassEvent {
|
override fun next(): NumassEvent {
|
||||||
val channel = java.lang.Short.toUnsignedInt(buffer.short).toShort()
|
val amp = java.lang.Short.toUnsignedInt(buffer.short).toShort()
|
||||||
val time = Integer.toUnsignedLong(buffer.int)
|
val time = Integer.toUnsignedLong(buffer.int)
|
||||||
val status = buffer.get() // status is ignored
|
val status = buffer.get() // status is ignored
|
||||||
return NumassEvent(channel, startTime, (time * timeCoef).toLong())
|
return NumassEvent(amp, (time * timeCoef).toLong(), this@ClassicBlock)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (ex: IOException) {
|
} catch (ex: IOException) {
|
||||||
@ -117,9 +100,7 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
override fun getFrames(): Stream<NumassFrame> {
|
override val frames: Stream<NumassFrame> = Stream.empty()
|
||||||
return Stream.empty()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
|
@ -53,6 +53,8 @@ class NumassDataLoader(
|
|||||||
override var isReadOnly: Boolean = true
|
override var isReadOnly: Boolean = true
|
||||||
) : AbstractLoader(storage, name, meta), ObjectLoader<Envelope>, NumassSet, Provider {
|
) : AbstractLoader(storage, name, meta), ObjectLoader<Envelope>, NumassSet, Provider {
|
||||||
|
|
||||||
|
override val meta: Meta = items[META_FRAGMENT_NAME]?.get()?.meta ?: Meta.empty()
|
||||||
|
|
||||||
private val hvEnvelope: Optional<Envelope>
|
private val hvEnvelope: Optional<Envelope>
|
||||||
get() = Optional.ofNullable(items[HV_FRAGMENT_NAME]).map { it.get() }
|
get() = Optional.ofNullable(items[HV_FRAGMENT_NAME]).map { it.get() }
|
||||||
|
|
||||||
@ -74,13 +76,8 @@ class NumassDataLoader(
|
|||||||
return items.keys
|
return items.keys
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
override val hvData: Optional<Table>
|
||||||
return items[META_FRAGMENT_NAME]?.get()?.meta ?: Meta.empty()
|
get() = hvEnvelope.map { hvEnvelope ->
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getHvData(): Optional<Table> {
|
|
||||||
return hvEnvelope.map { hvEnvelope ->
|
|
||||||
try {
|
try {
|
||||||
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
|
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
|
||||||
} catch (ex: IOException) {
|
} catch (ex: IOException) {
|
||||||
@ -89,11 +86,11 @@ class NumassDataLoader(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getPoints(): Stream<NumassPoint> {
|
override val points: Stream<NumassPoint>
|
||||||
return pointEnvelopes.map { ClassicNumassPoint(it) }
|
get() {
|
||||||
}
|
return pointEnvelopes.map { ClassicNumassPoint(it) }
|
||||||
|
}
|
||||||
|
|
||||||
override fun pull(fragmentName: String): Envelope {
|
override fun pull(fragmentName: String): Envelope {
|
||||||
//PENDING read data to memory?
|
//PENDING read data to memory?
|
||||||
@ -111,9 +108,8 @@ class NumassDataLoader(
|
|||||||
throw TODO("Not supported yet.")
|
throw TODO("Not supported yet.")
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getStartTime(): Instant {
|
override val startTime: Instant = meta.optValue("start_time").map<Instant> { it.timeValue() }.orElseGet { super.startTime }
|
||||||
return meta.optValue("start_time").map<Instant> { it.timeValue() }.orElseGet { super.startTime }
|
|
||||||
}
|
|
||||||
|
|
||||||
override val isOpen: Boolean
|
override val isOpen: Boolean
|
||||||
get() = true
|
get() = true
|
||||||
@ -161,7 +157,7 @@ class NumassDataLoader(
|
|||||||
|| fileName.startsWith(POINT_FRAGMENT_NAME))
|
|| fileName.startsWith(POINT_FRAGMENT_NAME))
|
||||||
}.forEach { file ->
|
}.forEach { file ->
|
||||||
try {
|
try {
|
||||||
items[FileStorage.entryName(file)] = Supplier{ NumassFileEnvelope.open(file, true) }
|
items[FileStorage.entryName(file)] = Supplier { NumassFileEnvelope.open(file, true) }
|
||||||
} catch (ex: Exception) {
|
} catch (ex: Exception) {
|
||||||
LoggerFactory.getLogger(NumassDataLoader::class.java)
|
LoggerFactory.getLogger(NumassDataLoader::class.java)
|
||||||
.error("Can't load numass data directory " + FileStorage.entryName(directory), ex)
|
.error("Can't load numass data directory " + FileStorage.entryName(directory), ex)
|
||||||
|
@ -3,6 +3,7 @@ package inr.numass.data.storage
|
|||||||
import hep.dataforge.context.Context
|
import hep.dataforge.context.Context
|
||||||
import hep.dataforge.context.Global
|
import hep.dataforge.context.Global
|
||||||
import hep.dataforge.io.envelopes.Envelope
|
import hep.dataforge.io.envelopes.Envelope
|
||||||
|
import hep.dataforge.kodex.buildMeta
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import inr.numass.data.NumassProto
|
import inr.numass.data.NumassProto
|
||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.NumassBlock
|
||||||
@ -36,14 +37,12 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
get() = point.channelsList.stream()
|
get() = point.channelsList.stream()
|
||||||
.flatMap { channel ->
|
.flatMap { channel ->
|
||||||
channel.blocksList.stream()
|
channel.blocksList.stream()
|
||||||
.map { block -> ProtoBlock(channel.num.toInt(), block, meta) }
|
.map { block -> ProtoBlock(channel.num.toInt(), block) }
|
||||||
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
|
.sorted(Comparator.comparing<ProtoBlock, Instant> { it.startTime })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
override val meta: Meta =envelope.meta
|
||||||
return envelope.meta
|
|
||||||
}
|
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
fun readFile(path: Path): ProtoNumassPoint {
|
fun readFile(path: Path): ProtoNumassPoint {
|
||||||
@ -62,7 +61,12 @@ class ProtoNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ProtoBlock(override val channel: Int, private val block: NumassProto.Point.Channel.Block, private val meta: Meta) : NumassBlock {
|
class ProtoBlock(val channel: Int, private val block: NumassProto.Point.Channel.Block) : NumassBlock {
|
||||||
|
override val meta: Meta by lazy {
|
||||||
|
buildMeta{
|
||||||
|
"channel" to channel
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override val startTime: Instant
|
override val startTime: Instant
|
||||||
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
||||||
|
@ -1,155 +1,157 @@
|
|||||||
/*
|
///*
|
||||||
* To change this license header, choose License Headers in Project Properties.
|
// * To change this license header, choose License Headers in Project Properties.
|
||||||
* To change this template file, choose Tools | Templates
|
// * To change this template file, choose Tools | Templates
|
||||||
* and open the template in the editor.
|
// * and open the template in the editor.
|
||||||
*/
|
// */
|
||||||
|
|
||||||
package inr.numass.scripts
|
|
||||||
|
|
||||||
import hep.dataforge.grind.Grind
|
|
||||||
import hep.dataforge.values.Values
|
|
||||||
import inr.numass.NumassUtils
|
|
||||||
import inr.numass.data.api.NumassPoint
|
|
||||||
import inr.numass.data.storage.NumassDataLoader
|
|
||||||
import inr.numass.utils.NMEventGeneratorWithPulser
|
|
||||||
import inr.numass.utils.PileUpSimulator
|
|
||||||
import inr.numass.utils.UnderflowCorrection
|
|
||||||
import org.apache.commons.math3.random.JDKRandomGenerator
|
|
||||||
|
|
||||||
rnd = new JDKRandomGenerator();
|
|
||||||
|
|
||||||
////Loading data
|
|
||||||
File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1\\set_28")
|
|
||||||
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_7")
|
|
||||||
if (!dataDir.exists()) {
|
|
||||||
println "dataDir directory does not exist"
|
|
||||||
}
|
|
||||||
def data = NumassDataLoader.fromLocalDir(null, dataDir).getNMPoints()
|
|
||||||
|
|
||||||
//File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
|
|
||||||
////File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
|
|
||||||
////File rootDir = new File("D:\\Work\\Numass\\data\\2017_01\\Fill_2_wide")
|
|
||||||
//
|
//
|
||||||
//NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
|
//package inr.numass.scripts
|
||||||
//
|
//
|
||||||
//Collection<NMPoint> data = NumassDataUtils.joinSpectra(
|
//import hep.dataforge.grind.Grind
|
||||||
// StorageUtils.loaderStream(storage)
|
//import hep.dataforge.values.Values
|
||||||
// .filter { it.key.matches("set_3.") }
|
//import inr.numass.NumassUtils
|
||||||
// .map {
|
//import inr.numass.data.api.NumassPoint
|
||||||
// println "loading ${it.key}"
|
//import inr.numass.data.storage.NumassDataLoader
|
||||||
// it.value
|
//
|
||||||
|
//import inr.numass.data.PileUpSimulator
|
||||||
|
//import inr.numass.utils.UnderflowCorrection
|
||||||
|
//import org.apache.commons.math3.random.JDKRandomGenerator
|
||||||
|
//
|
||||||
|
//rnd = new JDKRandomGenerator();
|
||||||
|
//
|
||||||
|
//////Loading data
|
||||||
|
//File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1\\set_28")
|
||||||
|
////File dataDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide\\set_7")
|
||||||
|
//if (!dataDir.exists()) {
|
||||||
|
// println "dataDir directory does not exist"
|
||||||
|
//}
|
||||||
|
//def data = NumassDataLoader.fromLocalDir(null, dataDir).getNMPoints()
|
||||||
|
//
|
||||||
|
////File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_1")
|
||||||
|
//////File rootDir = new File("D:\\Work\\Numass\\data\\2016_10\\Fill_2_wide")
|
||||||
|
//////File rootDir = new File("D:\\Work\\Numass\\data\\2017_01\\Fill_2_wide")
|
||||||
|
////
|
||||||
|
////NumassStorage storage = NumassStorage.buildLocalNumassRoot(rootDir, true);
|
||||||
|
////
|
||||||
|
////Collection<NMPoint> data = NumassDataUtils.joinSpectra(
|
||||||
|
//// StorageUtils.loaderStream(storage)
|
||||||
|
//// .filter { it.key.matches("set_3.") }
|
||||||
|
//// .map {
|
||||||
|
//// println "loading ${it.key}"
|
||||||
|
//// it.value
|
||||||
|
//// }
|
||||||
|
////)
|
||||||
|
//
|
||||||
|
////Simulation process
|
||||||
|
//Map<String, List<NumassPoint>> res = [:]
|
||||||
|
//
|
||||||
|
//List<NumassPoint> generated = new ArrayList<>();
|
||||||
|
//List<NumassPoint> registered = new ArrayList<>();
|
||||||
|
//List<NumassPoint> firstIteration = new ArrayList<>();
|
||||||
|
//List<NumassPoint> secondIteration = new ArrayList<>();
|
||||||
|
//List<NumassPoint> pileup = new ArrayList<>();
|
||||||
|
//
|
||||||
|
//lowerChannel = 400;
|
||||||
|
//upperChannel = 1800;
|
||||||
|
//
|
||||||
|
//PileUpSimulator buildSimulator(NumassPoint point, double cr, NumassPoint reference = null, boolean extrapolate = true, double scale = 1d) {
|
||||||
|
// def cfg = Grind.buildMeta(cr: cr) {
|
||||||
|
// pulser(mean: 3450, sigma: 86.45, freq: 66.43)
|
||||||
|
// }
|
||||||
|
// //NMEventGeneratorWithPulser generator = new NMEventGeneratorWithPulser(rnd, cfg)
|
||||||
|
//
|
||||||
|
// def generator = b
|
||||||
|
//
|
||||||
|
// if (extrapolate) {
|
||||||
|
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
||||||
|
// double[] values = new double[RawNMPoint.MAX_CHANEL];
|
||||||
|
// Values fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20);
|
||||||
|
//
|
||||||
|
// def amp = fitResult.getDouble("amp")
|
||||||
|
// def sigma = fitResult.getDouble("expConst")
|
||||||
|
// if (sigma > 0) {
|
||||||
|
//
|
||||||
|
// for (int i = 0; i < upperChannel; i++) {
|
||||||
|
// chanels[i] = i;
|
||||||
|
// if (i < lowerChannel) {
|
||||||
|
// values[i] = point.getLength()*amp * Math.exp((i as double) / sigma)
|
||||||
|
// } else {
|
||||||
|
// values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)) as int);
|
||||||
|
// }
|
||||||
// }
|
// }
|
||||||
//)
|
// generator.loadSpectrum(chanels, values)
|
||||||
|
// } else {
|
||||||
//Simulation process
|
// generator.loadSpectrum(point, reference, lowerChannel, upperChannel);
|
||||||
Map<String, List<NumassPoint>> res = [:]
|
// }
|
||||||
|
// } else {
|
||||||
List<NumassPoint> generated = new ArrayList<>();
|
// generator.loadSpectrum(point, reference, lowerChannel, upperChannel);
|
||||||
List<NumassPoint> registered = new ArrayList<>();
|
// }
|
||||||
List<NumassPoint> firstIteration = new ArrayList<>();
|
//
|
||||||
List<NumassPoint> secondIteration = new ArrayList<>();
|
// return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
|
||||||
List<NumassPoint> pileup = new ArrayList<>();
|
//}
|
||||||
|
//
|
||||||
lowerChannel = 400;
|
//double adjustCountRate(PileUpSimulator simulator, NumassPoint point) {
|
||||||
upperChannel = 1800;
|
// double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
|
||||||
|
// double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
|
||||||
PileUpSimulator buildSimulator(NumassPoint point, double cr, NumassPoint reference = null, boolean extrapolate = true, double scale = 1d) {
|
// return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
|
||||||
def cfg = Grind.buildMeta(cr: cr) {
|
//}
|
||||||
pulser(mean: 3450, sigma: 86.45, freq: 66.43)
|
//
|
||||||
}
|
//data.forEach { point ->
|
||||||
NMEventGeneratorWithPulser generator = new NMEventGeneratorWithPulser(rnd, cfg)
|
// double cr = NumassUtils.countRateWithDeadTime(point, lowerChannel, upperChannel, 6.55e-6);
|
||||||
|
//
|
||||||
if (extrapolate) {
|
// PileUpSimulator simulator = buildSimulator(point, cr);
|
||||||
double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
//
|
||||||
double[] values = new double[RawNMPoint.MAX_CHANEL];
|
// //second iteration to exclude pileup overlap
|
||||||
Values fitResult = new UnderflowCorrection().fitPoint(point, 400, 600, 1800, 20);
|
// NumassPoint pileupPoint = simulator.pileup();
|
||||||
|
// firstIteration.add(simulator.registered());
|
||||||
def amp = fitResult.getDouble("amp")
|
//
|
||||||
def sigma = fitResult.getDouble("expConst")
|
// //updating count rate
|
||||||
if (sigma > 0) {
|
// cr = adjustCountRate(simulator, point);
|
||||||
|
// simulator = buildSimulator(point, cr, pileupPoint);
|
||||||
for (int i = 0; i < upperChannel; i++) {
|
//
|
||||||
chanels[i] = i;
|
// pileupPoint = simulator.pileup();
|
||||||
if (i < lowerChannel) {
|
// secondIteration.add(simulator.registered());
|
||||||
values[i] = point.getLength()*amp * Math.exp((i as double) / sigma)
|
//
|
||||||
} else {
|
// cr = adjustCountRate(simulator, point);
|
||||||
values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)) as int);
|
// simulator = buildSimulator(point, cr, pileupPoint);
|
||||||
}
|
//
|
||||||
}
|
// generated.add(simulator.generated());
|
||||||
generator.loadSpectrum(chanels, values)
|
// registered.add(simulator.registered());
|
||||||
} else {
|
// pileup.add(simulator.pileup());
|
||||||
generator.loadSpectrum(point, reference, lowerChannel, upperChannel);
|
//}
|
||||||
}
|
//res.put("original", data);
|
||||||
} else {
|
//res.put("generated", generated);
|
||||||
generator.loadSpectrum(point, reference, lowerChannel, upperChannel);
|
//res.put("registered", registered);
|
||||||
}
|
//// res.put("firstIteration", new SimulatedPoint("firstIteration", firstIteration));
|
||||||
|
//// res.put("secondIteration", new SimulatedPoint("secondIteration", secondIteration));
|
||||||
return new PileUpSimulator(point.length * scale, rnd, generator).withUset(point.voltage).generate();
|
//res.put("pileup", pileup);
|
||||||
}
|
//
|
||||||
|
//def keys = res.keySet();
|
||||||
double adjustCountRate(PileUpSimulator simulator, NumassPoint point) {
|
//
|
||||||
double generatedInChannel = simulator.generated().getCountInWindow(lowerChannel, upperChannel);
|
////print spectra for selected point
|
||||||
double registeredInChannel = simulator.registered().getCountInWindow(lowerChannel, upperChannel);
|
//double u = 16500d;
|
||||||
return (generatedInChannel / registeredInChannel) * (point.getCountInWindow(lowerChannel, upperChannel) / point.getLength());
|
//
|
||||||
}
|
//List<Map> points = res.values().collect { it.find { it.voltage == u }.getMap(20, true) }
|
||||||
|
//
|
||||||
data.forEach { point ->
|
//println "\n Spectrum example for U = ${u}\n"
|
||||||
double cr = NumassUtils.countRateWithDeadTime(point, lowerChannel, upperChannel, 6.55e-6);
|
//
|
||||||
|
//print "channel\t"
|
||||||
PileUpSimulator simulator = buildSimulator(point, cr);
|
//println keys.join("\t")
|
||||||
|
//
|
||||||
//second iteration to exclude pileup overlap
|
//points.first().keySet().each {
|
||||||
NumassPoint pileupPoint = simulator.pileup();
|
// print "${it}\t"
|
||||||
firstIteration.add(simulator.registered());
|
// println points.collect { map -> map[it] }.join("\t")
|
||||||
|
//}
|
||||||
//updating count rate
|
//
|
||||||
cr = adjustCountRate(simulator, point);
|
////printing count rate in window
|
||||||
simulator = buildSimulator(point, cr, pileupPoint);
|
//print "U\tLength\t"
|
||||||
|
//print keys.collect { it + "[total]" }.join("\t") + "\t"
|
||||||
pileupPoint = simulator.pileup();
|
//print keys.collect { it + "[pulse]" }.join("\t") + "\t"
|
||||||
secondIteration.add(simulator.registered());
|
//println keys.join("\t")
|
||||||
|
//
|
||||||
cr = adjustCountRate(simulator, point);
|
//for (int i = 0; i < data.size(); i++) {
|
||||||
simulator = buildSimulator(point, cr, pileupPoint);
|
// print "${data.get(i).getVoltage()}\t"
|
||||||
|
// print "${data.get(i).getLength()}\t"
|
||||||
generated.add(simulator.generated());
|
// print keys.collect { res[it].get(i).getTotalCount() }.join("\t") + "\t"
|
||||||
registered.add(simulator.registered());
|
// print keys.collect { res[it].get(i).getCountInWindow(3100, 3800) }.join("\t") + "\t"
|
||||||
pileup.add(simulator.pileup());
|
// println keys.collect { res[it].get(i).getCountInWindow(400, 3100) }.join("\t")
|
||||||
}
|
//}
|
||||||
res.put("original", data);
|
|
||||||
res.put("generated", generated);
|
|
||||||
res.put("registered", registered);
|
|
||||||
// res.put("firstIteration", new SimulatedPoint("firstIteration", firstIteration));
|
|
||||||
// res.put("secondIteration", new SimulatedPoint("secondIteration", secondIteration));
|
|
||||||
res.put("pileup", pileup);
|
|
||||||
|
|
||||||
def keys = res.keySet();
|
|
||||||
|
|
||||||
//print spectra for selected point
|
|
||||||
double u = 16500d;
|
|
||||||
|
|
||||||
List<Map> points = res.values().collect { it.find { it.voltage == u }.getMap(20, true) }
|
|
||||||
|
|
||||||
println "\n Spectrum example for U = ${u}\n"
|
|
||||||
|
|
||||||
print "channel\t"
|
|
||||||
println keys.join("\t")
|
|
||||||
|
|
||||||
points.first().keySet().each {
|
|
||||||
print "${it}\t"
|
|
||||||
println points.collect { map -> map[it] }.join("\t")
|
|
||||||
}
|
|
||||||
|
|
||||||
//printing count rate in window
|
|
||||||
print "U\tLength\t"
|
|
||||||
print keys.collect { it + "[total]" }.join("\t") + "\t"
|
|
||||||
print keys.collect { it + "[pulse]" }.join("\t") + "\t"
|
|
||||||
println keys.join("\t")
|
|
||||||
|
|
||||||
for (int i = 0; i < data.size(); i++) {
|
|
||||||
print "${data.get(i).getVoltage()}\t"
|
|
||||||
print "${data.get(i).getLength()}\t"
|
|
||||||
print keys.collect { res[it].get(i).getTotalCount() }.join("\t") + "\t"
|
|
||||||
print keys.collect { res[it].get(i).getCountInWindow(3100, 3800) }.join("\t") + "\t"
|
|
||||||
println keys.collect { res[it].get(i).getCountInWindow(400, 3100) }.join("\t")
|
|
||||||
}
|
|
||||||
|
@ -32,7 +32,7 @@ new GrindShell(ctx).eval {
|
|||||||
|
|
||||||
|
|
||||||
def blocks = (1..num).collect {
|
def blocks = (1..num).collect {
|
||||||
def chain = GeneratorKt.buildSimpleEventChain(cr, new JDKRandomGenerator(),{10000})
|
def chain = GeneratorKt.generateEvents(cr, new JDKRandomGenerator(),{10000})
|
||||||
GeneratorKt.generateBlock(Instant.now().plusNanos(it * length), length, chain)
|
GeneratorKt.generateBlock(Instant.now().plusNanos(it * length), length, chain)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,7 +200,7 @@ public class MonitorCorrectAction extends OneToOneAction<Table, Table> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean isMonitorPoint(double monitor, Values point) {
|
private boolean isMonitorPoint(double monitor, Values point) {
|
||||||
return point.getValue(NumassPoint.Companion.getHV_KEY()).doubleValue() == monitor;
|
return point.getValue(NumassPoint.HV_KEY).doubleValue() == monitor;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Instant getTime(Values point) {
|
private Instant getTime(Values point) {
|
||||||
|
@ -1,176 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2015 Alexander Nozik.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package inr.numass.utils;
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta;
|
|
||||||
import hep.dataforge.tables.Table;
|
|
||||||
import inr.numass.data.api.NumassBlock;
|
|
||||||
import inr.numass.data.api.NumassEvent;
|
|
||||||
import inr.numass.data.api.SimpleBlock;
|
|
||||||
import org.apache.commons.math3.distribution.EnumeratedRealDistribution;
|
|
||||||
import org.apache.commons.math3.distribution.RealDistribution;
|
|
||||||
import org.apache.commons.math3.random.RandomGenerator;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import static inr.numass.data.analyzers.NumassAnalyzer.COUNT_RATE_KEY;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A generator for Numass events with given energy spectrum
|
|
||||||
*
|
|
||||||
* @author Darksnake
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public class NMEventGenerator {
|
|
||||||
|
|
||||||
protected final RandomGenerator rnd;
|
|
||||||
protected double cr;
|
|
||||||
protected RealDistribution distribution;
|
|
||||||
protected NumassEvent prevEvent;
|
|
||||||
|
|
||||||
public NMEventGenerator(RandomGenerator rnd, double cr) {
|
|
||||||
this.cr = cr;
|
|
||||||
this.rnd = rnd;
|
|
||||||
}
|
|
||||||
|
|
||||||
public NMEventGenerator(RandomGenerator rnd, Meta meta) {
|
|
||||||
this.cr = meta.getDouble("cr");
|
|
||||||
this.rnd = rnd;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSpectrum(Table spectrum) {
|
|
||||||
double[] chanels = new double[spectrum.size()];
|
|
||||||
double[] values = new double[spectrum.size()];
|
|
||||||
for (int i = 0; i < spectrum.size(); i++) {
|
|
||||||
chanels[i] = spectrum.get("channel", i).doubleValue();
|
|
||||||
values[i] = spectrum.get(COUNT_RATE_KEY, i).doubleValue();
|
|
||||||
}
|
|
||||||
distribution = new EnumeratedRealDistribution(chanels, values);
|
|
||||||
}
|
|
||||||
|
|
||||||
// public void loadSpectrum(RawNMPoint point, int minChanel, int maxChanel) {
|
|
||||||
// List<Short> shorts = new ArrayList<>();
|
|
||||||
// point.getEvents().stream()
|
|
||||||
// .filter((event) -> ((event.getAmp() > minChanel) && (event.getAmp() < maxChanel)))
|
|
||||||
// .forEach((event) -> shorts.add(event.getAmp()));
|
|
||||||
// double[] doubles = new double[shorts.size()];
|
|
||||||
//
|
|
||||||
// for (int i = 0; i < shorts.size(); i++) {
|
|
||||||
// doubles[i] = shorts.get(i);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// EmpiricalDistribution d = new EmpiricalDistribution();
|
|
||||||
// d.load(doubles);
|
|
||||||
//
|
|
||||||
// distribution = d;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// public void loadSpectrum(Map<Double, Double> spectrum) {
|
|
||||||
// double[] chanels = new double[spectrum.size()];
|
|
||||||
// double[] values = new double[spectrum.size()];
|
|
||||||
// int i = 0;
|
|
||||||
// for (Map.Entry<Double, Double> entry : spectrum.entrySet()) {
|
|
||||||
// chanels[i] = entry.getKey();
|
|
||||||
// values[i] = entry.getValue();
|
|
||||||
// }
|
|
||||||
// distribution = new EnumeratedRealDistribution(chanels, values);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// public void loadSpectrum(double[] channels, double[] values) {
|
|
||||||
// distribution = new EnumeratedRealDistribution(channels, values);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// public void loadSpectrum(NumassPoint point) {
|
|
||||||
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// double[] values = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
|
||||||
// chanels[i] = i;
|
|
||||||
// values[i] = point.getCount(i);
|
|
||||||
// }
|
|
||||||
// distribution = new EnumeratedRealDistribution(chanels, values);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// public void loadSpectrum(NumassPoint point, NumassPoint reference) {
|
|
||||||
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// double[] values = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// for (int i = 0; i < RawNMPoint.MAX_CHANEL; i++) {
|
|
||||||
// chanels[i] = i;
|
|
||||||
// values[i] = Math.max(0, point.getCount(i) - reference.getCount(i));
|
|
||||||
// }
|
|
||||||
// distribution = new EnumeratedRealDistribution(chanels, values);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// /**
|
|
||||||
// * @param point
|
|
||||||
// * @param reference
|
|
||||||
// * @param lower lower channel for spectrum generation
|
|
||||||
// * @param upper upper channel for spectrum generation
|
|
||||||
// */
|
|
||||||
// public void loadSpectrum(NumassPoint point, NumassPoint reference, int lower, int upper) {
|
|
||||||
// double[] chanels = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// double[] values = new double[RawNMPoint.MAX_CHANEL];
|
|
||||||
// for (int i = lower; i < upper; i++) {
|
|
||||||
// chanels[i] = i;
|
|
||||||
// values[i] = Math.max(0, point.getCount(i) - (reference == null ? 0 : reference.getCount(i)));
|
|
||||||
// }
|
|
||||||
// distribution = new EnumeratedRealDistribution(chanels, values);
|
|
||||||
// }
|
|
||||||
|
|
||||||
protected short generateChannel() {
|
|
||||||
if (distribution != null) {
|
|
||||||
return (short) distribution.sample();
|
|
||||||
} else {
|
|
||||||
return 1600;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected long generateDeltaTime() {
|
|
||||||
return (long) (nextExpDecay(1d / cr) * 1e9);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected NumassEvent nextEvent(NumassEvent prev) {
|
|
||||||
if (prev == null) {
|
|
||||||
return new NumassEvent(generateChannel(), Instant.EPOCH, 0);
|
|
||||||
} else {
|
|
||||||
return new NumassEvent(generateChannel(), prev.getBlockTime(), prev.getTimeOffset() + generateDeltaTime());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// @Override
|
|
||||||
// public synchronized NumassEvent get() {
|
|
||||||
// return prevEvent = nextEvent(prevEvent);
|
|
||||||
// }
|
|
||||||
|
|
||||||
public NumassBlock generateBlock(Instant stsrt, long length) {
|
|
||||||
List<NumassEvent> events = new ArrayList<>();
|
|
||||||
NumassEvent event = nextEvent(null);
|
|
||||||
while (event.getTimeOffset() < length) {
|
|
||||||
events.add(event);
|
|
||||||
event = nextEvent(event);
|
|
||||||
}
|
|
||||||
return new SimpleBlock(stsrt, Duration.ofNanos(length), events);
|
|
||||||
}
|
|
||||||
|
|
||||||
private double nextExpDecay(double mean) {
|
|
||||||
return - mean * Math.log(1 - rnd.nextDouble());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,56 +0,0 @@
|
|||||||
package inr.numass.utils;
|
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta;
|
|
||||||
import inr.numass.data.api.NumassEvent;
|
|
||||||
import org.apache.commons.math3.distribution.NormalDistribution;
|
|
||||||
import org.apache.commons.math3.distribution.RealDistribution;
|
|
||||||
import org.apache.commons.math3.random.RandomGenerator;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Created by darksnake on 25-Nov-16.
|
|
||||||
*/
|
|
||||||
public class NMEventGeneratorWithPulser extends NMEventGenerator {
|
|
||||||
private RealDistribution pulserChanelDistribution;
|
|
||||||
private double pulserDist;
|
|
||||||
private NumassEvent pulserEvent;
|
|
||||||
private NumassEvent nextEvent;
|
|
||||||
|
|
||||||
public NMEventGeneratorWithPulser(RandomGenerator rnd, Meta meta) {
|
|
||||||
super(rnd, meta);
|
|
||||||
pulserChanelDistribution = new NormalDistribution(
|
|
||||||
meta.getDouble("pulser.mean", 3450),
|
|
||||||
meta.getDouble("pulser.sigma", 86.45)
|
|
||||||
);
|
|
||||||
pulserDist = 1d / meta.getDouble("pulser.freq", 66.43);
|
|
||||||
pulserEvent = generatePulserEvent();
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized NumassEvent get() {
|
|
||||||
//expected next event
|
|
||||||
if (nextEvent == null) {
|
|
||||||
nextEvent = nextEvent(prevEvent);
|
|
||||||
}
|
|
||||||
//if pulser event is first, then leave next event as is and return pulser event
|
|
||||||
if (pulserEvent.getTimeOffset() < nextEvent.getTimeOffset()) {
|
|
||||||
NumassEvent res = pulserEvent;
|
|
||||||
pulserEvent = generatePulserEvent();
|
|
||||||
return res;
|
|
||||||
} else {
|
|
||||||
//else return saved next event and generate next one
|
|
||||||
prevEvent = nextEvent;
|
|
||||||
nextEvent = nextEvent(prevEvent);
|
|
||||||
return prevEvent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private NumassEvent generatePulserEvent() {
|
|
||||||
short channel = (short) pulserChanelDistribution.sample();
|
|
||||||
double time;
|
|
||||||
if (pulserEvent == null) {
|
|
||||||
time = rnd.nextDouble() * pulserDist * 1e9;
|
|
||||||
} else {
|
|
||||||
time = pulserEvent.getTimeOffset() + pulserDist*1e9;
|
|
||||||
}
|
|
||||||
return new NumassEvent(channel, (long) time);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,156 +0,0 @@
|
|||||||
/*
|
|
||||||
* To change this license header, choose License Headers in Project Properties.
|
|
||||||
* To change this template file, choose Tools | Templates
|
|
||||||
* and open the template in the editor.
|
|
||||||
*/
|
|
||||||
package inr.numass.utils;
|
|
||||||
|
|
||||||
import inr.numass.data.api.NumassBlock;
|
|
||||||
import inr.numass.data.api.NumassEvent;
|
|
||||||
import inr.numass.data.api.SimpleBlock;
|
|
||||||
import org.apache.commons.math3.random.RandomGenerator;
|
|
||||||
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
|
|
||||||
import static java.lang.Math.max;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author <a href="mailto:altavir@gmail.com">Alexander Nozik</a>
|
|
||||||
*/
|
|
||||||
public class PileUpSimulator {
|
|
||||||
private final static double us = 1e-6;//microsecond
|
|
||||||
private final long pointLength;
|
|
||||||
private final RandomGenerator rnd;
|
|
||||||
private final List<NumassEvent> generated = new ArrayList<>();
|
|
||||||
private final List<NumassEvent> pileup = new ArrayList<>();
|
|
||||||
private final List<NumassEvent> registered = new ArrayList<>();
|
|
||||||
private NMEventGenerator generator;
|
|
||||||
private double uSet = 0;
|
|
||||||
private AtomicInteger doublePileup = new AtomicInteger(0);
|
|
||||||
|
|
||||||
|
|
||||||
public PileUpSimulator(long length, RandomGenerator rnd, double countRate) {
|
|
||||||
this.rnd = rnd;
|
|
||||||
generator = new NMEventGenerator(rnd, countRate);
|
|
||||||
this.pointLength = length;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PileUpSimulator(long pointLength, NMEventGenerator generator) {
|
|
||||||
this.pointLength = pointLength;
|
|
||||||
this.generator = generator;
|
|
||||||
this.rnd = generator.rnd;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PileUpSimulator withUset(double uset) {
|
|
||||||
this.uSet = uset;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public NumassBlock generated() {
|
|
||||||
return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), generated);
|
|
||||||
}
|
|
||||||
|
|
||||||
public NumassBlock registered() {
|
|
||||||
return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), registered);
|
|
||||||
}
|
|
||||||
|
|
||||||
public NumassBlock pileup() {
|
|
||||||
return new SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength), pileup);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The amplitude for pileup event
|
|
||||||
*
|
|
||||||
* @param x
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private short pileupChannel(double x, short prevChanel, short nextChanel) {
|
|
||||||
assert x > 0;
|
|
||||||
//эмпирическая формула для канала
|
|
||||||
double coef = max(0, 0.99078 + 0.05098 * x - 0.45775 * x * x + 0.10962 * x * x * x);
|
|
||||||
if (coef < 0 || coef > 1) {
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
return (short) (prevChanel + coef * nextChanel);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* pileup probability
|
|
||||||
*
|
|
||||||
* @param delay
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private boolean pileup(double delay) {
|
|
||||||
double prob = 1d / (1d + Math.pow(delay / (2.5 + 0.2), 42.96));
|
|
||||||
return random(prob);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Probability for next event to register
|
|
||||||
*
|
|
||||||
* @param delay
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private boolean nextEventRegistered(short prevChanel, double delay) {
|
|
||||||
double average = 6.76102 - 4.31897E-4 * prevChanel + 7.88429E-8 * prevChanel * prevChanel + 0.2;
|
|
||||||
double prob = 1d - 1d / (1d + Math.pow(delay / average, 75.91));
|
|
||||||
return random(prob);
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean random(double prob) {
|
|
||||||
return rnd.nextDouble() <= prob;
|
|
||||||
}
|
|
||||||
|
|
||||||
public synchronized PileUpSimulator generate() {
|
|
||||||
NumassEvent next = null;
|
|
||||||
double lastRegisteredTime = 0; // Time of DAQ closing
|
|
||||||
//flag that shows that previous event was pileup
|
|
||||||
boolean pileupFlag = false;
|
|
||||||
while (true) {
|
|
||||||
next = generator.nextEvent(next);
|
|
||||||
if (next.getTimeOffset() > pointLength) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
generated.add(next);
|
|
||||||
//not counting double pileups
|
|
||||||
if (generated.size() > 1) {
|
|
||||||
double delay = (next.getTimeOffset() - lastRegisteredTime) / us; //time between events in microseconds
|
|
||||||
if (nextEventRegistered(next.getAmp(), delay)) {
|
|
||||||
//just register new event
|
|
||||||
registered.add(next);
|
|
||||||
lastRegisteredTime = next.getTimeOffset();
|
|
||||||
pileupFlag = false;
|
|
||||||
} else if (pileup(delay)) {
|
|
||||||
if (pileupFlag) {
|
|
||||||
//increase double pileup stack
|
|
||||||
doublePileup.incrementAndGet();
|
|
||||||
} else {
|
|
||||||
//pileup event
|
|
||||||
short newChannel = pileupChannel(delay, next.getAmp(), next.getAmp());
|
|
||||||
NumassEvent newEvent = new NumassEvent(newChannel, next.getBlockTime(), next.getTimeOffset());
|
|
||||||
//replace already registered event by event with new channel
|
|
||||||
registered.remove(registered.size() - 1);
|
|
||||||
registered.add(newEvent);
|
|
||||||
pileup.add(newEvent);
|
|
||||||
//do not change DAQ close time
|
|
||||||
pileupFlag = true; // up the flag to avoid secondary pileup
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// second event not registered, DAQ closed
|
|
||||||
pileupFlag = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
//register first event
|
|
||||||
registered.add(next);
|
|
||||||
lastRegisteredTime = next.getTimeOffset();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -101,7 +101,7 @@ object NumassUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun wrap(obj: Markedup, meta: Meta = Meta.empty()): EnvelopeBuilder {
|
fun wrap(obj: Markedup, meta: Meta = Meta.empty()): EnvelopeBuilder {
|
||||||
return wrap(obj,meta){SimpleMarkupRenderer(this).render(it.markup(meta))}
|
return wrap(obj, meta) { SimpleMarkupRenderer(this).render(it.markup(meta)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -280,7 +280,7 @@ fun FitResult.display(context: Context, stage: String = "fit") {
|
|||||||
|
|
||||||
val func = { x: Double -> model.spectrum.value(x, parameters) }
|
val func = { x: Double -> model.spectrum.value(x, parameters) }
|
||||||
|
|
||||||
val fit = XYFunctionPlot("fit",func)
|
val fit = XYFunctionPlot("fit", func)
|
||||||
fit.density = 100
|
fit.density = 100
|
||||||
// ensuring all data points are calculated explicitly
|
// ensuring all data points are calculated explicitly
|
||||||
data.rows.map { dp -> Adapters.getXValue(adapter, dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
|
data.rows.map { dp -> Adapters.getXValue(adapter, dp).doubleValue() }.sorted().forEach { fit.calculateIn(it) }
|
||||||
@ -297,7 +297,7 @@ fun FitResult.display(context: Context, stage: String = "fit") {
|
|||||||
data.rows.forEach {
|
data.rows.forEach {
|
||||||
val x = Adapters.getXValue(adapter, it).doubleValue()
|
val x = Adapters.getXValue(adapter, it).doubleValue()
|
||||||
val y = Adapters.getYValue(adapter, it).doubleValue()
|
val y = Adapters.getYValue(adapter, it).doubleValue()
|
||||||
val err = Adapters.optYError(adapter,it).orElse(1.0)
|
val err = Adapters.optYError(adapter, it).orElse(1.0)
|
||||||
residual += Adapters.buildXYDataPoint(x, (y - func(x)) / err, 1.0)
|
residual += Adapters.buildXYDataPoint(x, (y - func(x)) / err, 1.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ class MergeDataAction : ManyToOneAction<Table, Table>() {
|
|||||||
private val parnames = arrayOf(NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
private val parnames = arrayOf(NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||||
|
|
||||||
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
|
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
|
||||||
val meta = inputMeta(context, input.getMeta(), actionMeta)
|
val meta = inputMeta(context, input.meta, actionMeta)
|
||||||
val groups: List<DataNode<Table>>
|
val groups: List<DataNode<Table>>
|
||||||
if (meta.hasValue("grouping.byValue")) {
|
if (meta.hasValue("grouping.byValue")) {
|
||||||
groups = super.buildGroups(context, input, actionMeta)
|
groups = super.buildGroups(context, input, actionMeta)
|
||||||
|
@ -40,7 +40,7 @@ import java.util.*
|
|||||||
class SummaryAction : ManyToOneAction<FitState, Table>() {
|
class SummaryAction : ManyToOneAction<FitState, Table>() {
|
||||||
|
|
||||||
protected override fun buildGroups(context: Context, input: DataNode<FitState>, actionMeta: Meta): List<DataNode<FitState>> {
|
protected override fun buildGroups(context: Context, input: DataNode<FitState>, actionMeta: Meta): List<DataNode<FitState>> {
|
||||||
val meta = inputMeta(context, input.getMeta(), actionMeta)
|
val meta = inputMeta(context, input.meta, actionMeta)
|
||||||
val groups: List<DataNode<FitState>>
|
val groups: List<DataNode<FitState>>
|
||||||
if (meta.hasValue("grouping.byValue")) {
|
if (meta.hasValue("grouping.byValue")) {
|
||||||
groups = super.buildGroups(context, input, actionMeta)
|
groups = super.buildGroups(context, input, actionMeta)
|
||||||
|
@ -4,12 +4,14 @@ import hep.dataforge.maths.chain.Chain
|
|||||||
import hep.dataforge.maths.chain.MarkovChain
|
import hep.dataforge.maths.chain.MarkovChain
|
||||||
import hep.dataforge.maths.chain.StatefulChain
|
import hep.dataforge.maths.chain.StatefulChain
|
||||||
import hep.dataforge.stat.defaultGenerator
|
import hep.dataforge.stat.defaultGenerator
|
||||||
import inr.numass.data.api.NumassBlock
|
import hep.dataforge.tables.Table
|
||||||
import inr.numass.data.api.NumassEvent
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
||||||
import inr.numass.data.api.SimpleBlock
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
||||||
|
import inr.numass.data.api.*
|
||||||
|
import kotlinx.coroutines.experimental.channels.map
|
||||||
import kotlinx.coroutines.experimental.channels.takeWhile
|
import kotlinx.coroutines.experimental.channels.takeWhile
|
||||||
import kotlinx.coroutines.experimental.channels.toList
|
import kotlinx.coroutines.experimental.channels.toList
|
||||||
import kotlinx.coroutines.experimental.runBlocking
|
import org.apache.commons.math3.distribution.EnumeratedRealDistribution
|
||||||
import org.apache.commons.math3.random.RandomGenerator
|
import org.apache.commons.math3.random.RandomGenerator
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
@ -22,24 +24,50 @@ private fun RandomGenerator.nextDeltaTime(cr: Double): Long {
|
|||||||
return (nextExp(1.0 / cr) * 1e9).toLong()
|
return (nextExp(1.0 / cr) * 1e9).toLong()
|
||||||
}
|
}
|
||||||
|
|
||||||
fun generateBlock(start: Instant, length: Long, chain: Chain<NumassEvent>): NumassBlock {
|
fun generateBlock(start: Instant, length: Long, chain: Chain<OrphanNumassEvent>): NumassBlock {
|
||||||
|
return SimpleBlock(start, Duration.ofNanos(length)) { parent ->
|
||||||
val events = runBlocking { chain.channel.takeWhile { it.timeOffset < length }.toList()}
|
chain.channel.map { it.adopt(parent) }.takeWhile { it.timeOffset < length }.toList()
|
||||||
return SimpleBlock(start, Duration.ofNanos(length), events)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
internal val defaultAmplitudeGenerator: RandomGenerator.(NumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
|
internal val defaultAmplitudeGenerator: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
|
||||||
|
|
||||||
fun buildSimpleEventChain(
|
/**
|
||||||
|
* Generate an event chain with fixed count rate
|
||||||
|
* @param cr = count rate in Hz
|
||||||
|
* @param rnd = random number generator
|
||||||
|
* @param amp amplitude generator for the chain. The receiver is rng, first argument is the previous event and second argument
|
||||||
|
* is the delay between the next event. The result is the amplitude in channels
|
||||||
|
*/
|
||||||
|
fun generateEvents(
|
||||||
cr: Double,
|
cr: Double,
|
||||||
rnd: RandomGenerator = defaultGenerator,
|
rnd: RandomGenerator = defaultGenerator,
|
||||||
amp: RandomGenerator.(NumassEvent?, Long) -> Short = defaultAmplitudeGenerator): Chain<NumassEvent> {
|
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator): Chain<OrphanNumassEvent> {
|
||||||
return MarkovChain(NumassEvent(rnd.amp(null, 0), Instant.now(), 0)) { event ->
|
return MarkovChain(OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
|
||||||
val deltaT = rnd.nextDeltaTime(cr)
|
val deltaT = rnd.nextDeltaTime(cr)
|
||||||
NumassEvent(rnd.amp(event, deltaT), event.blockTime, event.timeOffset + deltaT)
|
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a chain using provided spectrum for amplitudes
|
||||||
|
*/
|
||||||
|
fun generateEvents(
|
||||||
|
cr: Double,
|
||||||
|
rnd: RandomGenerator = defaultGenerator,
|
||||||
|
spectrum: Table): Chain<OrphanNumassEvent> {
|
||||||
|
|
||||||
|
val channels = DoubleArray(spectrum.size())
|
||||||
|
val values = DoubleArray(spectrum.size())
|
||||||
|
for (i in 0 until spectrum.size()) {
|
||||||
|
channels[i] = spectrum.get(CHANNEL_KEY, i).doubleValue()
|
||||||
|
values[i] = spectrum.get(COUNT_RATE_KEY, i).doubleValue()
|
||||||
|
}
|
||||||
|
val distribution = EnumeratedRealDistribution(channels, values)
|
||||||
|
|
||||||
|
return generateEvents(cr, rnd) { _, _ -> distribution.sample().toShort() }
|
||||||
|
}
|
||||||
|
|
||||||
private data class BunchState(var bunchStart: Long = 0, var bunchEnd: Long = 0)
|
private data class BunchState(var bunchStart: Long = 0, var bunchEnd: Long = 0)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -53,24 +81,24 @@ fun buildBunchChain(
|
|||||||
bunchRate: Double,
|
bunchRate: Double,
|
||||||
bunchLength: Double,
|
bunchLength: Double,
|
||||||
rnd: RandomGenerator = defaultGenerator,
|
rnd: RandomGenerator = defaultGenerator,
|
||||||
amp: RandomGenerator.(NumassEvent?, Long) -> Short = defaultAmplitudeGenerator
|
amp: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = defaultAmplitudeGenerator
|
||||||
): Chain<NumassEvent> {
|
): Chain<OrphanNumassEvent> {
|
||||||
return StatefulChain(
|
return StatefulChain(
|
||||||
BunchState(0, 0),
|
BunchState(0, 0),
|
||||||
NumassEvent(rnd.amp(null, 0), Instant.now(), 0)) { event ->
|
OrphanNumassEvent(rnd.amp(null, 0), 0)) { event ->
|
||||||
if (event.timeOffset >= bunchEnd) {
|
if (event.timeOffset >= bunchEnd) {
|
||||||
bunchStart = bunchEnd + (rnd.nextDeltaTime(bunchRate)).toLong()
|
bunchStart = bunchEnd + (rnd.nextDeltaTime(bunchRate)).toLong()
|
||||||
bunchEnd = bunchStart + (bunchLength*1e9).toLong()
|
bunchEnd = bunchStart + (bunchLength * 1e9).toLong()
|
||||||
NumassEvent(rnd.amp(null, 0), Instant.EPOCH, bunchStart)
|
OrphanNumassEvent(rnd.amp(null, 0), bunchStart)
|
||||||
} else {
|
} else {
|
||||||
val deltaT = rnd.nextDeltaTime(cr)
|
val deltaT = rnd.nextDeltaTime(cr)
|
||||||
NumassEvent(rnd.amp(event, deltaT), event.blockTime, event.timeOffset + deltaT)
|
OrphanNumassEvent(rnd.amp(event, deltaT), event.timeOffset + deltaT)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class MergingState(private val chains: List<Chain<NumassEvent>>) {
|
private class MergingState(private val chains: List<Chain<OrphanNumassEvent>>) {
|
||||||
suspend fun poll(): NumassEvent {
|
suspend fun poll(): OrphanNumassEvent {
|
||||||
val next = chains.minBy { it.value.timeOffset } ?: chains.first()
|
val next = chains.minBy { it.value.timeOffset } ?: chains.first()
|
||||||
val res = next.value
|
val res = next.value
|
||||||
next.next()
|
next.next()
|
||||||
@ -79,8 +107,8 @@ private class MergingState(private val chains: List<Chain<NumassEvent>>) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fun mergeEventChains(vararg chains: Chain<NumassEvent>): Chain<NumassEvent> {
|
fun mergeEventChains(vararg chains: Chain<OrphanNumassEvent>): Chain<OrphanNumassEvent> {
|
||||||
return StatefulChain(MergingState(listOf(*chains)),NumassEvent(0, Instant.now(), 0)){
|
return StatefulChain(MergingState(listOf(*chains)), OrphanNumassEvent(0, 0)) {
|
||||||
poll()
|
poll()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
172
numass-main/src/main/kotlin/inr/numass/data/PileUpSimulator.kt
Normal file
172
numass-main/src/main/kotlin/inr/numass/data/PileUpSimulator.kt
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* To change this license header, choose License Headers in Project Properties.
|
||||||
|
* To change this template file, choose Tools | Templates
|
||||||
|
* and open the template in the editor.
|
||||||
|
*/
|
||||||
|
package inr.numass.data
|
||||||
|
|
||||||
|
import hep.dataforge.maths.chain.Chain
|
||||||
|
import inr.numass.data.api.*
|
||||||
|
import kotlinx.coroutines.experimental.runBlocking
|
||||||
|
import org.apache.commons.math3.random.RandomGenerator
|
||||||
|
import java.lang.Math.max
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.*
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author [Alexander Nozik](mailto:altavir@gmail.com)
|
||||||
|
*/
|
||||||
|
class PileUpSimulator {
|
||||||
|
private val pointLength: Long
|
||||||
|
private val rnd: RandomGenerator
|
||||||
|
private val generated = ArrayList<OrphanNumassEvent>()
|
||||||
|
private val pileup = ArrayList<OrphanNumassEvent>()
|
||||||
|
private val registered = ArrayList<OrphanNumassEvent>()
|
||||||
|
private var generator: Chain<OrphanNumassEvent>
|
||||||
|
//private double uSet = 0;
|
||||||
|
private val doublePileup = AtomicInteger(0)
|
||||||
|
|
||||||
|
|
||||||
|
constructor(length: Long, rnd: RandomGenerator, countRate: Double) {
|
||||||
|
this.rnd = rnd
|
||||||
|
generator = generateEvents(countRate, rnd)
|
||||||
|
this.pointLength = length
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(pointLength: Long, rnd: RandomGenerator, generator: Chain<OrphanNumassEvent>) {
|
||||||
|
this.rnd = rnd
|
||||||
|
this.pointLength = pointLength
|
||||||
|
this.generator = generator
|
||||||
|
}
|
||||||
|
|
||||||
|
// fun withUset(uset: Double): PileUpSimulator {
|
||||||
|
// this.uSet = uset
|
||||||
|
// return this
|
||||||
|
// }
|
||||||
|
|
||||||
|
fun generated(): NumassBlock {
|
||||||
|
return SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength)) { parent -> generated.map { it.adopt(parent) } }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun registered(): NumassBlock {
|
||||||
|
return SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength)) { parent -> registered.map { it.adopt(parent) } }
|
||||||
|
}
|
||||||
|
|
||||||
|
fun pileup(): NumassBlock {
|
||||||
|
return SimpleBlock(Instant.EPOCH, Duration.ofNanos(pointLength)) { parent -> pileup.map { it.adopt(parent) } }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The amplitude for pileup event
|
||||||
|
*
|
||||||
|
* @param x
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private fun pileupChannel(x: Double, prevChanel: Short, nextChanel: Short): Short {
|
||||||
|
assert(x > 0)
|
||||||
|
//эмпирическая формула для канала
|
||||||
|
val coef = max(0.0, 0.99078 + 0.05098 * x - 0.45775 * x * x + 0.10962 * x * x * x)
|
||||||
|
if (coef < 0 || coef > 1) {
|
||||||
|
throw Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
return (prevChanel + coef * nextChanel).toShort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* pileup probability
|
||||||
|
*
|
||||||
|
* @param delay
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private fun pileup(delay: Double): Boolean {
|
||||||
|
val prob = 1.0 / (1.0 + Math.pow(delay / (2.5 + 0.2), 42.96))
|
||||||
|
return random(prob)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Probability for next event to register
|
||||||
|
*
|
||||||
|
* @param delay
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
private fun nextEventRegistered(prevChanel: Short, delay: Double): Boolean {
|
||||||
|
val average = 6.76102 - 4.31897E-4 * prevChanel + 7.88429E-8 * prevChanel.toDouble() * prevChanel.toDouble() + 0.2
|
||||||
|
val prob = 1.0 - 1.0 / (1.0 + Math.pow(delay / average, 75.91))
|
||||||
|
return random(prob)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun random(prob: Double): Boolean {
|
||||||
|
return rnd.nextDouble() <= prob
|
||||||
|
}
|
||||||
|
|
||||||
|
@Synchronized
|
||||||
|
fun generate() {
|
||||||
|
var next: OrphanNumassEvent
|
||||||
|
var lastRegisteredTime = 0.0 // Time of DAQ closing
|
||||||
|
//flag that shows that previous event was pileup
|
||||||
|
var pileupFlag = false
|
||||||
|
runBlocking {
|
||||||
|
next = generator.next()
|
||||||
|
while (next.timeOffset <= pointLength) {
|
||||||
|
generated.add(next)
|
||||||
|
//not counting double pileups
|
||||||
|
if (generated.size > 1) {
|
||||||
|
val delay = (next.timeOffset - lastRegisteredTime) / us //time between events in microseconds
|
||||||
|
if (nextEventRegistered(next.amp, delay)) {
|
||||||
|
//just register new event
|
||||||
|
registered.add(next)
|
||||||
|
lastRegisteredTime = next.timeOffset.toDouble()
|
||||||
|
pileupFlag = false
|
||||||
|
} else if (pileup(delay)) {
|
||||||
|
if (pileupFlag) {
|
||||||
|
//increase double pileup stack
|
||||||
|
doublePileup.incrementAndGet()
|
||||||
|
} else {
|
||||||
|
//pileup event
|
||||||
|
val newChannel = pileupChannel(delay, next.amp, next.amp)
|
||||||
|
val newEvent = OrphanNumassEvent(newChannel, next.timeOffset)
|
||||||
|
//replace already registered event by event with new channel
|
||||||
|
registered.removeAt(registered.size - 1)
|
||||||
|
registered.add(newEvent)
|
||||||
|
pileup.add(newEvent)
|
||||||
|
//do not change DAQ close time
|
||||||
|
pileupFlag = true // up the flag to avoid secondary pileup
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// second event not registered, DAQ closed
|
||||||
|
pileupFlag = false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
//register first event
|
||||||
|
registered.add(next)
|
||||||
|
lastRegisteredTime = next.timeOffset.toDouble()
|
||||||
|
}
|
||||||
|
next = generator.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
private const val us = 1e-6//microsecond
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
56
numass-main/src/main/kotlin/inr/numass/data/Visualization.kt
Normal file
56
numass-main/src/main/kotlin/inr/numass/data/Visualization.kt
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018 Alexander Nozik.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package inr.numass.data
|
||||||
|
|
||||||
|
import hep.dataforge.context.Context
|
||||||
|
import hep.dataforge.context.Global
|
||||||
|
import hep.dataforge.fx.plots.FXPlotManager
|
||||||
|
import hep.dataforge.kodex.KMetaBuilder
|
||||||
|
import hep.dataforge.kodex.buildMeta
|
||||||
|
import hep.dataforge.kodex.configure
|
||||||
|
import hep.dataforge.plots.data.DataPlot
|
||||||
|
import hep.dataforge.tables.Adapters
|
||||||
|
import inr.numass.data.analyzers.NumassAnalyzer
|
||||||
|
import inr.numass.data.analyzers.SmartAnalyzer
|
||||||
|
import inr.numass.data.api.NumassBlock
|
||||||
|
|
||||||
|
|
||||||
|
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit) {
|
||||||
|
val meta = buildMeta("meta", metaAction)
|
||||||
|
val plotManager = context.load(FXPlotManager::class)
|
||||||
|
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum"))
|
||||||
|
plotManager.display(name = frameName) {
|
||||||
|
val valueAxis = if (meta.getBoolean("normalize",true)) {
|
||||||
|
NumassAnalyzer.COUNT_RATE_KEY
|
||||||
|
} else {
|
||||||
|
NumassAnalyzer.COUNT_KEY
|
||||||
|
}
|
||||||
|
val plot = DataPlot.plot(
|
||||||
|
plotName,
|
||||||
|
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis),
|
||||||
|
data
|
||||||
|
).configure {
|
||||||
|
"connectionType" to "step"
|
||||||
|
"thickness" to 2
|
||||||
|
"showLine" to true
|
||||||
|
"showSymbol" to false
|
||||||
|
"showErrors" to false
|
||||||
|
"JFreeChart.cache" to true
|
||||||
|
}
|
||||||
|
add(plot)
|
||||||
|
}
|
||||||
|
}
|
@ -5,8 +5,8 @@ import hep.dataforge.kodex.buildMeta
|
|||||||
import inr.numass.actions.TimeAnalyzerAction
|
import inr.numass.actions.TimeAnalyzerAction
|
||||||
import inr.numass.data.api.SimpleNumassPoint
|
import inr.numass.data.api.SimpleNumassPoint
|
||||||
import inr.numass.data.buildBunchChain
|
import inr.numass.data.buildBunchChain
|
||||||
import inr.numass.data.buildSimpleEventChain
|
|
||||||
import inr.numass.data.generateBlock
|
import inr.numass.data.generateBlock
|
||||||
|
import inr.numass.data.generateEvents
|
||||||
import inr.numass.data.mergeEventChains
|
import inr.numass.data.mergeEventChains
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ fun main(args: Array<String>) {
|
|||||||
val num = 60;
|
val num = 60;
|
||||||
|
|
||||||
val blocks = (1..num).map {
|
val blocks = (1..num).map {
|
||||||
val regularChain = buildSimpleEventChain(cr)
|
val regularChain = generateEvents(cr)
|
||||||
val bunchChain = buildBunchChain(40.0, 0.01, 5.0)
|
val bunchChain = buildBunchChain(40.0, 0.01, 5.0)
|
||||||
|
|
||||||
val generator = mergeEventChains(regularChain, bunchChain)
|
val generator = mergeEventChains(regularChain, bunchChain)
|
||||||
|
@ -1,34 +1,11 @@
|
|||||||
package inr.numass.scripts.tristan
|
package inr.numass.scripts.tristan
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.tables.Table
|
|
||||||
import hep.dataforge.values.Values
|
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer
|
|
||||||
import inr.numass.data.api.NumassBlock
|
|
||||||
import inr.numass.data.api.NumassEvent
|
|
||||||
import inr.numass.data.api.NumassSet
|
|
||||||
import inr.numass.data.storage.ProtoNumassPoint
|
import inr.numass.data.storage.ProtoNumassPoint
|
||||||
import java.util.stream.Stream
|
|
||||||
|
|
||||||
fun main(args: Array<String>) {
|
fun main(args: Array<String>) {
|
||||||
val analyzer = object : NumassAnalyzer{
|
|
||||||
override fun analyze(block: NumassBlock, config: Meta): Values {
|
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun analyzeSet(set: NumassSet, config: Meta): Table {
|
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
val file = ProtoNumassPoint.readFile("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df ")
|
val file = ProtoNumassPoint.readFile("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df ")
|
||||||
val events = Sequence { file.events.iterator() }.sortedBy { it.time }
|
val filtered = file.filter { it.channel == 4 }
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
@ -30,7 +30,9 @@ class NumassFitScanSummaryTask : AbstractTask<Table>() {
|
|||||||
val builder = DataSet.builder(Table::class.java)
|
val builder = DataSet.builder(Table::class.java)
|
||||||
val action = FitSummaryAction()
|
val action = FitSummaryAction()
|
||||||
val input = data.checked(FitResult::class.java)
|
val input = data.checked(FitResult::class.java)
|
||||||
input.nodeStream().filter { it -> it.getSize(false) > 0 }.forEach { node -> builder.putData(node.name, action.run(model.context, node, model.getMeta()).data) }
|
input.nodeStream()
|
||||||
|
.filter { it -> it.getSize(false) > 0 }
|
||||||
|
.forEach { node -> builder.putData(node.name, action.run(model.context, node, model.meta).data) }
|
||||||
return builder.build()
|
return builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
|
|||||||
|
|
||||||
|
|
||||||
override fun run(model: TaskModel, data: DataNode<*>): DataNode<FitResult> {
|
override fun run(model: TaskModel, data: DataNode<*>): DataNode<FitResult> {
|
||||||
val config = model.getMeta()
|
val config = model.meta
|
||||||
val scanParameter = config.getString("parameter", "msterile2")
|
val scanParameter = config.getString("parameter", "msterile2")
|
||||||
|
|
||||||
val scanValues: Value = if (config.hasValue("masses")) {
|
val scanValues: Value = if (config.hasValue("masses")) {
|
||||||
@ -57,7 +57,8 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
|
|||||||
overrideMeta.setValue("params.$scanParameter.value", `val`)
|
overrideMeta.setValue("params.$scanParameter.value", `val`)
|
||||||
} else {
|
} else {
|
||||||
overrideMeta.getMetaList("params.param").stream()
|
overrideMeta.getMetaList("params.param").stream()
|
||||||
.filter { par -> par.getString("name") == scanParameter }.forEach { par -> par.setValue("value", `val`) }
|
.filter { par -> par.getString("name") == scanParameter }
|
||||||
|
.forEach { it.setValue("value", `val`) }
|
||||||
}
|
}
|
||||||
// Data<Table> newData = new Data<Table>(data.getGoal(),data.type(),overrideMeta);
|
// Data<Table> newData = new Data<Table>(data.getGoal(),data.type(),overrideMeta);
|
||||||
val node = action.run(model.context, DataNode.of(resultName, table, Meta.empty()), overrideMeta)
|
val node = action.run(model.context, DataNode.of(resultName, table, Meta.empty()), overrideMeta)
|
||||||
|
@ -42,7 +42,7 @@ class NumassFitSummaryTask : SingleActionTask<FitState, Table>() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun transformMeta(model: TaskModel): Meta {
|
override fun transformMeta(model: TaskModel): Meta {
|
||||||
return model.getMeta().getMeta("summary")
|
return model.meta.getMeta("summary")
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun buildModel(model: TaskModel.Builder, meta: Meta) {
|
override fun buildModel(model: TaskModel.Builder, meta: Meta) {
|
||||||
|
@ -14,28 +14,20 @@ import java.util.stream.Stream
|
|||||||
*/
|
*/
|
||||||
class NumassDataCache(val data: NumassSet) : NumassSet {
|
class NumassDataCache(val data: NumassSet) : NumassSet {
|
||||||
//private val cachedDescription: String by lazy { data.description }
|
//private val cachedDescription: String by lazy { data.description }
|
||||||
private val cachedMeta: Meta by lazy { data.meta }
|
override val meta: Meta by lazy { data.meta }
|
||||||
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
|
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
|
||||||
private val hv: Optional<Table> by lazy { data.hvData }
|
override val hvData: Optional<Table> by lazy { data.hvData }
|
||||||
|
|
||||||
|
|
||||||
override fun getPoints(): Stream<NumassPoint> {
|
override val points: Stream<NumassPoint>
|
||||||
return cachedPoints.stream();
|
get() = cachedPoints.stream()
|
||||||
}
|
|
||||||
|
|
||||||
// override fun getDescription(): String {
|
// override fun getDescription(): String {
|
||||||
// return cachedDescription
|
// return cachedDescription
|
||||||
// }
|
// }
|
||||||
|
|
||||||
override fun getMeta(): Meta {
|
|
||||||
return cachedMeta
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getName(): String {
|
override fun getName(): String {
|
||||||
return data.name;
|
return data.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getHvData(): Optional<Table> {
|
|
||||||
return hv;
|
|
||||||
}
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user