Compare commits

...

10 Commits

77 changed files with 1866 additions and 681 deletions

View File

@ -1,5 +1,5 @@
buildscript {
ext.kotlin_version = "1.3.10"
ext.kotlin_version = "1.3.50"
repositories {
jcenter()
}
@ -9,10 +9,15 @@ buildscript {
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
plugins{
id 'org.openjfx.javafxplugin' version '0.0.8' apply false
}
allprojects {
apply plugin: 'idea'
apply plugin: 'java'
apply plugin: "kotlin"
apply plugin: "org.jetbrains.kotlin.jvm"
group = 'inr.numass'
version = '1.0.0'
@ -33,8 +38,6 @@ allprojects {
testImplementation group: 'junit', name: 'junit', version: '4.+'
}
compileKotlin {
kotlinOptions {
jvmTarget = "1.8"

Binary file not shown.

View File

@ -1,6 +1,5 @@
#Fri Jun 16 20:54:50 MSK 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.5.0-bin.zip

57
gradlew vendored
View File

@ -1,5 +1,21 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
@ -28,16 +44,16 @@ APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
warn () {
echo "$*"
}
die ( ) {
die () {
echo
echo "$*"
echo
@ -109,8 +125,8 @@ if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
@ -138,35 +154,30 @@ if $cygwin ; then
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
i=`expr $i + 1`
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save ( ) {
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

18
gradlew.bat vendored
View File

@ -1,3 +1,19 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@ -14,7 +30,7 @@ set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome

View File

@ -1,6 +1,12 @@
allprojects {
apply plugin: "kotlin"
// apply plugin: 'org.openjfx.javafxplugin'
//
// javafx {
// modules = [ 'javafx.controls' ]
// }
compileKotlin {
kotlinOptions {
jvmTarget = "1.8"

View File

@ -18,12 +18,9 @@ import java.util.function.Supplier
/**
* @author Alexander Nozik
*/
class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(meta), Metoid {
class PKT8VirtualPort(override val name: String, meta: Meta) : VirtualPort(meta), Metoid {
private val generator = Random()
override val name: String = portName
@Synchronized override fun evaluateRequest(request: String) {
when (request) {
"s" -> {
@ -59,6 +56,11 @@ class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(me
}
}
override fun toMeta(): Meta {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
@Throws(Exception::class)
override fun close() {
cancelByTag("measurement")

View File

@ -1,26 +1,15 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
//plugins {
// id 'application'
// id 'org.openjfx.javafxplugin' version '0.0.5'
//}
//
//javafx {
// modules = [ 'javafx.controls' ]
//}
plugins {
id 'application'
id 'org.openjfx.javafxplugin' version '0.0.5'
}
javafx {
modules = [ 'javafx.controls' ]
}
version = "0.1.0"

View File

@ -9,11 +9,9 @@ import java.text.DecimalFormat
import java.time.Duration
//@ValueDef(name = "timeout", type = [(ValueType.NUMBER)], def = "400", info = "A timeout for port response")
class LambdaPortController(context: Context, port: Port) : GenericPortController(context, port, "\r") {
class LambdaPortController(context: Context, port: Port, val timeout : Duration = Duration.ofMillis(200)) : GenericPortController(context, port, "\r") {
private var currentAddress: Int = -1;
private val timeout: Duration = port.meta.optString("timeout").map<Duration> { Duration.parse(it) }.orElse(Duration.ofMillis(200))
fun setAddress(address: Int) {
if(currentAddress!= address) {
val response = sendAndWait("ADR $address\r", timeout) { true }.trim()

View File

@ -27,7 +27,6 @@ import java.util.*
* @author Alexander Nozik
*/
class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
var currentAddress = -1
private set
@ -135,6 +134,11 @@ class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
val voltage get() = current * resistance
}
override fun toMeta(): Meta {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
companion object {
private val latency = Duration.ofMillis(50)
}

View File

@ -122,6 +122,7 @@ abstract class DeviceDisplayFX<D : Device> : Component(), Connection {
* Default display shows only board pane and nothing else
*/
class DefaultDisplay : DeviceDisplayFX<Device>() {
override fun buildView(device: Device): UIComponent? = null
}

View File

@ -1,6 +1,6 @@
apply plugin: 'application'
version = "0.5.0"
version = "0.6.0"
if (!hasProperty('mainClass')) {
ext.mainClass = 'inr.numass.control.readvac.ReadVac'

View File

@ -27,10 +27,7 @@ class MKSBaratronDevice(context: Context, meta: Meta) : PortSensor(context, meta
var channel by valueState("channel").intDelegate
override val type: String
get() {
return meta.getString("type", "numass.vac.baratron")
}
override val type: String get() = meta.getString("type", "numass.vac.baratron")
override fun buildConnection(meta: Meta): GenericPortController {
val port: Port = PortFactory.build(meta)

View File

@ -22,7 +22,11 @@ class ReadVac : NumassControlApplication<VacCollectorDevice>() {
}
override fun getDeviceMeta(config: Meta): Meta {
return MetaUtils.findNode(config,"device"){it.getString("name") == "numass.vac"}.orElseThrow{RuntimeException("Vacuum measurement configuration not found")}
return MetaUtils.findNode(config, "device") {
it.getString("type") == "numass:vac"
}.orElseThrow {
RuntimeException("Vacuum measurement configuration not found")
}
}
}

View File

@ -0,0 +1,66 @@
package inr.numass.control.readvac
import hep.dataforge.context.Context
import hep.dataforge.control.devices.PortSensor
import hep.dataforge.control.ports.GenericPortController
import hep.dataforge.control.ports.Port
import hep.dataforge.control.ports.PortFactory
import hep.dataforge.meta.Meta
import inr.numass.control.DeviceView
//@ValueDef(key = "address")
@DeviceView(VacDisplay::class)
//@StateDef(value = ValueDef(key = "address", type = [ValueType.STRING], def = "001"))
class ThyroContVacDevice(context: Context, meta: Meta) : PortSensor(context, meta) {
//val address by valueState("address").stringDelegate
val address = "001"
override val type: String get() = meta.getString("type", "numass.vac.thyrocont")
override fun buildConnection(meta: Meta): GenericPortController {
val port: Port = PortFactory.build(meta)
logger.info("Connecting to port {}", port.name)
return GenericPortController(context, port) { it.endsWith("\r") }
}
private fun String.checksum(): Char = (sumBy { it.toInt() } % 64 + 64).toChar()
private fun wrap(str: String): String = buildString {
append(str)
append(str.checksum())
append('\r')
}
override fun startMeasurement(oldMeta: Meta?, newMeta: Meta) {
measurement {
val request = wrap("0010MV00")
val answer = sendAndWait(request)
if (answer.isEmpty()) {
updateState(CONNECTED_STATE, false)
notifyError("No connection")
} else {
updateState(CONNECTED_STATE, true)
}
try {
val address = answer.substring(0..2)
//if wrong answer
if (address != this.address) {
logger.warn("Expected response for address ${this.address}, bur received for $address")
notifyError("Wrong response address")
return@measurement
}
val dataSize = answer.substring(6..7).toInt()
val data = answer.substring(8, 8 + dataSize).toDouble()
if (data <= 0) {
notifyError("Non positive")
} else {
notifyResult(data)
}
} catch (ex: Exception) {
logger.error("Parsing error", ex)
notifyError("Parse error")
}
}
}
}

View File

@ -48,16 +48,17 @@ class VacCollectorDevice(context: Context, meta: Meta, val sensors: Collection<S
private val helper = StorageHelper(this, this::buildLoader)
private val collector = object : DeviceListener {
override fun notifyStateChanged(device: Device, name: String, state: Any) {
if (name == MEASUREMENT_RESULT_STATE) {
collector.put(device.name, (state as Meta).getValue("value"))
}
}
val averagingDuration: Duration = Duration.parse(meta.getString("averagingDuration", "PT30S"))
private val collector = RegularPointCollector(averagingDuration) {
notifyResult(it)
}
override fun notifyStateChanged(device: Device, name: String, state: Any) {
if (name == MEASUREMENT_RESULT_STATE) {
collector.put(device.name, (state as Meta).getValue(RESULT_VALUE))
}
}
}
@ -69,10 +70,11 @@ class VacCollectorDevice(context: Context, meta: Meta, val sensors: Collection<S
override fun init() {
super.init()
for (s in sensors) {
s.init()
s.connect(collector, Roles.DEVICE_LISTENER_ROLE)
}
super.init()
}
override val type: String

View File

@ -5,13 +5,13 @@
*/
package inr.numass.control.readvac
import hep.dataforge.connections.Connection
import hep.dataforge.control.connections.Roles
import hep.dataforge.control.devices.Device
import hep.dataforge.control.devices.DeviceListener
import hep.dataforge.control.devices.Sensor
import hep.dataforge.control.measurements.Measurement
import hep.dataforge.control.measurements.MeasurementListener
import hep.dataforge.fx.bindWindow
import hep.dataforge.fx.fragments.LogFragment
import hep.dataforge.meta.Meta
import hep.dataforge.plots.PlotGroup
import hep.dataforge.plots.data.TimePlot
import hep.dataforge.values.Value
@ -24,7 +24,6 @@ import javafx.geometry.Orientation
import javafx.scene.control.ScrollPane
import javafx.scene.layout.Priority
import tornadofx.*
import java.time.Instant
/**
* A view controller for Vac collector
@ -35,16 +34,13 @@ class VacCollectorDisplay : DeviceDisplayFX<VacCollectorDevice>() {
private val table = FXCollections.observableHashMap<String, Double>()
private val sensorConnection = object : MeasurementListener, Connection {
override fun onMeasurementResult(measurement: Measurement<*>, result: Any, time: Instant?) {
if (result is Double) {
table.put(measurement.device.name, result);
private val sensorConnection = object : DeviceListener {
override fun notifyStateChanged(device: Device, name: String, state: Any) {
if (name == Sensor.MEASUREMENT_RESULT_STATE) {
table[device.name] = (state as Meta).getDouble(Sensor.RESULT_VALUE)
}
}
override fun onMeasurementFailed(measurement: Measurement<*>?, exception: Throwable?) {
}
}
private val viewList = FXCollections.observableArrayList<VacDisplay>();
@ -58,7 +54,7 @@ class VacCollectorDisplay : DeviceDisplayFX<VacCollectorDevice>() {
device.sensors.forEach { sensor ->
val view = VacDisplay()
sensor.connect(view, Roles.VIEW_ROLE, Roles.DEVICE_LISTENER_ROLE)
sensor.connect(sensorConnection, Roles.MEASUREMENT_LISTENER_ROLE);
sensor.connect(sensorConnection, Roles.DEVICE_LISTENER_ROLE)
viewList.add(view)
}
}

View File

@ -19,6 +19,7 @@ class VacDeviceFactory : DeviceFactory {
"CM32" -> CM32Device(context, sensorConfig)
"meradat" -> MeradatVacDevice(context, sensorConfig)
"baratron" -> MKSBaratronDevice(context, sensorConfig)
"ThyroCont" -> ThyroContVacDevice(context,sensorConfig)
// VIRTUAL_SENSOR_TYPE -> VirtualDevice.randomDoubleSensor(context, sensorConfig)
else -> throw RuntimeException("Unknown vacuum sensor type")
}

View File

@ -0,0 +1,20 @@
package inr.numass.control.readvac
import hep.dataforge.context.Global
import hep.dataforge.meta.buildMeta
import kotlinx.coroutines.delay
suspend fun main() {
val meta = buildMeta {
"name" to "PSP"
"port" to "tcp::192.168.111.32:4001"
"sensorType" to "ThyroCont"
}
val device = ThyroContVacDevice(Global, meta)
device.measure()
device.connected.set(true)
delay(400)
println(device.result)
device.connected.set(false)
}

View File

@ -8,5 +8,6 @@
<sensor name="Px" color="black" port="tcp::192.168.111.33:4003" sensorType="meradat" address="1"/>
<sensor name="Baratron" color="cyan" port="tcp::192.168.111.33:4004" sensorType="baratron"/>
<sensor name="Collector" color="magenta" port="tcp::192.168.111.33:4003" sensorType="meradat" address="2"/>
<sensor name="PSP" port="tcp::192.168.111.32:4001" sensorType="ThyroCont" address="001"/>
</device>
</config>

View File

@ -26,6 +26,9 @@ class MetaBlock(override val blocks: List<NumassBlock>) : ParentBlock {
override val length: Duration
get() = Duration.ofNanos(blocks.stream().mapToLong { block -> block.length.toNanos() }.sum())
/**
* A stream of events, sorted by block time but not sorted by event time
*/
override val events: Stream<NumassEvent>
get() = blocks.sortedBy { it.startTime }.stream().flatMap { it.events }

View File

@ -28,6 +28,12 @@ open class OrphanNumassEvent(val amplitude: Short, val timeOffset: Long) : Seria
override fun compareTo(other: OrphanNumassEvent): Int {
return this.timeOffset.compareTo(other.timeOffset)
}
override fun toString(): String {
return "[$amplitude, $timeOffset]"
}
}
/**

View File

@ -9,18 +9,19 @@ import java.time.Instant
* Created by darksnake on 06-Jul-17.
*/
class NumassFrame(
/**
* The absolute start time of the frame
*/
val time: Instant,
/**
* The time interval per tick
*/
val tickSize: Duration,
/**
* The buffered signal shape in ticks
*/
val signal: ShortBuffer) {
/**
* The absolute start time of the frame
*/
val time: Instant,
/**
* The time interval per tick
*/
val tickSize: Duration,
/**
* The buffered signal shape in ticks
*/
val signal: ShortBuffer
) {
val length: Duration
get() = tickSize.multipliedBy(signal.capacity().toLong())

View File

@ -7,5 +7,5 @@ import java.util.stream.Stream
* Created by darksnake on 07.07.2017.
*/
interface SignalProcessor {
fun analyze(frame: NumassFrame): Stream<NumassEvent>
fun process(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent>
}

View File

@ -1,27 +1,30 @@
package inr.numass.data.api
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.MetaHolder
import hep.dataforge.meta.buildMeta
/**
* A simple static implementation of NumassPoint
* Created by darksnake on 08.07.2017.
*/
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) : MetaHolder(meta), NumassPoint {
/**
* Input blocks must be sorted
* @param voltage
* @param blocks
*/
constructor(blocks: Collection<NumassBlock>, voltage: Double) :
this(blocks.sortedBy { it.startTime }, MetaBuilder("point").setValue(NumassPoint.HV_KEY, voltage))
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) :
MetaHolder(meta), NumassPoint {
init {
if(blocks.isEmpty()){
if (blocks.isEmpty()) {
throw IllegalArgumentException("No blocks in collection")
}
}
companion object {
fun build(blocks: Collection<NumassBlock>, voltage: Double? = null, index: Int? = null): SimpleNumassPoint {
val meta = buildMeta("point") {
NumassPoint.HV_KEY to voltage
NumassPoint.INDEX_KEY to index
}
return SimpleNumassPoint(blocks.sortedBy { it.startTime }, meta.build())
}
}
}

View File

@ -1,4 +1,3 @@
import com.google.protobuf.gradle.GenerateProtoTask
import com.google.protobuf.gradle.protobuf
import com.google.protobuf.gradle.protoc
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
@ -6,7 +5,7 @@ import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
idea
kotlin("jvm")
id("com.google.protobuf") version "0.8.7"
id("com.google.protobuf") version "0.8.8"
}
@ -26,13 +25,13 @@ tasks.withType<KotlinCompile> {
dependsOn(":numass-core:numass-data-proto:generateProto")
}
sourceSets{
create("proto"){
proto {
srcDir("src/main/proto")
}
}
}
//sourceSets {
// create("proto") {
// proto {
// srcDir("src/main/proto")
// }
// }
//}
protobuf {
// Configure the protoc executable

View File

@ -29,7 +29,6 @@ import java.time.Instant
import java.util.stream.IntStream
import java.util.stream.Stream
import java.util.zip.Inflater
import kotlin.streams.toList
/**
* Protobuf based numass point
@ -42,11 +41,11 @@ class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassPr
override val blocks: List<NumassBlock>
get() = proto.channelsList
.flatMap { channel ->
channel.blocksList
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
.sortedBy { it.startTime }
}
.flatMap { channel ->
channel.blocksList
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
.sortedBy { it.startTime }
}
override val channels: Map<Int, NumassBlock>
get() = proto.channelsList.groupBy { it.id.toInt() }.mapValues { entry ->
@ -127,7 +126,11 @@ class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassPr
}
}
class ProtoBlock(override val channel: Int, private val block: NumassProto.Point.Channel.Block, val parent: NumassPoint? = null) : NumassBlock {
class ProtoBlock(
override val channel: Int,
private val block: NumassProto.Point.Channel.Block,
val parent: NumassPoint? = null
) : NumassBlock {
override val startTime: Instant
get() = ProtoNumassPoint.ofEpochNanos(block.time)
@ -136,11 +139,15 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
block.length > 0 -> Duration.ofNanos(block.length)
parent?.meta?.hasValue("acquisition_time") ?: false ->
Duration.ofMillis((parent!!.meta.getDouble("acquisition_time") * 1000).toLong())
parent?.meta?.hasValue("params.b_size") ?: false ->
Duration.ofNanos((parent!!.meta.getDouble("params.b_size") * 320).toLong())
else -> {
LoggerFactory.getLogger(javaClass).error("No length information on block. Trying to infer from first and last events")
val times = events.map { it.timeOffset }.toList()
val nanos = (times.max()!! - times.min()!!)
Duration.ofNanos(nanos)
error("No length information on block")
// LoggerFactory.getLogger(javaClass).warn("No length information on block. Trying to infer from first and last events")
// val times = events.map { it.timeOffset }.toList()
// val nanos = (times.max()!! - times.min()!!)
// Duration.ofNanos(nanos)
// Duration.ofMillis(380)
}
}
@ -148,9 +155,11 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
get() = if (block.hasEvents()) {
val events = block.events
if (events.timesCount != events.amplitudesCount) {
LoggerFactory.getLogger(javaClass).error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
LoggerFactory.getLogger(javaClass)
.error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
}
IntStream.range(0, events.timesCount).mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
IntStream.range(0, events.timesCount)
.mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
} else {
Stream.empty()
}

View File

@ -0,0 +1,20 @@
plugins {
idea
kotlin("jvm")
}
repositories {
mavenLocal()
mavenCentral()
}
dependencies {
compile(kotlin("stdlib-jdk8"))
compile("hep.dataforge:dataforge-maths")
compile(project(":numass-core:numass-data-api"))
// https://mvnrepository.com/artifact/org.apache.commons/commons-collections4
compile(group = "org.apache.commons", name = "commons-collections4", version = "4.3")
}

View File

@ -0,0 +1,100 @@
package inr.numass.data
import inr.numass.data.api.*
import org.apache.commons.collections4.queue.CircularFifoQueue
import org.apache.commons.math3.fitting.PolynomialCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import org.slf4j.LoggerFactory
import java.nio.ShortBuffer
import java.util.stream.Stream
import kotlin.streams.asStream
private fun ShortBuffer.clone(): ShortBuffer {
val clone = ShortBuffer.allocate(capacity())
rewind()//copy from the beginning
clone.put(this)
rewind()
clone.flip()
return clone
}
class ChernovProcessor(
val threshold: Short,
val signalRange: IntRange,
val tickSize: Int = 320,
val signal: (Double) -> Double
) : SignalProcessor {
private val fitter = PolynomialCurveFitter.create(2)
private val signalMax = signal(0.0)
/**
* position an amplitude of peak relative to buffer end (negative)
*/
private fun CircularFifoQueue<Short>.findMax(): Pair<Double, Double> {
val data = this.mapIndexed { index, value ->
WeightedObservedPoint(
1.0,
index.toDouble() - size + 1, // final point in zero
value.toDouble()
)
}
val (c, b, a) = fitter.fit(data)
if (a > 0) error("Minimum!")
val x = -b / 2 / a
val y = -(b * b - 4 * a * c) / 4 / a
return x to y
}
fun processBuffer(buffer: ShortBuffer): Sequence<OrphanNumassEvent> {
val ringBuffer = CircularFifoQueue<Short>(5)
fun roll() {
ringBuffer.add(buffer.get())
}
return sequence<OrphanNumassEvent> {
while (buffer.remaining() > 1) {
roll()
if (ringBuffer.isAtFullCapacity) {
if (ringBuffer.all { it > threshold && it <= ringBuffer[2] }) {
//Found bending, evaluating event
//TODO check end of frame
try {
val (pos, amp) = ringBuffer.findMax()
val timeInTicks = (pos + buffer.position() - 1)
val event = OrphanNumassEvent(amp.toShort(), (timeInTicks * tickSize).toLong())
yield(event)
//subtracting event from buffer copy
for (x in (signalRange.first + timeInTicks.toInt())..(signalRange.endInclusive + timeInTicks.toInt())) {
//TODO check all roundings
if (x >= 0 && x < buffer.limit()) {
val oldValue = buffer.get(x)
val newValue = oldValue - amp * signal(x - timeInTicks) / signalMax
buffer.put(x, newValue.toShort())
}
}
println(buffer.array().joinToString())
} catch (ex: Exception) {
LoggerFactory.getLogger(javaClass).error("Something went wrong", ex)
}
roll()
}
}
}
}
}
override fun process(parent: NumassBlock, frame: NumassFrame): Stream<NumassEvent> {
val buffer = frame.signal.clone()
return processBuffer(buffer).map { it.adopt(parent) }.asStream()
}
}

View File

@ -0,0 +1,26 @@
package inr.numass.data
import org.apache.commons.math3.analysis.function.Gaussian
import org.junit.Assert.assertTrue
import org.junit.Test
import java.nio.ShortBuffer
class ChernovProcessorTest {
val gaussian = Gaussian(1000.0, 0.0, 3.0)
val processor = ChernovProcessor(10, -12..12, tickSize = 100) { gaussian.value(it) }
val events = mapOf<Double, Double>(10.0 to 1.0, 16.0 to 0.5)
val buffer = ShortArray(40) { i ->
events.entries.sumByDouble { (pos, amp) -> amp * gaussian.value(pos - i.toDouble()) }.toShort()
}
@Test
fun testPeaks() {
println(buffer.joinToString())
val peaks = processor.processBuffer(ShortBuffer.wrap(buffer)).toList()
assertTrue(peaks.isNotEmpty())
println(peaks.joinToString())
}
}

View File

@ -21,9 +21,8 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import inr.numass.data.api.*
import inr.numass.data.storage.ClassicNumassPoint
import java.util.stream.Collectors
import org.slf4j.LoggerFactory
import kotlin.streams.asSequence
import kotlin.streams.toList
/**
@ -35,10 +34,8 @@ object NumassDataUtils {
override suspend fun getHvData() = TODO()
override val points: List<NumassPoint> by lazy {
val points = sets.stream().flatMap<NumassPoint> { it.points.stream() }
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
points.entries.stream().map { entry -> SimpleNumassPoint(entry.value, entry.key) }
.toList()
val points = sets.flatMap { it.points }.groupBy { it.voltage }
return@lazy points.entries.map { entry -> SimpleNumassPoint.build(entry.value, entry.key) }
}
override val meta: Meta by lazy {
@ -51,6 +48,35 @@ object NumassDataUtils {
}
}
fun joinByIndex(setName: String, sets: Collection<NumassSet>): NumassSet {
return object : NumassSet {
override suspend fun getHvData() = TODO()
override val points: List<NumassPoint> by lazy {
val points = sets.flatMap { it.points }.groupBy { it.index }
return@lazy points.mapNotNull { (index, points) ->
val voltage = points.first().voltage
if (!points.all { it.voltage == voltage }) {
LoggerFactory.getLogger(javaClass)
.warn("Not all points with index $index have voltage $voltage")
null
} else {
SimpleNumassPoint.build(points, voltage, index)
}
}
}
override val meta: Meta by lazy {
val metaBuilder = MetaBuilder()
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
metaBuilder
}
override val name = setName
}
}
fun adapter(): SpectrumAdapter {
return SpectrumAdapter("Uset", "CR", "CRerr", "Time")
}
@ -67,18 +93,18 @@ object NumassDataUtils {
suspend fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>?): NumassBlock {
return SimpleBlock.produce(this.startTime, this.length) {
this.events.asSequence()
.sortedBy { it.timeOffset }
.zipWithNext(transform)
.filterNotNull()
.map { OrphanNumassEvent(it.first, it.second) }.asIterable()
.sortedBy { it.timeOffset }
.zipWithNext(transform)
.filterNotNull()
.map { OrphanNumassEvent(it.first, it.second) }.asIterable()
}
}
suspend fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
return SimpleBlock.produce(this.startTime, this.length) {
this.events.asSequence()
.sortedBy { it.timeOffset }
.zipWithNext().filter { condition.invoke(it.first, it.second) }.map { it.second }.asIterable()
.sortedBy { it.timeOffset }
.zipWithNext().filter { condition.invoke(it.first, it.second) }.map { it.second }.asIterable()
}
}
@ -91,7 +117,7 @@ suspend fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock
suspend fun NumassBlock.transform(transform: (NumassEvent) -> OrphanNumassEvent): NumassBlock {
return SimpleBlock.produce(this.startTime, this.length) {
this.events.asSequence()
.map { transform(it) }
.asIterable()
.map { transform(it) }
.asIterable()
}
}

View File

@ -22,18 +22,19 @@ import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableFormat
import hep.dataforge.tables.TableFormatBuilder
import hep.dataforge.toList
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SignalProcessor
import java.lang.IllegalArgumentException
import java.util.stream.Stream
/**
* Created by darksnake on 11.07.2017.
*/
abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) : NumassAnalyzer {
abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor: SignalProcessor? = null) :
NumassAnalyzer {
/**
* Return unsorted stream of events including events from frames.
@ -43,22 +44,24 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
* @param block
* @return
*/
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
val loChannel = meta.getInt("window.lo", 0)
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
// if (meta.getBoolean("sort", false)) {
// res = res.sorted(compareBy { it.timeOffset })
// }
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val range = meta.getRange()
return getAllEvents(block).filter { event ->
event.amplitude.toInt() in loChannel..(upChannel - 1)
}
event.amplitude.toInt() in range
}.toList()
}
protected fun Meta.getRange(): IntRange {
val loChannel = getInt("window.lo", 0)
val upChannel = getInt("window.up", Integer.MAX_VALUE)
return loChannel until upChannel
}
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
return when {
block.frames.count() == 0L -> block.events
processor == null -> throw IllegalArgumentException("Signal processor needed to analyze frames")
else -> Stream.concat(block.events, block.frames.flatMap { processor.analyze(it) })
else -> Stream.concat(block.events, block.frames.flatMap { processor.process(block, it) })
}
}
@ -70,14 +73,14 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
*/
protected open fun getTableFormat(config: Meta): TableFormat {
return TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.build()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.build()
}
@ -85,18 +88,18 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
val format = getTableFormat(config)
return ListTable.Builder(format)
.rows(set.points.map { point -> analyzeParent(point, config) })
.build()
.rows(set.points.map { point -> analyzeParent(point, config) })
.build()
}
companion object {
val NAME_LIST = arrayOf(
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY
)
}
}

View File

@ -28,7 +28,6 @@ import java.util.*
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.atomic.AtomicReference
import java.util.stream.IntStream
import java.util.stream.Stream
import kotlin.streams.asSequence
/**
@ -53,10 +52,19 @@ interface NumassAnalyzer {
* @return
*/
fun analyzeParent(point: ParentBlock, config: Meta = Meta.empty()): Values {
// //Add properties to config
// val newConfig = config.builder.apply {
// if (point is NumassPoint) {
// setValue("voltage", point.voltage)
// setValue("index", point.index)
// }
// setValue("channel", point.channel)
// }
val map = HashMap(analyze(point, config).asMap())
if(point is NumassPoint) {
if (point is NumassPoint) {
map[HV_KEY] = Value.of(point.voltage)
}
return ValueMap(map)
}
@ -66,7 +74,7 @@ interface NumassAnalyzer {
* @param block
* @return
*/
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): Stream<NumassEvent>
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): List<NumassEvent>
/**
* Analyze the whole set. And return results as a table
@ -101,7 +109,7 @@ interface NumassAnalyzer {
fun getAmplitudeSpectrum(block: NumassBlock, config: Meta = Meta.empty()): Table {
val seconds = block.length.toMillis().toDouble() / 1000.0
return getAmplitudeSpectrum(getEvents(block, config).asSequence(), seconds, config)
return getEvents(block, config).asSequence().getAmplitudeSpectrum(seconds, config)
}
companion object {
@ -114,8 +122,6 @@ interface NumassAnalyzer {
const val WINDOW_KEY = "window"
const val TIME_KEY = "timestamp"
val DEFAULT_ANALYZER: NumassAnalyzer = SmartAnalyzer()
val AMPLITUDE_ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
// val MAX_CHANNEL = 10000
@ -139,23 +145,26 @@ fun Table.countInWindow(loChannel: Short, upChannel: Short): Long {
/**
* Calculate the amplitude spectrum for a given block. The s
*
* @param events
* @param this@getAmplitudeSpectrum
* @param length length in seconds, used for count rate calculation
* @param config
* @return
*/
fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config: Meta = Meta.empty()): Table {
fun Sequence<NumassEvent>.getAmplitudeSpectrum(
length: Double,
config: Meta = Meta.empty()
): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta { metaBuilder -> metaBuilder.setNode("config", config) }
.build()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta { metaBuilder -> metaBuilder.setNode("config", config) }
.build()
//optimized for fastest computation
val spectrum: MutableMap<Int, AtomicLong> = HashMap()
events.forEach { event ->
forEach { event ->
val channel = event.amplitude.toInt()
spectrum.getOrPut(channel) {
AtomicLong(0)
@ -163,22 +172,22 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
}
val minChannel = config.getInt("window.lo") { spectrum.keys.min()?:0 }
val maxChannel = config.getInt("window.up") { spectrum.keys.max()?: 4096 }
val minChannel = config.getInt("window.lo") { spectrum.keys.min() ?: 0 }
val maxChannel = config.getInt("window.up") { spectrum.keys.max() ?: 4096 }
return ListTable.Builder(format)
.rows(IntStream.range(minChannel, maxChannel)
.mapToObj { i ->
val value = spectrum[i]?.get() ?: 0
ValueMap.of(
format.namesAsArray(),
i,
value,
value.toDouble() / length,
Math.sqrt(value.toDouble()) / length
)
}
).build()
.rows(IntStream.range(minChannel, maxChannel)
.mapToObj { i ->
val value = spectrum[i]?.get() ?: 0
ValueMap.of(
format.namesAsArray(),
i,
value,
value.toDouble() / length,
Math.sqrt(value.toDouble()) / length
)
}
).build()
}
/**
@ -192,18 +201,18 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
@JvmOverloads
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
.addNumber("binSize")
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
.addNumber("binSize")
val builder = ListTable.Builder(format)
var chan = loChannel
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
val top = upChannel
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
while (chan < top - binSize) {
val count = AtomicLong(0)
@ -218,10 +227,21 @@ fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = nu
}.forEach { row ->
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).long)
countRate.accumulateAndGet(row.getDouble(NumassAnalyzer.COUNT_RATE_KEY, 0.0)) { d1, d2 -> d1 + d2 }
countRateDispersion.accumulateAndGet(Math.pow(row.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY, 0.0), 2.0)) { d1, d2 -> d1 + d2 }
countRateDispersion.accumulateAndGet(
Math.pow(
row.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY, 0.0),
2.0
)
) { d1, d2 -> d1 + d2 }
}
val bin = Math.min(binSize, top - chan)
builder.row(chan.toDouble() + bin.toDouble() / 2.0, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin)
builder.row(
chan.toDouble() + bin.toDouble() / 2.0,
count.get(),
countRate.get(),
Math.sqrt(countRateDispersion.get()),
bin
)
chan += binSize
}
return builder.build()
@ -236,26 +256,24 @@ fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = nu
*/
fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
val builder = ListTable.Builder(format)
sp1.forEach { row1 ->
val channel = row1.getDouble(NumassAnalyzer.CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel } //t2[channel]
if (row2 == null) {
throw RuntimeException("Reference for channel $channel not found");
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel }
?: ValueMap.ofPairs(NumassAnalyzer.COUNT_RATE_KEY to 0.0, NumassAnalyzer.COUNT_RATE_ERROR_KEY to 0.0)
} else {
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
val value =
Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
return builder.build()
}

View File

@ -32,10 +32,18 @@ import inr.numass.data.api.*
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import java.util.*
import java.util.concurrent.atomic.AtomicLong
import java.util.stream.Stream
import kotlin.math.sqrt
import kotlin.collections.List
import kotlin.collections.asSequence
import kotlin.collections.count
import kotlin.collections.first
import kotlin.collections.map
import kotlin.collections.set
import kotlin.collections.sortBy
import kotlin.collections.sumBy
import kotlin.collections.sumByDouble
import kotlin.collections.toMutableList
import kotlin.math.*
import kotlin.streams.asSequence
import kotlin.streams.asStream
/**
@ -43,10 +51,19 @@ import kotlin.streams.asStream
* Created by darksnake on 11.07.2017.
*/
@ValueDefs(
ValueDef(key = "separateParallelBlocks", type = [ValueType.BOOLEAN], info = "If true, then parallel blocks will be forced to be evaluated separately"),
ValueDef(key = "chunkSize", type = [ValueType.NUMBER], def = "-1", info = "The number of events in chunk to split the chain into. If negative, no chunks are used")
ValueDef(
key = "separateParallelBlocks",
type = [ValueType.BOOLEAN],
info = "If true, then parallel blocks will be forced to be evaluated separately"
),
ValueDef(
key = "chunkSize",
type = [ValueType.NUMBER],
def = "-1",
info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
)
)
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override fun analyze(block: NumassBlock, config: Meta): Values {
//In case points inside points
@ -54,27 +71,33 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
return analyzeParent(block, config)
}
val loChannel = config.getInt("window.lo", 0)
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
val t0 = getT0(block, config).toLong()
val chunkSize = config.getInt("chunkSize", -1)
val res = if (chunkSize > 0) {
getEventsWithDelay(block, config)
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
.toList()
.mean(config.getEnum("mean", WEIGHTED))
} else {
analyzeSequence(getEventsWithDelay(block, config), t0)
val count = super.getEvents(block, config).count()
val length = block.length.toNanos().toDouble() / 1e9
val res = when {
count < 1000 -> ValueMap.ofPairs(
LENGTH_KEY to length,
COUNT_KEY to count,
COUNT_RATE_KEY to count.toDouble() / length,
COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
)
chunkSize > 0 -> getEventsWithDelay(block, config)
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
.toList()
.mean(config.getEnum("mean", WEIGHTED))
else -> analyzeSequence(getEventsWithDelay(block, config), t0)
}
return ValueMap.Builder(res)
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
.putValue(T0_KEY, t0.toDouble() / 1000.0)
.build()
.putValue("blockLength", length)
.putValue(NumassAnalyzer.WINDOW_KEY, config.getRange())
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
.putValue(T0_KEY, t0.toDouble() / 1000.0)
.build()
}
@ -82,22 +105,27 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val totalN = AtomicLong(0)
val totalT = AtomicLong(0)
sequence.filter { pair -> pair.second >= t0 }
.forEach { pair ->
totalN.incrementAndGet()
//TODO add progress listener here
totalT.addAndGet(pair.second)
}
.forEach { pair ->
totalN.incrementAndGet()
//TODO add progress listener here
totalT.addAndGet(pair.second)
}
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
if (totalN.toInt() == 0) {
error("Zero number of intervals")
}
val countRate =
1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / sqrt(totalN.get().toDouble())
val length = totalT.get() / 1e9
val count = (length * countRate).toLong()
return ValueMap.ofPairs(
NumassAnalyzer.LENGTH_KEY to length,
NumassAnalyzer.COUNT_KEY to count,
NumassAnalyzer.COUNT_RATE_KEY to countRate,
NumassAnalyzer.COUNT_RATE_ERROR_KEY to countRateError
LENGTH_KEY to length,
COUNT_KEY to count,
COUNT_RATE_KEY to countRate,
COUNT_RATE_ERROR_KEY to countRateError
)
}
@ -128,45 +156,53 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
if (this.isEmpty()) {
return ValueMap.Builder()
.putValue(LENGTH_KEY, 0)
.putValue(COUNT_KEY, 0)
.putValue(COUNT_RATE_KEY, 0)
.putValue(COUNT_RATE_ERROR_KEY, 0)
.build()
.putValue(LENGTH_KEY, 0)
.putValue(COUNT_KEY, 0)
.putValue(COUNT_RATE_KEY, 0)
.putValue(COUNT_RATE_ERROR_KEY, 0)
.build()
}
val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
val (countRate, countRateDispersion) = when (method) {
ARITHMETIC -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY), 2.0) } / size / size
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
sumByDouble { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size
)
WEIGHTED -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
sumByDouble { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) }
)
GEOMETRIC -> {
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = Math.pow(mean / size, 2.0) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
val mean = exp(sumByDouble { ln(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = (mean / size).pow(2.0) * sumByDouble {
(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(
COUNT_RATE_KEY
)).pow(2.0)
}
Pair(mean, variance)
}
}
return ValueMap.Builder(first())
.putValue(LENGTH_KEY, totalTime)
.putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
.putValue(COUNT_RATE_KEY, countRate)
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
.build()
.putValue(LENGTH_KEY, totalTime)
.putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
.putValue(COUNT_RATE_KEY, countRate)
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
.build()
}
@ValueDefs(
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
ValueDef(key = "t0.crFraction", type = arrayOf(ValueType.NUMBER), info = "The relative fraction of events that should be removed by time cut"),
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
ValueDef(
key = "t0.crFraction",
type = arrayOf(ValueType.NUMBER),
info = "The relative fraction of events that should be removed by time cut"
),
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
)
private fun getT0(block: NumassBlock, meta: Meta): Int {
protected fun getT0(block: NumassBlock, meta: Meta): Int {
return if (meta.hasValue("t0")) {
meta.getInt("t0")
} else if (meta.hasMeta("t0")) {
@ -175,7 +211,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
if (cr < meta.getDouble("t0.minCR", 0.0)) {
0
} else {
Math.max(-1e9 / cr * Math.log(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt()
max(-1e9 / cr * ln(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt()
}
} else {
0
@ -200,16 +236,16 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
*/
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
val inverted = config.getBoolean("inverted", true)
val events: Stream<NumassEvent> = super.getEvents(block, config).let { stream ->
if (block is ParentBlock && !block.isSequential) {
stream.sorted(compareBy { it.timeOffset })
} else {
stream
}
//range is included in super.getEvents
val events = super.getEvents(block, config).toMutableList()
if (config.getBoolean("sortEvents", false) || (block is ParentBlock && !block.isSequential)) {
//sort in place if needed
events.sortBy { it.timeOffset }
}
return events.asSequence().zipWithNext { prev, next ->
val delay = Math.max(next.timeOffset - prev.timeOffset, 0)
val delay = max(next.timeOffset - prev.timeOffset, 0)
if (inverted) {
Pair(next, delay)
} else {
@ -225,35 +261,37 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
* @param meta
* @return
*/
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val t0 = getT0(block, meta).toLong()
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.asStream().map { it.first }
return getEventsWithDelay(block, meta)
.filter { pair -> pair.second >= t0 }
.map { it.first }.toList()
}
public override fun getTableFormat(config: Meta): TableFormat {
return TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.addNumber(T0_KEY)
.build()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(LENGTH_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.addNumber(T0_KEY)
.build()
}
companion object {
const val T0_KEY = "t0"
val NAME_LIST = arrayOf(
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY,
T0_KEY
LENGTH_KEY,
COUNT_KEY,
COUNT_RATE_KEY,
COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY,
T0_KEY
)
}
}

View File

@ -15,7 +15,7 @@ import java.nio.file.Path
import java.time.Duration
import java.time.Instant
import java.util.stream.Stream
import kotlin.streams.asStream
import java.util.stream.StreamSupport
/**
* Created by darksnake on 08.07.2017.
@ -47,11 +47,12 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
//TODO split blocks using meta
private inner class ClassicBlock(
override val startTime: Instant,
override val length: Duration) : NumassBlock, Iterable<NumassEvent> {
override val startTime: Instant,
override val length: Duration
) : NumassBlock, Iterable<NumassEvent> {
override val events: Stream<NumassEvent>
get() = this.asSequence().asStream()
get() = StreamSupport.stream(this.spliterator(), false)
override fun iterator(): Iterator<NumassEvent> {
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
@ -78,7 +79,8 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
}
}
} catch (e: IOException) {
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass).error("Unexpected IOException when reading block", e)
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass)
.error("Unexpected IOException when reading block", e)
return false
}

View File

@ -35,11 +35,11 @@ class NumassDataFactory : DataFactory<NumassSet>(NumassSet::class.java) {
override fun fill(builder: DataNodeEditor<NumassSet>, context: Context, meta: Meta) {
runBlocking {
val storage = NumassDirectory.read(context,meta.getString("path")) as Storage
val storage = NumassDirectory.read(context, meta.getString("path")) as Storage
storage.sequence().forEach { pair ->
val value = pair.second
if (value is NumassSet) {
builder.putStatic(pair.first.unescaped, value)
builder.putStatic(pair.first.unescaped, value, value.meta)
}
}
}

View File

@ -44,10 +44,10 @@ import kotlin.streams.toList
* @author darksnake
*/
class NumassDataLoader(
override val context: Context,
override val parent: StorageElement?,
override val name: String,
override val path: Path
override val context: Context,
override val parent: StorageElement?,
override val name: String,
override val path: Path
) : Loader<NumassPoint>, NumassSet, Provider, FileStorageElement {
override val type: KClass<NumassPoint> = NumassPoint::class
@ -63,26 +63,24 @@ class NumassDataLoader(
}
override suspend fun getHvData(): Table? {
val hvEnvelope = path.resolve(HV_FRAGMENT_NAME)?.let {
val hvEnvelope = path.resolve(HV_FRAGMENT_NAME).let {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read hv file")
}
return hvEnvelope?.let {
try {
ColumnedDataReader(it.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
}
return try {
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
}
}
private val pointEnvelopes: List<Envelope> by lazy {
Files.list(path)
.filter { it.fileName.toString().startsWith(POINT_FRAGMENT_NAME) }
.map {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read point file")
}.toList()
.filter { it.fileName.toString().startsWith(POINT_FRAGMENT_NAME) }
.map {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read point file")
}.toList()
}
val isReversed: Boolean
@ -189,4 +187,8 @@ class NumassDataLoader(
}
fun Context.readNumassSet(path:Path):NumassDataLoader{
return NumassDataLoader(this,null,path.fileName.toString(),path)
}

View File

@ -1,33 +0,0 @@
syntax = "proto3";
package inr.numass.data;
message Point {
// A single channel for multichannel detector readout
message Channel {
//A continuous measurement block
message Block {
// Raw data frame
message Frame {
uint64 time = 1; // Time in nanos from the beginning of the block
bytes data = 2; // Frame data as an array of int16 mesured in arbitrary channels
}
// Event block obtained directly from device of from frame analysis
// In order to save space, times and amplitudes are in separate arrays.
// Amplitude and time with the same index correspond to the same event
message Events {
repeated uint64 times = 1; // Array of time in nanos from the beginning of the block
repeated uint64 amplitudes = 2; // Array of amplitudes of events in channels
}
uint64 time = 1; // Block start in epoch nanos
repeated Frame frames = 2; // Frames array
Events events = 3; // Events array
uint64 length = 4; // block size in nanos. If missing, take from meta.
uint64 bin_size = 5; // tick size in nanos. Obsolete, to be removed
}
uint64 id = 1; // The number of measuring channel
repeated Block blocks = 2; // Blocks
}
repeated Channel channels = 1; // Array of measuring channels
}

View File

@ -5,6 +5,12 @@ plugins {
apply plugin: 'kotlin'
//apply plugin: 'org.openjfx.javafxplugin'
//
//javafx {
// modules = [ 'javafx.controls' ]
//}
//if (!hasProperty('mainClass')) {
// ext.mainClass = 'inr.numass.LaunchGrindShell'
//}
@ -30,6 +36,8 @@ dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':numass-core')
compile project(':numass-core:numass-signal-processing')
compileOnly "org.jetbrains.kotlin:kotlin-main-kts:1.3.21"
compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit')
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')
compile "hep.dataforge:dataforge-gui"

View File

@ -8,6 +8,7 @@ import hep.dataforge.grind.workspace.GrindWorkspace
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.workspace.FileBasedWorkspace
import hep.dataforge.workspace.Workspace
import groovy.cli.commons.CliBuilder
/**
* Created by darksnake on 29-Aug-16.

View File

@ -41,11 +41,11 @@ import org.apache.commons.math3.util.FastMath
* @author Alexander Nozik
*/
@PluginDef(
group = "inr.numass",
name = "numass",
dependsOn = ["hep.dataforge:functions", "hep.dataforge:MINUIT", "hep.dataforge:actions"],
support = false,
info = "Numass data analysis tools"
group = "inr.numass",
name = "numass",
dependsOn = ["hep.dataforge:functions", "hep.dataforge:MINUIT", "hep.dataforge:actions"],
support = false,
info = "Numass data analysis tools"
)
class NumassPlugin : BasicPlugin() {
@ -58,20 +58,22 @@ class NumassPlugin : BasicPlugin() {
}
private val tasks = listOf(
NumassFitScanSummaryTask,
NumassFitSummaryTask,
selectTask,
analyzeTask,
mergeTask,
mergeEmptyTask,
monitorTableTask,
subtractEmptyTask,
transformTask,
filterTask,
fitTask,
plotFitTask,
histogramTask,
fitScanTask
NumassFitScanSummaryTask,
NumassFitSummaryTask,
selectTask,
analyzeTask,
mergeTask,
mergeEmptyTask,
monitorTableTask,
subtractEmptyTask,
transformTask,
filterTask,
fitTask,
plotFitTask,
histogramTask,
fitScanTask,
sliceTask,
subThresholdTask
)
@Provides(Task.TASK_TARGET)
@ -109,7 +111,10 @@ class NumassPlugin : BasicPlugin() {
BivariateFunction { E: Double, U: Double ->
val D = E - U
val factor = 7.33 - E / 1000.0 / 3.0
return@BivariateFunction 1.0 - (3.05346E-7 * D - 5.45738E-10 * Math.pow(D, 2.0) - 6.36105E-14 * Math.pow(D, 3.0)) * factor
return@BivariateFunction 1.0 - (3.05346E-7 * D - 5.45738E-10 * Math.pow(
D,
2.0
) - 6.36105E-14 * Math.pow(D, 3.0)) * factor
}
}
}
@ -263,11 +268,19 @@ class NumassPlugin : BasicPlugin() {
val transmissionFile = an.getString("transFile")
return TransmissionInterpolator
.fromFile(context, transmissionFile, transXName, transYName, nSmooth, w, stitchBorder)
.fromFile(context, transmissionFile, transXName, transYName, nSmooth, w, stitchBorder)
} else if (an.hasMeta("transBuildAction")) {
val transBuild = an.getMeta("transBuildAction")
try {
return TransmissionInterpolator.fromAction(context, transBuild, transXName, transYName, nSmooth, w, stitchBorder)
return TransmissionInterpolator.fromAction(
context,
transBuild,
transXName,
transYName,
nSmooth,
w,
stitchBorder
)
} catch (ex: InterruptedException) {
throw RuntimeException("Transmission builder failed")
}
@ -281,7 +294,11 @@ class NumassPlugin : BasicPlugin() {
return if (an.hasMeta(ValuesAdapter.ADAPTER_KEY)) {
Adapters.buildAdapter(an.getMeta(ValuesAdapter.ADAPTER_KEY))
} else {
Adapters.buildXYAdapter(NumassPoint.HV_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
Adapters.buildXYAdapter(
NumassPoint.HV_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY
)
}
}
@ -303,7 +320,13 @@ class NumassPlugin : BasicPlugin() {
* @return
*/
@JvmOverloads
fun displayChart(title: String, context: Context = Global, width: Double = 800.0, height: Double = 600.0, meta: Meta = Meta.empty()): JFreeChartFrame {
fun displayChart(
title: String,
context: Context = Global,
width: Double = 800.0,
height: Double = 600.0,
meta: Meta = Meta.empty()
): JFreeChartFrame {
val frame = JFreeChartFrame()
frame.configure(meta)
frame.configureValue("title", title)

View File

@ -48,6 +48,7 @@ import java.awt.Font
import java.io.IOException
import java.io.OutputStream
import java.lang.Math.*
import java.time.Instant
import java.util.*
/**
@ -108,11 +109,11 @@ object NumassUtils {
fun writeEnvelope(stream: OutputStream, meta: Meta, dataWriter: (OutputStream) -> Unit) {
try {
TaglessEnvelopeType.INSTANCE.writer.write(
stream,
EnvelopeBuilder()
.meta(meta)
.data(dataWriter)
.build()
stream,
EnvelopeBuilder()
.meta(meta)
.data(dataWriter)
.build()
)
stream.flush()
} catch (e: IOException) {
@ -148,10 +149,10 @@ object NumassUtils {
builder.name = set.name
set.points.forEach { point ->
val pointMeta = MetaBuilder("point")
.putValue("voltage", point.voltage)
.putValue("index", point.meta.getInt("external_meta.point_index", -1))
.putValue("run", point.meta.getString("external_meta.session", ""))
.putValue("group", point.meta.getString("external_meta.group", ""))
.putValue("voltage", point.voltage)
.putValue("index", point.meta.getInt("external_meta.point_index", -1))
.putValue("run", point.meta.getString("external_meta.session", ""))
.putValue("group", point.meta.getString("external_meta.group", ""))
val pointName = "point_" + point.meta.getInt("external_meta.point_index", point.hashCode())
builder.putData(pointName, point, pointMeta)
}
@ -176,8 +177,8 @@ object NumassUtils {
fun getFSS(context: Context, meta: Meta): FSS? {
return if (meta.getBoolean("useFSS", true)) {
val fssBinary: Binary? = meta.optString("fssFile")
.map { fssFile -> context.getFile(fssFile).binary }
.orElse(context.getResource("data/FS.txt"))
.map { fssFile -> context.getFile(fssFile).binary }
.orElse(context.getResource("data/FS.txt"))
fssBinary?.let { FSS(it.stream) } ?: throw RuntimeException("Could not load FSS file")
} else {
null
@ -189,16 +190,17 @@ fun getFSS(context: Context, meta: Meta): FSS? {
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @param values
* @return
*/
fun pointExpression(expression: String, point: Values): Double {
fun pointExpression(expression: String, values: Values): Double {
val exprParams = HashMap<String, Any>()
//Adding all point values to expression parameters
point.names.forEach { name -> exprParams[name] = point.getValue(name).value }
values.names.forEach { name -> exprParams[name] = values.getValue(name).value }
//Adding aliases for commonly used parameters
exprParams["T"] = point.getDouble("length")
exprParams["U"] = point.getDouble("voltage")
exprParams["T"] = values.getDouble("length")
exprParams["U"] = values.getDouble("voltage")
exprParams["time"] = values.optTime("timestamp").orElse(Instant.EPOCH).epochSecond
return ExpressionUtils.function(expression, exprParams)
}
@ -212,8 +214,8 @@ fun JFreeChartFrame.addSetMarkers(sets: Collection<NumassSet>) {
sets.stream().forEach { set ->
val start = set.startTime;
val stop = set.meta.optValue("end_time").map { it.time }
.orElse(start.plusSeconds(300))
.minusSeconds(60)
.orElse(start.plusSeconds(300))
.minusSeconds(60)
val marker = IntervalMarker(start.toEpochMilli().toDouble(), stop.toEpochMilli().toDouble(), paint)
marker.label = set.name
marker.labelFont = Font("Verdana", Font.BOLD, 20);
@ -230,15 +232,25 @@ fun subtractSpectrum(merge: Table, empty: Table, logger: Logger? = null): Table
merge.rows.forEach { point ->
val pointBuilder = ValueMap.Builder(point)
val referencePoint = empty.rows
.filter { p -> Math.abs(p.getDouble(NumassPoint.HV_KEY) - point.getDouble(NumassPoint.HV_KEY)) < 0.1 }.findFirst()
.filter { p -> Math.abs(p.getDouble(NumassPoint.HV_KEY) - point.getDouble(NumassPoint.HV_KEY)) < 0.1 }
.findFirst()
if (referencePoint.isPresent) {
pointBuilder.putValue(
NumassAnalyzer.COUNT_RATE_KEY,
Math.max(0.0, point.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_KEY))
NumassAnalyzer.COUNT_RATE_KEY,
Math.max(
0.0,
point.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_KEY)
)
)
pointBuilder.putValue(
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
Math.sqrt(Math.pow(point.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0) + Math.pow(referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0)))
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
Math.sqrt(
Math.pow(
point.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY),
2.0
) + Math.pow(referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0)
)
)
} else {
logger?.warn("No reference point found for voltage = {}", point.getDouble(NumassPoint.HV_KEY))
}

View File

@ -12,6 +12,7 @@ import hep.dataforge.values.ValueType.STRING
import inr.numass.NumassUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.analyzers.SmartAnalyzer
/**
* The action performs the readout of data and collection of count rate into a table
@ -25,7 +26,7 @@ import inr.numass.data.api.NumassSet
object AnalyzeDataAction : OneToOneAction<NumassSet, Table>("numass.analyze", NumassSet::class.java, Table::class.java) {
override fun execute(context: Context, name: String, input: NumassSet, inputMeta: Laminate): Table {
//TODO add processor here
val analyzer = NumassAnalyzer.DEFAULT_ANALYZER
val analyzer: NumassAnalyzer = SmartAnalyzer()
val res = analyzer.analyzeSet(input, inputMeta)
render(context, name, NumassUtils.wrap(res, inputMeta))

View File

@ -45,12 +45,11 @@ object MergeDataAction : ManyToOneAction<Table, Table>("numass.merge", Table::cl
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
val meta = inputMeta(context, input.meta, actionMeta)
val groups: List<DataNode<Table>> = if (meta.hasValue("grouping.byValue")) {
return if (meta.hasValue("grouping.byValue")) {
super.buildGroups(context, input, actionMeta)
} else {
GroupBuilder.byValue(MERGE_NAME, meta.getString(MERGE_NAME, input.name)).group(input)
}
return groups
}
override fun execute(context: Context, nodeName: String, data: Map<String, Table>, meta: Laminate): Table {

View File

@ -52,7 +52,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.getEventsWithDelay(input, analyzerMeta)
.asStream()
.mapToDouble { it.second.toDouble() / 1000.0 }
).asTable()
@ -128,6 +128,8 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
val minT0 = inputMeta.getDouble("t0.min", 0.0)
val maxT0 = inputMeta.getDouble("t0.max", 1e9 / cr)
val steps = inputMeta.getInt("t0.steps", 100)
val t0Step = inputMeta.getDouble("t0.step", (maxT0-minT0)/(steps - 1))
val norm = if (inputMeta.getBoolean("normalize", false)) {
cr
@ -135,7 +137,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
1.0
}
(0..steps).map { minT0 + (maxT0 - minT0) / steps * it }.map { t ->
(0..steps).map { minT0 + t0Step * it }.map { t ->
val result = analyzer.analyze(input, analyzerMeta.builder.setValue("t0", t))
if (Thread.currentThread().isInterrupted) {

View File

@ -11,17 +11,17 @@ import hep.dataforge.isAnonymous
import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.tables.ColumnFormat
import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.ListColumn
import hep.dataforge.tables.Table
import hep.dataforge.tables.*
import hep.dataforge.values.ValueType.NUMBER
import hep.dataforge.values.ValueType.STRING
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.TIME_KEY
import inr.numass.pointExpression
import java.util.*
import kotlin.math.pow
import kotlin.math.sqrt
/**
* Apply corrections and transformations to analyzed data
@ -29,10 +29,17 @@ import java.util.*
*/
@TypedActionDef(name = "numass.transform", inputType = Table::class, outputType = Table::class)
@ValueDefs(
ValueDef(key = "correction", info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`"),
ValueDef(key = "utransform", info = "Expression for voltage transformation. Uses U as input")
ValueDef(
key = "correction",
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`"
),
ValueDef(key = "utransform", info = "Expression for voltage transformation. Uses U as input")
)
@NodeDef(
key = "correction",
multiple = true,
descriptor = "method::inr.numass.actions.TransformDataAction.makeCorrection"
)
@NodeDef(key = "correction", multiple = true, descriptor = "method::inr.numass.actions.TransformDataAction.makeCorrection")
object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Table::class.java, Table::class.java) {
override fun execute(context: Context, name: String, input: Table, meta: Laminate): Table {
@ -43,9 +50,10 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
meta.optMeta("corrections").ifPresent { cors ->
MetaUtils.nodeStream(cors)
.map<Meta> { it.second }
.map<Correction> { this.makeCorrection(it) }
.forEach { corrections.add(it) }
.filter { it.first.length == 1 }
.map<Meta> { it.second }
.map { makeCorrection(it) }
.forEach { corrections.add(it) }
}
if (meta.hasValue("correction")) {
@ -64,28 +72,39 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
if (!correction.isAnonymous) {
table = table.buildColumn(ColumnFormat.build(correction.name, NUMBER)) { correction.corr(this) }
if (correction.hasError()) {
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) { correction.corrErr(this) }
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) {
correction.corrErr(this)
}
}
}
}
// adding original count rate and error columns
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_KEY.orig", NUMBER), table.getColumn(COUNT_RATE_KEY).stream()))
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_ERROR_KEY.orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()))
table = table.addColumn(
ListColumn(
ColumnFormat.build("$COUNT_RATE_KEY.orig", NUMBER),
table.getColumn(COUNT_RATE_KEY).stream()
)
)
table = table.addColumn(
ListColumn(
ColumnFormat.build("$COUNT_RATE_ERROR_KEY.orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()
)
)
val cr = ArrayList<Double>()
val crErr = ArrayList<Double>()
table.rows.forEach { point ->
val correctionFactor = corrections.stream()
.mapToDouble { cor -> cor.corr(point) }
.reduce { d1, d2 -> d1 * d2 }.orElse(1.0)
.mapToDouble { cor -> cor.corr(point) }
.reduce { d1, d2 -> d1 * d2 }.orElse(1.0)
val relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble { cor -> cor.relativeErr(point) }
.reduce { d1, d2 -> d1 * d1 + d2 * d2 }.orElse(0.0)
corrections.stream()
.mapToDouble { cor -> cor.relativeErr(point) }
.reduce { d1, d2 -> d1 * d1 + d2 * d2 }.orElse(0.0)
)
val originalCR = point.getDouble(COUNT_RATE_KEY)
val originalCRErr = point.getDouble(COUNT_RATE_ERROR_KEY)
@ -93,13 +112,15 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
if (relativeCorrectionError == 0.0) {
crErr.add(originalCRErr * correctionFactor)
} else {
crErr.add(Math.sqrt(Math.pow(originalCRErr / originalCR, 2.0) + Math.pow(relativeCorrectionError, 2.0)) * originalCR)
crErr.add(sqrt((originalCRErr / originalCR).pow(2.0) + relativeCorrectionError.pow(2.0)) * originalCR)
}
}
//replacing cr column
val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
.sort(TIME_KEY)
context.output[this@TransformDataAction.name, name].render(res, meta)
return res
@ -107,34 +128,23 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
@ValueDefs(
ValueDef(key = "value", type = arrayOf(NUMBER, STRING), info = "Value or function to multiply count rate"),
ValueDef(key = "err", type = arrayOf(NUMBER, STRING), info = "error of the value")
ValueDef(key = "value", type = arrayOf(NUMBER, STRING), info = "Value or function to multiply count rate"),
ValueDef(key = "err", type = arrayOf(NUMBER, STRING), info = "error of the value")
)
private fun makeCorrection(corrMeta: Meta): Correction {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
return object : Correction {
override val name = corrMeta.getString("name", corrMeta.name)
override fun corr(point: Values): Double {
return pointExpression(expr, point)
}
override fun corrErr(point: Values): Double {
return if (errExpr.isEmpty()) {
0.0
} else {
pointExpression(errExpr, point)
}
}
override fun hasError(): Boolean {
return !errExpr.isEmpty()
}
val name = corrMeta.getString("name", corrMeta.name)
return if (corrMeta.hasMeta("table")) {
val x = corrMeta.getValue("table.u").list.map { it.double }
val corr = corrMeta.getValue("table.corr").list.map { it.double }
TableCorrection(name, x, corr)
} else {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
ExpressionCorrection(name, expr, errExpr)
}
}
private interface Correction : Named {
interface Correction : Named {
/**
* correction coefficient
@ -168,4 +178,44 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
}
}
class ExpressionCorrection(override val name: String, val expr: String, val errExpr: String) : Correction {
override fun corr(point: Values): Double {
return pointExpression(expr, point)
}
override fun corrErr(point: Values): Double {
return if (errExpr.isEmpty()) {
0.0
} else {
pointExpression(errExpr, point)
}
}
override fun hasError(): Boolean {
return errExpr.isNotEmpty()
}
}
class TableCorrection(
override val name: String,
val x: List<Double>,
val y: List<Double>,
val yErr: List<Double>? = null
) : Correction {
override fun corr(point: Values): Double {
val voltage = point.getDouble("voltage")
val index = x.indexOfFirst { it > voltage }
//TODO add interpolation
return if (index < 0) {
y.last()
} else {
y[index]
}
}
//
// override fun corrErr(point: Values): Double = 0.0
//
// override fun hasError(): Boolean = yErr.isNullOrEmpty()
}
}

View File

@ -18,10 +18,11 @@ package inr.numass.data
import hep.dataforge.configure
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.meta.KMetaBuilder
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.PlotGroup
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.output.plotFrame
import hep.dataforge.tables.Adapters
@ -30,20 +31,24 @@ import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassBlock
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit = {}) {
val meta = buildMeta("meta", metaAction)
fun PlotGroup.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaBuilder: KMetaBuilder.() -> Unit = {}
) {
val meta = buildMeta("meta", metaBuilder)
val binning = meta.getInt("binning", 20)
val lo = meta.optNumber("window.lo").nullable?.toInt()
val up = meta.optNumber("window.up").nullable?.toInt()
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum")).withBinning(binning, lo, up)
context.plotFrame(plotName) {
val data = analyzer.getAmplitudeSpectrum(numassBlock, meta).withBinning(binning, lo, up)
apply {
val valueAxis = if (meta.getBoolean("normalize", false)) {
NumassAnalyzer.COUNT_RATE_KEY
} else {
NumassAnalyzer.COUNT_KEY
}
plots.configure {
configure {
"connectionType" to "step"
"thickness" to 2
"showLine" to true
@ -52,11 +57,31 @@ fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName:
}.setType<DataPlot>()
val plot = DataPlot.plot(
plotName,
data,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
plotName,
data,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
)
plot.configure(meta)
add(plot)
}
}
fun PlotFrame.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaBuilder: KMetaBuilder.() -> Unit = {}
) = plots.plotAmplitudeSpectrum(numassBlock, plotName, analyzer, metaBuilder)
fun Context.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
frameName: String = plotName,
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaAction: KMetaBuilder.() -> Unit = {}
) {
plotFrame(frameName) {
plotAmplitudeSpectrum(numassBlock, plotName, analyzer, metaAction)
}
}

View File

@ -17,14 +17,16 @@
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableFormat
import hep.dataforge.values.Value
import hep.dataforge.values.ValueMap
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.SignalProcessor
import java.util.stream.Stream
import inr.numass.data.ChernovProcessor
import inr.numass.data.api.*
import inr.numass.utils.ExpressionUtils
/**
* An analyzer dispatcher which uses different analyzer for different meta
@ -41,7 +43,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
"simple" -> simpleAnalyzer
"time" -> timeAnalyzer
"debunch" -> debunchAnalyzer
else -> throw IllegalArgumentException("Analyzer not found")
else -> throw IllegalArgumentException("Analyzer ${config.getString("type")} not found")
}
} else {
if (config.hasValue("t0") || config.hasMeta("t0")) {
@ -59,7 +61,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
return ValueMap(map)
}
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
return getAnalyzer(meta).getEvents(block, meta)
}
@ -69,4 +71,33 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
} else super.getTableFormat(config)
}
override fun analyzeSet(set: NumassSet, config: Meta): Table {
fun Value.computeExpression(point: NumassPoint): Int {
return when {
this.type == ValueType.NUMBER -> this.int
this.type == ValueType.STRING -> {
val exprParams = HashMap<String, Any>()
exprParams["U"] = point.voltage
ExpressionUtils.function(this.string, exprParams).toInt()
}
else -> error("Can't interpret $type as expression or number")
}
}
val lo = config.getValue("window.lo",0)
val up = config.getValue("window.up", Int.MAX_VALUE)
val format = getTableFormat(config)
return ListTable.Builder(format)
.rows(set.points.map { point ->
val newConfig = config.builder.apply{
setValue("window.lo", lo.computeExpression(point))
setValue("window.up", up.computeExpression(point))
}
analyzeParent(point, newConfig)
})
.build()
}
}

View File

@ -32,7 +32,7 @@ fun main(args: Array<String>) {
}
val point = SimpleNumassPoint(blocks, 10000.0)
val point = SimpleNumassPoint.build(blocks, 10000.0)
val meta = buildMeta {
"t0" to 1e7

View File

@ -25,10 +25,9 @@ import inr.numass.data.api.NumassEvent
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
import java.util.stream.Stream
private fun correlation(sequence: Stream<NumassEvent>): Double {
private fun correlation(sequence: List<NumassEvent>): Double {
val amplitudes: MutableList<Double> = ArrayList()
val times: MutableList<Double> = ArrayList()
sequence.forEach {

View File

@ -108,7 +108,7 @@ fun main(args: Array<String>) {
.filter { pair -> pair.second <= t0 }
.map { it.first }
val pileupSpectrum = getAmplitudeSpectrum(sequence, point.length.toMillis().toDouble() / 1000.0).withBinning(20)
val pileupSpectrum = sequence.getAmplitudeSpectrum(point.length.toMillis().toDouble() / 1000.0).withBinning(20)
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))

View File

@ -110,7 +110,7 @@ fun main(args: Array<String>) {
.filter { pair -> pair.second <= t0 }
.map { it.first }
val pileupSpectrum = getAmplitudeSpectrum(sequence, point.length.toMillis().toDouble() / 1000.0).withBinning(20)
val pileupSpectrum = sequence.getAmplitudeSpectrum(point.length.toMillis().toDouble() / 1000.0).withBinning(20)
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))

View File

@ -23,7 +23,7 @@ fun main(args: Array<String>) {
}
//val rootDir = File("D:\\Work\\Numass\\data\\2017_05\\Fill_2")
val storage = NumassDirectory.read(context, "Fill_3_events") ?: error("Storage not found")
val storage = NumassDirectory.read(context, "Fill_3") ?: error("Storage not found")
val analyzer = SmartAnalyzer()
@ -34,7 +34,7 @@ fun main(args: Array<String>) {
// "window.up" to 1600
}
val set = storage.provide("set_58", NumassSet::class.java).nullable ?: error("Set does not exist")
val set = storage.provide("set_4", NumassSet::class.java).nullable ?: error("Set does not exist")
val frame = displayChart("slices").apply {
plots.setType<DataPlot>()

View File

@ -14,7 +14,7 @@ fun main(args: Array<String>) {
//val meta = workspace.getTarget("group_3")
val result = workspace.runTask("scansum", "group_3").first().get()
val result = workspace.runTask("fit", "group_5").first().get()
println("Complete!")
}

View File

@ -0,0 +1,20 @@
package inr.numass.scripts.analysis
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.workspace.FileBasedWorkspace
import java.io.File
fun main(args: Array<String>) {
FXOutputManager().startGlobal()
val configPath = File("D:\\Work\\Numass\\sterile2017_05_frames\\workspace.groovy").toPath()
val workspace = FileBasedWorkspace.build(Global, configPath)
workspace.context.setValue("cache.enabled", false)
//val meta = workspace.getTarget("group_3")
val result = workspace.runTask("fit", "group_5").first().get()
println("Complete!")
}

View File

@ -0,0 +1,57 @@
package inr.numass.scripts.models
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.meta.buildMeta
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.step
import inr.numass.NumassPlugin
import inr.numass.displayChart
import inr.numass.models.NBkgSpectrum
import inr.numass.models.sterile.SterileNeutrinoSpectrum
fun main(args: Array<String>) {
NumassPlugin().startGlobal()
JFreeChartPlugin().startGlobal()
Global.output = FXOutputManager()
val params = ParamSet().apply {
setPar("N", 8e5, 6.0, 0.0, Double.POSITIVE_INFINITY)
setPar("bkg", 2.0, 0.03)
setPar("E0", 18575.0, 1.0)
setPar("mnu2", 0.0, 1.0)
setParValue("msterile2", (1000 * 1000).toDouble())
setPar("U2", 0.0, 1e-3)
setPar("X", 0.0, 0.01)
setPar("trap", 1.0, 0.01)
}
val meta1 = buildMeta {
"resolution.A" to 8.3e-5
}
val spectrum1 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta1))
val meta2 = buildMeta {
"resolution.A" to 0
}
val spectrum2 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta2))
displayChart("compare").apply {
val x = (14000.0..18600.0).step(100.0).toList()
val y1 = x.map { spectrum1.value(it, params) }
+DataPlot.plot("simple", x.toDoubleArray(), y1.toDoubleArray())
val y2 = x.map { spectrum2.value(it, params) }
+DataPlot.plot("normal", x.toDoubleArray(), y2.toDoubleArray())
val dif = x.mapIndexed{ index, _ -> 1 - y1[index]/y2[index] }
+DataPlot.plot("dif", x.toDoubleArray(), dif.toDoubleArray())
}
}

View File

@ -29,7 +29,6 @@ import hep.dataforge.plots.plotData
import hep.dataforge.storage.files.FileStorage
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.filter
import hep.dataforge.tables.sort
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassSet
@ -37,46 +36,47 @@ import inr.numass.data.storage.NumassDirectory
import inr.numass.displayChart
import inr.numass.subthreshold.Threshold
fun main(args: Array<String>) {
fun main() {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile\\2017_05_frames"
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
output = FXOutputManager() + DirectoryOutput()
}
val storage = NumassDirectory.read(context, "Fill_2") as? FileStorage ?: error("Storage not found")
val storage = NumassDirectory.read(context, "Fill_3") as? FileStorage ?: error("Storage not found")
val meta = buildMeta {
"delta" to -150
"delta" to -300
"method" to "pow"
"t0" to 15e3
// "window.lo" to 400
// "window.up" to 1600
"xLow" to 450
"xHigh" to 700
"upper" to 3100
"binning" to 20
"xLow" to 1000
"xHigh" to 1300
"upper" to 6000
"binning" to 32
//"reference" to 18600
}
val frame = displayChart("correction").apply {
plots.setType<DataPlot>()
}
val sets = (1..18).map { "set_$it" }.map { setName ->
val sets = (1..14).map { "set_$it" }.mapNotNull { setName ->
storage.provide(setName, NumassSet::class.java).nullable
}.filterNotNull()
}
val name = "fill_2[1-18]"
val name = "fill_3[1-14]"
val sum = NumassDataUtils.join(name, sets)
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
it.getDouble("correction") in (1.0..1.2)
}.sort("voltage")
}
frame.plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
frame.plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
frame.plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
context.output.render(correctionTable,"numass.correction", "fill_2[1-18]")
context.output.render(correctionTable,"numass.correction", name)
}

View File

@ -35,19 +35,24 @@ import inr.numass.subthreshold.Threshold
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile\\2017_05_frames"
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
output = FXOutputManager() + DirectoryOutput()
}
val storage = NumassDirectory.read(context, "Fill_2") as? FileStorage ?: error("Storage not found")
val storage = NumassDirectory.read(context, "Fill_3") as? FileStorage ?: error("Storage not found")
val meta = buildMeta {
"delta" to -200
"delta" to -300
"method" to "pow"
"t0" to 15e3
"window.lo" to 400
"window.up" to 1600
// "window.lo" to 400
// "window.up" to 1600
"xLow" to 1000
"xHigh" to 1300
"upper" to 6000
"binning" to 32
//"reference" to 18600
}
val frame = displayChart("correction").apply {

View File

@ -0,0 +1,71 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import hep.dataforge.buildContext
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.io.DirectoryOutput
import hep.dataforge.io.plus
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.plotData
import hep.dataforge.storage.files.FileStorage
import hep.dataforge.tables.Adapters
import inr.numass.NumassPlugin
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
import inr.numass.displayChart
import inr.numass.subthreshold.Threshold
fun main(){
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
output = FXOutputManager() + DirectoryOutput()
}
val storage = NumassDirectory.read(context, "Fill_3") as? FileStorage ?: error("Storage not found")
val meta = buildMeta {
"delta" to -200
"method" to "pow"
"t0" to 15e3
// "window.lo" to 400
// "window.up" to 1600
"xLow" to 450
"xHigh" to 700
"upper" to 3000
"binning" to 20
//"reference" to 18600
}
val frame = displayChart("correction").apply {
plots.setType<DataPlot>()
}
listOf("set_2", "set_3", "set_4", "set_5").forEach { setName ->
val set = storage.provide(setName, NumassSet::class.java).nullable ?: error("Set does not exist")
val correctionTable = Threshold.calculateSubThreshold(set, meta).filter {
it.getDouble("correction") in (1.0..1.2)
}
frame.plotData(setName, correctionTable, Adapters.buildXYAdapter("U", "correction"))
}
}

View File

@ -0,0 +1,72 @@
package inr.numass.scripts.timeanalysis
import hep.dataforge.buildContext
import hep.dataforge.data.DataSet
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.meta.buildMeta
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
import inr.numass.data.storage.NumassDirectory
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2018_04"
dataDir = "D:\\Work\\Numass\\data\\2018_04"
}
val storage = NumassDirectory.read(context, "Fill_3")!!
val meta = buildMeta {
"binNum" to 200
//"chunkSize" to 10000
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"separateParallelBlocks" to true
"t0" to {
"step" to 320
}
"analyzer" to {
"t0" to 16000
"window" to {
"lo" to 450
"up" to 1900
}
}
//"plot.showErrors" to false
}
val loader = storage.provide("set_9",NumassSet::class.java).get()
val hvs = listOf(14000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
//listOf(18500.0, 18600.0, 18995.0, 19000.0)
val data = DataSet.edit(NumassPoint::class).apply {
hvs.forEach { hv ->
val points = loader.points.filter {
it.voltage == hv
}.toList()
if (!points.isEmpty()) {
putStatic(
"point_${hv.toInt()}",
SimpleNumassPoint.build(points, hv)
)
}
}
}.build()
val result = TimeAnalyzerAction.run(context, data, meta);
result.nodeGoal().run()
readLine()
println("Canceling task")
result.nodeGoal().cancel()
}

View File

@ -8,7 +8,6 @@ import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
@ -18,28 +17,33 @@ fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile2017_05_frames"
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
}
val storage = NumassDirectory.read(context, "Fill_2")!!
val storage = NumassDirectory.read(context, "Fill_3")!!
val meta = buildMeta {
"t0" to 3000
"binNum" to 200
"t0Step" to 100
"chunkSize" to 3000
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"chunkSize" to 10000
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"separateParallelBlocks" to true
"window" to {
"lo" to 0
"up" to 4000
"t0" to {
"step" to 320
}
"analyzer" to {
"t0" to 16000
"window" to {
"lo" to 1500
"up" to 7000
}
}
//"plot.showErrors" to false
}
//def sets = ((2..14) + (22..31)).collect { "set_$it" }
val sets = (2..12).map { "set_$it" }
val sets = (11..11).map { "set_$it" }
//def sets = (16..31).collect { "set_$it" }
//def sets = (20..28).collect { "set_$it" }
@ -49,7 +53,7 @@ fun main(args: Array<String>) {
val all = NumassDataUtils.join("sum", loaders)
val hvs = listOf(14500.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
val hvs = listOf(14000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
//listOf(18500.0, 18600.0, 18995.0, 19000.0)
val data = DataSet.edit(NumassPoint::class).apply {
@ -58,10 +62,7 @@ fun main(args: Array<String>) {
it.voltage == hv && it.channel == 0
}.toList()
if (!points.isEmpty()) {
putStatic(
"point_${hv.toInt()}",
SimpleNumassPoint(points, hv)
)
putStatic("point_${hv.toInt()}", SimpleNumassPoint.build(points, hv))
}
}
}.build()

View File

@ -39,7 +39,7 @@ fun main(args: Array<String>) {
.generateBlock(start.plusNanos(it * length), length)
}
}.join(Global) { blocks ->
SimpleNumassPoint(blocks, 12000.0)
SimpleNumassPoint.build(blocks, 12000.0)
}.get()

View File

@ -59,7 +59,7 @@ fun main(args: Array<String>) {
.generateBlock(start.plusNanos(it * length), length)
}
}.join(Global) { blocks ->
SimpleNumassPoint(blocks, 18000.0)
SimpleNumassPoint.build(blocks, 18000.0)
}.get()

View File

@ -0,0 +1,90 @@
package inr.numass.scripts.timeanalysis
import hep.dataforge.buildContext
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.meta.buildMeta
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.storage.NumassDirectory
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
}
val storage = NumassDirectory.read(context, "Fill_3")!!
val loader = storage.provide("set_10", NumassSet::class.java).get()
val point = loader.getPoints(18050.00).first()
val analyzer = TimeAnalyzer()
val meta = buildMeta("analyzer") {
"t0" to 3000
"inverted" to false
//"chunkSize" to 5000
//"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
}
println(analyzer.analyze(point, meta))
println(analyzer.getEventsWithDelay(point.firstBlock, meta ).count())
println(point.events.count())
println(point.firstBlock.events.count())
// val time = point.events.asSequence().zipWithNext().map { (p, n) ->
// n.timeOffset - p.timeOffset
// }.filter { it > 0 }.sum()
val time = analyzer.getEventsWithDelay(point.firstBlock, meta ).map { it.second }.filter { it > 0 }.sum()
// val totalN = AtomicLong(0)
// val totalT = AtomicLong(0)
//
// analyzer.getEventsWithDelay(point.firstBlock, meta ).filter { pair -> pair.second >= 3000 }
// .forEach { pair ->
// totalN.incrementAndGet()
// //TODO add progress listener here
// totalT.addAndGet(pair.second)
// }
//
// val time = totalT.get()
println(time / 1e9)
//
// val cr = 80.0
// val length = 5e9.toLong()
// val num = 6
// val dt = 6.5
//
// val start = Instant.now()
//
// val generator = SynchronizedRandomGenerator(JDKRandomGenerator(2223))
//
// repeat(100) {
//
// val point = (1..num).map {
// Global.generate {
// NumassGenerator
// .generateEvents(cr , rnd = generator)
//// .withDeadTime { (dt * 1000).toLong() }
// .generateBlock(start.plusNanos(it * length), length)
// }
// }.join(Global) { blocks ->
// SimpleNumassPoint.build(blocks, 12000.0)
// }.get()
//
//
// println(analyzer.analyze(point, meta))
//
// }
}

View File

@ -1,5 +1,8 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.data.ProtoNumassPoint
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.transformChain
@ -7,19 +10,22 @@ import kotlinx.coroutines.runBlocking
import java.io.File
fun main(args: Array<String>) {
val file = File("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df").toPath()
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_4\\p129(30s)(HV1=13000)").toPath()
val point = ProtoNumassPoint.readFile(file)
point.plotAmplitudeSpectrum()
Global.plotAmplitudeSpectrum(point)
point.blocks.firstOrNull { it.channel == 0 }?.let {
it.plotAmplitudeSpectrum(plotName = "0") {
Global.plotAmplitudeSpectrum(it, plotName = "0") {
"title" to "pixel 0"
"binning" to 50
}
}
point.blocks.firstOrNull { it.channel == 4 }?.let {
it.plotAmplitudeSpectrum(plotName = "4") {
Global.plotAmplitudeSpectrum(it, plotName = "4") {
"title" to "pixel 4"
"binning" to 50
}
@ -29,7 +35,7 @@ fun main(args: Array<String>) {
runBlocking {
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
Global.plotAmplitudeSpectrum(point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 4 && first.channel == 0 && dt > window && dt < 1000) {
Pair((first.amplitude + second.amplitude).toShort(), second.timeOffset)
@ -38,7 +44,7 @@ fun main(args: Array<String>) {
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.before.$window") {
}, plotName = "filtered.before.$window") {
"binning" to 50
}
@ -46,7 +52,7 @@ fun main(args: Array<String>) {
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
Global.plotAmplitudeSpectrum(point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 0 && first.channel == 4 && dt > window && dt < 1000) {
Pair((first.amplitude + second.amplitude).toShort(), second.timeOffset)
@ -55,11 +61,13 @@ fun main(args: Array<String>) {
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.after.$window") {
}, plotName = "filtered.after.$window") {
"binning" to 50
}
}
}
readLine()
}

View File

@ -0,0 +1,51 @@
package inr.numass.scripts.tristan
import hep.dataforge.meta.Meta
import hep.dataforge.meta.value
import hep.dataforge.useValue
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
object TristanAnalyzer : TimeAnalyzer() {
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val t0 = getT0(block, meta).toLong()
val summTime = meta.getInt("summTime", 200) //time limit in nanos for event summation
var sequence = sequence {
var last: NumassEvent? = null
var amp: Int = 0
getEventsWithDelay(block, meta).forEach { (event, time) ->
when {
last == null -> {
last = event
}
time < 0 -> error("Can't be")
time < summTime -> {
//add to amplitude
amp += event.amplitude
}
time > t0 -> {
//accept new event and reset summator
if (amp != 0) {
//construct new event with pileup
yield(NumassEvent(amp.toShort(), last!!.timeOffset, last!!.owner))
} else {
//yield event without changes if there is no pileup
yield(last!!)
}
last = event
amp = event.amplitude.toInt()
}
//else ignore event
}
}
}
meta.useValue("allowedChannels"){
val list = it.list.map { it.int }
sequence = sequence.filter { it.channel in list }
}
return sequence.toList()
}
}

View File

@ -1,5 +1,8 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.data.ProtoNumassPoint
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassBlock
@ -18,11 +21,17 @@ private fun NumassPoint.getChannels(): Map<Int, NumassBlock> {
}
}
fun main(args: Array<String>) {
val file = File("D:\\Work\\Numass\\data\\17kV\\processed.df").toPath()
fun main() {
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_4\\p129(30s)(HV1=13000)").toPath()
val point = ProtoNumassPoint.readFile(file)
println(point.meta)
point.getChannels().forEach{ num, block ->
block.plotAmplitudeSpectrum(plotName = num.toString())
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
point.getChannels().forEach{ (num, block) ->
Global.plotAmplitudeSpectrum(numassBlock = block, plotName = num.toString())
}
readLine()
}

View File

@ -0,0 +1,41 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(point, "cut", analyzer = TristanAnalyzer) {
// "t0" to 3e3
"summTime" to 200
"sortEvents" to true
"inverted" to false
}
plotAmplitudeSpectrum(point, "uncut",analyzer = TristanAnalyzer){
"summTime" to 0
"sortEvents" to true
"inverted" to false
}
}
}
}
readLine()
}

View File

@ -0,0 +1,35 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(point, "cut") {
"t0" to 3e3
"sortEvents" to true
}
plotAmplitudeSpectrum(point, "uncut")
}
}
}
readLine()
}

View File

@ -0,0 +1,36 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
val block = point.channel(0)!!
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(block, "cut") {
"t0" to 3e3
"sortEvents" to true
}
plotAmplitudeSpectrum(block, "uncut")
}
}
}
readLine()
}

View File

@ -0,0 +1,2 @@
package inr.numass.scripts.utils

View File

@ -6,17 +6,16 @@ import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.meta.Meta
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.storage.Storage
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.toList
import hep.dataforge.values.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.*
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
@ -27,7 +26,9 @@ import org.apache.commons.math3.analysis.ParametricUnivariateFunction
import org.apache.commons.math3.exception.DimensionMismatchException
import org.apache.commons.math3.fitting.SimpleCurveFitter
import org.apache.commons.math3.fitting.WeightedObservedPoint
import org.slf4j.LoggerFactory
import java.util.stream.Collectors
import java.util.stream.StreamSupport
object Threshold {
@ -37,13 +38,13 @@ object Threshold {
//creating storage instance
val storage = NumassDirectory.read(context, meta.getString("data.dir")) as Storage
fun Storage.loaders(): Sequence<NumassDataLoader>{
fun Storage.loaders(): Sequence<NumassDataLoader> {
return sequence<NumassDataLoader> {
print("Reading ${this@loaders.fullName}")
runBlocking { this@loaders.children }.forEach {
if(it is NumassDataLoader){
if (it is NumassDataLoader) {
yield(it)
} else if (it is Storage){
} else if (it is Storage) {
yieldAll(it.loaders())
}
}
@ -53,19 +54,19 @@ object Threshold {
//Reading points
//Free operation. No reading done
val sets = storage.loaders()
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
val analyzer = TimeAnalyzer();
val data = DataSet.edit(NumassPoint::class).also { dataBuilder ->
sets.sortedBy { it.startTime }
.flatMap { set -> set.points.asSequence() }
.groupBy { it.voltage }
.forEach { key, value ->
val point = SimpleNumassPoint(value, key)
val name = key.toInt().toString()
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
}
.flatMap { set -> set.points.asSequence() }
.groupBy { it.voltage }
.forEach { key, value ->
val point = SimpleNumassPoint.build(value, key)
val name = key.toInt().toString()
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
}
}.build()
return data.pipe(context, meta) {
@ -92,13 +93,14 @@ object Threshold {
// )
return binned.rows
.map {
WeightedObservedPoint(
1.0,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY), // x
it.getDouble(COUNT_RATE_KEY) / binning) //y
}
.collect(Collectors.toList())
.map {
WeightedObservedPoint(
1.0,//1d / p.getValue() , //weight
it.getDouble(CHANNEL_KEY), // x
it.getDouble(COUNT_RATE_KEY) / binning
) //y
}
.collect(Collectors.toList())
}
private fun norm(spectrum: Table, xLow: Int, upper: Int): Double {
@ -134,11 +136,10 @@ object Threshold {
}
/**
* Exponential function $a e^{\frac{x}{\sigma}}$
*/
private fun exponential(spectrum: Table, voltage: Double, config: Meta): Values {
private fun exponential(point: NumassPoint, spectrum: Table, config: Meta): Values {
val xLow: Int = config.getInt("xLow", 400)
val xHigh: Int = config.getInt("xHigh", 700)
val upper: Int = config.getInt("upper", 3100)
@ -151,10 +152,11 @@ object Threshold {
val norm = norm(spectrum, xLow, upper)
return ValueMap.ofPairs(
"U" to voltage,
"a" to a,
"sigma" to sigma,
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
"index" to point.index,
"U" to point.voltage,
"a" to a,
"sigma" to sigma,
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
)
}
@ -175,14 +177,14 @@ object Threshold {
val delta = shift ?: parameters[2]
return if (parameters.size > 2) {
doubleArrayOf(
Math.pow(x - delta, beta),
a * Math.pow(x - delta, beta) * Math.log(x - delta),
-a * beta * Math.pow(x - delta, beta - 1)
Math.pow(x - delta, beta),
a * Math.pow(x - delta, beta) * Math.log(x - delta),
-a * beta * Math.pow(x - delta, beta - 1)
)
} else {
doubleArrayOf(
Math.pow(x - delta, beta),
a * Math.pow(x - delta, beta) * Math.log(x - delta)
Math.pow(x - delta, beta),
a * Math.pow(x - delta, beta) * Math.log(x - delta)
)
}
}
@ -191,7 +193,7 @@ object Threshold {
/**
* Power function $a (x-\delta)^{\beta}
*/
private fun power(spectrum: Table, voltage: Double, config: Meta): Values {
private fun power(point: NumassPoint, spectrum: Table, config: Meta): Values {
val xLow: Int = config.getInt("xLow", 400)
val xHigh: Int = config.getInt("xHigh", 700)
val upper: Int = config.getInt("upper", 3100)
@ -208,27 +210,51 @@ object Threshold {
val norm = norm(spectrum, xLow, upper)
return ValueMap.ofPairs(
"U" to voltage,
"a" to a,
"beta" to beta,
"delta" to delta,
"correction" to a / (beta + 1) * Math.pow(xLow - delta, beta + 1.0) / norm + 1.0
"index" to point.index,
"U" to point.voltage,
"a" to a,
"beta" to beta,
"delta" to delta,
"correction" to a / (beta + 1) * Math.pow(xLow - delta, beta + 1.0) / norm + 1.0
)
}
fun calculateSubThreshold(spectrum: Table, voltage: Double, config: Meta): Values {
fun calculateSubThreshold(point: NumassPoint, spectrum: Table, config: Meta): Values {
return when (config.getString("method", "exp")) {
"exp" -> exponential(spectrum, voltage, config)
"pow" -> power(spectrum, voltage, config)
"exp" -> exponential(point, spectrum, config)
"pow" -> power(point, spectrum, config)
else -> throw RuntimeException("Unknown sub threshold calculation method")
}
}
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
val reference = config.optNumber("reference").nullable?.let {
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
}?.let {
println("Using reference point ${it.voltage}")
analyzer.getAmplitudeSpectrum(it, config)
}
return ListTable.Builder().apply {
set.forEach{
val spectrum = analyzer.getAmplitudeSpectrum(it,config)
row(calculateSubThreshold(spectrum,it.voltage,config))
StreamSupport.stream(set.spliterator(), true).map { point ->
LoggerFactory.getLogger(Threshold.javaClass).info("Starting point ${point.voltage}")
val spectrum = analyzer.getAmplitudeSpectrum(point, config).let {
if (reference == null) {
it
} else {
subtractAmplitudeSpectrum(it, reference)
}
}
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
return@map try {
calculateSubThreshold(point, spectrum, config)
} catch (ex: Exception) {
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
null
}
}.toList().filterNotNull().forEach {
println(it.toString())
row(it)
}
}.build()
}

View File

@ -1,14 +1,9 @@
package inr.numass.tasks
import hep.dataforge.configure
import hep.dataforge.data.CustomDataFilter
import hep.dataforge.data.DataSet
import hep.dataforge.data.DataTree
import hep.dataforge.data.DataUtils
import hep.dataforge.data.*
import hep.dataforge.io.output.stream
import hep.dataforge.io.render
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.plots.data.DataPlot
@ -22,19 +17,21 @@ import hep.dataforge.stat.fit.FitResult
import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.*
import hep.dataforge.useMeta
import hep.dataforge.useValue
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import hep.dataforge.values.asValue
import hep.dataforge.values.edit
import hep.dataforge.workspace.tasks.task
import inr.numass.NumassUtils
import inr.numass.actions.MergeDataAction
import inr.numass.actions.MergeDataAction.MERGE_NAME
import inr.numass.actions.TransformDataAction
import inr.numass.addSetMarkers
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_KEY
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.countInWindow
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
@ -49,10 +46,17 @@ import java.util.stream.StreamSupport
import kotlin.collections.HashMap
import kotlin.collections.set
private val filterForward = DataFilter.byMetaValue("iteration_info.reverse") {
!(it?.boolean ?: false)
}
private val filterReverse = DataFilter.byMetaValue("iteration_info.reverse") {
it?.boolean ?: false
}
val selectTask = task("select") {
descriptor {
info = "Select data from initial data pool"
value("forward", types = listOf(ValueType.BOOLEAN), info = "Select only forward or only backward sets")
}
model { meta ->
data("*")
@ -60,9 +64,18 @@ val selectTask = task("select") {
}
transform<NumassSet> { data ->
logger.info("Starting selection from data node with size ${data.size}")
CustomDataFilter(meta).filter(data.checked(NumassSet::class.java)).also {
logger.info("Selected ${it.size} elements")
var res = data.checked(NumassSet::class.java).filter(CustomDataFilter(meta))
meta.useValue("forward") {
res = if (it.boolean) {
res.filter(filterForward)
} else {
res.filter(filterReverse)
}
}
logger.info("Selected ${res.size} elements")
res
}
}
@ -71,41 +84,57 @@ val analyzeTask = task("analyze") {
info = "Count the number of events for each voltage and produce a table with the results"
}
model { meta ->
dependsOn(selectTask, meta);
configure(MetaUtils.optEither(meta, "analyzer", "prepare").orElse(Meta.empty()))
dependsOn(selectTask, meta)
configure {
"analyzer" to meta.getMetaOrEmpty("analyzer")
}
}
pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta).also { res ->
val outputMeta = meta.builder.putNode("data", set.meta)
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
}
val res = SmartAnalyzer().analyzeSet(set, meta.getMeta("analyzer"))
val outputMeta = meta.builder.putNode("data", set.meta)
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
return@pipe res
}
}
val monitorTableTask = task("monitor") {
descriptor {
value("showPlot", types = listOf(ValueType.BOOLEAN), info = "Show plot after complete")
value("monitorVoltage", types = listOf(ValueType.NUMBER), info = "The voltage for monitor point")
value("monitorPoint", types = listOf(ValueType.NUMBER), info = "The voltage for monitor point")
}
model { meta ->
dependsOn(selectTask, meta)
// if (meta.getBoolean("monitor.correctForThreshold", false)) {
// dependsOn(subThresholdTask, meta, "threshold")
// }
configure(meta.getMetaOrEmpty("monitor"))
configure {
meta.useMeta("analyzer") { putNode(it) }
setValue("@target", meta.getString("@target", meta.name))
}
}
join<NumassSet, Table> { data ->
val monitorVoltage = meta.getDouble("monitorVoltage", 16000.0);
val monitorVoltage = meta.getDouble("monitorPoint", 16000.0);
val analyzer = SmartAnalyzer()
val analyzerMeta = meta.getMetaOrEmpty("analyzer")
//val thresholdCorrection = da
//TODO add separator labels
val res = ListTable.Builder("timestamp", "count", "cr", "crErr")
.rows(
data.values.stream().parallel()
.flatMap { it.points.stream() }
.filter { it.voltage == monitorVoltage }
.map { it -> analyzer.analyzeParent(it, analyzerMeta) }
).build()
val res = ListTable.Builder("timestamp", "count", "cr", "crErr", "index", "set")
.rows(
data.values.stream().flatMap { set ->
set.points.stream()
.filter { it.voltage == monitorVoltage }
.parallel()
.map { point ->
analyzer.analyzeParent(point, analyzerMeta).edit {
"index" to point.index
"set" to set.name
}
}
}
).build()
if (meta.getBoolean("showPlot", true)) {
val plot = DataPlot.plot(name, res, Adapters.buildXYAdapter("timestamp", "cr", "crErr"))
@ -115,10 +144,11 @@ val monitorTableTask = task("monitor") {
"yAxis.title" to "Count rate"
"yAxis.units" to "Hz"
}
((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
}
context.output["numass.monitor", name].render(NumassUtils.wrap(res, meta))
context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
return@join res;
}
@ -126,7 +156,7 @@ val monitorTableTask = task("monitor") {
val mergeTask = task("merge") {
model { meta ->
dependsOn(analyzeTask, meta)
dependsOn(transformTask, meta)
configure(meta.getMetaOrEmpty("merge"))
}
action(MergeDataAction)
@ -139,10 +169,10 @@ val mergeEmptyTask = task("empty") {
}
//replace data node by "empty" node
val newMeta = meta.builder
.removeNode("data")
.removeNode("empty")
.setNode("data", meta.getMeta("empty"))
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty");
.removeNode("data")
.removeNode("empty")
.setNode("data", meta.getMeta("empty"))
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty")
dependsOn(mergeTask, newMeta)
}
transform<Table> { data ->
@ -158,13 +188,18 @@ val mergeEmptyTask = task("empty") {
val subtractEmptyTask = task("dif") {
model { meta ->
dependsOn(mergeTask, meta, "data")
dependsOn(mergeEmptyTask, meta, "empty")
if (meta.hasMeta("empty")) {
dependsOn(mergeEmptyTask, meta, "empty")
}
}
transform<Table> { data ->
//ignore if there is no empty data
if (!meta.hasMeta("empty")) return@transform data
val builder = DataTree.edit(Table::class)
val rootNode = data.getCheckedNode("data", Table::class.java)
val empty = data.getCheckedNode("empty", Table::class.java).data
?: throw RuntimeException("No empty data found")
?: throw RuntimeException("No empty data found")
rootNode.visit(Table::class.java) { input ->
val resMeta = buildMeta {
@ -185,34 +220,36 @@ val subtractEmptyTask = task("dif") {
val transformTask = task("transform") {
model { meta ->
if (meta.hasMeta("merge")) {
if (meta.hasMeta("empty")) {
dependsOn(subtractEmptyTask, meta)
} else {
dependsOn(mergeTask, meta);
}
} else {
dependsOn(analyzeTask, meta);
}
dependsOn(analyzeTask, meta)
}
action<Table, Table>(TransformDataAction)
action(TransformDataAction)
}
val filterTask = task("filter") {
model { meta ->
dependsOn(transformTask, meta)
if (meta.hasMeta("merge")) {
dependsOn(subtractEmptyTask, meta)
} else {
dependsOn(analyzeTask, meta)
}
configure(meta.getMetaOrEmpty("filter"))
}
pipe<Table, Table> { data ->
if (meta.hasValue("from") || meta.hasValue("to")) {
if(meta.isEmpty) return@pipe data
val result = if (meta.hasValue("from") || meta.hasValue("to")) {
val uLo = meta.getDouble("from", 0.0)
val uHi = meta.getDouble("to", java.lang.Double.POSITIVE_INFINITY)
this.log.report("Filtering finished")
Tables.filter(data, NumassPoint.HV_KEY, uLo, uHi)
} else if (meta.hasValue("condition")) {
Tables.filter(data, Predicate { ExpressionUtils.condition(meta.getString("condition"), it.unbox()) })
} else {
throw RuntimeException("No filtering condition specified")
}
context.output.render(result, name = this.name, stage = "numass.filter")
return@pipe result
}
}
@ -230,16 +267,16 @@ val fitTask = task("fit") {
writer.flush()
FitHelper(context).fit(data, meta)
.setListenerStream(out)
.report(log)
.run()
.also {
if (meta.getBoolean("printLog", true)) {
writer.println()
log.entries.forEach { entry -> writer.println(entry.toString()) }
writer.println()
}
.setListenerStream(out)
.report(log)
.run()
.also {
if (meta.getBoolean("printLog", true)) {
writer.println()
log.entries.forEach { entry -> writer.println(entry.toString()) }
writer.println()
}
}
}
}
}
@ -262,7 +299,7 @@ val plotFitTask = task("plotFit") {
// ensuring all data points are calculated explicitly
StreamSupport.stream<Values>(data.spliterator(), false)
.map { dp -> Adapters.getXValue(adapter, dp).double }.sorted().forEach { fit.calculateIn(it) }
.map { dp -> Adapters.getXValue(adapter, dp).double }.sorted().forEach { fit.calculateIn(it) }
val dataPlot = DataPlot.plot("data", data, adapter)
@ -275,9 +312,24 @@ val plotFitTask = task("plotFit") {
val histogramTask = task("histogram") {
descriptor {
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show plot of the spectra")
value("points", multiple = true, types = listOf(ValueType.NUMBER), info = "The list of point voltages to build histogram")
value("binning", types = listOf(ValueType.NUMBER), defaultValue = 20, info = "The binning of resulting histogram")
value("normalize", types = listOf(ValueType.BOOLEAN), defaultValue = true, info = "If true reports the count rate in each bin, otherwise total count")
value(
"points",
multiple = true,
types = listOf(ValueType.NUMBER),
info = "The list of point voltages to build histogram"
)
value(
"binning",
types = listOf(ValueType.NUMBER),
defaultValue = 16,
info = "The binning of resulting histogram"
)
value(
"normalize",
types = listOf(ValueType.BOOLEAN),
defaultValue = true,
info = "If true reports the count rate in each bin, otherwise total count"
)
info = "Combine amplitude spectra from multiple sets, but with the same U"
}
model { meta ->
@ -299,41 +351,41 @@ val histogramTask = task("histogram") {
//Fill values to table
data.flatMap { it.value.points }
.filter { points == null || points.contains(it.voltage) }
.groupBy { it.voltage }
.mapValues {
analyzer.getAmplitudeSpectrum(MetaBlock(it.value))
}
.forEach { u, spectrum ->
log.report("Aggregating data from U = $u")
spectrum.forEach {
val channel = it[CHANNEL_KEY].int
val count = it[COUNT_KEY].long
aggregator.getOrPut(channel) { HashMap() }
.getOrPut(u) { AtomicLong() }
.addAndGet(count)
}
names.add("U$u")
.filter { points == null || points.contains(it.voltage) }
.groupBy { it.voltage }
.mapValues { (_, value) ->
analyzer.getAmplitudeSpectrum(MetaBlock(value), meta.getMetaOrEmpty("analyzer"))
}
.forEach { (u, spectrum) ->
log.report("Aggregating data from U = $u")
spectrum.forEach {
val channel = it[CHANNEL_KEY].int
val count = it[COUNT_KEY].long
aggregator.getOrPut(channel) { HashMap() }
.getOrPut(u) { AtomicLong() }
.addAndGet(count)
}
names.add("U$u")
}
val times: Map<Double, Double> = data.flatMap { it.value.points }
.filter { points == null || points.contains(it.voltage) }
.groupBy { it.voltage }
.mapValues {
it.value.sumByDouble { it.length.toMillis().toDouble() / 1000 }
}
.filter { points == null || points.contains(it.voltage) }
.groupBy { it.voltage }
.mapValues {
it.value.sumByDouble { it.length.toMillis().toDouble() / 1000 }
}
val normalize = meta.getBoolean("normalize", true)
log.report("Combining spectra")
val format = MetaTableFormat.forNames(names)
val table = buildTable(format) {
aggregator.forEach { channel, counters ->
aggregator.forEach { (channel, counters) ->
val values: MutableMap<String, Any> = HashMap()
values[NumassAnalyzer.CHANNEL_KEY] = channel
counters.forEach { u, counter ->
values[CHANNEL_KEY] = channel
counters.forEach { (u, counter) ->
if (normalize) {
values["U$u"] = counter.get().toDouble() / times[u]!!
values["U$u"] = counter.get().toDouble() / times.getValue(u)
} else {
values["U$u"] = counter.get()
}
@ -343,10 +395,18 @@ val histogramTask = task("histogram") {
}
row(values)
}
}.sumByStep(NumassAnalyzer.CHANNEL_KEY, meta.getDouble("binning", 20.0)) //apply binning
}.sumByStep(CHANNEL_KEY, meta.getDouble("binning", 16.0)) //apply binning
// send raw table to the output
context.output.render(table, stage = "numass.histogram", name = name, meta = meta)
context.output.render(table, stage = "numass.histogram", name = name) {
update(meta)
data.toSortedMap().forEach { (name, set) ->
putNode("data", buildMeta {
"name" to name
set.meta.useMeta("iteration_info") { "iteration" to it }
})
}
}
if (meta.getBoolean("plot", false)) {
context.plotFrame("$name.plot", stage = "numass.histogram") {
@ -363,6 +423,57 @@ val histogramTask = task("histogram") {
}
}
return@join table
}
}
val sliceTask = task("slice") {
model { meta ->
dependsOn(selectTask, meta)
configure(meta.getMetaOrEmpty("slice"))
configure {
meta.useMeta("analyzer") { putNode(it) }
setValue("@target", meta.getString("@target", meta.name))
}
}
join<NumassSet, Table> { data ->
val analyzer = SmartAnalyzer()
val slices = HashMap<String, IntRange>()
val formatBuilder = TableFormatBuilder()
formatBuilder.addColumn("set", ValueType.STRING)
formatBuilder.addColumn("time", ValueType.TIME)
meta.getMetaList("range").forEach {
val range = IntRange(it.getInt("from"), it.getInt("to"))
val name = it.getString("name", range.toString())
slices[name] = range
formatBuilder.addColumn(name, ValueType.NUMBER)
}
val table = buildTable(formatBuilder.build()) {
data.forEach { (setName, set) ->
val point = set.find {
it.index == meta.getInt("index", -1) ||
it.voltage == meta.getDouble("voltage", -1.0)
}
if (point != null) {
val amplitudeSpectrum = analyzer.getAmplitudeSpectrum(point, meta.getMetaOrEmpty("analyzer"))
val map = HashMap<String, Any>()
map["set"] = setName
map["time"] = point.startTime
slices.mapValuesTo(map) { (_, range) ->
amplitudeSpectrum.countInWindow(
range.start.toShort(),
range.endInclusive.toShort()
)
}
row(map)
}
}
}
// send raw table to the output
context.output.render(table, stage = "numass.table", name = name, meta = meta)
return@join table
}
@ -371,21 +482,20 @@ val histogramTask = task("histogram") {
val fitScanTask = task("fitscan") {
model { meta ->
dependsOn(filterTask, meta)
configure{
configure {
setNode(meta.getMetaOrEmpty("scan"))
setNode(meta.getMeta("fit"))
}
}
splitAction<Table, FitResult> {
val scanMeta = meta.getMeta("scan")
val scanValues = if (scanMeta.hasValue("masses")) {
scanMeta.getValue("masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
val scanValues = if (meta.hasValue("scan.masses")) {
meta.getValue("scan.masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
} else {
scanMeta.getValue("values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
meta.getValue("scan.values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
}
val scanParameter = scanMeta.getString("parameter", "msterile2")
val scanParameter = meta.getString("parameter", "msterile2")
scanValues.forEach { scanValue ->
val resultName = String.format("%s[%s=%s]", this.name, scanParameter, scanValue.string)
val fitMeta = meta.getMeta("fit").builder.apply {
@ -394,8 +504,8 @@ val fitScanTask = task("fitscan") {
setValue("params.$scanParameter.value", scanValue)
} else {
getMetaList("params.param").stream()
.filter { par -> par.getString("name") == scanParameter }
.forEach { it.setValue("value", it) }
.filter { par -> par.getString("name") == scanParameter }
.forEach { it.setValue("value", it) }
}
}
@ -408,16 +518,16 @@ val fitScanTask = task("fitscan") {
writer.flush()
FitHelper(context).fit(data, fitMeta)
.setListenerStream(out)
.report(log)
.run()
.also {
if (fitMeta.getBoolean("printLog", true)) {
writer.println()
log.entries.forEach { entry -> writer.println(entry.toString()) }
writer.println()
}
.setListenerStream(out)
.report(log)
.run()
.also {
if (fitMeta.getBoolean("printLog", true)) {
writer.println()
log.entries.forEach { entry -> writer.println(entry.toString()) }
writer.println()
}
}
}
}
}

View File

@ -0,0 +1,55 @@
package inr.numass.tasks
import hep.dataforge.io.render
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.output.plotFrame
import hep.dataforge.plots.plotData
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.Table
import hep.dataforge.tables.filter
import hep.dataforge.useMeta
import hep.dataforge.values.ValueType
import hep.dataforge.workspace.tasks.task
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassSet
import inr.numass.subthreshold.Threshold
val subThresholdTask = task("threshold") {
descriptor {
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show threshold correction plot")
value(
"binning",
types = listOf(ValueType.NUMBER),
defaultValue = 16,
info = "The binning used for fit"
)
info = "Calculate sub threshold correction"
}
model { meta ->
dependsOn(selectTask, meta)
configure(meta.getMetaOrEmpty("threshold"))
configure {
meta.useMeta("analyzer") { putNode(it) }
setValue("@target", meta.getString("@target", meta.name))
}
}
join<NumassSet, Table> { data ->
val sum = NumassDataUtils.joinByIndex(name, data.values)
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
it.getDouble("correction") in (1.0..1.2)
}
if (meta.getBoolean("plot", false)) {
context.plotFrame("$name.plot", stage = "numass.threshold") {
plots.setType<DataPlot>()
plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
}
}
context.output.render(correctionTable, "numass.correction", name, meta = meta)
return@join correctionTable
}
}

View File

@ -5,6 +5,11 @@ repositories {
mavenCentral()
}
//apply plugin: 'org.openjfx.javafxplugin'
//
//javafx {
// modules = [ 'javafx.controls' ]
//}
if (!hasProperty('mainClass')) {
ext.mainClass = 'inr.numass.viewer.Viewer'//"inr.numass.viewer.test.TestApp"

View File

@ -58,7 +58,7 @@ class AmplitudeView : View(title = "Numass amplitude spectrum plot", icon = Imag
private val container = PlotContainer(frame).apply {
val binningSelector: ChoiceBox<Int> = ChoiceBox(FXCollections.observableArrayList(1, 2, 5, 10, 20, 50)).apply {
val binningSelector: ChoiceBox<Int> = ChoiceBox(FXCollections.observableArrayList(1, 2, 8, 16, 32, 50)).apply {
minWidth = 0.0
selectionModel.selectLast()
binningProperty.bind(this.selectionModel.selectedItemProperty())

View File

@ -15,6 +15,7 @@ include ":numass-core"
include 'numass-core:numass-data-api'
include 'numass-core:numass-data-proto'
include 'numass-core:numass-signal-processing'
//include ":numass-server"
//