Major tristan analysis update

This commit is contained in:
Alexander Nozik 2019-07-22 16:36:49 +03:00
parent 28b7969709
commit ee18f68928
23 changed files with 600 additions and 236 deletions

View File

@ -26,6 +26,9 @@ class MetaBlock(override val blocks: List<NumassBlock>) : ParentBlock {
override val length: Duration
get() = Duration.ofNanos(blocks.stream().mapToLong { block -> block.length.toNanos() }.sum())
/**
* A stream of events, sorted by block time but not sorted by event time
*/
override val events: Stream<NumassEvent>
get() = blocks.sortedBy { it.startTime }.stream().flatMap { it.events }

View File

@ -1,4 +1,3 @@
import com.google.protobuf.gradle.GenerateProtoTask
import com.google.protobuf.gradle.protobuf
import com.google.protobuf.gradle.protoc
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
@ -6,7 +5,7 @@ import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
plugins {
idea
kotlin("jvm")
id("com.google.protobuf") version "0.8.7"
id("com.google.protobuf") version "0.8.8"
}
@ -26,13 +25,13 @@ tasks.withType<KotlinCompile> {
dependsOn(":numass-core:numass-data-proto:generateProto")
}
sourceSets{
create("proto"){
proto {
srcDir("src/main/proto")
}
}
}
//sourceSets {
// create("proto") {
// proto {
// srcDir("src/main/proto")
// }
// }
//}
protobuf {
// Configure the protoc executable

View File

@ -21,6 +21,7 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import inr.numass.data.api.*
import inr.numass.data.storage.ClassicNumassPoint
import org.slf4j.LoggerFactory
import kotlin.streams.asSequence
@ -53,10 +54,15 @@ object NumassDataUtils {
override val points: List<NumassPoint> by lazy {
val points = sets.flatMap { it.points }.groupBy { it.index }
return@lazy points.map { (index, points) ->
return@lazy points.mapNotNull { (index, points) ->
val voltage = points.first().voltage
if (!points.all { it.voltage == voltage }) error("Not all points with same index have same voltage")
SimpleNumassPoint.build(points, voltage, index)
if (!points.all { it.voltage == voltage }) {
LoggerFactory.getLogger(javaClass)
.warn("Not all points with index $index have voltage $voltage")
null
} else {
SimpleNumassPoint.build(points, voltage, index)
}
}
}

View File

@ -45,16 +45,18 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
* @return
*/
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val loChannel = meta.getInt("window.lo", 0)
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
// if (meta.getBoolean("sort", false)) {
// res = res.sorted(compareBy { it.timeOffset })
// }
val range = meta.getRange()
return getAllEvents(block).filter { event ->
event.amplitude.toInt() in loChannel..(upChannel - 1)
event.amplitude.toInt() in range
}.toList()
}
protected fun Meta.getRange(): IntRange {
val loChannel = getInt("window.lo", 0)
val upChannel = getInt("window.up", Integer.MAX_VALUE)
return loChannel until upChannel
}
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
return when {
block.frames.count() == 0L -> block.events

View File

@ -52,6 +52,14 @@ interface NumassAnalyzer {
* @return
*/
fun analyzeParent(point: ParentBlock, config: Meta = Meta.empty()): Values {
// //Add properties to config
// val newConfig = config.builder.apply {
// if (point is NumassPoint) {
// setValue("voltage", point.voltage)
// setValue("index", point.index)
// }
// setValue("channel", point.channel)
// }
val map = HashMap(analyze(point, config).asMap())
if (point is NumassPoint) {
map[HV_KEY] = Value.of(point.voltage)
@ -101,7 +109,7 @@ interface NumassAnalyzer {
fun getAmplitudeSpectrum(block: NumassBlock, config: Meta = Meta.empty()): Table {
val seconds = block.length.toMillis().toDouble() / 1000.0
return getAmplitudeSpectrum(getEvents(block, config).asSequence(), seconds, config)
return getEvents(block, config).asSequence().getAmplitudeSpectrum(seconds, config)
}
companion object {
@ -114,8 +122,6 @@ interface NumassAnalyzer {
const val WINDOW_KEY = "window"
const val TIME_KEY = "timestamp"
val DEFAULT_ANALYZER: NumassAnalyzer = SmartAnalyzer()
val AMPLITUDE_ADAPTER: ValuesAdapter = Adapters.buildXYAdapter(CHANNEL_KEY, COUNT_RATE_KEY)
// val MAX_CHANNEL = 10000
@ -139,23 +145,26 @@ fun Table.countInWindow(loChannel: Short, upChannel: Short): Long {
/**
* Calculate the amplitude spectrum for a given block. The s
*
* @param events
* @param this@getAmplitudeSpectrum
* @param length length in seconds, used for count rate calculation
* @param config
* @return
*/
fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config: Meta = Meta.empty()): Table {
fun Sequence<NumassEvent>.getAmplitudeSpectrum(
length: Double,
config: Meta = Meta.empty()
): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta { metaBuilder -> metaBuilder.setNode("config", config) }
.build()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.updateMeta { metaBuilder -> metaBuilder.setNode("config", config) }
.build()
//optimized for fastest computation
val spectrum: MutableMap<Int, AtomicLong> = HashMap()
events.forEach { event ->
forEach { event ->
val channel = event.amplitude.toInt()
spectrum.getOrPut(channel) {
AtomicLong(0)
@ -167,18 +176,18 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
val maxChannel = config.getInt("window.up") { spectrum.keys.max() ?: 4096 }
return ListTable.Builder(format)
.rows(IntStream.range(minChannel, maxChannel)
.mapToObj { i ->
val value = spectrum[i]?.get() ?: 0
ValueMap.of(
format.namesAsArray(),
i,
value,
value.toDouble() / length,
Math.sqrt(value.toDouble()) / length
)
}
).build()
.rows(IntStream.range(minChannel, maxChannel)
.mapToObj { i ->
val value = spectrum[i]?.get() ?: 0
ValueMap.of(
format.namesAsArray(),
i,
value,
value.toDouble() / length,
Math.sqrt(value.toDouble()) / length
)
}
).build()
}
/**
@ -192,18 +201,18 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
@JvmOverloads
fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = null): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
.addNumber("binSize")
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
.addNumber("binSize")
val builder = ListTable.Builder(format)
var chan = loChannel
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.min().orElse(0)
val top = upChannel
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
?: this.getColumn(NumassAnalyzer.CHANNEL_KEY).stream().mapToInt { it.int }.max().orElse(1)
while (chan < top - binSize) {
val count = AtomicLong(0)
@ -218,10 +227,21 @@ fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = nu
}.forEach { row ->
count.addAndGet(row.getValue(NumassAnalyzer.COUNT_KEY, 0).long)
countRate.accumulateAndGet(row.getDouble(NumassAnalyzer.COUNT_RATE_KEY, 0.0)) { d1, d2 -> d1 + d2 }
countRateDispersion.accumulateAndGet(Math.pow(row.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY, 0.0), 2.0)) { d1, d2 -> d1 + d2 }
countRateDispersion.accumulateAndGet(
Math.pow(
row.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY, 0.0),
2.0
)
) { d1, d2 -> d1 + d2 }
}
val bin = Math.min(binSize, top - chan)
builder.row(chan.toDouble() + bin.toDouble() / 2.0, count.get(), countRate.get(), Math.sqrt(countRateDispersion.get()), bin)
builder.row(
chan.toDouble() + bin.toDouble() / 2.0,
count.get(),
countRate.get(),
Math.sqrt(countRateDispersion.get()),
bin
)
chan += binSize
}
return builder.build()
@ -236,19 +256,20 @@ fun Table.withBinning(binSize: Int, loChannel: Int? = null, upChannel: Int? = nu
*/
fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
val format = TableFormatBuilder()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
.addNumber(NumassAnalyzer.CHANNEL_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.build()
val builder = ListTable.Builder(format)
sp1.forEach { row1 ->
val channel = row1.getDouble(NumassAnalyzer.CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel }
?: ValueMap.ofPairs(NumassAnalyzer.COUNT_RATE_KEY to 0.0, NumassAnalyzer.COUNT_RATE_ERROR_KEY to 0.0)
?: ValueMap.ofPairs(NumassAnalyzer.COUNT_RATE_KEY to 0.0, NumassAnalyzer.COUNT_RATE_ERROR_KEY to 0.0)
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val value =
Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)

View File

@ -32,7 +32,17 @@ import inr.numass.data.api.*
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
import java.util.*
import java.util.concurrent.atomic.AtomicLong
import kotlin.math.sqrt
import kotlin.collections.List
import kotlin.collections.asSequence
import kotlin.collections.count
import kotlin.collections.first
import kotlin.collections.map
import kotlin.collections.set
import kotlin.collections.sortBy
import kotlin.collections.sumBy
import kotlin.collections.sumByDouble
import kotlin.collections.toMutableList
import kotlin.math.*
import kotlin.streams.asSequence
@ -53,7 +63,7 @@ import kotlin.streams.asSequence
info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
)
)
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
override fun analyze(block: NumassBlock, config: Meta): Values {
//In case points inside points
@ -61,8 +71,6 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
return analyzeParent(block, config)
}
val loChannel = config.getInt("window.lo", 0)
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
val t0 = getT0(block, config).toLong()
val chunkSize = config.getInt("chunkSize", -1)
@ -72,10 +80,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val res = when {
count < 1000 -> ValueMap.ofPairs(
NumassAnalyzer.LENGTH_KEY to length,
NumassAnalyzer.COUNT_KEY to count,
NumassAnalyzer.COUNT_RATE_KEY to count.toDouble() / length,
NumassAnalyzer.COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
LENGTH_KEY to length,
COUNT_KEY to count,
COUNT_RATE_KEY to count.toDouble() / length,
COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
)
chunkSize > 0 -> getEventsWithDelay(block, config)
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
@ -86,7 +94,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
return ValueMap.Builder(res)
.putValue("blockLength", length)
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
.putValue(NumassAnalyzer.WINDOW_KEY, config.getRange())
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
.putValue(T0_KEY, t0.toDouble() / 1000.0)
.build()
@ -109,15 +117,15 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val countRate =
1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
val countRateError = countRate / sqrt(totalN.get().toDouble())
val length = totalT.get() / 1e9
val count = (length * countRate).toLong()
return ValueMap.ofPairs(
NumassAnalyzer.LENGTH_KEY to length,
NumassAnalyzer.COUNT_KEY to count,
NumassAnalyzer.COUNT_RATE_KEY to countRate,
NumassAnalyzer.COUNT_RATE_ERROR_KEY to countRateError
LENGTH_KEY to length,
COUNT_KEY to count,
COUNT_RATE_KEY to countRate,
COUNT_RATE_ERROR_KEY to countRateError
)
}
@ -160,18 +168,19 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
val (countRate, countRateDispersion) = when (method) {
ARITHMETIC -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY), 2.0) } / size / size
sumByDouble { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size
)
WEIGHTED -> Pair(
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
sumByDouble { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) }
)
GEOMETRIC -> {
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = Math.pow(
mean / size,
2.0
) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
val mean = exp(sumByDouble { ln(it.getDouble(COUNT_RATE_KEY)) } / size)
val variance = (mean / size).pow(2.0) * sumByDouble {
(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(
COUNT_RATE_KEY
)).pow(2.0)
}
Pair(mean, variance)
}
}
@ -193,7 +202,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
),
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
)
private fun getT0(block: NumassBlock, meta: Meta): Int {
protected fun getT0(block: NumassBlock, meta: Meta): Int {
return if (meta.hasValue("t0")) {
meta.getInt("t0")
} else if (meta.hasMeta("t0")) {
@ -202,7 +211,7 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
if (cr < meta.getDouble("t0.minCR", 0.0)) {
0
} else {
Math.max(-1e9 / cr * Math.log(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt()
max(-1e9 / cr * ln(1.0 - fraction), meta.getDouble("t0.min", 0.0)).toInt()
}
} else {
0
@ -227,15 +236,16 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
*/
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
val inverted = config.getBoolean("inverted", true)
//range is included in super.getEvents
val events = super.getEvents(block, config).toMutableList()
if (block is ParentBlock && !block.isSequential) {
if (config.getBoolean("sortEvents", false) || (block is ParentBlock && !block.isSequential)) {
//sort in place if needed
events.sortBy { it.timeOffset }
}
return events.asSequence().zipWithNext { prev, next ->
val delay = Math.max(next.timeOffset - prev.timeOffset, 0)
val delay = max(next.timeOffset - prev.timeOffset, 0)
if (inverted) {
Pair(next, delay)
} else {
@ -253,16 +263,18 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
*/
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val t0 = getT0(block, meta).toLong()
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.map { it.first }.toList()
return getEventsWithDelay(block, meta)
.filter { pair -> pair.second >= t0 }
.map { it.first }.toList()
}
public override fun getTableFormat(config: Meta): TableFormat {
return TableFormatBuilder()
.addNumber(HV_KEY, X_VALUE_KEY)
.addNumber(NumassAnalyzer.LENGTH_KEY)
.addNumber(NumassAnalyzer.COUNT_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addNumber(LENGTH_KEY)
.addNumber(COUNT_KEY)
.addNumber(COUNT_RATE_KEY, Y_VALUE_KEY)
.addNumber(COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
.addColumn(NumassAnalyzer.WINDOW_KEY)
.addTime()
.addNumber(T0_KEY)
@ -273,10 +285,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
const val T0_KEY = "t0"
val NAME_LIST = arrayOf(
NumassAnalyzer.LENGTH_KEY,
NumassAnalyzer.COUNT_KEY,
NumassAnalyzer.COUNT_RATE_KEY,
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
LENGTH_KEY,
COUNT_KEY,
COUNT_RATE_KEY,
COUNT_RATE_ERROR_KEY,
NumassAnalyzer.WINDOW_KEY,
NumassAnalyzer.TIME_KEY,
T0_KEY

View File

@ -44,10 +44,10 @@ import kotlin.streams.toList
* @author darksnake
*/
class NumassDataLoader(
override val context: Context,
override val parent: StorageElement?,
override val name: String,
override val path: Path
override val context: Context,
override val parent: StorageElement?,
override val name: String,
override val path: Path
) : Loader<NumassPoint>, NumassSet, Provider, FileStorageElement {
override val type: KClass<NumassPoint> = NumassPoint::class
@ -63,26 +63,24 @@ class NumassDataLoader(
}
override suspend fun getHvData(): Table? {
val hvEnvelope = path.resolve(HV_FRAGMENT_NAME)?.let {
val hvEnvelope = path.resolve(HV_FRAGMENT_NAME).let {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read hv file")
}
return hvEnvelope?.let {
try {
ColumnedDataReader(it.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
}
return try {
ColumnedDataReader(hvEnvelope.data.stream, "timestamp", "block", "value").toTable()
} catch (ex: IOException) {
LoggerFactory.getLogger(javaClass).error("Failed to load HV data from file", ex)
null
}
}
private val pointEnvelopes: List<Envelope> by lazy {
Files.list(path)
.filter { it.fileName.toString().startsWith(POINT_FRAGMENT_NAME) }
.map {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read point file")
}.toList()
.filter { it.fileName.toString().startsWith(POINT_FRAGMENT_NAME) }
.map {
NumassEnvelopeType.infer(it)?.reader?.read(it) ?: error("Can't read point file")
}.toList()
}
val isReversed: Boolean
@ -189,4 +187,8 @@ class NumassDataLoader(
}
fun Context.readNumassSet(path:Path):NumassDataLoader{
return NumassDataLoader(this,null,path.fileName.toString(),path)
}

View File

@ -36,6 +36,7 @@ dependencies {
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
compile group: 'commons-io', name: 'commons-io', version: '2.+'
compile project(':numass-core')
compile project(':numass-core:numass-signal-processing')
compileOnly "org.jetbrains.kotlin:kotlin-main-kts:1.3.21"
compile "hep.dataforge:dataforge-minuit" //project(':dataforge-stat:dataforge-minuit')
compile "hep.dataforge:grind-terminal" //project(':dataforge-grind:grind-terminal')

View File

@ -8,6 +8,7 @@ import hep.dataforge.grind.workspace.GrindWorkspace
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.workspace.FileBasedWorkspace
import hep.dataforge.workspace.Workspace
import groovy.cli.commons.CliBuilder
/**
* Created by darksnake on 29-Aug-16.

View File

@ -48,6 +48,7 @@ import java.awt.Font
import java.io.IOException
import java.io.OutputStream
import java.lang.Math.*
import java.time.Instant
import java.util.*
/**
@ -108,11 +109,11 @@ object NumassUtils {
fun writeEnvelope(stream: OutputStream, meta: Meta, dataWriter: (OutputStream) -> Unit) {
try {
TaglessEnvelopeType.INSTANCE.writer.write(
stream,
EnvelopeBuilder()
.meta(meta)
.data(dataWriter)
.build()
stream,
EnvelopeBuilder()
.meta(meta)
.data(dataWriter)
.build()
)
stream.flush()
} catch (e: IOException) {
@ -148,10 +149,10 @@ object NumassUtils {
builder.name = set.name
set.points.forEach { point ->
val pointMeta = MetaBuilder("point")
.putValue("voltage", point.voltage)
.putValue("index", point.meta.getInt("external_meta.point_index", -1))
.putValue("run", point.meta.getString("external_meta.session", ""))
.putValue("group", point.meta.getString("external_meta.group", ""))
.putValue("voltage", point.voltage)
.putValue("index", point.meta.getInt("external_meta.point_index", -1))
.putValue("run", point.meta.getString("external_meta.session", ""))
.putValue("group", point.meta.getString("external_meta.group", ""))
val pointName = "point_" + point.meta.getInt("external_meta.point_index", point.hashCode())
builder.putData(pointName, point, pointMeta)
}
@ -176,8 +177,8 @@ object NumassUtils {
fun getFSS(context: Context, meta: Meta): FSS? {
return if (meta.getBoolean("useFSS", true)) {
val fssBinary: Binary? = meta.optString("fssFile")
.map { fssFile -> context.getFile(fssFile).binary }
.orElse(context.getResource("data/FS.txt"))
.map { fssFile -> context.getFile(fssFile).binary }
.orElse(context.getResource("data/FS.txt"))
fssBinary?.let { FSS(it.stream) } ?: throw RuntimeException("Could not load FSS file")
} else {
null
@ -189,16 +190,17 @@ fun getFSS(context: Context, meta: Meta): FSS? {
* Evaluate groovy expression using numass point as parameter
*
* @param expression
* @param point
* @param values
* @return
*/
fun pointExpression(expression: String, point: Values): Double {
fun pointExpression(expression: String, values: Values): Double {
val exprParams = HashMap<String, Any>()
//Adding all point values to expression parameters
point.names.forEach { name -> exprParams[name] = point.getValue(name).value }
values.names.forEach { name -> exprParams[name] = values.getValue(name).value }
//Adding aliases for commonly used parameters
exprParams["T"] = point.getDouble("length")
exprParams["U"] = point.getDouble("voltage")
exprParams["T"] = values.getDouble("length")
exprParams["U"] = values.getDouble("voltage")
exprParams["time"] = values.optTime("timestamp").orElse(Instant.EPOCH).epochSecond
return ExpressionUtils.function(expression, exprParams)
}
@ -212,8 +214,8 @@ fun JFreeChartFrame.addSetMarkers(sets: Collection<NumassSet>) {
sets.stream().forEach { set ->
val start = set.startTime;
val stop = set.meta.optValue("end_time").map { it.time }
.orElse(start.plusSeconds(300))
.minusSeconds(60)
.orElse(start.plusSeconds(300))
.minusSeconds(60)
val marker = IntervalMarker(start.toEpochMilli().toDouble(), stop.toEpochMilli().toDouble(), paint)
marker.label = set.name
marker.labelFont = Font("Verdana", Font.BOLD, 20);
@ -230,15 +232,25 @@ fun subtractSpectrum(merge: Table, empty: Table, logger: Logger? = null): Table
merge.rows.forEach { point ->
val pointBuilder = ValueMap.Builder(point)
val referencePoint = empty.rows
.filter { p -> Math.abs(p.getDouble(NumassPoint.HV_KEY) - point.getDouble(NumassPoint.HV_KEY)) < 0.1 }.findFirst()
.filter { p -> Math.abs(p.getDouble(NumassPoint.HV_KEY) - point.getDouble(NumassPoint.HV_KEY)) < 0.1 }
.findFirst()
if (referencePoint.isPresent) {
pointBuilder.putValue(
NumassAnalyzer.COUNT_RATE_KEY,
Math.max(0.0, point.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_KEY))
NumassAnalyzer.COUNT_RATE_KEY,
Math.max(
0.0,
point.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_KEY)
)
)
pointBuilder.putValue(
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
Math.sqrt(Math.pow(point.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0) + Math.pow(referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0)))
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
Math.sqrt(
Math.pow(
point.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY),
2.0
) + Math.pow(referencePoint.get().getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY), 2.0)
)
)
} else {
logger?.warn("No reference point found for voltage = {}", point.getDouble(NumassPoint.HV_KEY))
}

View File

@ -12,6 +12,7 @@ import hep.dataforge.values.ValueType.STRING
import inr.numass.NumassUtils
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.api.NumassSet
import inr.numass.data.analyzers.SmartAnalyzer
/**
* The action performs the readout of data and collection of count rate into a table
@ -25,7 +26,7 @@ import inr.numass.data.api.NumassSet
object AnalyzeDataAction : OneToOneAction<NumassSet, Table>("numass.analyze", NumassSet::class.java, Table::class.java) {
override fun execute(context: Context, name: String, input: NumassSet, inputMeta: Laminate): Table {
//TODO add processor here
val analyzer = NumassAnalyzer.DEFAULT_ANALYZER
val analyzer: NumassAnalyzer = SmartAnalyzer()
val res = analyzer.analyzeSet(input, inputMeta)
render(context, name, NumassUtils.wrap(res, inputMeta))

View File

@ -22,6 +22,8 @@ import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_ERROR_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.pointExpression
import java.util.*
import kotlin.math.pow
import kotlin.math.sqrt
/**
* Apply corrections and transformations to analyzed data
@ -29,10 +31,17 @@ import java.util.*
*/
@TypedActionDef(name = "numass.transform", inputType = Table::class, outputType = Table::class)
@ValueDefs(
ValueDef(key = "correction", info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`"),
ValueDef(key = "utransform", info = "Expression for voltage transformation. Uses U as input")
ValueDef(
key = "correction",
info = "An expression to correct count number depending on potential `U`, point length `T` and point itself as `point`"
),
ValueDef(key = "utransform", info = "Expression for voltage transformation. Uses U as input")
)
@NodeDef(
key = "correction",
multiple = true,
descriptor = "method::inr.numass.actions.TransformDataAction.makeCorrection"
)
@NodeDef(key = "correction", multiple = true, descriptor = "method::inr.numass.actions.TransformDataAction.makeCorrection")
object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Table::class.java, Table::class.java) {
override fun execute(context: Context, name: String, input: Table, meta: Laminate): Table {
@ -43,9 +52,10 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
meta.optMeta("corrections").ifPresent { cors ->
MetaUtils.nodeStream(cors)
.map<Meta> { it.second }
.map<Correction> { this.makeCorrection(it) }
.forEach { corrections.add(it) }
.filter { it.first.length == 1 }
.map<Meta> { it.second }
.map { makeCorrection(it) }
.forEach { corrections.add(it) }
}
if (meta.hasValue("correction")) {
@ -64,28 +74,39 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
if (!correction.isAnonymous) {
table = table.buildColumn(ColumnFormat.build(correction.name, NUMBER)) { correction.corr(this) }
if (correction.hasError()) {
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) { correction.corrErr(this) }
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) {
correction.corrErr(this)
}
}
}
}
// adding original count rate and error columns
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_KEY.orig", NUMBER), table.getColumn(COUNT_RATE_KEY).stream()))
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_ERROR_KEY.orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()))
table = table.addColumn(
ListColumn(
ColumnFormat.build("$COUNT_RATE_KEY.orig", NUMBER),
table.getColumn(COUNT_RATE_KEY).stream()
)
)
table = table.addColumn(
ListColumn(
ColumnFormat.build("$COUNT_RATE_ERROR_KEY.orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()
)
)
val cr = ArrayList<Double>()
val crErr = ArrayList<Double>()
table.rows.forEach { point ->
val correctionFactor = corrections.stream()
.mapToDouble { cor -> cor.corr(point) }
.reduce { d1, d2 -> d1 * d2 }.orElse(1.0)
.mapToDouble { cor -> cor.corr(point) }
.reduce { d1, d2 -> d1 * d2 }.orElse(1.0)
val relativeCorrectionError = Math.sqrt(
corrections.stream()
.mapToDouble { cor -> cor.relativeErr(point) }
.reduce { d1, d2 -> d1 * d1 + d2 * d2 }.orElse(0.0)
corrections.stream()
.mapToDouble { cor -> cor.relativeErr(point) }
.reduce { d1, d2 -> d1 * d1 + d2 * d2 }.orElse(0.0)
)
val originalCR = point.getDouble(COUNT_RATE_KEY)
val originalCRErr = point.getDouble(COUNT_RATE_ERROR_KEY)
@ -93,13 +114,13 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
if (relativeCorrectionError == 0.0) {
crErr.add(originalCRErr * correctionFactor)
} else {
crErr.add(Math.sqrt(Math.pow(originalCRErr / originalCR, 2.0) + Math.pow(relativeCorrectionError, 2.0)) * originalCR)
crErr.add(sqrt((originalCRErr / originalCR).pow(2.0) + relativeCorrectionError.pow(2.0)) * originalCR)
}
}
//replacing cr column
val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
context.output[this@TransformDataAction.name, name].render(res, meta)
return res
@ -107,34 +128,23 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
@ValueDefs(
ValueDef(key = "value", type = arrayOf(NUMBER, STRING), info = "Value or function to multiply count rate"),
ValueDef(key = "err", type = arrayOf(NUMBER, STRING), info = "error of the value")
ValueDef(key = "value", type = arrayOf(NUMBER, STRING), info = "Value or function to multiply count rate"),
ValueDef(key = "err", type = arrayOf(NUMBER, STRING), info = "error of the value")
)
private fun makeCorrection(corrMeta: Meta): Correction {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
return object : Correction {
override val name = corrMeta.getString("name", corrMeta.name)
override fun corr(point: Values): Double {
return pointExpression(expr, point)
}
override fun corrErr(point: Values): Double {
return if (errExpr.isEmpty()) {
0.0
} else {
pointExpression(errExpr, point)
}
}
override fun hasError(): Boolean {
return !errExpr.isEmpty()
}
val name = corrMeta.getString("name", corrMeta.name)
return if (corrMeta.hasMeta("table")) {
val x = corrMeta.getValue("table.u").list.map { it.double }
val corr = corrMeta.getValue("table.corr").list.map { it.double }
TableCorrection(name, x, corr)
} else {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
ExpressionCorrection(name, expr, errExpr)
}
}
private interface Correction : Named {
interface Correction : Named {
/**
* correction coefficient
@ -168,4 +178,46 @@ object TransformDataAction : OneToOneAction<Table, Table>("numass.transform", Ta
}
}
class ExpressionCorrection(override val name: String, val expr: String, val errExpr: String) : Correction {
override fun corr(point: Values): Double {
return pointExpression(expr, point)
}
override fun corrErr(point: Values): Double {
return if (errExpr.isEmpty()) {
0.0
} else {
pointExpression(errExpr, point)
}
}
override fun hasError(): Boolean {
return errExpr.isNotEmpty()
}
}
class TableCorrection(
override val name: String,
val x: List<Double>,
val y: List<Double>,
val yErr: List<Double>? = null
) : Correction {
override fun corr(point: Values): Double {
val voltage = point.getDouble("voltage")
val index = x.indexOfFirst { it > voltage }
//TODO add interpolation
return if (index < 0) {
y.last()
} else {
y[index]
}
}
//
// override fun corrErr(point: Values): Double {
// return super.corrErr(point)
// }
//
// override fun hasError(): Boolean = yErr.isNullOrEmpty()
}
}

View File

@ -18,10 +18,11 @@ package inr.numass.data
import hep.dataforge.configure
import hep.dataforge.context.Context
import hep.dataforge.context.Global
import hep.dataforge.meta.KMetaBuilder
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.plots.PlotFrame
import hep.dataforge.plots.PlotGroup
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.output.plotFrame
import hep.dataforge.tables.Adapters
@ -30,20 +31,24 @@ import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassBlock
fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName: String = "", context: Context = Global, metaAction: KMetaBuilder.() -> Unit = {}) {
val meta = buildMeta("meta", metaAction)
fun PlotGroup.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaBuilder: KMetaBuilder.() -> Unit = {}
) {
val meta = buildMeta("meta", metaBuilder)
val binning = meta.getInt("binning", 20)
val lo = meta.optNumber("window.lo").nullable?.toInt()
val up = meta.optNumber("window.up").nullable?.toInt()
val data = SmartAnalyzer().getAmplitudeSpectrum(this, meta.getMetaOrEmpty("spectrum")).withBinning(binning, lo, up)
context.plotFrame(plotName) {
val data = analyzer.getAmplitudeSpectrum(numassBlock, meta).withBinning(binning, lo, up)
apply {
val valueAxis = if (meta.getBoolean("normalize", false)) {
NumassAnalyzer.COUNT_RATE_KEY
} else {
NumassAnalyzer.COUNT_KEY
}
plots.configure {
configure {
"connectionType" to "step"
"thickness" to 2
"showLine" to true
@ -52,11 +57,31 @@ fun NumassBlock.plotAmplitudeSpectrum(plotName: String = "spectrum", frameName:
}.setType<DataPlot>()
val plot = DataPlot.plot(
plotName,
data,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
plotName,
data,
Adapters.buildXYAdapter(NumassAnalyzer.CHANNEL_KEY, valueAxis)
)
plot.configure(meta)
add(plot)
}
}
}
fun PlotFrame.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaBuilder: KMetaBuilder.() -> Unit = {}
) = plots.plotAmplitudeSpectrum(numassBlock, plotName, analyzer, metaBuilder)
fun Context.plotAmplitudeSpectrum(
numassBlock: NumassBlock,
plotName: String = "spectrum",
frameName: String = plotName,
analyzer: NumassAnalyzer = SmartAnalyzer(),
metaAction: KMetaBuilder.() -> Unit = {}
) {
plotFrame(frameName) {
plotAmplitudeSpectrum(numassBlock, plotName, analyzer, metaAction)
}
}

View File

@ -17,13 +17,16 @@
package inr.numass.data.analyzers
import hep.dataforge.meta.Meta
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.tables.TableFormat
import hep.dataforge.values.Value
import hep.dataforge.values.ValueMap
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
import inr.numass.data.api.SignalProcessor
import inr.numass.data.ChernovProcessor
import inr.numass.data.api.*
import inr.numass.utils.ExpressionUtils
/**
* An analyzer dispatcher which uses different analyzer for different meta
@ -68,4 +71,33 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
} else super.getTableFormat(config)
}
}
override fun analyzeSet(set: NumassSet, config: Meta): Table {
fun Value.computeExpression(point: NumassPoint): Int {
return when {
this.type == ValueType.NUMBER -> this.int
this.type == ValueType.STRING -> {
val exprParams = HashMap<String, Any>()
exprParams["U"] = point.voltage
ExpressionUtils.function(this.string, exprParams).toInt()
}
else -> error("Can't interpret $type as expression or number")
}
}
val lo = config.getValue("window.lo",0)
val up = config.getValue("window.up", Int.MAX_VALUE)
val format = getTableFormat(config)
return ListTable.Builder(format)
.rows(set.points.map { point ->
val newConfig = config.builder.apply{
setValue("window.lo", lo.computeExpression(point))
setValue("window.up", up.computeExpression(point))
}
analyzeParent(point, newConfig)
})
.build()
}
}

View File

@ -108,7 +108,7 @@ fun main(args: Array<String>) {
.filter { pair -> pair.second <= t0 }
.map { it.first }
val pileupSpectrum = getAmplitudeSpectrum(sequence, point.length.toMillis().toDouble() / 1000.0).withBinning(20)
val pileupSpectrum = sequence.getAmplitudeSpectrum(point.length.toMillis().toDouble() / 1000.0).withBinning(20)
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))

View File

@ -110,7 +110,7 @@ fun main(args: Array<String>) {
.filter { pair -> pair.second <= t0 }
.map { it.first }
val pileupSpectrum = getAmplitudeSpectrum(sequence, point.length.toMillis().toDouble() / 1000.0).withBinning(20)
val pileupSpectrum = sequence.getAmplitudeSpectrum(point.length.toMillis().toDouble() / 1000.0).withBinning(20)
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))

View File

@ -1,5 +1,8 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.data.ProtoNumassPoint
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.transformChain
@ -7,19 +10,22 @@ import kotlinx.coroutines.runBlocking
import java.io.File
fun main(args: Array<String>) {
val file = File("D:\\Work\\Numass\\data\\TRISTAN_11_2017\\df\\gun_16_19.df").toPath()
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_4\\p129(30s)(HV1=13000)").toPath()
val point = ProtoNumassPoint.readFile(file)
point.plotAmplitudeSpectrum()
Global.plotAmplitudeSpectrum(point)
point.blocks.firstOrNull { it.channel == 0 }?.let {
it.plotAmplitudeSpectrum(plotName = "0") {
Global.plotAmplitudeSpectrum(it, plotName = "0") {
"title" to "pixel 0"
"binning" to 50
}
}
point.blocks.firstOrNull { it.channel == 4 }?.let {
it.plotAmplitudeSpectrum(plotName = "4") {
Global.plotAmplitudeSpectrum(it, plotName = "4") {
"title" to "pixel 4"
"binning" to 50
}
@ -29,7 +35,7 @@ fun main(args: Array<String>) {
runBlocking {
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
Global.plotAmplitudeSpectrum(point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 4 && first.channel == 0 && dt > window && dt < 1000) {
Pair((first.amplitude + second.amplitude).toShort(), second.timeOffset)
@ -38,7 +44,7 @@ fun main(args: Array<String>) {
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.before.$window") {
}, plotName = "filtered.before.$window") {
"binning" to 50
}
@ -46,7 +52,7 @@ fun main(args: Array<String>) {
listOf(0, 20, 50, 100, 200).forEach { window ->
point.transformChain { first, second ->
Global.plotAmplitudeSpectrum(point.transformChain { first, second ->
val dt = second.timeOffset - first.timeOffset
if (second.channel == 0 && first.channel == 4 && dt > window && dt < 1000) {
Pair((first.amplitude + second.amplitude).toShort(), second.timeOffset)
@ -55,11 +61,13 @@ fun main(args: Array<String>) {
}
}.also {
println("Number of events for $window is ${it.events.count()}")
}.plotAmplitudeSpectrum(plotName = "filtered.after.$window") {
}, plotName = "filtered.after.$window") {
"binning" to 50
}
}
}
readLine()
}

View File

@ -0,0 +1,51 @@
package inr.numass.scripts.tristan
import hep.dataforge.meta.Meta
import hep.dataforge.meta.value
import hep.dataforge.useValue
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassBlock
import inr.numass.data.api.NumassEvent
object TristanAnalyzer : TimeAnalyzer() {
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
val t0 = getT0(block, meta).toLong()
val summTime = meta.getInt("summTime", 200) //time limit in nanos for event summation
var sequence = sequence {
var last: NumassEvent? = null
var amp: Int = 0
getEventsWithDelay(block, meta).forEach { (event, time) ->
when {
last == null -> {
last = event
}
time < 0 -> error("Can't be")
time < summTime -> {
//add to amplitude
amp += event.amplitude
}
time > t0 -> {
//accept new event and reset summator
if (amp != 0) {
//construct new event with pileup
yield(NumassEvent(amp.toShort(), last!!.timeOffset, last!!.owner))
} else {
//yield event without changes if there is no pileup
yield(last!!)
}
last = event
amp = event.amplitude.toInt()
}
//else ignore event
}
}
}
meta.useValue("allowedChannels"){
val list = it.list.map { it.int }
sequence = sequence.filter { it.channel in list }
}
return sequence.toList()
}
}

View File

@ -1,5 +1,8 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.data.ProtoNumassPoint
import inr.numass.data.api.MetaBlock
import inr.numass.data.api.NumassBlock
@ -18,11 +21,17 @@ private fun NumassPoint.getChannels(): Map<Int, NumassBlock> {
}
}
fun main(args: Array<String>) {
val file = File("D:\\Work\\Numass\\data\\17kV\\processed.df").toPath()
fun main() {
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_4\\p129(30s)(HV1=13000)").toPath()
val point = ProtoNumassPoint.readFile(file)
println(point.meta)
point.getChannels().forEach{ num, block ->
block.plotAmplitudeSpectrum(plotName = num.toString())
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
point.getChannels().forEach{ (num, block) ->
Global.plotAmplitudeSpectrum(numassBlock = block, plotName = num.toString())
}
readLine()
}

View File

@ -0,0 +1,41 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(point, "cut", analyzer = TristanAnalyzer) {
// "t0" to 3e3
"summTime" to 200
"sortEvents" to true
"inverted" to false
}
plotAmplitudeSpectrum(point, "uncut",analyzer = TristanAnalyzer){
"summTime" to 0
"sortEvents" to true
"inverted" to false
}
}
}
}
readLine()
}

View File

@ -0,0 +1,35 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(point, "cut") {
"t0" to 3e3
"sortEvents" to true
}
plotAmplitudeSpectrum(point, "uncut")
}
}
}
readLine()
}

View File

@ -0,0 +1,36 @@
package inr.numass.scripts.tristan
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.fx.plots.group
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.plots.output.plotFrame
import inr.numass.data.plotAmplitudeSpectrum
import inr.numass.data.storage.readNumassSet
import java.io.File
fun main() {
Global.output = FXOutputManager()
JFreeChartPlugin().startGlobal()
val file = File("D:\\Work\\Numass\\data\\2018_04\\Fill_3\\set_36").toPath()
val set = Global.readNumassSet(file)
Global.plotFrame("compare") {
listOf(12000.0, 13000.0, 14000.0, 14900.0).forEach {voltage->
val point = set.optPoint(voltage).get()
val block = point.channel(0)!!
group("${set.name}/p${point.index}[${point.voltage}]") {
plotAmplitudeSpectrum(block, "cut") {
"t0" to 3e3
"sortEvents" to true
}
plotAmplitudeSpectrum(block, "uncut")
}
}
}
readLine()
}

View File

@ -18,9 +18,7 @@ import hep.dataforge.stat.models.XYModel
import hep.dataforge.tables.*
import hep.dataforge.useMeta
import hep.dataforge.useValue
import hep.dataforge.values.ValueType
import hep.dataforge.values.Values
import hep.dataforge.values.asValue
import hep.dataforge.values.*
import hep.dataforge.workspace.tasks.task
import inr.numass.NumassUtils
import inr.numass.actions.MergeDataAction
@ -90,20 +88,23 @@ val analyzeTask = task("analyze") {
}
}
pipe<NumassSet, Table> { set ->
SmartAnalyzer().analyzeSet(set, meta.getMeta("analyzer")).also { res ->
val outputMeta = meta.builder.putNode("data", set.meta)
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
}
val res = SmartAnalyzer().analyzeSet(set, meta.getMeta("analyzer"))
val outputMeta = meta.builder.putNode("data", set.meta)
context.output.render(res, stage = "numass.analyze", name = name, meta = outputMeta)
return@pipe res
}
}
val monitorTableTask = task("monitor") {
descriptor {
value("showPlot", types = listOf(ValueType.BOOLEAN), info = "Show plot after complete")
value("monitorVoltage", types = listOf(ValueType.NUMBER), info = "The voltage for monitor point")
value("monitorPoint", types = listOf(ValueType.NUMBER), info = "The voltage for monitor point")
}
model { meta ->
dependsOn(selectTask, meta)
// if (meta.getBoolean("monitor.correctForThreshold", false)) {
// dependsOn(subThresholdTask, meta, "threshold")
// }
configure(meta.getMetaOrEmpty("monitor"))
configure {
meta.useMeta("analyzer") { putNode(it) }
@ -111,16 +112,26 @@ val monitorTableTask = task("monitor") {
}
}
join<NumassSet, Table> { data ->
val monitorVoltage = meta.getDouble("monitorVoltage", 16000.0);
val monitorVoltage = meta.getDouble("monitorPoint", 16000.0);
val analyzer = SmartAnalyzer()
val analyzerMeta = meta.getMetaOrEmpty("analyzer")
//val thresholdCorrection = da
//TODO add separator labels
val res = ListTable.Builder("timestamp", "count", "cr", "crErr")
val res = ListTable.Builder("timestamp", "count", "cr", "crErr", "index", "set")
.rows(
data.values.stream().parallel()
.flatMap { it.points.stream() }
.filter { it.voltage == monitorVoltage }
.map { it -> analyzer.analyzeParent(it, analyzerMeta) }
data.values.stream().flatMap { set ->
set.points.stream()
.filter { it.voltage == monitorVoltage }
.parallel()
.map { point ->
analyzer.analyzeParent(point, analyzerMeta).edit {
"index" to point.index
"set" to set.name
}
}
}
).build()
if (meta.getBoolean("showPlot", true)) {
@ -143,7 +154,7 @@ val monitorTableTask = task("monitor") {
val mergeTask = task("merge") {
model { meta ->
dependsOn(analyzeTask, meta)
dependsOn(transformTask, meta)
configure(meta.getMetaOrEmpty("merge"))
}
action(MergeDataAction)
@ -175,9 +186,14 @@ val mergeEmptyTask = task("empty") {
val subtractEmptyTask = task("dif") {
model { meta ->
dependsOn(mergeTask, meta, "data")
dependsOn(mergeEmptyTask, meta, "empty")
if (meta.hasMeta("empty")) {
dependsOn(mergeEmptyTask, meta, "empty")
}
}
transform<Table> { data ->
//ignore if there is no empty data
if (!meta.hasMeta("empty")) return@transform data
val builder = DataTree.edit(Table::class)
val rootNode = data.getCheckedNode("data", Table::class.java)
val empty = data.getCheckedNode("empty", Table::class.java).data
@ -202,24 +218,23 @@ val subtractEmptyTask = task("dif") {
val transformTask = task("transform") {
model { meta ->
if (meta.hasMeta("merge")) {
if (meta.hasMeta("empty")) {
dependsOn(subtractEmptyTask, meta)
} else {
dependsOn(mergeTask, meta);
}
} else {
dependsOn(analyzeTask, meta);
}
dependsOn(analyzeTask, meta)
}
action<Table, Table>(TransformDataAction)
action(TransformDataAction)
}
val filterTask = task("filter") {
model { meta ->
dependsOn(transformTask, meta)
if (meta.hasMeta("merge")) {
dependsOn(subtractEmptyTask, meta)
} else {
dependsOn(analyzeTask, meta)
}
}
pipe<Table, Table> { data ->
if (!meta.hasMeta("filter")) return@pipe data
if (meta.hasValue("from") || meta.hasValue("to")) {
val uLo = meta.getDouble("from", 0.0)
val uHi = meta.getDouble("to", java.lang.Double.POSITIVE_INFINITY)
@ -333,10 +348,10 @@ val histogramTask = task("histogram") {
data.flatMap { it.value.points }
.filter { points == null || points.contains(it.voltage) }
.groupBy { it.voltage }
.mapValues {
analyzer.getAmplitudeSpectrum(MetaBlock(it.value), meta.getMetaOrEmpty("analyzer"))
.mapValues { (_, value) ->
analyzer.getAmplitudeSpectrum(MetaBlock(value), meta.getMetaOrEmpty("analyzer"))
}
.forEach { u, spectrum ->
.forEach { (u, spectrum) ->
log.report("Aggregating data from U = $u")
spectrum.forEach {
val channel = it[CHANNEL_KEY].int
@ -360,10 +375,10 @@ val histogramTask = task("histogram") {
log.report("Combining spectra")
val format = MetaTableFormat.forNames(names)
val table = buildTable(format) {
aggregator.forEach { channel, counters ->
aggregator.forEach { (channel, counters) ->
val values: MutableMap<String, Any> = HashMap()
values[NumassAnalyzer.CHANNEL_KEY] = channel
counters.forEach { u, counter ->
values[CHANNEL_KEY] = channel
counters.forEach { (u, counter) ->
if (normalize) {
values["U$u"] = counter.get().toDouble() / times.getValue(u)
} else {
@ -375,12 +390,12 @@ val histogramTask = task("histogram") {
}
row(values)
}
}.sumByStep(NumassAnalyzer.CHANNEL_KEY, meta.getDouble("binning", 16.0)) //apply binning
}.sumByStep(CHANNEL_KEY, meta.getDouble("binning", 16.0)) //apply binning
// send raw table to the output
context.output.render(table, stage = "numass.histogram", name = name) {
update(meta)
data.toSortedMap().forEach { name, set ->
data.toSortedMap().forEach { (name, set) ->
putNode("data", buildMeta {
"name" to name
set.meta.useMeta("iteration_info") { "iteration" to it }
@ -430,7 +445,7 @@ val sliceTask = task("slice") {
}
val table = buildTable(formatBuilder.build()) {
data.forEach { setName, set ->
data.forEach { (setName, set) ->
val point = set.find {
it.index == meta.getInt("index", -1) ||
it.voltage == meta.getDouble("voltage", -1.0)