Fixed error in numass file read
This commit is contained in:
parent
5b170da7d4
commit
28b7969709
@ -1,27 +1,30 @@
|
|||||||
package inr.numass.data.api
|
package inr.numass.data.api
|
||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaBuilder
|
|
||||||
import hep.dataforge.meta.MetaHolder
|
import hep.dataforge.meta.MetaHolder
|
||||||
|
import hep.dataforge.meta.buildMeta
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A simple static implementation of NumassPoint
|
* A simple static implementation of NumassPoint
|
||||||
* Created by darksnake on 08.07.2017.
|
* Created by darksnake on 08.07.2017.
|
||||||
*/
|
*/
|
||||||
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) : MetaHolder(meta), NumassPoint {
|
class SimpleNumassPoint(override val blocks: List<NumassBlock>, meta: Meta, override val isSequential: Boolean = true) :
|
||||||
|
MetaHolder(meta), NumassPoint {
|
||||||
/**
|
|
||||||
* Input blocks must be sorted
|
|
||||||
* @param voltage
|
|
||||||
* @param blocks
|
|
||||||
*/
|
|
||||||
constructor(blocks: Collection<NumassBlock>, voltage: Double) :
|
|
||||||
this(blocks.sortedBy { it.startTime }, MetaBuilder("point").setValue(NumassPoint.HV_KEY, voltage))
|
|
||||||
|
|
||||||
init {
|
init {
|
||||||
if(blocks.isEmpty()){
|
if (blocks.isEmpty()) {
|
||||||
throw IllegalArgumentException("No blocks in collection")
|
throw IllegalArgumentException("No blocks in collection")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
fun build(blocks: Collection<NumassBlock>, voltage: Double? = null, index: Int? = null): SimpleNumassPoint {
|
||||||
|
val meta = buildMeta("point") {
|
||||||
|
NumassPoint.HV_KEY to voltage
|
||||||
|
NumassPoint.INDEX_KEY to index
|
||||||
|
}
|
||||||
|
return SimpleNumassPoint(blocks.sortedBy { it.startTime }, meta.build())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -21,9 +21,7 @@ import hep.dataforge.meta.Meta
|
|||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import inr.numass.data.api.*
|
import inr.numass.data.api.*
|
||||||
import inr.numass.data.storage.ClassicNumassPoint
|
import inr.numass.data.storage.ClassicNumassPoint
|
||||||
import java.util.stream.Collectors
|
|
||||||
import kotlin.streams.asSequence
|
import kotlin.streams.asSequence
|
||||||
import kotlin.streams.toList
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -35,10 +33,8 @@ object NumassDataUtils {
|
|||||||
override suspend fun getHvData() = TODO()
|
override suspend fun getHvData() = TODO()
|
||||||
|
|
||||||
override val points: List<NumassPoint> by lazy {
|
override val points: List<NumassPoint> by lazy {
|
||||||
val points = sets.stream().flatMap<NumassPoint> { it.points.stream() }
|
val points = sets.flatMap { it.points }.groupBy { it.voltage }
|
||||||
.collect(Collectors.groupingBy<NumassPoint, Double> { it.voltage })
|
return@lazy points.entries.map { entry -> SimpleNumassPoint.build(entry.value, entry.key) }
|
||||||
points.entries.stream().map { entry -> SimpleNumassPoint(entry.value, entry.key) }
|
|
||||||
.toList()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override val meta: Meta by lazy {
|
override val meta: Meta by lazy {
|
||||||
@ -51,6 +47,30 @@ object NumassDataUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun joinByIndex(setName: String, sets: Collection<NumassSet>): NumassSet {
|
||||||
|
return object : NumassSet {
|
||||||
|
override suspend fun getHvData() = TODO()
|
||||||
|
|
||||||
|
override val points: List<NumassPoint> by lazy {
|
||||||
|
val points = sets.flatMap { it.points }.groupBy { it.index }
|
||||||
|
return@lazy points.map { (index, points) ->
|
||||||
|
val voltage = points.first().voltage
|
||||||
|
if (!points.all { it.voltage == voltage }) error("Not all points with same index have same voltage")
|
||||||
|
SimpleNumassPoint.build(points, voltage, index)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override val meta: Meta by lazy {
|
||||||
|
val metaBuilder = MetaBuilder()
|
||||||
|
sets.forEach { set -> metaBuilder.putNode(set.name, set.meta) }
|
||||||
|
metaBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
override val name = setName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
fun adapter(): SpectrumAdapter {
|
fun adapter(): SpectrumAdapter {
|
||||||
return SpectrumAdapter("Uset", "CR", "CRerr", "Time")
|
return SpectrumAdapter("Uset", "CR", "CRerr", "Time")
|
||||||
}
|
}
|
||||||
@ -67,18 +87,18 @@ object NumassDataUtils {
|
|||||||
suspend fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>?): NumassBlock {
|
suspend fun NumassBlock.transformChain(transform: (NumassEvent, NumassEvent) -> Pair<Short, Long>?): NumassBlock {
|
||||||
return SimpleBlock.produce(this.startTime, this.length) {
|
return SimpleBlock.produce(this.startTime, this.length) {
|
||||||
this.events.asSequence()
|
this.events.asSequence()
|
||||||
.sortedBy { it.timeOffset }
|
.sortedBy { it.timeOffset }
|
||||||
.zipWithNext(transform)
|
.zipWithNext(transform)
|
||||||
.filterNotNull()
|
.filterNotNull()
|
||||||
.map { OrphanNumassEvent(it.first, it.second) }.asIterable()
|
.map { OrphanNumassEvent(it.first, it.second) }.asIterable()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
suspend fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
|
suspend fun NumassBlock.filterChain(condition: (NumassEvent, NumassEvent) -> Boolean): NumassBlock {
|
||||||
return SimpleBlock.produce(this.startTime, this.length) {
|
return SimpleBlock.produce(this.startTime, this.length) {
|
||||||
this.events.asSequence()
|
this.events.asSequence()
|
||||||
.sortedBy { it.timeOffset }
|
.sortedBy { it.timeOffset }
|
||||||
.zipWithNext().filter { condition.invoke(it.first, it.second) }.map { it.second }.asIterable()
|
.zipWithNext().filter { condition.invoke(it.first, it.second) }.map { it.second }.asIterable()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +111,7 @@ suspend fun NumassBlock.filter(condition: (NumassEvent) -> Boolean): NumassBlock
|
|||||||
suspend fun NumassBlock.transform(transform: (NumassEvent) -> OrphanNumassEvent): NumassBlock {
|
suspend fun NumassBlock.transform(transform: (NumassEvent) -> OrphanNumassEvent): NumassBlock {
|
||||||
return SimpleBlock.produce(this.startTime, this.length) {
|
return SimpleBlock.produce(this.startTime, this.length) {
|
||||||
this.events.asSequence()
|
this.events.asSequence()
|
||||||
.map { transform(it) }
|
.map { transform(it) }
|
||||||
.asIterable()
|
.asIterable()
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -22,6 +22,7 @@ import hep.dataforge.tables.ListTable
|
|||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import hep.dataforge.tables.TableFormat
|
import hep.dataforge.tables.TableFormat
|
||||||
import hep.dataforge.tables.TableFormatBuilder
|
import hep.dataforge.tables.TableFormatBuilder
|
||||||
|
import hep.dataforge.toList
|
||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.NumassBlock
|
||||||
import inr.numass.data.api.NumassEvent
|
import inr.numass.data.api.NumassEvent
|
||||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||||
@ -43,7 +44,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
|
|||||||
* @param block
|
* @param block
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||||
val loChannel = meta.getInt("window.lo", 0)
|
val loChannel = meta.getInt("window.lo", 0)
|
||||||
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
|
val upChannel = meta.getInt("window.up", Integer.MAX_VALUE)
|
||||||
// if (meta.getBoolean("sort", false)) {
|
// if (meta.getBoolean("sort", false)) {
|
||||||
@ -51,7 +52,7 @@ abstract class AbstractAnalyzer @JvmOverloads constructor(private val processor:
|
|||||||
// }
|
// }
|
||||||
return getAllEvents(block).filter { event ->
|
return getAllEvents(block).filter { event ->
|
||||||
event.amplitude.toInt() in loChannel..(upChannel - 1)
|
event.amplitude.toInt() in loChannel..(upChannel - 1)
|
||||||
}
|
}.toList()
|
||||||
}
|
}
|
||||||
|
|
||||||
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
|
protected fun getAllEvents(block: NumassBlock): Stream<NumassEvent> {
|
||||||
|
@ -28,7 +28,6 @@ import java.util.*
|
|||||||
import java.util.concurrent.atomic.AtomicLong
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
import java.util.concurrent.atomic.AtomicReference
|
import java.util.concurrent.atomic.AtomicReference
|
||||||
import java.util.stream.IntStream
|
import java.util.stream.IntStream
|
||||||
import java.util.stream.Stream
|
|
||||||
import kotlin.streams.asSequence
|
import kotlin.streams.asSequence
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -67,7 +66,7 @@ interface NumassAnalyzer {
|
|||||||
* @param block
|
* @param block
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): Stream<NumassEvent>
|
fun getEvents(block: NumassBlock, meta: Meta = Meta.empty()): List<NumassEvent>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Analyze the whole set. And return results as a table
|
* Analyze the whole set. And return results as a table
|
||||||
|
@ -24,7 +24,6 @@ import hep.dataforge.values.Values
|
|||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.NumassBlock
|
||||||
import inr.numass.data.api.NumassEvent
|
import inr.numass.data.api.NumassEvent
|
||||||
import inr.numass.data.api.SignalProcessor
|
import inr.numass.data.api.SignalProcessor
|
||||||
import java.util.stream.Stream
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An analyzer dispatcher which uses different analyzer for different meta
|
* An analyzer dispatcher which uses different analyzer for different meta
|
||||||
@ -59,7 +58,7 @@ class SmartAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proce
|
|||||||
return ValueMap(map)
|
return ValueMap(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||||
return getAnalyzer(meta).getEvents(block, meta)
|
return getAnalyzer(meta).getEvents(block, meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,10 +32,8 @@ import inr.numass.data.api.*
|
|||||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||||
import java.util.*
|
import java.util.*
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
import java.util.stream.Stream
|
|
||||||
import kotlin.math.sqrt
|
import kotlin.math.sqrt
|
||||||
import kotlin.streams.asSequence
|
import kotlin.streams.asSequence
|
||||||
import kotlin.streams.asStream
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -43,8 +41,17 @@ import kotlin.streams.asStream
|
|||||||
* Created by darksnake on 11.07.2017.
|
* Created by darksnake on 11.07.2017.
|
||||||
*/
|
*/
|
||||||
@ValueDefs(
|
@ValueDefs(
|
||||||
ValueDef(key = "separateParallelBlocks", type = [ValueType.BOOLEAN], info = "If true, then parallel blocks will be forced to be evaluated separately"),
|
ValueDef(
|
||||||
ValueDef(key = "chunkSize", type = [ValueType.NUMBER], def = "-1", info = "The number of events in chunk to split the chain into. If negative, no chunks are used")
|
key = "separateParallelBlocks",
|
||||||
|
type = [ValueType.BOOLEAN],
|
||||||
|
info = "If true, then parallel blocks will be forced to be evaluated separately"
|
||||||
|
),
|
||||||
|
ValueDef(
|
||||||
|
key = "chunkSize",
|
||||||
|
type = [ValueType.NUMBER],
|
||||||
|
def = "-1",
|
||||||
|
info = "The number of events in chunk to split the chain into. If negative, no chunks are used"
|
||||||
|
)
|
||||||
)
|
)
|
||||||
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
|
class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(processor) {
|
||||||
|
|
||||||
@ -60,28 +67,29 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
|||||||
|
|
||||||
val chunkSize = config.getInt("chunkSize", -1)
|
val chunkSize = config.getInt("chunkSize", -1)
|
||||||
|
|
||||||
val count = block.events.count()
|
val count = super.getEvents(block, config).count()
|
||||||
val length = block.length.toMillis().toDouble() / 1e3
|
val length = block.length.toNanos().toDouble() / 1e9
|
||||||
|
|
||||||
val res = when {
|
val res = when {
|
||||||
count < 1000 -> ValueMap.ofPairs(
|
count < 1000 -> ValueMap.ofPairs(
|
||||||
NumassAnalyzer.LENGTH_KEY to length,
|
NumassAnalyzer.LENGTH_KEY to length,
|
||||||
NumassAnalyzer.COUNT_KEY to count,
|
NumassAnalyzer.COUNT_KEY to count,
|
||||||
NumassAnalyzer.COUNT_RATE_KEY to count.toDouble() / length,
|
NumassAnalyzer.COUNT_RATE_KEY to count.toDouble() / length,
|
||||||
NumassAnalyzer.COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
|
NumassAnalyzer.COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
|
||||||
)
|
)
|
||||||
chunkSize > 0 -> getEventsWithDelay(block, config)
|
chunkSize > 0 -> getEventsWithDelay(block, config)
|
||||||
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
|
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
|
||||||
.toList()
|
.toList()
|
||||||
.mean(config.getEnum("mean", WEIGHTED))
|
.mean(config.getEnum("mean", WEIGHTED))
|
||||||
else -> analyzeSequence(getEventsWithDelay(block, config), t0)
|
else -> analyzeSequence(getEventsWithDelay(block, config), t0)
|
||||||
}
|
}
|
||||||
|
|
||||||
return ValueMap.Builder(res)
|
return ValueMap.Builder(res)
|
||||||
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
|
.putValue("blockLength", length)
|
||||||
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
|
.putValue(NumassAnalyzer.WINDOW_KEY, arrayOf(loChannel, upChannel))
|
||||||
.putValue(T0_KEY, t0.toDouble() / 1000.0)
|
.putValue(NumassAnalyzer.TIME_KEY, block.startTime)
|
||||||
.build()
|
.putValue(T0_KEY, t0.toDouble() / 1000.0)
|
||||||
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -89,26 +97,27 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
|||||||
val totalN = AtomicLong(0)
|
val totalN = AtomicLong(0)
|
||||||
val totalT = AtomicLong(0)
|
val totalT = AtomicLong(0)
|
||||||
sequence.filter { pair -> pair.second >= t0 }
|
sequence.filter { pair -> pair.second >= t0 }
|
||||||
.forEach { pair ->
|
.forEach { pair ->
|
||||||
totalN.incrementAndGet()
|
totalN.incrementAndGet()
|
||||||
//TODO add progress listener here
|
//TODO add progress listener here
|
||||||
totalT.addAndGet(pair.second)
|
totalT.addAndGet(pair.second)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (totalN.toInt() == 0) {
|
if (totalN.toInt() == 0) {
|
||||||
error("Zero number of intervals")
|
error("Zero number of intervals")
|
||||||
}
|
}
|
||||||
|
|
||||||
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
|
val countRate =
|
||||||
|
1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
|
||||||
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
|
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
|
||||||
val length = totalT.get() / 1e9
|
val length = totalT.get() / 1e9
|
||||||
val count = (length * countRate).toLong()
|
val count = (length * countRate).toLong()
|
||||||
|
|
||||||
return ValueMap.ofPairs(
|
return ValueMap.ofPairs(
|
||||||
NumassAnalyzer.LENGTH_KEY to length,
|
NumassAnalyzer.LENGTH_KEY to length,
|
||||||
NumassAnalyzer.COUNT_KEY to count,
|
NumassAnalyzer.COUNT_KEY to count,
|
||||||
NumassAnalyzer.COUNT_RATE_KEY to countRate,
|
NumassAnalyzer.COUNT_RATE_KEY to countRate,
|
||||||
NumassAnalyzer.COUNT_RATE_ERROR_KEY to countRateError
|
NumassAnalyzer.COUNT_RATE_ERROR_KEY to countRateError
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -139,43 +148,50 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
|||||||
|
|
||||||
if (this.isEmpty()) {
|
if (this.isEmpty()) {
|
||||||
return ValueMap.Builder()
|
return ValueMap.Builder()
|
||||||
.putValue(LENGTH_KEY, 0)
|
.putValue(LENGTH_KEY, 0)
|
||||||
.putValue(COUNT_KEY, 0)
|
.putValue(COUNT_KEY, 0)
|
||||||
.putValue(COUNT_RATE_KEY, 0)
|
.putValue(COUNT_RATE_KEY, 0)
|
||||||
.putValue(COUNT_RATE_ERROR_KEY, 0)
|
.putValue(COUNT_RATE_ERROR_KEY, 0)
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
|
val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
|
||||||
|
|
||||||
val (countRate, countRateDispersion) = when (method) {
|
val (countRate, countRateDispersion) = when (method) {
|
||||||
ARITHMETIC -> Pair(
|
ARITHMETIC -> Pair(
|
||||||
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
|
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
|
||||||
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY), 2.0) } / size / size
|
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY), 2.0) } / size / size
|
||||||
)
|
)
|
||||||
WEIGHTED -> Pair(
|
WEIGHTED -> Pair(
|
||||||
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
|
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
|
||||||
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
|
sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime, 2.0) }
|
||||||
)
|
)
|
||||||
GEOMETRIC -> {
|
GEOMETRIC -> {
|
||||||
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
|
val mean = Math.exp(sumByDouble { Math.log(it.getDouble(COUNT_RATE_KEY)) } / size)
|
||||||
val variance = Math.pow(mean / size, 2.0) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
|
val variance = Math.pow(
|
||||||
|
mean / size,
|
||||||
|
2.0
|
||||||
|
) * sumByDouble { Math.pow(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(COUNT_RATE_KEY), 2.0) }
|
||||||
Pair(mean, variance)
|
Pair(mean, variance)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ValueMap.Builder(first())
|
return ValueMap.Builder(first())
|
||||||
.putValue(LENGTH_KEY, totalTime)
|
.putValue(LENGTH_KEY, totalTime)
|
||||||
.putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
|
.putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
|
||||||
.putValue(COUNT_RATE_KEY, countRate)
|
.putValue(COUNT_RATE_KEY, countRate)
|
||||||
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
|
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ValueDefs(
|
@ValueDefs(
|
||||||
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
|
ValueDef(key = "t0", type = arrayOf(ValueType.NUMBER), info = "Constant t0 cut"),
|
||||||
ValueDef(key = "t0.crFraction", type = arrayOf(ValueType.NUMBER), info = "The relative fraction of events that should be removed by time cut"),
|
ValueDef(
|
||||||
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
|
key = "t0.crFraction",
|
||||||
|
type = arrayOf(ValueType.NUMBER),
|
||||||
|
info = "The relative fraction of events that should be removed by time cut"
|
||||||
|
),
|
||||||
|
ValueDef(key = "t0.min", type = arrayOf(ValueType.NUMBER), def = "0", info = "Minimal t0")
|
||||||
)
|
)
|
||||||
private fun getT0(block: NumassBlock, meta: Meta): Int {
|
private fun getT0(block: NumassBlock, meta: Meta): Int {
|
||||||
return if (meta.hasValue("t0")) {
|
return if (meta.hasValue("t0")) {
|
||||||
@ -211,12 +227,11 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
|||||||
*/
|
*/
|
||||||
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
|
fun getEventsWithDelay(block: NumassBlock, config: Meta): Sequence<Pair<NumassEvent, Long>> {
|
||||||
val inverted = config.getBoolean("inverted", true)
|
val inverted = config.getBoolean("inverted", true)
|
||||||
val events: Stream<NumassEvent> = super.getEvents(block, config).let { stream ->
|
val events = super.getEvents(block, config).toMutableList()
|
||||||
if (block is ParentBlock && !block.isSequential) {
|
|
||||||
stream.sorted(compareBy { it.timeOffset })
|
if (block is ParentBlock && !block.isSequential) {
|
||||||
} else {
|
//sort in place if needed
|
||||||
stream
|
events.sortBy { it.timeOffset }
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return events.asSequence().zipWithNext { prev, next ->
|
return events.asSequence().zipWithNext { prev, next ->
|
||||||
@ -236,35 +251,35 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
|||||||
* @param meta
|
* @param meta
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
override fun getEvents(block: NumassBlock, meta: Meta): Stream<NumassEvent> {
|
override fun getEvents(block: NumassBlock, meta: Meta): List<NumassEvent> {
|
||||||
val t0 = getT0(block, meta).toLong()
|
val t0 = getT0(block, meta).toLong()
|
||||||
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.asStream().map { it.first }
|
return getEventsWithDelay(block, meta).filter { pair -> pair.second >= t0 }.map { it.first }.toList()
|
||||||
}
|
}
|
||||||
|
|
||||||
public override fun getTableFormat(config: Meta): TableFormat {
|
public override fun getTableFormat(config: Meta): TableFormat {
|
||||||
return TableFormatBuilder()
|
return TableFormatBuilder()
|
||||||
.addNumber(HV_KEY, X_VALUE_KEY)
|
.addNumber(HV_KEY, X_VALUE_KEY)
|
||||||
.addNumber(NumassAnalyzer.LENGTH_KEY)
|
.addNumber(NumassAnalyzer.LENGTH_KEY)
|
||||||
.addNumber(NumassAnalyzer.COUNT_KEY)
|
.addNumber(NumassAnalyzer.COUNT_KEY)
|
||||||
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
|
.addNumber(NumassAnalyzer.COUNT_RATE_KEY, Y_VALUE_KEY)
|
||||||
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
.addNumber(NumassAnalyzer.COUNT_RATE_ERROR_KEY, Y_ERROR_KEY)
|
||||||
.addColumn(NumassAnalyzer.WINDOW_KEY)
|
.addColumn(NumassAnalyzer.WINDOW_KEY)
|
||||||
.addTime()
|
.addTime()
|
||||||
.addNumber(T0_KEY)
|
.addNumber(T0_KEY)
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
const val T0_KEY = "t0"
|
const val T0_KEY = "t0"
|
||||||
|
|
||||||
val NAME_LIST = arrayOf(
|
val NAME_LIST = arrayOf(
|
||||||
NumassAnalyzer.LENGTH_KEY,
|
NumassAnalyzer.LENGTH_KEY,
|
||||||
NumassAnalyzer.COUNT_KEY,
|
NumassAnalyzer.COUNT_KEY,
|
||||||
NumassAnalyzer.COUNT_RATE_KEY,
|
NumassAnalyzer.COUNT_RATE_KEY,
|
||||||
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
|
NumassAnalyzer.COUNT_RATE_ERROR_KEY,
|
||||||
NumassAnalyzer.WINDOW_KEY,
|
NumassAnalyzer.WINDOW_KEY,
|
||||||
NumassAnalyzer.TIME_KEY,
|
NumassAnalyzer.TIME_KEY,
|
||||||
T0_KEY
|
T0_KEY
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ import java.nio.file.Path
|
|||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
import kotlin.streams.asStream
|
import java.util.stream.StreamSupport
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 08.07.2017.
|
* Created by darksnake on 08.07.2017.
|
||||||
@ -47,11 +47,12 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
|
|
||||||
//TODO split blocks using meta
|
//TODO split blocks using meta
|
||||||
private inner class ClassicBlock(
|
private inner class ClassicBlock(
|
||||||
override val startTime: Instant,
|
override val startTime: Instant,
|
||||||
override val length: Duration) : NumassBlock, Iterable<NumassEvent> {
|
override val length: Duration
|
||||||
|
) : NumassBlock, Iterable<NumassEvent> {
|
||||||
|
|
||||||
override val events: Stream<NumassEvent>
|
override val events: Stream<NumassEvent>
|
||||||
get() = this.asSequence().asStream()
|
get() = StreamSupport.stream(this.spliterator(), false)
|
||||||
|
|
||||||
override fun iterator(): Iterator<NumassEvent> {
|
override fun iterator(): Iterator<NumassEvent> {
|
||||||
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
|
val timeCoef = envelope.meta.getDouble("time_coeff", 50.0)
|
||||||
@ -78,7 +79,8 @@ class ClassicNumassPoint(private val envelope: Envelope) : NumassPoint {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e: IOException) {
|
} catch (e: IOException) {
|
||||||
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass).error("Unexpected IOException when reading block", e)
|
LoggerFactory.getLogger(this@ClassicNumassPoint.javaClass)
|
||||||
|
.error("Unexpected IOException when reading block", e)
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ fun main(args: Array<String>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
val point = SimpleNumassPoint(blocks, 10000.0)
|
val point = SimpleNumassPoint.build(blocks, 10000.0)
|
||||||
|
|
||||||
val meta = buildMeta {
|
val meta = buildMeta {
|
||||||
"t0" to 1e7
|
"t0" to 1e7
|
||||||
|
@ -25,10 +25,9 @@ import inr.numass.data.api.NumassEvent
|
|||||||
import inr.numass.data.api.NumassSet
|
import inr.numass.data.api.NumassSet
|
||||||
import inr.numass.data.storage.NumassDirectory
|
import inr.numass.data.storage.NumassDirectory
|
||||||
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
|
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation
|
||||||
import java.util.stream.Stream
|
|
||||||
|
|
||||||
|
|
||||||
private fun correlation(sequence: Stream<NumassEvent>): Double {
|
private fun correlation(sequence: List<NumassEvent>): Double {
|
||||||
val amplitudes: MutableList<Double> = ArrayList()
|
val amplitudes: MutableList<Double> = ArrayList()
|
||||||
val times: MutableList<Double> = ArrayList()
|
val times: MutableList<Double> = ArrayList()
|
||||||
sequence.forEach {
|
sequence.forEach {
|
||||||
|
@ -55,7 +55,7 @@ fun main(args: Array<String>) {
|
|||||||
if (!points.isEmpty()) {
|
if (!points.isEmpty()) {
|
||||||
putStatic(
|
putStatic(
|
||||||
"point_${hv.toInt()}",
|
"point_${hv.toInt()}",
|
||||||
SimpleNumassPoint(points, hv)
|
SimpleNumassPoint.build(points, hv)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ fun main(args: Array<String>) {
|
|||||||
val meta = buildMeta {
|
val meta = buildMeta {
|
||||||
"binNum" to 200
|
"binNum" to 200
|
||||||
//"chunkSize" to 10000
|
//"chunkSize" to 10000
|
||||||
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
||||||
//"separateParallelBlocks" to true
|
//"separateParallelBlocks" to true
|
||||||
"t0" to {
|
"t0" to {
|
||||||
"step" to 320
|
"step" to 320
|
||||||
@ -62,10 +62,7 @@ fun main(args: Array<String>) {
|
|||||||
it.voltage == hv && it.channel == 0
|
it.voltage == hv && it.channel == 0
|
||||||
}.toList()
|
}.toList()
|
||||||
if (!points.isEmpty()) {
|
if (!points.isEmpty()) {
|
||||||
putStatic(
|
putStatic("point_${hv.toInt()}", SimpleNumassPoint.build(points, hv))
|
||||||
"point_${hv.toInt()}",
|
|
||||||
SimpleNumassPoint(points, hv)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.build()
|
}.build()
|
||||||
|
@ -39,7 +39,7 @@ fun main(args: Array<String>) {
|
|||||||
.generateBlock(start.plusNanos(it * length), length)
|
.generateBlock(start.plusNanos(it * length), length)
|
||||||
}
|
}
|
||||||
}.join(Global) { blocks ->
|
}.join(Global) { blocks ->
|
||||||
SimpleNumassPoint(blocks, 12000.0)
|
SimpleNumassPoint.build(blocks, 12000.0)
|
||||||
}.get()
|
}.get()
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ fun main(args: Array<String>) {
|
|||||||
.generateBlock(start.plusNanos(it * length), length)
|
.generateBlock(start.plusNanos(it * length), length)
|
||||||
}
|
}
|
||||||
}.join(Global) { blocks ->
|
}.join(Global) { blocks ->
|
||||||
SimpleNumassPoint(blocks, 18000.0)
|
SimpleNumassPoint.build(blocks, 18000.0)
|
||||||
}.get()
|
}.get()
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,90 @@
|
|||||||
|
package inr.numass.scripts.timeanalysis
|
||||||
|
|
||||||
|
import hep.dataforge.buildContext
|
||||||
|
import hep.dataforge.fx.output.FXOutputManager
|
||||||
|
import hep.dataforge.meta.buildMeta
|
||||||
|
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
||||||
|
import inr.numass.NumassPlugin
|
||||||
|
import inr.numass.data.analyzers.TimeAnalyzer
|
||||||
|
import inr.numass.data.api.NumassSet
|
||||||
|
import inr.numass.data.storage.NumassDirectory
|
||||||
|
|
||||||
|
|
||||||
|
fun main(args: Array<String>) {
|
||||||
|
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
|
||||||
|
output = FXOutputManager()
|
||||||
|
rootDir = "D:\\Work\\Numass\\sterile2017_05"
|
||||||
|
dataDir = "D:\\Work\\Numass\\data\\2017_05"
|
||||||
|
}
|
||||||
|
|
||||||
|
val storage = NumassDirectory.read(context, "Fill_3")!!
|
||||||
|
|
||||||
|
val loader = storage.provide("set_10", NumassSet::class.java).get()
|
||||||
|
|
||||||
|
val point = loader.getPoints(18050.00).first()
|
||||||
|
|
||||||
|
val analyzer = TimeAnalyzer()
|
||||||
|
|
||||||
|
val meta = buildMeta("analyzer") {
|
||||||
|
"t0" to 3000
|
||||||
|
"inverted" to false
|
||||||
|
//"chunkSize" to 5000
|
||||||
|
//"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
||||||
|
}
|
||||||
|
|
||||||
|
println(analyzer.analyze(point, meta))
|
||||||
|
|
||||||
|
println(analyzer.getEventsWithDelay(point.firstBlock, meta ).count())
|
||||||
|
println(point.events.count())
|
||||||
|
println(point.firstBlock.events.count())
|
||||||
|
|
||||||
|
// val time = point.events.asSequence().zipWithNext().map { (p, n) ->
|
||||||
|
// n.timeOffset - p.timeOffset
|
||||||
|
// }.filter { it > 0 }.sum()
|
||||||
|
|
||||||
|
val time = analyzer.getEventsWithDelay(point.firstBlock, meta ).map { it.second }.filter { it > 0 }.sum()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// val totalN = AtomicLong(0)
|
||||||
|
// val totalT = AtomicLong(0)
|
||||||
|
//
|
||||||
|
// analyzer.getEventsWithDelay(point.firstBlock, meta ).filter { pair -> pair.second >= 3000 }
|
||||||
|
// .forEach { pair ->
|
||||||
|
// totalN.incrementAndGet()
|
||||||
|
// //TODO add progress listener here
|
||||||
|
// totalT.addAndGet(pair.second)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// val time = totalT.get()
|
||||||
|
|
||||||
|
println(time / 1e9)
|
||||||
|
|
||||||
|
//
|
||||||
|
// val cr = 80.0
|
||||||
|
// val length = 5e9.toLong()
|
||||||
|
// val num = 6
|
||||||
|
// val dt = 6.5
|
||||||
|
//
|
||||||
|
// val start = Instant.now()
|
||||||
|
//
|
||||||
|
// val generator = SynchronizedRandomGenerator(JDKRandomGenerator(2223))
|
||||||
|
//
|
||||||
|
// repeat(100) {
|
||||||
|
//
|
||||||
|
// val point = (1..num).map {
|
||||||
|
// Global.generate {
|
||||||
|
// NumassGenerator
|
||||||
|
// .generateEvents(cr , rnd = generator)
|
||||||
|
//// .withDeadTime { (dt * 1000).toLong() }
|
||||||
|
// .generateBlock(start.plusNanos(it * length), length)
|
||||||
|
// }
|
||||||
|
// }.join(Global) { blocks ->
|
||||||
|
// SimpleNumassPoint.build(blocks, 12000.0)
|
||||||
|
// }.get()
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// println(analyzer.analyze(point, meta))
|
||||||
|
//
|
||||||
|
// }
|
||||||
|
}
|
@ -63,7 +63,7 @@ object Threshold {
|
|||||||
.flatMap { set -> set.points.asSequence() }
|
.flatMap { set -> set.points.asSequence() }
|
||||||
.groupBy { it.voltage }
|
.groupBy { it.voltage }
|
||||||
.forEach { key, value ->
|
.forEach { key, value ->
|
||||||
val point = SimpleNumassPoint(value, key)
|
val point = SimpleNumassPoint.build(value, key)
|
||||||
val name = key.toInt().toString()
|
val name = key.toInt().toString()
|
||||||
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
||||||
}
|
}
|
||||||
@ -139,7 +139,7 @@ object Threshold {
|
|||||||
/**
|
/**
|
||||||
* Exponential function $a e^{\frac{x}{\sigma}}$
|
* Exponential function $a e^{\frac{x}{\sigma}}$
|
||||||
*/
|
*/
|
||||||
private fun exponential(spectrum: Table, voltage: Double, config: Meta): Values {
|
private fun exponential(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||||
val xLow: Int = config.getInt("xLow", 400)
|
val xLow: Int = config.getInt("xLow", 400)
|
||||||
val xHigh: Int = config.getInt("xHigh", 700)
|
val xHigh: Int = config.getInt("xHigh", 700)
|
||||||
val upper: Int = config.getInt("upper", 3100)
|
val upper: Int = config.getInt("upper", 3100)
|
||||||
@ -152,7 +152,8 @@ object Threshold {
|
|||||||
val norm = norm(spectrum, xLow, upper)
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
return ValueMap.ofPairs(
|
return ValueMap.ofPairs(
|
||||||
"U" to voltage,
|
"index" to point.index,
|
||||||
|
"U" to point.voltage,
|
||||||
"a" to a,
|
"a" to a,
|
||||||
"sigma" to sigma,
|
"sigma" to sigma,
|
||||||
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
||||||
@ -192,7 +193,7 @@ object Threshold {
|
|||||||
/**
|
/**
|
||||||
* Power function $a (x-\delta)^{\beta}
|
* Power function $a (x-\delta)^{\beta}
|
||||||
*/
|
*/
|
||||||
private fun power(spectrum: Table, voltage: Double, config: Meta): Values {
|
private fun power(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||||
val xLow: Int = config.getInt("xLow", 400)
|
val xLow: Int = config.getInt("xLow", 400)
|
||||||
val xHigh: Int = config.getInt("xHigh", 700)
|
val xHigh: Int = config.getInt("xHigh", 700)
|
||||||
val upper: Int = config.getInt("upper", 3100)
|
val upper: Int = config.getInt("upper", 3100)
|
||||||
@ -209,7 +210,8 @@ object Threshold {
|
|||||||
val norm = norm(spectrum, xLow, upper)
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
return ValueMap.ofPairs(
|
return ValueMap.ofPairs(
|
||||||
"U" to voltage,
|
"index" to point.index,
|
||||||
|
"U" to point.voltage,
|
||||||
"a" to a,
|
"a" to a,
|
||||||
"beta" to beta,
|
"beta" to beta,
|
||||||
"delta" to delta,
|
"delta" to delta,
|
||||||
@ -217,10 +219,10 @@ object Threshold {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun calculateSubThreshold(spectrum: Table, voltage: Double, config: Meta): Values {
|
fun calculateSubThreshold(point: NumassPoint, spectrum: Table, config: Meta): Values {
|
||||||
return when (config.getString("method", "exp")) {
|
return when (config.getString("method", "exp")) {
|
||||||
"exp" -> exponential(spectrum, voltage, config)
|
"exp" -> exponential(point, spectrum, config)
|
||||||
"pow" -> power(spectrum, voltage, config)
|
"pow" -> power(point, spectrum, config)
|
||||||
else -> throw RuntimeException("Unknown sub threshold calculation method")
|
else -> throw RuntimeException("Unknown sub threshold calculation method")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -244,8 +246,8 @@ object Threshold {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
|
LoggerFactory.getLogger(Threshold.javaClass).info("Calculating threshold ${point.voltage}")
|
||||||
try {
|
return@map try {
|
||||||
calculateSubThreshold(spectrum, point.voltage, config)
|
calculateSubThreshold(point, spectrum, config)
|
||||||
} catch (ex: Exception) {
|
} catch (ex: Exception) {
|
||||||
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
|
LoggerFactory.getLogger(Threshold.javaClass).error("Failed to fit point ${point.voltage}", ex)
|
||||||
null
|
null
|
||||||
|
@ -107,6 +107,7 @@ val monitorTableTask = task("monitor") {
|
|||||||
configure(meta.getMetaOrEmpty("monitor"))
|
configure(meta.getMetaOrEmpty("monitor"))
|
||||||
configure {
|
configure {
|
||||||
meta.useMeta("analyzer") { putNode(it) }
|
meta.useMeta("analyzer") { putNode(it) }
|
||||||
|
setValue("@target", meta.getString("@target", meta.name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
join<NumassSet, Table> { data ->
|
join<NumassSet, Table> { data ->
|
||||||
@ -130,10 +131,11 @@ val monitorTableTask = task("monitor") {
|
|||||||
"yAxis.title" to "Count rate"
|
"yAxis.title" to "Count rate"
|
||||||
"yAxis.units" to "Hz"
|
"yAxis.units" to "Hz"
|
||||||
}
|
}
|
||||||
|
|
||||||
((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
|
((context.output["numass.monitor", name] as? PlotOutput)?.frame as? JFreeChartFrame)?.addSetMarkers(data.values)
|
||||||
}
|
}
|
||||||
|
|
||||||
context.output["numass.monitor", name].render(NumassUtils.wrap(res, meta))
|
context.output.render(res, stage = "numass.monitor", name = name, meta = meta)
|
||||||
|
|
||||||
return@join res;
|
return@join res;
|
||||||
}
|
}
|
||||||
@ -157,7 +159,7 @@ val mergeEmptyTask = task("empty") {
|
|||||||
.removeNode("data")
|
.removeNode("data")
|
||||||
.removeNode("empty")
|
.removeNode("empty")
|
||||||
.setNode("data", meta.getMeta("empty"))
|
.setNode("data", meta.getMeta("empty"))
|
||||||
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty");
|
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty")
|
||||||
dependsOn(mergeTask, newMeta)
|
dependsOn(mergeTask, newMeta)
|
||||||
}
|
}
|
||||||
transform<Table> { data ->
|
transform<Table> { data ->
|
||||||
|
@ -34,7 +34,7 @@ val subThresholdTask = task("threshold") {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
join<NumassSet, Table> { data ->
|
join<NumassSet, Table> { data ->
|
||||||
val sum = NumassDataUtils.join(name, data.values)
|
val sum = NumassDataUtils.joinByIndex(name, data.values)
|
||||||
|
|
||||||
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
|
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
|
||||||
it.getDouble("correction") in (1.0..1.2)
|
it.getDouble("correction") in (1.0..1.2)
|
||||||
|
Loading…
Reference in New Issue
Block a user