Build and bug fixes
This commit is contained in:
parent
93c43d9ebd
commit
3e1e78c45a
@ -18,12 +18,9 @@ import java.util.function.Supplier
|
||||
/**
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(meta), Metoid {
|
||||
|
||||
class PKT8VirtualPort(override val name: String, meta: Meta) : VirtualPort(meta), Metoid {
|
||||
private val generator = Random()
|
||||
|
||||
override val name: String = portName
|
||||
|
||||
@Synchronized override fun evaluateRequest(request: String) {
|
||||
when (request) {
|
||||
"s" -> {
|
||||
@ -59,6 +56,11 @@ class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(me
|
||||
}
|
||||
}
|
||||
|
||||
override fun toMeta(): Meta {
|
||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
|
||||
@Throws(Exception::class)
|
||||
override fun close() {
|
||||
cancelByTag("measurement")
|
||||
|
@ -1,26 +1,15 @@
|
||||
/*
|
||||
* Copyright 2018 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
//
|
||||
//plugins {
|
||||
// id 'application'
|
||||
// id 'org.openjfx.javafxplugin' version '0.0.5'
|
||||
//}
|
||||
//
|
||||
//javafx {
|
||||
// modules = [ 'javafx.controls' ]
|
||||
//}
|
||||
|
||||
plugins {
|
||||
id 'application'
|
||||
id 'org.openjfx.javafxplugin' version '0.0.5'
|
||||
}
|
||||
|
||||
javafx {
|
||||
modules = [ 'javafx.controls' ]
|
||||
}
|
||||
|
||||
version = "0.1.0"
|
||||
|
@ -9,11 +9,9 @@ import java.text.DecimalFormat
|
||||
import java.time.Duration
|
||||
|
||||
//@ValueDef(name = "timeout", type = [(ValueType.NUMBER)], def = "400", info = "A timeout for port response")
|
||||
class LambdaPortController(context: Context, port: Port) : GenericPortController(context, port, "\r") {
|
||||
class LambdaPortController(context: Context, port: Port, val timeout : Duration = Duration.ofMillis(200)) : GenericPortController(context, port, "\r") {
|
||||
private var currentAddress: Int = -1;
|
||||
|
||||
private val timeout: Duration = port.meta.optString("timeout").map<Duration> { Duration.parse(it) }.orElse(Duration.ofMillis(200))
|
||||
|
||||
fun setAddress(address: Int) {
|
||||
if(currentAddress!= address) {
|
||||
val response = sendAndWait("ADR $address\r", timeout) { true }.trim()
|
||||
|
@ -27,7 +27,6 @@ import java.util.*
|
||||
* @author Alexander Nozik
|
||||
*/
|
||||
class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
|
||||
|
||||
var currentAddress = -1
|
||||
private set
|
||||
|
||||
@ -135,6 +134,11 @@ class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
|
||||
val voltage get() = current * resistance
|
||||
}
|
||||
|
||||
override fun toMeta(): Meta {
|
||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
|
||||
companion object {
|
||||
private val latency = Duration.ofMillis(50)
|
||||
}
|
||||
|
@ -29,7 +29,6 @@ import java.time.Instant
|
||||
import java.util.stream.IntStream
|
||||
import java.util.stream.Stream
|
||||
import java.util.zip.Inflater
|
||||
import kotlin.streams.toList
|
||||
|
||||
/**
|
||||
* Protobuf based numass point
|
||||
@ -42,11 +41,11 @@ class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassPr
|
||||
|
||||
override val blocks: List<NumassBlock>
|
||||
get() = proto.channelsList
|
||||
.flatMap { channel ->
|
||||
channel.blocksList
|
||||
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
|
||||
.sortedBy { it.startTime }
|
||||
}
|
||||
.flatMap { channel ->
|
||||
channel.blocksList
|
||||
.map { block -> ProtoBlock(channel.id.toInt(), block, this) }
|
||||
.sortedBy { it.startTime }
|
||||
}
|
||||
|
||||
override val channels: Map<Int, NumassBlock>
|
||||
get() = proto.channelsList.groupBy { it.id.toInt() }.mapValues { entry ->
|
||||
@ -127,7 +126,11 @@ class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassPr
|
||||
}
|
||||
}
|
||||
|
||||
class ProtoBlock(override val channel: Int, private val block: NumassProto.Point.Channel.Block, val parent: NumassPoint? = null) : NumassBlock {
|
||||
class ProtoBlock(
|
||||
override val channel: Int,
|
||||
private val block: NumassProto.Point.Channel.Block,
|
||||
val parent: NumassPoint? = null
|
||||
) : NumassBlock {
|
||||
|
||||
override val startTime: Instant
|
||||
get() = ProtoNumassPoint.ofEpochNanos(block.time)
|
||||
@ -136,11 +139,15 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
|
||||
block.length > 0 -> Duration.ofNanos(block.length)
|
||||
parent?.meta?.hasValue("acquisition_time") ?: false ->
|
||||
Duration.ofMillis((parent!!.meta.getDouble("acquisition_time") * 1000).toLong())
|
||||
parent?.meta?.hasValue("params.b_size") ?: false ->
|
||||
Duration.ofNanos((parent!!.meta.getDouble("params.b_size") * 320).toLong())
|
||||
else -> {
|
||||
LoggerFactory.getLogger(javaClass).error("No length information on block. Trying to infer from first and last events")
|
||||
val times = events.map { it.timeOffset }.toList()
|
||||
val nanos = (times.max()!! - times.min()!!)
|
||||
Duration.ofNanos(nanos)
|
||||
error("No length information on block")
|
||||
// LoggerFactory.getLogger(javaClass).warn("No length information on block. Trying to infer from first and last events")
|
||||
// val times = events.map { it.timeOffset }.toList()
|
||||
// val nanos = (times.max()!! - times.min()!!)
|
||||
// Duration.ofNanos(nanos)
|
||||
// Duration.ofMillis(380)
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,9 +155,11 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
|
||||
get() = if (block.hasEvents()) {
|
||||
val events = block.events
|
||||
if (events.timesCount != events.amplitudesCount) {
|
||||
LoggerFactory.getLogger(javaClass).error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
|
||||
LoggerFactory.getLogger(javaClass)
|
||||
.error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
|
||||
}
|
||||
IntStream.range(0, events.timesCount).mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
|
||||
IntStream.range(0, events.timesCount)
|
||||
.mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
|
||||
} else {
|
||||
Stream.empty()
|
||||
}
|
||||
|
@ -54,9 +54,10 @@ interface NumassAnalyzer {
|
||||
*/
|
||||
fun analyzeParent(point: ParentBlock, config: Meta = Meta.empty()): Values {
|
||||
val map = HashMap(analyze(point, config).asMap())
|
||||
if(point is NumassPoint) {
|
||||
if (point is NumassPoint) {
|
||||
map[HV_KEY] = Value.of(point.voltage)
|
||||
}
|
||||
|
||||
return ValueMap(map)
|
||||
}
|
||||
|
||||
@ -163,8 +164,8 @@ fun getAmplitudeSpectrum(events: Sequence<NumassEvent>, length: Double, config:
|
||||
}
|
||||
|
||||
|
||||
val minChannel = config.getInt("window.lo") { spectrum.keys.min()?:0 }
|
||||
val maxChannel = config.getInt("window.up") { spectrum.keys.max()?: 4096 }
|
||||
val minChannel = config.getInt("window.lo") { spectrum.keys.min() ?: 0 }
|
||||
val maxChannel = config.getInt("window.up") { spectrum.keys.max() ?: 4096 }
|
||||
|
||||
return ListTable.Builder(format)
|
||||
.rows(IntStream.range(minChannel, maxChannel)
|
||||
@ -245,17 +246,14 @@ fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
|
||||
|
||||
sp1.forEach { row1 ->
|
||||
val channel = row1.getDouble(NumassAnalyzer.CHANNEL_KEY)
|
||||
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel } //t2[channel]
|
||||
if (row2 == null) {
|
||||
throw RuntimeException("Reference for channel $channel not found");
|
||||
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel }
|
||||
?: ValueMap.ofPairs(NumassAnalyzer.COUNT_RATE_KEY to 0.0, NumassAnalyzer.COUNT_RATE_ERROR_KEY to 0.0)
|
||||
|
||||
} else {
|
||||
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
|
||||
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||
val error = Math.sqrt(error1 * error1 + error2 * error2)
|
||||
builder.row(channel, value, error)
|
||||
}
|
||||
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
|
||||
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||
val error = Math.sqrt(error1 * error1 + error2 * error2)
|
||||
builder.row(channel, value, error)
|
||||
}
|
||||
return builder.build()
|
||||
}
|
@ -54,20 +54,27 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
return analyzeParent(block, config)
|
||||
}
|
||||
|
||||
|
||||
val loChannel = config.getInt("window.lo", 0)
|
||||
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
|
||||
val t0 = getT0(block, config).toLong()
|
||||
|
||||
val chunkSize = config.getInt("chunkSize", -1)
|
||||
|
||||
val res = if (chunkSize > 0) {
|
||||
getEventsWithDelay(block, config)
|
||||
val count = block.events.count()
|
||||
val length = block.length.toMillis().toDouble() / 1e3
|
||||
|
||||
val res = when {
|
||||
count < 1000 -> ValueMap.ofPairs(
|
||||
NumassAnalyzer.LENGTH_KEY to length,
|
||||
NumassAnalyzer.COUNT_KEY to count,
|
||||
NumassAnalyzer.COUNT_RATE_KEY to count.toDouble() / length,
|
||||
NumassAnalyzer.COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
|
||||
)
|
||||
chunkSize > 0 -> getEventsWithDelay(block, config)
|
||||
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
|
||||
.toList()
|
||||
.mean(config.getEnum("mean", WEIGHTED))
|
||||
} else {
|
||||
analyzeSequence(getEventsWithDelay(block, config), t0)
|
||||
else -> analyzeSequence(getEventsWithDelay(block, config), t0)
|
||||
}
|
||||
|
||||
return ValueMap.Builder(res)
|
||||
@ -88,6 +95,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
|
||||
totalT.addAndGet(pair.second)
|
||||
}
|
||||
|
||||
if (totalN.toInt() == 0) {
|
||||
error("Zero number of intervals")
|
||||
}
|
||||
|
||||
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
|
||||
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
|
||||
val length = totalT.get() / 1e9
|
||||
|
@ -52,7 +52,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
|
||||
|
||||
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
|
||||
.fill(analyzer
|
||||
.getEventsWithDelay(input, inputMeta)
|
||||
.getEventsWithDelay(input, analyzerMeta)
|
||||
.asStream()
|
||||
.mapToDouble { it.second.toDouble() / 1000.0 }
|
||||
).asTable()
|
||||
@ -128,6 +128,8 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
|
||||
val minT0 = inputMeta.getDouble("t0.min", 0.0)
|
||||
val maxT0 = inputMeta.getDouble("t0.max", 1e9 / cr)
|
||||
val steps = inputMeta.getInt("t0.steps", 100)
|
||||
val t0Step = inputMeta.getDouble("t0.step", (maxT0-minT0)/(steps - 1))
|
||||
|
||||
|
||||
val norm = if (inputMeta.getBoolean("normalize", false)) {
|
||||
cr
|
||||
@ -135,7 +137,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
|
||||
1.0
|
||||
}
|
||||
|
||||
(0..steps).map { minT0 + (maxT0 - minT0) / steps * it }.map { t ->
|
||||
(0..steps).map { minT0 + t0Step * it }.map { t ->
|
||||
val result = analyzer.analyze(input, analyzerMeta.builder.setValue("t0", t))
|
||||
|
||||
if (Thread.currentThread().isInterrupted) {
|
||||
|
@ -14,7 +14,7 @@ fun main(args: Array<String>) {
|
||||
|
||||
//val meta = workspace.getTarget("group_3")
|
||||
|
||||
val result = workspace.runTask("scansum", "group_3").first().get()
|
||||
val result = workspace.runTask("fit", "group_5").first().get()
|
||||
println("Complete!")
|
||||
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
package inr.numass.scripts.analysis
|
||||
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.fx.output.FXOutputManager
|
||||
import hep.dataforge.workspace.FileBasedWorkspace
|
||||
import java.io.File
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
FXOutputManager().startGlobal()
|
||||
|
||||
val configPath = File("D:\\Work\\Numass\\sterile2017_05_frames\\workspace.groovy").toPath()
|
||||
val workspace = FileBasedWorkspace.build(Global, configPath)
|
||||
workspace.context.setValue("cache.enabled", false)
|
||||
|
||||
//val meta = workspace.getTarget("group_3")
|
||||
|
||||
val result = workspace.runTask("fit", "group_5").first().get()
|
||||
println("Complete!")
|
||||
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
package inr.numass.scripts.models
|
||||
|
||||
import hep.dataforge.context.Global
|
||||
import hep.dataforge.fx.output.FXOutputManager
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import hep.dataforge.plots.data.DataPlot
|
||||
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
||||
import hep.dataforge.stat.fit.ParamSet
|
||||
import hep.dataforge.step
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.displayChart
|
||||
import inr.numass.models.NBkgSpectrum
|
||||
import inr.numass.models.sterile.SterileNeutrinoSpectrum
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
NumassPlugin().startGlobal()
|
||||
JFreeChartPlugin().startGlobal()
|
||||
Global.output = FXOutputManager()
|
||||
|
||||
|
||||
|
||||
val params = ParamSet().apply {
|
||||
setPar("N", 8e5, 6.0, 0.0, Double.POSITIVE_INFINITY)
|
||||
setPar("bkg", 2.0, 0.03)
|
||||
setPar("E0", 18575.0, 1.0)
|
||||
setPar("mnu2", 0.0, 1.0)
|
||||
setParValue("msterile2", (1000 * 1000).toDouble())
|
||||
setPar("U2", 0.0, 1e-3)
|
||||
setPar("X", 0.0, 0.01)
|
||||
setPar("trap", 1.0, 0.01)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
val meta1 = buildMeta {
|
||||
"resolution.A" to 8.3e-5
|
||||
}
|
||||
val spectrum1 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta1))
|
||||
|
||||
val meta2 = buildMeta {
|
||||
"resolution.A" to 0
|
||||
}
|
||||
val spectrum2 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta2))
|
||||
|
||||
displayChart("compare").apply {
|
||||
val x = (14000.0..18600.0).step(100.0).toList()
|
||||
val y1 = x.map { spectrum1.value(it, params) }
|
||||
+DataPlot.plot("simple", x.toDoubleArray(), y1.toDoubleArray())
|
||||
val y2 = x.map { spectrum2.value(it, params) }
|
||||
+DataPlot.plot("normal", x.toDoubleArray(), y2.toDoubleArray())
|
||||
val dif = x.mapIndexed{ index, _ -> 1 - y1[index]/y2[index] }
|
||||
+DataPlot.plot("dif", x.toDoubleArray(), dif.toDoubleArray())
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -29,7 +29,6 @@ import hep.dataforge.plots.plotData
|
||||
import hep.dataforge.storage.files.FileStorage
|
||||
import hep.dataforge.tables.Adapters
|
||||
import hep.dataforge.tables.filter
|
||||
import hep.dataforge.tables.sort
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.data.NumassDataUtils
|
||||
import inr.numass.data.api.NumassSet
|
||||
@ -39,44 +38,45 @@ import inr.numass.subthreshold.Threshold
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
|
||||
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
|
||||
dataDir = "D:\\Work\\Numass\\data\\2017_05"
|
||||
rootDir = "D:\\Work\\Numass\\sterile\\2017_05_frames"
|
||||
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
|
||||
output = FXOutputManager() + DirectoryOutput()
|
||||
}
|
||||
|
||||
val storage = NumassDirectory.read(context, "Fill_2") as? FileStorage ?: error("Storage not found")
|
||||
val storage = NumassDirectory.read(context, "Fill_3") as? FileStorage ?: error("Storage not found")
|
||||
|
||||
val meta = buildMeta {
|
||||
"delta" to -150
|
||||
"delta" to -300
|
||||
"method" to "pow"
|
||||
"t0" to 15e3
|
||||
// "window.lo" to 400
|
||||
// "window.up" to 1600
|
||||
"xLow" to 450
|
||||
"xHigh" to 700
|
||||
"upper" to 3100
|
||||
"xLow" to 1000
|
||||
"xHigh" to 1300
|
||||
"upper" to 6000
|
||||
"binning" to 20
|
||||
//"reference" to 18600
|
||||
}
|
||||
|
||||
val frame = displayChart("correction").apply {
|
||||
plots.setType<DataPlot>()
|
||||
}
|
||||
|
||||
val sets = (1..18).map { "set_$it" }.map { setName ->
|
||||
val sets = (1..14).map { "set_$it" }.mapNotNull { setName ->
|
||||
storage.provide(setName, NumassSet::class.java).nullable
|
||||
}.filterNotNull()
|
||||
}
|
||||
|
||||
val name = "fill_2[1-18]"
|
||||
val name = "fill_3[1-14]"
|
||||
|
||||
val sum = NumassDataUtils.join(name, sets)
|
||||
|
||||
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
|
||||
it.getDouble("correction") in (1.0..1.2)
|
||||
}.sort("voltage")
|
||||
}
|
||||
|
||||
frame.plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
|
||||
frame.plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
|
||||
frame.plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
|
||||
|
||||
context.output.render(correctionTable,"numass.correction", "fill_2[1-18]")
|
||||
context.output.render(correctionTable,"numass.correction", name)
|
||||
}
|
@ -8,7 +8,6 @@ import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
||||
import inr.numass.NumassPlugin
|
||||
import inr.numass.actions.TimeAnalyzerAction
|
||||
import inr.numass.data.NumassDataUtils
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.api.SimpleNumassPoint
|
||||
@ -18,28 +17,33 @@ fun main(args: Array<String>) {
|
||||
|
||||
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
|
||||
output = FXOutputManager()
|
||||
rootDir = "D:\\Work\\Numass\\sterile2017_05"
|
||||
dataDir = "D:\\Work\\Numass\\data\\2017_05"
|
||||
rootDir = "D:\\Work\\Numass\\sterile2017_05_frames"
|
||||
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
|
||||
}
|
||||
|
||||
val storage = NumassDirectory.read(context, "Fill_2")!!
|
||||
val storage = NumassDirectory.read(context, "Fill_3")!!
|
||||
|
||||
val meta = buildMeta {
|
||||
"t0" to 3000
|
||||
"binNum" to 200
|
||||
"t0Step" to 100
|
||||
"chunkSize" to 3000
|
||||
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
||||
//"chunkSize" to 10000
|
||||
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
|
||||
//"separateParallelBlocks" to true
|
||||
"window" to {
|
||||
"lo" to 0
|
||||
"up" to 4000
|
||||
"t0" to {
|
||||
"step" to 320
|
||||
}
|
||||
"analyzer" to {
|
||||
"t0" to 16000
|
||||
"window" to {
|
||||
"lo" to 1500
|
||||
"up" to 7000
|
||||
}
|
||||
}
|
||||
|
||||
//"plot.showErrors" to false
|
||||
}
|
||||
|
||||
//def sets = ((2..14) + (22..31)).collect { "set_$it" }
|
||||
val sets = (2..12).map { "set_$it" }
|
||||
val sets = (11..11).map { "set_$it" }
|
||||
//def sets = (16..31).collect { "set_$it" }
|
||||
//def sets = (20..28).collect { "set_$it" }
|
||||
|
||||
@ -49,7 +53,7 @@ fun main(args: Array<String>) {
|
||||
|
||||
val all = NumassDataUtils.join("sum", loaders)
|
||||
|
||||
val hvs = listOf(14500.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
|
||||
val hvs = listOf(14000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
|
||||
//listOf(18500.0, 18600.0, 18995.0, 19000.0)
|
||||
|
||||
val data = DataSet.edit(NumassPoint::class).apply {
|
||||
|
@ -0,0 +1,2 @@
|
||||
package inr.numass.scripts.utils
|
||||
|
@ -6,17 +6,15 @@ import hep.dataforge.data.DataNode
|
||||
import hep.dataforge.data.DataSet
|
||||
import hep.dataforge.meta.Meta
|
||||
import hep.dataforge.meta.buildMeta
|
||||
import hep.dataforge.nullable
|
||||
import hep.dataforge.storage.Storage
|
||||
import hep.dataforge.tables.ListTable
|
||||
import hep.dataforge.tables.Table
|
||||
import hep.dataforge.values.ValueMap
|
||||
import hep.dataforge.values.Values
|
||||
import inr.numass.data.analyzers.NumassAnalyzer
|
||||
import inr.numass.data.analyzers.*
|
||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
||||
import inr.numass.data.analyzers.SmartAnalyzer
|
||||
import inr.numass.data.analyzers.TimeAnalyzer
|
||||
import inr.numass.data.analyzers.withBinning
|
||||
import inr.numass.data.api.NumassPoint
|
||||
import inr.numass.data.api.NumassSet
|
||||
import inr.numass.data.api.SimpleNumassPoint
|
||||
@ -225,10 +223,23 @@ object Threshold {
|
||||
}
|
||||
|
||||
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
|
||||
val reference = config.optNumber("reference").nullable?.let{
|
||||
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
|
||||
}?.let {
|
||||
println("Using reference point ${it.voltage}")
|
||||
analyzer.getAmplitudeSpectrum(it,config)
|
||||
}
|
||||
|
||||
return ListTable.Builder().apply {
|
||||
set.forEach{
|
||||
val spectrum = analyzer.getAmplitudeSpectrum(it,config)
|
||||
row(calculateSubThreshold(spectrum,it.voltage,config))
|
||||
set.forEach{ point ->
|
||||
val spectrum = analyzer.getAmplitudeSpectrum(point,config).let {
|
||||
if(reference == null){
|
||||
it
|
||||
} else{
|
||||
subtractAmplitudeSpectrum(it,reference)
|
||||
}
|
||||
}
|
||||
row(calculateSubThreshold(spectrum,point.voltage,config))
|
||||
}
|
||||
}.build()
|
||||
}
|
||||
|
@ -100,12 +100,12 @@ val monitorTableTask = task("monitor") {
|
||||
val analyzerMeta = meta.getMetaOrEmpty("analyzer")
|
||||
//TODO add separator labels
|
||||
val res = ListTable.Builder("timestamp", "count", "cr", "crErr")
|
||||
.rows(
|
||||
data.values.stream().parallel()
|
||||
.flatMap { it.points.stream() }
|
||||
.filter { it.voltage == monitorVoltage }
|
||||
.map { it -> analyzer.analyzeParent(it, analyzerMeta) }
|
||||
).build()
|
||||
.rows(
|
||||
data.values.stream().parallel()
|
||||
.flatMap { it.points.stream() }
|
||||
.filter { it.voltage == monitorVoltage }
|
||||
.map { it -> analyzer.analyzeParent(it, analyzerMeta) }
|
||||
).build()
|
||||
|
||||
if (meta.getBoolean("showPlot", true)) {
|
||||
val plot = DataPlot.plot(name, res, Adapters.buildXYAdapter("timestamp", "cr", "crErr"))
|
||||
@ -139,10 +139,10 @@ val mergeEmptyTask = task("empty") {
|
||||
}
|
||||
//replace data node by "empty" node
|
||||
val newMeta = meta.builder
|
||||
.removeNode("data")
|
||||
.removeNode("empty")
|
||||
.setNode("data", meta.getMeta("empty"))
|
||||
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty");
|
||||
.removeNode("data")
|
||||
.removeNode("empty")
|
||||
.setNode("data", meta.getMeta("empty"))
|
||||
.setValue("merge.$MERGE_NAME", meta.getString("merge.$MERGE_NAME", "") + "_empty");
|
||||
dependsOn(mergeTask, newMeta)
|
||||
}
|
||||
transform<Table> { data ->
|
||||
@ -164,7 +164,7 @@ val subtractEmptyTask = task("dif") {
|
||||
val builder = DataTree.edit(Table::class)
|
||||
val rootNode = data.getCheckedNode("data", Table::class.java)
|
||||
val empty = data.getCheckedNode("empty", Table::class.java).data
|
||||
?: throw RuntimeException("No empty data found")
|
||||
?: throw RuntimeException("No empty data found")
|
||||
|
||||
rootNode.visit(Table::class.java) { input ->
|
||||
val resMeta = buildMeta {
|
||||
@ -230,16 +230,16 @@ val fitTask = task("fit") {
|
||||
writer.flush()
|
||||
|
||||
FitHelper(context).fit(data, meta)
|
||||
.setListenerStream(out)
|
||||
.report(log)
|
||||
.run()
|
||||
.also {
|
||||
if (meta.getBoolean("printLog", true)) {
|
||||
writer.println()
|
||||
log.entries.forEach { entry -> writer.println(entry.toString()) }
|
||||
writer.println()
|
||||
}
|
||||
.setListenerStream(out)
|
||||
.report(log)
|
||||
.run()
|
||||
.also {
|
||||
if (meta.getBoolean("printLog", true)) {
|
||||
writer.println()
|
||||
log.entries.forEach { entry -> writer.println(entry.toString()) }
|
||||
writer.println()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -262,7 +262,7 @@ val plotFitTask = task("plotFit") {
|
||||
|
||||
// ensuring all data points are calculated explicitly
|
||||
StreamSupport.stream<Values>(data.spliterator(), false)
|
||||
.map { dp -> Adapters.getXValue(adapter, dp).double }.sorted().forEach { fit.calculateIn(it) }
|
||||
.map { dp -> Adapters.getXValue(adapter, dp).double }.sorted().forEach { fit.calculateIn(it) }
|
||||
|
||||
val dataPlot = DataPlot.plot("data", data, adapter)
|
||||
|
||||
@ -275,9 +275,24 @@ val plotFitTask = task("plotFit") {
|
||||
val histogramTask = task("histogram") {
|
||||
descriptor {
|
||||
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show plot of the spectra")
|
||||
value("points", multiple = true, types = listOf(ValueType.NUMBER), info = "The list of point voltages to build histogram")
|
||||
value("binning", types = listOf(ValueType.NUMBER), defaultValue = 20, info = "The binning of resulting histogram")
|
||||
value("normalize", types = listOf(ValueType.BOOLEAN), defaultValue = true, info = "If true reports the count rate in each bin, otherwise total count")
|
||||
value(
|
||||
"points",
|
||||
multiple = true,
|
||||
types = listOf(ValueType.NUMBER),
|
||||
info = "The list of point voltages to build histogram"
|
||||
)
|
||||
value(
|
||||
"binning",
|
||||
types = listOf(ValueType.NUMBER),
|
||||
defaultValue = 20,
|
||||
info = "The binning of resulting histogram"
|
||||
)
|
||||
value(
|
||||
"normalize",
|
||||
types = listOf(ValueType.BOOLEAN),
|
||||
defaultValue = true,
|
||||
info = "If true reports the count rate in each bin, otherwise total count"
|
||||
)
|
||||
info = "Combine amplitude spectra from multiple sets, but with the same U"
|
||||
}
|
||||
model { meta ->
|
||||
@ -299,29 +314,29 @@ val histogramTask = task("histogram") {
|
||||
|
||||
//Fill values to table
|
||||
data.flatMap { it.value.points }
|
||||
.filter { points == null || points.contains(it.voltage) }
|
||||
.groupBy { it.voltage }
|
||||
.mapValues {
|
||||
analyzer.getAmplitudeSpectrum(MetaBlock(it.value))
|
||||
}
|
||||
.forEach { u, spectrum ->
|
||||
log.report("Aggregating data from U = $u")
|
||||
spectrum.forEach {
|
||||
val channel = it[CHANNEL_KEY].int
|
||||
val count = it[COUNT_KEY].long
|
||||
aggregator.getOrPut(channel) { HashMap() }
|
||||
.getOrPut(u) { AtomicLong() }
|
||||
.addAndGet(count)
|
||||
}
|
||||
names.add("U$u")
|
||||
.filter { points == null || points.contains(it.voltage) }
|
||||
.groupBy { it.voltage }
|
||||
.mapValues {
|
||||
analyzer.getAmplitudeSpectrum(MetaBlock(it.value))
|
||||
}
|
||||
.forEach { u, spectrum ->
|
||||
log.report("Aggregating data from U = $u")
|
||||
spectrum.forEach {
|
||||
val channel = it[CHANNEL_KEY].int
|
||||
val count = it[COUNT_KEY].long
|
||||
aggregator.getOrPut(channel) { HashMap() }
|
||||
.getOrPut(u) { AtomicLong() }
|
||||
.addAndGet(count)
|
||||
}
|
||||
names.add("U$u")
|
||||
}
|
||||
|
||||
val times: Map<Double, Double> = data.flatMap { it.value.points }
|
||||
.filter { points == null || points.contains(it.voltage) }
|
||||
.groupBy { it.voltage }
|
||||
.mapValues {
|
||||
it.value.sumByDouble { it.length.toMillis().toDouble() / 1000 }
|
||||
}
|
||||
.filter { points == null || points.contains(it.voltage) }
|
||||
.groupBy { it.voltage }
|
||||
.mapValues {
|
||||
it.value.sumByDouble { it.length.toMillis().toDouble() / 1000 }
|
||||
}
|
||||
|
||||
val normalize = meta.getBoolean("normalize", true)
|
||||
|
||||
@ -371,21 +386,20 @@ val histogramTask = task("histogram") {
|
||||
val fitScanTask = task("fitscan") {
|
||||
model { meta ->
|
||||
dependsOn(filterTask, meta)
|
||||
configure{
|
||||
configure {
|
||||
setNode(meta.getMetaOrEmpty("scan"))
|
||||
setNode(meta.getMeta("fit"))
|
||||
}
|
||||
}
|
||||
|
||||
splitAction<Table, FitResult> {
|
||||
val scanMeta = meta.getMeta("scan")
|
||||
val scanValues = if (scanMeta.hasValue("masses")) {
|
||||
scanMeta.getValue("masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
|
||||
val scanValues = if (meta.hasValue("scan.masses")) {
|
||||
meta.getValue("scan.masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
|
||||
} else {
|
||||
scanMeta.getValue("values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
|
||||
meta.getValue("scan.values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
|
||||
}
|
||||
|
||||
val scanParameter = scanMeta.getString("parameter", "msterile2")
|
||||
val scanParameter = meta.getString("parameter", "msterile2")
|
||||
scanValues.forEach { scanValue ->
|
||||
val resultName = String.format("%s[%s=%s]", this.name, scanParameter, scanValue.string)
|
||||
val fitMeta = meta.getMeta("fit").builder.apply {
|
||||
@ -394,8 +408,8 @@ val fitScanTask = task("fitscan") {
|
||||
setValue("params.$scanParameter.value", scanValue)
|
||||
} else {
|
||||
getMetaList("params.param").stream()
|
||||
.filter { par -> par.getString("name") == scanParameter }
|
||||
.forEach { it.setValue("value", it) }
|
||||
.filter { par -> par.getString("name") == scanParameter }
|
||||
.forEach { it.setValue("value", it) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -408,16 +422,16 @@ val fitScanTask = task("fitscan") {
|
||||
writer.flush()
|
||||
|
||||
FitHelper(context).fit(data, fitMeta)
|
||||
.setListenerStream(out)
|
||||
.report(log)
|
||||
.run()
|
||||
.also {
|
||||
if (fitMeta.getBoolean("printLog", true)) {
|
||||
writer.println()
|
||||
log.entries.forEach { entry -> writer.println(entry.toString()) }
|
||||
writer.println()
|
||||
}
|
||||
.setListenerStream(out)
|
||||
.report(log)
|
||||
.run()
|
||||
.also {
|
||||
if (fitMeta.getBoolean("printLog", true)) {
|
||||
writer.println()
|
||||
log.entries.forEach { entry -> writer.println(entry.toString()) }
|
||||
writer.println()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user