Build and bug fixes

This commit is contained in:
Alexander Nozik 2019-01-09 19:10:47 +03:00
parent 93c43d9ebd
commit 3e1e78c45a
16 changed files with 277 additions and 156 deletions

View File

@ -18,12 +18,9 @@ import java.util.function.Supplier
/**
* @author Alexander Nozik
*/
class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(meta), Metoid {
class PKT8VirtualPort(override val name: String, meta: Meta) : VirtualPort(meta), Metoid {
private val generator = Random()
override val name: String = portName
@Synchronized override fun evaluateRequest(request: String) {
when (request) {
"s" -> {
@ -59,6 +56,11 @@ class PKT8VirtualPort(private val portName: String, meta: Meta) : VirtualPort(me
}
}
override fun toMeta(): Meta {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
@Throws(Exception::class)
override fun close() {
cancelByTag("measurement")

View File

@ -1,26 +1,15 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
//plugins {
// id 'application'
// id 'org.openjfx.javafxplugin' version '0.0.5'
//}
//
//javafx {
// modules = [ 'javafx.controls' ]
//}
plugins {
id 'application'
id 'org.openjfx.javafxplugin' version '0.0.5'
}
javafx {
modules = [ 'javafx.controls' ]
}
version = "0.1.0"

View File

@ -9,11 +9,9 @@ import java.text.DecimalFormat
import java.time.Duration
//@ValueDef(name = "timeout", type = [(ValueType.NUMBER)], def = "400", info = "A timeout for port response")
class LambdaPortController(context: Context, port: Port) : GenericPortController(context, port, "\r") {
class LambdaPortController(context: Context, port: Port, val timeout : Duration = Duration.ofMillis(200)) : GenericPortController(context, port, "\r") {
private var currentAddress: Int = -1;
private val timeout: Duration = port.meta.optString("timeout").map<Duration> { Duration.parse(it) }.orElse(Duration.ofMillis(200))
fun setAddress(address: Int) {
if(currentAddress!= address) {
val response = sendAndWait("ADR $address\r", timeout) { true }.trim()

View File

@ -27,7 +27,6 @@ import java.util.*
* @author Alexander Nozik
*/
class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
var currentAddress = -1
private set
@ -135,6 +134,11 @@ class VirtualLambdaPort(meta: Meta) : VirtualPort(meta) {
val voltage get() = current * resistance
}
override fun toMeta(): Meta {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
companion object {
private val latency = Duration.ofMillis(50)
}

View File

@ -29,7 +29,6 @@ import java.time.Instant
import java.util.stream.IntStream
import java.util.stream.Stream
import java.util.zip.Inflater
import kotlin.streams.toList
/**
* Protobuf based numass point
@ -127,7 +126,11 @@ class ProtoNumassPoint(override val meta: Meta, val protoBuilder: () -> NumassPr
}
}
class ProtoBlock(override val channel: Int, private val block: NumassProto.Point.Channel.Block, val parent: NumassPoint? = null) : NumassBlock {
class ProtoBlock(
override val channel: Int,
private val block: NumassProto.Point.Channel.Block,
val parent: NumassPoint? = null
) : NumassBlock {
override val startTime: Instant
get() = ProtoNumassPoint.ofEpochNanos(block.time)
@ -136,11 +139,15 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
block.length > 0 -> Duration.ofNanos(block.length)
parent?.meta?.hasValue("acquisition_time") ?: false ->
Duration.ofMillis((parent!!.meta.getDouble("acquisition_time") * 1000).toLong())
parent?.meta?.hasValue("params.b_size") ?: false ->
Duration.ofNanos((parent!!.meta.getDouble("params.b_size") * 320).toLong())
else -> {
LoggerFactory.getLogger(javaClass).error("No length information on block. Trying to infer from first and last events")
val times = events.map { it.timeOffset }.toList()
val nanos = (times.max()!! - times.min()!!)
Duration.ofNanos(nanos)
error("No length information on block")
// LoggerFactory.getLogger(javaClass).warn("No length information on block. Trying to infer from first and last events")
// val times = events.map { it.timeOffset }.toList()
// val nanos = (times.max()!! - times.min()!!)
// Duration.ofNanos(nanos)
// Duration.ofMillis(380)
}
}
@ -148,9 +155,11 @@ class ProtoBlock(override val channel: Int, private val block: NumassProto.Point
get() = if (block.hasEvents()) {
val events = block.events
if (events.timesCount != events.amplitudesCount) {
LoggerFactory.getLogger(javaClass).error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
LoggerFactory.getLogger(javaClass)
.error("The block is broken. Number of times is ${events.timesCount} and number of amplitudes is ${events.amplitudesCount}")
}
IntStream.range(0, events.timesCount).mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
IntStream.range(0, events.timesCount)
.mapToObj { i -> NumassEvent(events.getAmplitudes(i).toShort(), events.getTimes(i), this) }
} else {
Stream.empty()
}

View File

@ -57,6 +57,7 @@ interface NumassAnalyzer {
if (point is NumassPoint) {
map[HV_KEY] = Value.of(point.voltage)
}
return ValueMap(map)
}
@ -245,17 +246,14 @@ fun subtractAmplitudeSpectrum(sp1: Table, sp2: Table): Table {
sp1.forEach { row1 ->
val channel = row1.getDouble(NumassAnalyzer.CHANNEL_KEY)
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel } //t2[channel]
if (row2 == null) {
throw RuntimeException("Reference for channel $channel not found");
val row2 = sp2.rows.asSequence().find { it.getDouble(NumassAnalyzer.CHANNEL_KEY) == channel }
?: ValueMap.ofPairs(NumassAnalyzer.COUNT_RATE_KEY to 0.0, NumassAnalyzer.COUNT_RATE_ERROR_KEY to 0.0)
} else {
val value = Math.max(row1.getDouble(NumassAnalyzer.COUNT_RATE_KEY) - row2.getDouble(NumassAnalyzer.COUNT_RATE_KEY), 0.0)
val error1 = row1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error2 = row2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
val error = Math.sqrt(error1 * error1 + error2 * error2)
builder.row(channel, value, error)
}
}
return builder.build()
}

View File

@ -54,20 +54,27 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
return analyzeParent(block, config)
}
val loChannel = config.getInt("window.lo", 0)
val upChannel = config.getInt("window.up", Integer.MAX_VALUE)
val t0 = getT0(block, config).toLong()
val chunkSize = config.getInt("chunkSize", -1)
val res = if (chunkSize > 0) {
getEventsWithDelay(block, config)
val count = block.events.count()
val length = block.length.toMillis().toDouble() / 1e3
val res = when {
count < 1000 -> ValueMap.ofPairs(
NumassAnalyzer.LENGTH_KEY to length,
NumassAnalyzer.COUNT_KEY to count,
NumassAnalyzer.COUNT_RATE_KEY to count.toDouble() / length,
NumassAnalyzer.COUNT_RATE_ERROR_KEY to sqrt(count.toDouble()) / length
)
chunkSize > 0 -> getEventsWithDelay(block, config)
.chunked(chunkSize) { analyzeSequence(it.asSequence(), t0) }
.toList()
.mean(config.getEnum("mean", WEIGHTED))
} else {
analyzeSequence(getEventsWithDelay(block, config), t0)
else -> analyzeSequence(getEventsWithDelay(block, config), t0)
}
return ValueMap.Builder(res)
@ -88,6 +95,10 @@ class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(proces
totalT.addAndGet(pair.second)
}
if (totalN.toInt() == 0) {
error("Zero number of intervals")
}
val countRate = 1e6 * totalN.get() / (totalT.get() / 1000 - t0 * totalN.get() / 1000)//1e9 / (totalT.get() / totalN.get() - t0);
val countRateError = countRate / Math.sqrt(totalN.get().toDouble())
val length = totalT.get() / 1e9

View File

@ -52,7 +52,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
val histogram = UnivariateHistogram.buildUniform(0.0, binSize * binNum, binSize)
.fill(analyzer
.getEventsWithDelay(input, inputMeta)
.getEventsWithDelay(input, analyzerMeta)
.asStream()
.mapToDouble { it.second.toDouble() / 1000.0 }
).asTable()
@ -128,6 +128,8 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
val minT0 = inputMeta.getDouble("t0.min", 0.0)
val maxT0 = inputMeta.getDouble("t0.max", 1e9 / cr)
val steps = inputMeta.getInt("t0.steps", 100)
val t0Step = inputMeta.getDouble("t0.step", (maxT0-minT0)/(steps - 1))
val norm = if (inputMeta.getBoolean("normalize", false)) {
cr
@ -135,7 +137,7 @@ object TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>("timeSpectrum",Nu
1.0
}
(0..steps).map { minT0 + (maxT0 - minT0) / steps * it }.map { t ->
(0..steps).map { minT0 + t0Step * it }.map { t ->
val result = analyzer.analyze(input, analyzerMeta.builder.setValue("t0", t))
if (Thread.currentThread().isInterrupted) {

View File

@ -14,7 +14,7 @@ fun main(args: Array<String>) {
//val meta = workspace.getTarget("group_3")
val result = workspace.runTask("scansum", "group_3").first().get()
val result = workspace.runTask("fit", "group_5").first().get()
println("Complete!")
}

View File

@ -0,0 +1,20 @@
package inr.numass.scripts.analysis
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.workspace.FileBasedWorkspace
import java.io.File
fun main(args: Array<String>) {
FXOutputManager().startGlobal()
val configPath = File("D:\\Work\\Numass\\sterile2017_05_frames\\workspace.groovy").toPath()
val workspace = FileBasedWorkspace.build(Global, configPath)
workspace.context.setValue("cache.enabled", false)
//val meta = workspace.getTarget("group_3")
val result = workspace.runTask("fit", "group_5").first().get()
println("Complete!")
}

View File

@ -0,0 +1,57 @@
package inr.numass.scripts.models
import hep.dataforge.context.Global
import hep.dataforge.fx.output.FXOutputManager
import hep.dataforge.meta.buildMeta
import hep.dataforge.plots.data.DataPlot
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import hep.dataforge.stat.fit.ParamSet
import hep.dataforge.step
import inr.numass.NumassPlugin
import inr.numass.displayChart
import inr.numass.models.NBkgSpectrum
import inr.numass.models.sterile.SterileNeutrinoSpectrum
fun main(args: Array<String>) {
NumassPlugin().startGlobal()
JFreeChartPlugin().startGlobal()
Global.output = FXOutputManager()
val params = ParamSet().apply {
setPar("N", 8e5, 6.0, 0.0, Double.POSITIVE_INFINITY)
setPar("bkg", 2.0, 0.03)
setPar("E0", 18575.0, 1.0)
setPar("mnu2", 0.0, 1.0)
setParValue("msterile2", (1000 * 1000).toDouble())
setPar("U2", 0.0, 1e-3)
setPar("X", 0.0, 0.01)
setPar("trap", 1.0, 0.01)
}
val meta1 = buildMeta {
"resolution.A" to 8.3e-5
}
val spectrum1 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta1))
val meta2 = buildMeta {
"resolution.A" to 0
}
val spectrum2 = NBkgSpectrum(SterileNeutrinoSpectrum(Global, meta2))
displayChart("compare").apply {
val x = (14000.0..18600.0).step(100.0).toList()
val y1 = x.map { spectrum1.value(it, params) }
+DataPlot.plot("simple", x.toDoubleArray(), y1.toDoubleArray())
val y2 = x.map { spectrum2.value(it, params) }
+DataPlot.plot("normal", x.toDoubleArray(), y2.toDoubleArray())
val dif = x.mapIndexed{ index, _ -> 1 - y1[index]/y2[index] }
+DataPlot.plot("dif", x.toDoubleArray(), dif.toDoubleArray())
}
}

View File

@ -29,7 +29,6 @@ import hep.dataforge.plots.plotData
import hep.dataforge.storage.files.FileStorage
import hep.dataforge.tables.Adapters
import hep.dataforge.tables.filter
import hep.dataforge.tables.sort
import inr.numass.NumassPlugin
import inr.numass.data.NumassDataUtils
import inr.numass.data.api.NumassSet
@ -39,44 +38,45 @@ import inr.numass.subthreshold.Threshold
fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
rootDir = "D:\\Work\\Numass\\sterile\\2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile\\2017_05_frames"
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
output = FXOutputManager() + DirectoryOutput()
}
val storage = NumassDirectory.read(context, "Fill_2") as? FileStorage ?: error("Storage not found")
val storage = NumassDirectory.read(context, "Fill_3") as? FileStorage ?: error("Storage not found")
val meta = buildMeta {
"delta" to -150
"delta" to -300
"method" to "pow"
"t0" to 15e3
// "window.lo" to 400
// "window.up" to 1600
"xLow" to 450
"xHigh" to 700
"upper" to 3100
"xLow" to 1000
"xHigh" to 1300
"upper" to 6000
"binning" to 20
//"reference" to 18600
}
val frame = displayChart("correction").apply {
plots.setType<DataPlot>()
}
val sets = (1..18).map { "set_$it" }.map { setName ->
val sets = (1..14).map { "set_$it" }.mapNotNull { setName ->
storage.provide(setName, NumassSet::class.java).nullable
}.filterNotNull()
}
val name = "fill_2[1-18]"
val name = "fill_3[1-14]"
val sum = NumassDataUtils.join(name, sets)
val correctionTable = Threshold.calculateSubThreshold(sum, meta).filter {
it.getDouble("correction") in (1.0..1.2)
}.sort("voltage")
}
frame.plotData("${name}_cor", correctionTable, Adapters.buildXYAdapter("U", "correction"))
frame.plotData("${name}_a", correctionTable, Adapters.buildXYAdapter("U", "a"))
frame.plotData("${name}_beta", correctionTable, Adapters.buildXYAdapter("U", "beta"))
context.output.render(correctionTable,"numass.correction", "fill_2[1-18]")
context.output.render(correctionTable,"numass.correction", name)
}

View File

@ -8,7 +8,6 @@ import hep.dataforge.plots.jfreechart.JFreeChartPlugin
import inr.numass.NumassPlugin
import inr.numass.actions.TimeAnalyzerAction
import inr.numass.data.NumassDataUtils
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
@ -18,28 +17,33 @@ fun main(args: Array<String>) {
val context = buildContext("NUMASS", NumassPlugin::class.java, JFreeChartPlugin::class.java) {
output = FXOutputManager()
rootDir = "D:\\Work\\Numass\\sterile2017_05"
dataDir = "D:\\Work\\Numass\\data\\2017_05"
rootDir = "D:\\Work\\Numass\\sterile2017_05_frames"
dataDir = "D:\\Work\\Numass\\data\\2017_05_frames"
}
val storage = NumassDirectory.read(context, "Fill_2")!!
val storage = NumassDirectory.read(context, "Fill_3")!!
val meta = buildMeta {
"t0" to 3000
"binNum" to 200
"t0Step" to 100
"chunkSize" to 3000
"mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"chunkSize" to 10000
// "mean" to TimeAnalyzer.AveragingMethod.ARITHMETIC
//"separateParallelBlocks" to true
"window" to {
"lo" to 0
"up" to 4000
"t0" to {
"step" to 320
}
"analyzer" to {
"t0" to 16000
"window" to {
"lo" to 1500
"up" to 7000
}
}
//"plot.showErrors" to false
}
//def sets = ((2..14) + (22..31)).collect { "set_$it" }
val sets = (2..12).map { "set_$it" }
val sets = (11..11).map { "set_$it" }
//def sets = (16..31).collect { "set_$it" }
//def sets = (20..28).collect { "set_$it" }
@ -49,7 +53,7 @@ fun main(args: Array<String>) {
val all = NumassDataUtils.join("sum", loaders)
val hvs = listOf(14500.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
val hvs = listOf(14000.0)//, 15000d, 15200d, 15400d, 15600d, 15800d]
//listOf(18500.0, 18600.0, 18995.0, 19000.0)
val data = DataSet.edit(NumassPoint::class).apply {

View File

@ -0,0 +1,2 @@
package inr.numass.scripts.utils

View File

@ -6,17 +6,15 @@ import hep.dataforge.data.DataNode
import hep.dataforge.data.DataSet
import hep.dataforge.meta.Meta
import hep.dataforge.meta.buildMeta
import hep.dataforge.nullable
import hep.dataforge.storage.Storage
import hep.dataforge.tables.ListTable
import hep.dataforge.tables.Table
import hep.dataforge.values.ValueMap
import hep.dataforge.values.Values
import inr.numass.data.analyzers.NumassAnalyzer
import inr.numass.data.analyzers.*
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
import inr.numass.data.analyzers.SmartAnalyzer
import inr.numass.data.analyzers.TimeAnalyzer
import inr.numass.data.analyzers.withBinning
import inr.numass.data.api.NumassPoint
import inr.numass.data.api.NumassSet
import inr.numass.data.api.SimpleNumassPoint
@ -225,10 +223,23 @@ object Threshold {
}
fun calculateSubThreshold(set: NumassSet, config: Meta, analyzer: NumassAnalyzer = SmartAnalyzer()): Table {
val reference = config.optNumber("reference").nullable?.let{
set.getPoints(it.toDouble()).firstOrNull() ?: error("Reference point not found")
}?.let {
println("Using reference point ${it.voltage}")
analyzer.getAmplitudeSpectrum(it,config)
}
return ListTable.Builder().apply {
set.forEach{
val spectrum = analyzer.getAmplitudeSpectrum(it,config)
row(calculateSubThreshold(spectrum,it.voltage,config))
set.forEach{ point ->
val spectrum = analyzer.getAmplitudeSpectrum(point,config).let {
if(reference == null){
it
} else{
subtractAmplitudeSpectrum(it,reference)
}
}
row(calculateSubThreshold(spectrum,point.voltage,config))
}
}.build()
}

View File

@ -275,9 +275,24 @@ val plotFitTask = task("plotFit") {
val histogramTask = task("histogram") {
descriptor {
value("plot", types = listOf(ValueType.BOOLEAN), defaultValue = false, info = "Show plot of the spectra")
value("points", multiple = true, types = listOf(ValueType.NUMBER), info = "The list of point voltages to build histogram")
value("binning", types = listOf(ValueType.NUMBER), defaultValue = 20, info = "The binning of resulting histogram")
value("normalize", types = listOf(ValueType.BOOLEAN), defaultValue = true, info = "If true reports the count rate in each bin, otherwise total count")
value(
"points",
multiple = true,
types = listOf(ValueType.NUMBER),
info = "The list of point voltages to build histogram"
)
value(
"binning",
types = listOf(ValueType.NUMBER),
defaultValue = 20,
info = "The binning of resulting histogram"
)
value(
"normalize",
types = listOf(ValueType.BOOLEAN),
defaultValue = true,
info = "If true reports the count rate in each bin, otherwise total count"
)
info = "Combine amplitude spectra from multiple sets, but with the same U"
}
model { meta ->
@ -378,14 +393,13 @@ val fitScanTask = task("fitscan") {
}
splitAction<Table, FitResult> {
val scanMeta = meta.getMeta("scan")
val scanValues = if (scanMeta.hasValue("masses")) {
scanMeta.getValue("masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
val scanValues = if (meta.hasValue("scan.masses")) {
meta.getValue("scan.masses").list.map { it -> Math.pow(it.double * 1000, 2.0).asValue() }
} else {
scanMeta.getValue("values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
meta.getValue("scan.values", listOf(2.5e5, 1e6, 2.25e6, 4e6, 6.25e6, 9e6)).list
}
val scanParameter = scanMeta.getString("parameter", "msterile2")
val scanParameter = meta.getString("parameter", "msterile2")
scanValues.forEach { scanValue ->
val resultName = String.format("%s[%s=%s]", this.name, scanParameter, scanValue.string)
val fitMeta = meta.getMeta("fit").builder.apply {