A lot of fixes
This commit is contained in:
parent
939d15272d
commit
13a3376433
@ -22,7 +22,7 @@ import hep.dataforge.tables.Table
|
|||||||
import hep.dataforge.tables.TableTransform
|
import hep.dataforge.tables.TableTransform
|
||||||
import inr.numass.NumassPlugin
|
import inr.numass.NumassPlugin
|
||||||
import inr.numass.data.analyzers.NumassAnalyzerKt
|
import inr.numass.data.analyzers.NumassAnalyzerKt
|
||||||
import inr.numass.subthreshold.SubFitKt
|
import inr.numass.subthreshold.Threshold
|
||||||
import javafx.application.Platform
|
import javafx.application.Platform
|
||||||
|
|
||||||
import static hep.dataforge.grind.Grind.buildMeta
|
import static hep.dataforge.grind.Grind.buildMeta
|
||||||
@ -37,14 +37,19 @@ ctx.getPluginManager().load(CachePlugin)
|
|||||||
Meta meta = buildMeta(t0: 3e4) {
|
Meta meta = buildMeta(t0: 3e4) {
|
||||||
data(dir: "D:\\Work\\Numass\\data\\2017_11\\Fill_2", mask: "set_3.")
|
data(dir: "D:\\Work\\Numass\\data\\2017_11\\Fill_2", mask: "set_3.")
|
||||||
subtract(reference: 18500)
|
subtract(reference: 18500)
|
||||||
fit(xLow: 400, xHigh: 600, upper: 3000, binning: 20)
|
fit(xLow: 400, xHigh: 600, upper: 3000, binning: 20, method: "pow")
|
||||||
|
scan(
|
||||||
|
hi: [500, 600, 700, 800, 900],
|
||||||
|
lo: [350, 400, 450],
|
||||||
|
def: 700
|
||||||
|
)
|
||||||
window(lo: 300, up: 3000)
|
window(lo: 300, up: 3000)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def shell = new GrindShell(ctx);
|
def shell = new GrindShell(ctx);
|
||||||
|
|
||||||
DataNode<Table> spectra = SubFitKt.getSpectraMap(ctx, meta).computeAll();
|
DataNode<Table> spectra = Threshold.INSTANCE.getSpectraMap(ctx, meta).computeAll();
|
||||||
|
|
||||||
shell.eval {
|
shell.eval {
|
||||||
|
|
||||||
@ -86,22 +91,19 @@ shell.eval {
|
|||||||
|
|
||||||
showPoints(spectraMap.findAll { it.key in [14100d, 14200d, 14300d, 14400d, 14800d, 15000d, 15200d, 15700d] })
|
showPoints(spectraMap.findAll { it.key in [14100d, 14200d, 14300d, 14400d, 14800d, 15000d, 15200d, 15700d] })
|
||||||
|
|
||||||
[500, 550, 600, 650, 700].each { xHigh ->
|
meta["scan.hi"].each { xHigh ->
|
||||||
println "Caclculate correctuion for upper linearity bound: ${xHigh}"
|
println "Caclculate correctuion for upper linearity bound: ${xHigh}"
|
||||||
Table correctionTable = TableTransform.filter(
|
Table correctionTable = TableTransform.filter(
|
||||||
SubFitKt.fitAllPoints(
|
Threshold.INSTANCE.calculateSubThreshold(
|
||||||
spectraMap,
|
spectraMap,
|
||||||
meta["fit.xLow"] as int,
|
meta.getMeta("fit").builder.setValue("xHigh", xHigh)
|
||||||
xHigh,
|
|
||||||
meta["fit.upper"] as int,
|
|
||||||
meta["fit.binning"] as int
|
|
||||||
),
|
),
|
||||||
"correction",
|
"correction",
|
||||||
0,
|
0,
|
||||||
2
|
2
|
||||||
)
|
)
|
||||||
|
|
||||||
if (xHigh == 600) {
|
if (xHigh == meta["scan.def"]) {
|
||||||
ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters")
|
ColumnedDataWriter.writeTable(System.out, correctionTable, "underflow parameters")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,15 +118,12 @@ shell.eval {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
[350, 400, 450].each { xLow ->
|
meta["scan.lo"].each { xLow ->
|
||||||
println "Caclculate correctuion for lower linearity bound: ${xLow}"
|
println "Caclculate correctuion for lower linearity bound: ${xLow}"
|
||||||
Table correctionTable = TableTransform.filter(
|
Table correctionTable = TableTransform.filter(
|
||||||
SubFitKt.fitAllPoints(
|
Threshold.INSTANCE.calculateSubThreshold(
|
||||||
spectraMap,
|
spectraMap,
|
||||||
xLow,
|
meta.getMeta("fit").builder.setValue("xLow", xLow)
|
||||||
meta["fit.xHigh"] as int,
|
|
||||||
meta["fit.upper"] as int,
|
|
||||||
meta["fit.binning"] as int
|
|
||||||
),
|
),
|
||||||
"correction",
|
"correction",
|
||||||
0,
|
0,
|
||||||
|
@ -38,7 +38,7 @@ import java.util.*
|
|||||||
@NodeDef(name = "grouping", info = "The definition of grouping rule for this merge", from = "method::hep.dataforge.actions.GroupBuilder.byMeta")
|
@NodeDef(name = "grouping", info = "The definition of grouping rule for this merge", from = "method::hep.dataforge.actions.GroupBuilder.byMeta")
|
||||||
class MergeDataAction : ManyToOneAction<Table, Table>() {
|
class MergeDataAction : ManyToOneAction<Table, Table>() {
|
||||||
|
|
||||||
val parnames = arrayOf(NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
private val parnames = arrayOf(NumassPoint.HV_KEY, NumassPoint.LENGTH_KEY, NumassAnalyzer.COUNT_KEY, NumassAnalyzer.COUNT_RATE_KEY, NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||||
|
|
||||||
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
|
override fun buildGroups(context: Context, input: DataNode<Table>, actionMeta: Meta): List<DataNode<Table>> {
|
||||||
val meta = inputMeta(context, input.getMeta(), actionMeta)
|
val meta = inputMeta(context, input.getMeta(), actionMeta)
|
||||||
@ -80,15 +80,13 @@ class MergeDataAction : ManyToOneAction<Table, Table>() {
|
|||||||
|
|
||||||
val cr = (cr1 * t1 + cr2 * t2) / (t1 + t2)
|
val cr = (cr1 * t1 + cr2 * t2) / (t1 + t2)
|
||||||
|
|
||||||
val err1 = dp1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)!!
|
val err1 = dp1.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||||
val err2 = dp2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)!!
|
val err2 = dp2.getDouble(NumassAnalyzer.COUNT_RATE_ERROR_KEY)
|
||||||
|
|
||||||
// абсолютные ошибки складываются квадратично
|
// абсолютные ошибки складываются квадратично
|
||||||
val crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time
|
val crErr = Math.sqrt(err1 * err1 * t1 * t1 + err2 * err2 * t2 * t2) / time
|
||||||
|
|
||||||
val map = ValueMap.of(parnames, voltage, time, total, cr, crErr).builder()
|
return ValueMap.of(parnames, voltage, time, total, cr, crErr)
|
||||||
|
|
||||||
return map.build()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun mergeDataSets(ds: Collection<Table>): Table {
|
private fun mergeDataSets(ds: Collection<Table>): Table {
|
||||||
|
@ -1,149 +0,0 @@
|
|||||||
package inr.numass.subthreshold
|
|
||||||
|
|
||||||
import hep.dataforge.context.Context
|
|
||||||
import hep.dataforge.data.DataNode
|
|
||||||
import hep.dataforge.data.DataSet
|
|
||||||
import hep.dataforge.kodex.buildMeta
|
|
||||||
import hep.dataforge.kodex.pipe
|
|
||||||
import hep.dataforge.kodex.toList
|
|
||||||
import hep.dataforge.meta.Meta
|
|
||||||
import hep.dataforge.storage.commons.StorageUtils
|
|
||||||
import hep.dataforge.tables.ListTable
|
|
||||||
import hep.dataforge.tables.Table
|
|
||||||
import hep.dataforge.tables.TableTransform
|
|
||||||
import hep.dataforge.tables.ValueMap
|
|
||||||
import hep.dataforge.values.Values
|
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
|
||||||
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
|
||||||
import inr.numass.data.analyzers.TimeAnalyzer
|
|
||||||
import inr.numass.data.analyzers.spectrumWithBinning
|
|
||||||
import inr.numass.data.api.NumassPoint
|
|
||||||
import inr.numass.data.api.NumassSet
|
|
||||||
import inr.numass.data.api.SimpleNumassPoint
|
|
||||||
import inr.numass.data.storage.NumassStorageFactory
|
|
||||||
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
|
|
||||||
import org.apache.commons.math3.exception.DimensionMismatchException
|
|
||||||
import org.apache.commons.math3.fitting.SimpleCurveFitter
|
|
||||||
import org.apache.commons.math3.fitting.WeightedObservedPoint
|
|
||||||
import java.util.stream.Collectors
|
|
||||||
|
|
||||||
internal fun getSpectraMap(context: Context, meta: Meta): DataNode<Table> {
|
|
||||||
|
|
||||||
//creating storage instance
|
|
||||||
val storage = NumassStorageFactory.buildLocal(context, meta.getString("data.dir"), true, false);
|
|
||||||
|
|
||||||
//Reading points
|
|
||||||
//Free operation. No reading done
|
|
||||||
val sets = StorageUtils
|
|
||||||
.loaderStream(storage)
|
|
||||||
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
|
|
||||||
.map {
|
|
||||||
println("loading ${it.fullName}")
|
|
||||||
it as NumassSet
|
|
||||||
}.collect(Collectors.toList());
|
|
||||||
|
|
||||||
val analyzer = TimeAnalyzer();
|
|
||||||
|
|
||||||
val data = DataSet.builder(NumassPoint::class.java).also { dataBuilder ->
|
|
||||||
sets.sortedBy { it.startTime }
|
|
||||||
.flatMap { set -> set.points.toList() }
|
|
||||||
.groupBy { it.voltage }
|
|
||||||
.forEach { key, value ->
|
|
||||||
val point = SimpleNumassPoint(key, value)
|
|
||||||
val name = key.toInt().toString()
|
|
||||||
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
|
||||||
}
|
|
||||||
}.build()
|
|
||||||
|
|
||||||
return data.pipe(context, meta) {
|
|
||||||
result { input ->
|
|
||||||
analyzer.getAmplitudeSpectrum(input, this.meta)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// val id = buildMeta {
|
|
||||||
// +meta.getMeta("data")
|
|
||||||
// +meta.getMeta("analyze")
|
|
||||||
// }
|
|
||||||
// return context.getFeature(CachePlugin::class.java).cacheNode("subThreshold", id, spectra)
|
|
||||||
}
|
|
||||||
|
|
||||||
private val pointNames = arrayOf("U", "amp", "expConst", "correction");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Exponential function for fitting
|
|
||||||
*/
|
|
||||||
private class ExponentFunction : ParametricUnivariateFunction {
|
|
||||||
override fun value(x: Double, vararg parameters: Double): Double {
|
|
||||||
if (parameters.size != 2) {
|
|
||||||
throw DimensionMismatchException(parameters.size, 2);
|
|
||||||
}
|
|
||||||
val a = parameters[0];
|
|
||||||
val sigma = parameters[1];
|
|
||||||
//return a * (Math.exp(x / sigma) - 1);
|
|
||||||
return a * Math.exp(x / sigma);
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun gradient(x: Double, vararg parameters: Double): DoubleArray {
|
|
||||||
if (parameters.size != 2) {
|
|
||||||
throw DimensionMismatchException(parameters.size, 2);
|
|
||||||
}
|
|
||||||
val a = parameters[0];
|
|
||||||
val sigma = parameters[1];
|
|
||||||
return doubleArrayOf(Math.exp(x / sigma), -a * x / sigma / sigma * Math.exp(x / sigma))
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate underflow exponent parameters using (xLow, xHigh) window for
|
|
||||||
* extrapolation
|
|
||||||
*
|
|
||||||
* @param xLow
|
|
||||||
* @param xHigh
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
private fun getUnderflowExpParameters(spectrum: Table, xLow: Int, xHigh: Int, binning: Int): Pair<Double, Double> {
|
|
||||||
try {
|
|
||||||
if (xHigh <= xLow) {
|
|
||||||
throw IllegalArgumentException("Wrong borders for underflow calculation");
|
|
||||||
}
|
|
||||||
val binned = TableTransform.filter(
|
|
||||||
spectrumWithBinning(spectrum, binning),
|
|
||||||
CHANNEL_KEY,
|
|
||||||
xLow,
|
|
||||||
xHigh
|
|
||||||
);
|
|
||||||
|
|
||||||
val points = binned.rows
|
|
||||||
.map {
|
|
||||||
WeightedObservedPoint(
|
|
||||||
1.0,//1d / p.getValue() , //weight
|
|
||||||
it.getDouble(CHANNEL_KEY), // x
|
|
||||||
it.getDouble(COUNT_RATE_KEY) / binning) //y
|
|
||||||
}
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
val fitter = SimpleCurveFitter.create(ExponentFunction(), doubleArrayOf(1.0, 200.0))
|
|
||||||
val res = fitter.fit(points)
|
|
||||||
return Pair(res[0], res[1])
|
|
||||||
} catch (ex: Exception) {
|
|
||||||
return Pair(0.0, 0.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
internal fun fitPoint(voltage: Double, spectrum: Table, xLow: Int, xHigh: Int, upper: Int, binning: Int): Values {
|
|
||||||
val norm = spectrum.rows.filter { row ->
|
|
||||||
row.getInt(CHANNEL_KEY) in (xLow + 1)..(upper - 1);
|
|
||||||
}.mapToDouble { it.getValue(COUNT_RATE_KEY).doubleValue() }.sum();
|
|
||||||
|
|
||||||
val (a, sigma) = getUnderflowExpParameters(spectrum, xLow, xHigh, binning);
|
|
||||||
|
|
||||||
return ValueMap.of(pointNames, voltage, a, sigma, a * sigma * Math.exp(xLow / sigma) / norm + 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
internal fun fitAllPoints(data: Map<Double, Table>, xLow: Int, xHigh: Int, upper: Int, binning: Int): Table {
|
|
||||||
return ListTable.Builder(*pointNames).apply {
|
|
||||||
data.forEach { voltage, spectrum -> row(fitPoint(voltage, spectrum, xLow, xHigh, upper, binning)) }
|
|
||||||
}.build()
|
|
||||||
}
|
|
226
numass-main/src/main/kotlin/inr/numass/subthreshold/Threshold.kt
Normal file
226
numass-main/src/main/kotlin/inr/numass/subthreshold/Threshold.kt
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
package inr.numass.subthreshold
|
||||||
|
|
||||||
|
import hep.dataforge.context.Context
|
||||||
|
import hep.dataforge.data.DataNode
|
||||||
|
import hep.dataforge.data.DataSet
|
||||||
|
import hep.dataforge.kodex.buildMeta
|
||||||
|
import hep.dataforge.kodex.pipe
|
||||||
|
import hep.dataforge.kodex.toList
|
||||||
|
import hep.dataforge.meta.Meta
|
||||||
|
import hep.dataforge.storage.commons.StorageUtils
|
||||||
|
import hep.dataforge.tables.ListTable
|
||||||
|
import hep.dataforge.tables.Table
|
||||||
|
import hep.dataforge.tables.TableTransform
|
||||||
|
import hep.dataforge.tables.ValueMap
|
||||||
|
import hep.dataforge.values.Values
|
||||||
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.CHANNEL_KEY
|
||||||
|
import inr.numass.data.analyzers.NumassAnalyzer.Companion.COUNT_RATE_KEY
|
||||||
|
import inr.numass.data.analyzers.TimeAnalyzer
|
||||||
|
import inr.numass.data.analyzers.spectrumWithBinning
|
||||||
|
import inr.numass.data.api.NumassPoint
|
||||||
|
import inr.numass.data.api.NumassSet
|
||||||
|
import inr.numass.data.api.SimpleNumassPoint
|
||||||
|
import inr.numass.data.storage.NumassStorageFactory
|
||||||
|
import org.apache.commons.math3.analysis.ParametricUnivariateFunction
|
||||||
|
import org.apache.commons.math3.exception.DimensionMismatchException
|
||||||
|
import org.apache.commons.math3.fitting.SimpleCurveFitter
|
||||||
|
import org.apache.commons.math3.fitting.WeightedObservedPoint
|
||||||
|
import java.util.stream.Collectors
|
||||||
|
|
||||||
|
|
||||||
|
object Threshold {
|
||||||
|
|
||||||
|
fun getSpectraMap(context: Context, meta: Meta): DataNode<Table> {
|
||||||
|
|
||||||
|
//creating storage instance
|
||||||
|
val storage = NumassStorageFactory.buildLocal(context, meta.getString("data.dir"), true, false);
|
||||||
|
|
||||||
|
//Reading points
|
||||||
|
//Free operation. No reading done
|
||||||
|
val sets = StorageUtils
|
||||||
|
.loaderStream(storage)
|
||||||
|
.filter { it.fullName.toString().matches(meta.getString("data.mask").toRegex()) }
|
||||||
|
.map {
|
||||||
|
println("loading ${it.fullName}")
|
||||||
|
it as NumassSet
|
||||||
|
}.collect(Collectors.toList());
|
||||||
|
|
||||||
|
val analyzer = TimeAnalyzer();
|
||||||
|
|
||||||
|
val data = DataSet.builder(NumassPoint::class.java).also { dataBuilder ->
|
||||||
|
sets.sortedBy { it.startTime }
|
||||||
|
.flatMap { set -> set.points.toList() }
|
||||||
|
.groupBy { it.voltage }
|
||||||
|
.forEach { key, value ->
|
||||||
|
val point = SimpleNumassPoint(key, value)
|
||||||
|
val name = key.toInt().toString()
|
||||||
|
dataBuilder.putStatic(name, point, buildMeta("meta", "voltage" to key));
|
||||||
|
}
|
||||||
|
}.build()
|
||||||
|
|
||||||
|
return data.pipe(context, meta) {
|
||||||
|
result { input ->
|
||||||
|
analyzer.getAmplitudeSpectrum(input, this.meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// val id = buildMeta {
|
||||||
|
// +meta.getMeta("data")
|
||||||
|
// +meta.getMeta("analyze")
|
||||||
|
// }
|
||||||
|
// return context.getFeature(CachePlugin::class.java).cacheNode("subThreshold", id, spectra)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare the list of weighted points for fitter
|
||||||
|
*/
|
||||||
|
private fun preparePoints(spectrum: Table, xLow: Int, xHigh: Int, binning: Int): List<WeightedObservedPoint> {
|
||||||
|
if (xHigh <= xLow) {
|
||||||
|
throw IllegalArgumentException("Wrong borders for underflow calculation");
|
||||||
|
}
|
||||||
|
val binned = TableTransform.filter(
|
||||||
|
spectrumWithBinning(spectrum, binning),
|
||||||
|
CHANNEL_KEY,
|
||||||
|
xLow,
|
||||||
|
xHigh
|
||||||
|
)
|
||||||
|
|
||||||
|
return binned.rows
|
||||||
|
.map {
|
||||||
|
WeightedObservedPoint(
|
||||||
|
1.0,//1d / p.getValue() , //weight
|
||||||
|
it.getDouble(CHANNEL_KEY), // x
|
||||||
|
it.getDouble(COUNT_RATE_KEY) / binning) //y
|
||||||
|
}
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun norm(spectrum: Table, xLow: Int, upper: Int): Double {
|
||||||
|
return spectrum.rows.filter { row ->
|
||||||
|
row.getInt(CHANNEL_KEY) in (xLow + 1)..(upper - 1)
|
||||||
|
}.mapToDouble { it.getValue(COUNT_RATE_KEY).doubleValue() }.sum()
|
||||||
|
}
|
||||||
|
|
||||||
|
private val expPointNames = arrayOf("U", "amp", "expConst", "correction");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exponential function for fitting
|
||||||
|
*/
|
||||||
|
private class ExponentFunction : ParametricUnivariateFunction {
|
||||||
|
override fun value(x: Double, vararg parameters: Double): Double {
|
||||||
|
if (parameters.size != 2) {
|
||||||
|
throw DimensionMismatchException(parameters.size, 2);
|
||||||
|
}
|
||||||
|
val a = parameters[0];
|
||||||
|
val sigma = parameters[1];
|
||||||
|
//return a * (Math.exp(x / sigma) - 1);
|
||||||
|
return a * Math.exp(x / sigma);
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun gradient(x: Double, vararg parameters: Double): DoubleArray {
|
||||||
|
if (parameters.size != 2) {
|
||||||
|
throw DimensionMismatchException(parameters.size, 2);
|
||||||
|
}
|
||||||
|
val a = parameters[0];
|
||||||
|
val sigma = parameters[1];
|
||||||
|
return doubleArrayOf(Math.exp(x / sigma), -a * x / sigma / sigma * Math.exp(x / sigma))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exponential function $a e^{\frac{x}{\sigma}}$
|
||||||
|
*/
|
||||||
|
private fun exponential(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||||
|
val xLow: Int = config.getInt("xLow", 400)
|
||||||
|
val xHigh: Int = config.getInt("xHigh", 700)
|
||||||
|
val upper: Int = config.getInt("upper", 3100)
|
||||||
|
val binning: Int = config.getInt("binning", 20)
|
||||||
|
|
||||||
|
|
||||||
|
val fitter = SimpleCurveFitter.create(ExponentFunction(), doubleArrayOf(1.0, 200.0))
|
||||||
|
val (a, sigma) = fitter.fit(preparePoints(spectrum, xLow, xHigh, binning))
|
||||||
|
|
||||||
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
|
return ValueMap.ofPairs(
|
||||||
|
"U" to voltage,
|
||||||
|
"a" to a,
|
||||||
|
"sigma" to sigma,
|
||||||
|
"correction" to a * sigma * Math.exp(xLow / sigma) / norm + 1.0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private class PowerFunction(val shift: Double? = null) : ParametricUnivariateFunction {
|
||||||
|
|
||||||
|
override fun value(x: Double, vararg parameters: Double): Double {
|
||||||
|
val a = parameters[0]
|
||||||
|
val beta = parameters[1]
|
||||||
|
val delta = shift ?: parameters[2]
|
||||||
|
|
||||||
|
return a * Math.pow(x - delta, beta)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun gradient(x: Double, vararg parameters: Double): DoubleArray {
|
||||||
|
val a = parameters[0]
|
||||||
|
val beta = parameters[1]
|
||||||
|
val delta = shift ?: parameters[2]
|
||||||
|
return if (parameters.size > 2) {
|
||||||
|
doubleArrayOf(
|
||||||
|
Math.pow(x - delta, beta),
|
||||||
|
a * Math.pow(x - delta, beta) * Math.log(x - delta),
|
||||||
|
-a * beta * Math.pow(x - delta, beta - 1)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
doubleArrayOf(
|
||||||
|
Math.pow(x - delta, beta),
|
||||||
|
a * Math.pow(x - delta, beta) * Math.log(x - delta)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Power function $a (x-\delta)^{\beta}
|
||||||
|
*/
|
||||||
|
private fun power(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||||
|
val xLow: Int = config.getInt("xLow", 400)
|
||||||
|
val xHigh: Int = config.getInt("xHigh", 700)
|
||||||
|
val upper: Int = config.getInt("upper", 3100)
|
||||||
|
val binning: Int = config.getInt("binning", 20)
|
||||||
|
|
||||||
|
//val fitter = SimpleCurveFitter.create(PowerFunction(), doubleArrayOf(1e-2, 1.5,0.0))
|
||||||
|
//val (a, beta, delta) = fitter.fit(preparePoints(spectrum, xLow, xHigh, binning))
|
||||||
|
|
||||||
|
val delta = config.getDouble("delta", 0.0)
|
||||||
|
val fitter = SimpleCurveFitter.create(PowerFunction(delta), doubleArrayOf(1e-2, 1.5))
|
||||||
|
val (a, beta) = fitter.fit(preparePoints(spectrum, xLow, xHigh, binning))
|
||||||
|
|
||||||
|
|
||||||
|
val norm = norm(spectrum, xLow, upper)
|
||||||
|
|
||||||
|
return ValueMap.ofPairs(
|
||||||
|
"U" to voltage,
|
||||||
|
"a" to a,
|
||||||
|
"beta" to beta,
|
||||||
|
"delta" to delta,
|
||||||
|
"correction" to a / (beta + 1) * Math.pow(xLow - delta, beta + 1.0) / norm + 1.0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun calculateSubThreshold(spectrum: Table, voltage: Double, config: Meta): Values {
|
||||||
|
return when (config.getString("method", "exp")) {
|
||||||
|
"exp" -> exponential(spectrum, voltage, config)
|
||||||
|
"pow" -> power(spectrum, voltage, config)
|
||||||
|
else -> throw RuntimeException("Unknown sub threshold calculation method")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun calculateSubThreshold(spectra: Map<Double, Table>, config: Meta): Table {
|
||||||
|
return ListTable.Builder().apply {
|
||||||
|
spectra.forEach { voltage, spectrum -> row(calculateSubThreshold(spectrum, voltage, config)) }
|
||||||
|
}.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user