forked from kscience/kmath
Fixed QOW optimization
This commit is contained in:
parent
dfd6e0a949
commit
8d33d6beab
@ -14,6 +14,7 @@ import space.kscience.kmath.expressions.Symbol
|
||||
import space.kscience.kmath.expressions.binding
|
||||
import space.kscience.kmath.expressions.symbol
|
||||
import space.kscience.kmath.optimization.QowOptimizer
|
||||
import space.kscience.kmath.optimization.chiSquaredOrNull
|
||||
import space.kscience.kmath.optimization.fitWith
|
||||
import space.kscience.kmath.optimization.resultPoint
|
||||
import space.kscience.kmath.real.map
|
||||
@ -50,7 +51,7 @@ suspend fun main() {
|
||||
|
||||
//Perform an operation on each x value (much more effective, than numpy)
|
||||
val y = x.map { it ->
|
||||
val value = it.pow(2) + it + 100
|
||||
val value = it.pow(2) + it + 1
|
||||
value + chain.next() * sqrt(value)
|
||||
}
|
||||
// this will also work, but less effective:
|
||||
@ -63,7 +64,7 @@ suspend fun main() {
|
||||
val result = XYErrorColumnarData.of(x, y, yErr).fitWith(
|
||||
QowOptimizer,
|
||||
DSProcessor,
|
||||
mapOf(a to 1.0, b to 1.2, c to 99.0)
|
||||
mapOf(a to 0.9, b to 1.2, c to 2.0)
|
||||
) { arg ->
|
||||
//bind variables to autodiff context
|
||||
val a by binding
|
||||
@ -96,9 +97,9 @@ suspend fun main() {
|
||||
h3 {
|
||||
+"Fit result: ${result.resultPoint}"
|
||||
}
|
||||
// h3 {
|
||||
// +"Chi2/dof = ${result.resultValue / (x.size - 3)}"
|
||||
// }
|
||||
h3 {
|
||||
+"Chi2/dof = ${result.chiSquaredOrNull!! / (x.size - 3)}"
|
||||
}
|
||||
}
|
||||
|
||||
page.makeFile()
|
||||
|
@ -50,7 +50,7 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
|
||||
*/
|
||||
val dispersion: Point<Double> by lazy {
|
||||
DoubleBuffer(problem.data.size) { d ->
|
||||
problem.weight(d).invoke(parameters)
|
||||
1.0/problem.weight(d).invoke(parameters)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@
|
||||
package space.kscience.kmath.optimization
|
||||
|
||||
import space.kscience.kmath.data.XYColumnarData
|
||||
import space.kscience.kmath.data.indices
|
||||
import space.kscience.kmath.expressions.*
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
import space.kscience.kmath.misc.Loggable
|
||||
@ -81,6 +82,7 @@ public class XYFit(
|
||||
override val features: FeatureSet<OptimizationFeature>,
|
||||
internal val pointToCurveDistance: PointToCurveDistance = PointToCurveDistance.byY,
|
||||
internal val pointWeight: PointWeight = PointWeight.byYSigma,
|
||||
public val xSymbol: Symbol = Symbol.x,
|
||||
) : OptimizationProblem<Double> {
|
||||
public fun distance(index: Int): DifferentiableExpression<Double> = pointToCurveDistance.distance(this, index)
|
||||
|
||||
@ -119,7 +121,26 @@ public suspend fun <I : Any, A> XYColumnarData<Double, Double, Double>.fitWith(
|
||||
modelExpression,
|
||||
actualFeatures,
|
||||
pointToCurveDistance,
|
||||
pointWeight
|
||||
pointWeight,
|
||||
xSymbol
|
||||
)
|
||||
return optimizer.optimize(problem)
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute chi squared value for completed fit. Return null for incomplete fit
|
||||
*/
|
||||
public val XYFit.chiSquaredOrNull: Double? get() {
|
||||
val result = resultPointOrNull ?: return null
|
||||
|
||||
return data.indices.sumOf { index->
|
||||
|
||||
val x = data.x[index]
|
||||
val y = data.y[index]
|
||||
val yErr = data[Symbol.yError]?.get(index) ?: 1.0
|
||||
|
||||
val mu = model.invoke(result + (xSymbol to x) )
|
||||
|
||||
((y - mu)/yErr).pow(2)
|
||||
}
|
||||
}
|
@ -26,7 +26,7 @@ internal fun XYFit.logLikelihood(): DifferentiableExpression<Double> = object :
|
||||
data.indices.sumOf { index ->
|
||||
val d = distance(index)(arguments)
|
||||
val weight = weight(index)(arguments)
|
||||
val weightDerivative = weight(index)(arguments)
|
||||
val weightDerivative = weight(index).derivative(symbols)(arguments)
|
||||
|
||||
// -1 / (sqrt(2 PI) * sigma) + 2 (x-mu)/ 2 sigma^2 * d mu/ d theta - (x-mu)^2 / 2 * d w/ d theta
|
||||
return@sumOf -oneOver2Pi * sqrt(weight) + //offset derivative
|
||||
|
Loading…
Reference in New Issue
Block a user