forked from kscience/kmath
Separate object for fitting. Chi-squared
This commit is contained in:
parent
57781678e5
commit
30132964dd
@ -11,6 +11,7 @@
|
||||
- A separate `Symbol` entity, which is used for global unbound symbol.
|
||||
- A `Symbol` indexing scope.
|
||||
- Basic optimization API for Commons-math.
|
||||
- Chi squared optimization for array-like data in CM
|
||||
|
||||
### Changed
|
||||
- Package changed from `scientifik` to `kscience.kmath`.
|
||||
|
@ -0,0 +1,103 @@
|
||||
package kscience.kmath.commons.optimization
|
||||
|
||||
import kscience.kmath.commons.expressions.DerivativeStructureExpression
|
||||
import kscience.kmath.commons.expressions.DerivativeStructureField
|
||||
import kscience.kmath.expressions.DifferentiableExpression
|
||||
import kscience.kmath.expressions.Expression
|
||||
import kscience.kmath.expressions.StringSymbol
|
||||
import kscience.kmath.expressions.Symbol
|
||||
import kscience.kmath.structures.Buffer
|
||||
import kscience.kmath.structures.indices
|
||||
import org.apache.commons.math3.analysis.differentiation.DerivativeStructure
|
||||
import org.apache.commons.math3.optim.nonlinear.scalar.GoalType
|
||||
import kotlin.math.pow
|
||||
|
||||
|
||||
public object CMFit {
|
||||
|
||||
/**
|
||||
* Generate a chi squared expression from given x-y-sigma model represented by an expression. Does not provide derivatives
|
||||
* TODO move to core/separate module
|
||||
*/
|
||||
public fun chiSquaredExpression(
|
||||
x: Buffer<Double>,
|
||||
y: Buffer<Double>,
|
||||
yErr: Buffer<Double>,
|
||||
model: Expression<Double>,
|
||||
xSymbol: Symbol = StringSymbol("x"),
|
||||
): Expression<Double> {
|
||||
require(x.size == y.size) { "X and y buffers should be of the same size" }
|
||||
require(y.size == yErr.size) { "Y and yErr buffer should of the same size" }
|
||||
return Expression { arguments ->
|
||||
x.indices.sumByDouble {
|
||||
val xValue = x[it]
|
||||
val yValue = y[it]
|
||||
val yErrValue = yErr[it]
|
||||
val modifiedArgs = arguments + (xSymbol to xValue)
|
||||
val modelValue = model(modifiedArgs)
|
||||
((yValue - modelValue) / yErrValue).pow(2) / 2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation
|
||||
*/
|
||||
public fun chiSquaredExpression(
|
||||
x: Buffer<Double>,
|
||||
y: Buffer<Double>,
|
||||
yErr: Buffer<Double>,
|
||||
model: DerivativeStructureField.(x: DerivativeStructure) -> DerivativeStructure,
|
||||
): DerivativeStructureExpression {
|
||||
require(x.size == y.size) { "X and y buffers should be of the same size" }
|
||||
require(y.size == yErr.size) { "Y and yErr buffer should of the same size" }
|
||||
return DerivativeStructureExpression {
|
||||
var sum = zero
|
||||
x.indices.forEach {
|
||||
val xValue = x[it]
|
||||
val yValue = y[it]
|
||||
val yErrValue = yErr[it]
|
||||
val modelValue = model(const(xValue))
|
||||
sum += ((yValue - modelValue) / yErrValue).pow(2) / 2
|
||||
}
|
||||
sum
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize expression without derivatives
|
||||
*/
|
||||
public fun Expression<Double>.optimize(
|
||||
vararg symbols: Symbol,
|
||||
configuration: CMOptimizationProblem.() -> Unit,
|
||||
): OptimizationResult<Double> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = CMOptimizationProblem(symbols.toList()).apply(configuration)
|
||||
problem.expression(this)
|
||||
return problem.optimize()
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize differentiable expression
|
||||
*/
|
||||
public fun DifferentiableExpression<Double>.optimize(
|
||||
vararg symbols: Symbol,
|
||||
configuration: CMOptimizationProblem.() -> Unit,
|
||||
): OptimizationResult<Double> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = CMOptimizationProblem(symbols.toList()).apply(configuration)
|
||||
problem.diffExpression(this)
|
||||
return problem.optimize()
|
||||
}
|
||||
|
||||
public fun DifferentiableExpression<Double>.minimize(
|
||||
vararg symbols: Symbol,
|
||||
configuration: CMOptimizationProblem.() -> Unit,
|
||||
): OptimizationResult<Double> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = CMOptimizationProblem(symbols.toList()).apply(configuration)
|
||||
problem.diffExpression(this)
|
||||
problem.goal(GoalType.MINIMIZE)
|
||||
return problem.optimize()
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
package kscience.kmath.commons.optimization
|
||||
|
||||
import kscience.kmath.expressions.DifferentiableExpression
|
||||
import kscience.kmath.expressions.Expression
|
||||
import kscience.kmath.expressions.Symbol
|
||||
|
||||
|
||||
/**
|
||||
* Optimize expression without derivatives
|
||||
*/
|
||||
public fun Expression<Double>.optimize(
|
||||
vararg symbols: Symbol,
|
||||
configuration: CMOptimizationProblem.() -> Unit,
|
||||
): OptimizationResult<Double> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = CMOptimizationProblem(symbols.toList()).apply(configuration)
|
||||
problem.expression(this)
|
||||
return problem.optimize()
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize differentiable expression
|
||||
*/
|
||||
public fun DifferentiableExpression<Double>.optimize(
|
||||
vararg symbols: Symbol,
|
||||
configuration: CMOptimizationProblem.() -> Unit,
|
||||
): OptimizationResult<Double> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = CMOptimizationProblem(symbols.toList()).apply(configuration)
|
||||
problem.diffExpression(this)
|
||||
return problem.optimize()
|
||||
}
|
@ -13,7 +13,7 @@ internal class OptimizeTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testOptimization() {
|
||||
fun testGradientOptimization() {
|
||||
val result = normal.optimize(x, y) {
|
||||
initialGuess(x to 1.0, y to 1.0)
|
||||
//no need to select optimizer. Gradient optimizer is used by default because gradients are provided by function
|
||||
|
Loading…
Reference in New Issue
Block a user