0.3.1-dev-11 #510

Merged
altavir merged 80 commits from dev into master 2023-04-05 18:46:36 +03:00
10 changed files with 173 additions and 69 deletions
Showing only changes of commit 7f4f4c7703 - Show all commits

View File

@ -2,6 +2,8 @@
## [Unreleased] ## [Unreleased]
### Added ### Added
- `NamedMatrix` - matrix with symbol-based indexing
- `Expression` with default arguments
- Type-aliases for numbers like `Float64` - Type-aliases for numbers like `Float64`
- 2D optimal trajectory computation in a separate module `kmath-trajectory` - 2D optimal trajectory computation in a separate module `kmath-trajectory`
- Autodiff for generic algebra elements in core! - Autodiff for generic algebra elements in core!

View File

@ -15,7 +15,7 @@ allprojects {
} }
group = "space.kscience" group = "space.kscience"
version = "0.3.1-dev-8" version = "0.3.1-dev-9"
} }
subprojects { subprojects {

View File

@ -10,7 +10,6 @@ import kotlinx.html.h3
import space.kscience.kmath.commons.optimization.CMOptimizer import space.kscience.kmath.commons.optimization.CMOptimizer
import space.kscience.kmath.distributions.NormalDistribution import space.kscience.kmath.distributions.NormalDistribution
import space.kscience.kmath.expressions.autodiff import space.kscience.kmath.expressions.autodiff
import space.kscience.kmath.expressions.chiSquaredExpression
import space.kscience.kmath.expressions.symbol import space.kscience.kmath.expressions.symbol
import space.kscience.kmath.operations.asIterable import space.kscience.kmath.operations.asIterable
import space.kscience.kmath.operations.toList import space.kscience.kmath.operations.toList
@ -22,6 +21,7 @@ import space.kscience.kmath.random.RandomGenerator
import space.kscience.kmath.real.DoubleVector import space.kscience.kmath.real.DoubleVector
import space.kscience.kmath.real.map import space.kscience.kmath.real.map
import space.kscience.kmath.real.step import space.kscience.kmath.real.step
import space.kscience.kmath.stat.chiSquaredExpression
import space.kscience.plotly.* import space.kscience.plotly.*
import space.kscience.plotly.models.ScatterMode import space.kscience.plotly.models.ScatterMode
import space.kscience.plotly.models.TraceValues import space.kscience.plotly.models.TraceValues

View File

@ -15,10 +15,7 @@ import space.kscience.kmath.expressions.binding
import space.kscience.kmath.expressions.symbol import space.kscience.kmath.expressions.symbol
import space.kscience.kmath.operations.asIterable import space.kscience.kmath.operations.asIterable
import space.kscience.kmath.operations.toList import space.kscience.kmath.operations.toList
import space.kscience.kmath.optimization.QowOptimizer import space.kscience.kmath.optimization.*
import space.kscience.kmath.optimization.chiSquaredOrNull
import space.kscience.kmath.optimization.fitWith
import space.kscience.kmath.optimization.resultPoint
import space.kscience.kmath.random.RandomGenerator import space.kscience.kmath.random.RandomGenerator
import space.kscience.kmath.real.map import space.kscience.kmath.real.map
import space.kscience.kmath.real.step import space.kscience.kmath.real.step
@ -32,6 +29,8 @@ import kotlin.math.sqrt
private val a by symbol private val a by symbol
private val b by symbol private val b by symbol
private val c by symbol private val c by symbol
private val d by symbol
private val e by symbol
/** /**
@ -64,16 +63,22 @@ suspend fun main() {
val result = XYErrorColumnarData.of(x, y, yErr).fitWith( val result = XYErrorColumnarData.of(x, y, yErr).fitWith(
QowOptimizer, QowOptimizer,
Double.autodiff, Double.autodiff,
mapOf(a to 0.9, b to 1.2, c to 2.0) mapOf(a to 0.9, b to 1.2, c to 2.0, e to 1.0, d to 1.0, e to 0.0),
OptimizationParameters(a, b, c, d)
) { arg -> ) { arg ->
//bind variables to autodiff context //bind variables to autodiff context
val a by binding val a by binding
val b by binding val b by binding
//Include default value for c if it is not provided as a parameter //Include default value for c if it is not provided as a parameter
val c = bindSymbolOrNull(c) ?: one val c = bindSymbolOrNull(c) ?: one
a * arg.pow(2) + b * arg + c val d by binding
val e by binding
a * arg.pow(2) + b * arg + c + d * arg.pow(3) + e / arg
} }
println("Resulting chi2/dof: ${result.chiSquaredOrNull}/${result.dof}")
//display a page with plot and numerical results //display a page with plot and numerical results
val page = Plotly.page { val page = Plotly.page {
plot { plot {
@ -89,7 +94,7 @@ suspend fun main() {
scatter { scatter {
mode = ScatterMode.lines mode = ScatterMode.lines
x(x) x(x)
y(x.map { result.model(result.resultPoint + (Symbol.x to it)) }) y(x.map { result.model(result.startPoint + result.resultPoint + (Symbol.x to it)) })
name = "fit" name = "fit"
} }
} }
@ -98,7 +103,7 @@ suspend fun main() {
+"Fit result: ${result.resultPoint}" +"Fit result: ${result.resultPoint}"
} }
h3 { h3 {
+"Chi2/dof = ${result.chiSquaredOrNull!! / (x.size - 3)}" +"Chi2/dof = ${result.chiSquaredOrNull!! / result.dof}"
} }
} }

View File

@ -0,0 +1,31 @@
/*
* Copyright 2018-2023 KMath contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package space.kscience.kmath.expressions
public class ExpressionWithDefault<T>(
private val origin: Expression<T>,
private val defaultArgs: Map<Symbol, T>,
) : Expression<T> {
override fun invoke(arguments: Map<Symbol, T>): T = origin.invoke(defaultArgs + arguments)
}
public fun <T> Expression<T>.withDefaultArgs(defaultArgs: Map<Symbol, T>): ExpressionWithDefault<T> =
ExpressionWithDefault(this, defaultArgs)
public class DiffExpressionWithDefault<T>(
private val origin: DifferentiableExpression<T>,
private val defaultArgs: Map<Symbol, T>,
) : DifferentiableExpression<T> {
override fun invoke(arguments: Map<Symbol, T>): T = origin.invoke(defaultArgs + arguments)
override fun derivativeOrNull(symbols: List<Symbol>): Expression<T>? =
origin.derivativeOrNull(symbols)?.withDefaultArgs(defaultArgs)
}
public fun <T> DifferentiableExpression<T>.withDefaultArgs(defaultArgs: Map<Symbol, T>): DiffExpressionWithDefault<T> =
DiffExpressionWithDefault(this, defaultArgs)

View File

@ -0,0 +1,37 @@
/*
* Copyright 2018-2023 KMath contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
@file:OptIn(UnstableKMathAPI::class)
package space.kscience.kmath.expressions
import space.kscience.kmath.linear.Matrix
import space.kscience.kmath.misc.UnstableKMathAPI
import space.kscience.kmath.structures.getOrNull
public class NamedMatrix<T>(public val values: Matrix<T>, public val indexer: SymbolIndexer) : Matrix<T> by values {
public operator fun get(i: Symbol, j: Symbol): T = get(indexer.indexOf(i), indexer.indexOf(j))
public companion object {
public fun toStringWithSymbols(values: Matrix<*>, indexer: SymbolIndexer): String = buildString {
appendLine(indexer.symbols.joinToString(separator = "\t", prefix = "\t\t"))
indexer.symbols.forEach { i ->
append(i.identity + "\t")
values.rows.getOrNull(indexer.indexOf(i))?.let { row ->
indexer.symbols.forEach { j ->
append(row.getOrNull(indexer.indexOf(j)).toString())
append("\t")
}
appendLine()
}
}
}
}
}
public fun <T> Matrix<T>.named(indexer: SymbolIndexer): NamedMatrix<T> = NamedMatrix(this, indexer)
public fun <T> Matrix<T>.named(symbols: List<Symbol>): NamedMatrix<T> = named(SimpleSymbolIndexer(symbols))

View File

@ -6,8 +6,8 @@
package space.kscience.kmath.optimization package space.kscience.kmath.optimization
import space.kscience.kmath.expressions.DifferentiableExpression import space.kscience.kmath.expressions.DifferentiableExpression
import space.kscience.kmath.expressions.NamedMatrix
import space.kscience.kmath.expressions.Symbol import space.kscience.kmath.expressions.Symbol
import space.kscience.kmath.linear.Matrix
import space.kscience.kmath.misc.* import space.kscience.kmath.misc.*
import kotlin.reflect.KClass import kotlin.reflect.KClass
@ -32,7 +32,10 @@ public interface OptimizationPrior<T> : OptimizationFeature, DifferentiableExpre
override val key: FeatureKey<OptimizationFeature> get() = OptimizationPrior::class override val key: FeatureKey<OptimizationFeature> get() = OptimizationPrior::class
} }
public class OptimizationCovariance<T>(public val covariance: Matrix<T>) : OptimizationFeature { /**
* Covariance matrix for
*/
public class OptimizationCovariance<T>(public val covariance: NamedMatrix<T>) : OptimizationFeature {
override fun toString(): String = "Covariance($covariance)" override fun toString(): String = "Covariance($covariance)"
} }
@ -57,10 +60,20 @@ public class OptimizationLog(private val loggable: Loggable) : Loggable by logga
override fun toString(): String = "Log($loggable)" override fun toString(): String = "Log($loggable)"
} }
/**
* Free parameters of the optimization
*/
public class OptimizationParameters(public val symbols: List<Symbol>) : OptimizationFeature { public class OptimizationParameters(public val symbols: List<Symbol>) : OptimizationFeature {
public constructor(vararg symbols: Symbol) : this(listOf(*symbols)) public constructor(vararg symbols: Symbol) : this(listOf(*symbols))
override fun toString(): String = "Parameters($symbols)" override fun toString(): String = "Parameters($symbols)"
} }
/**
* Maximum allowed number of iterations
*/
public class OptimizationIterations(public val maxIterations: Int) : OptimizationFeature {
override fun toString(): String = "Iterations($maxIterations)"
}

View File

@ -5,10 +5,7 @@
package space.kscience.kmath.optimization package space.kscience.kmath.optimization
import space.kscience.kmath.expressions.DifferentiableExpression import space.kscience.kmath.expressions.*
import space.kscience.kmath.expressions.Symbol
import space.kscience.kmath.expressions.SymbolIndexer
import space.kscience.kmath.expressions.derivative
import space.kscience.kmath.linear.* import space.kscience.kmath.linear.*
import space.kscience.kmath.misc.UnstableKMathAPI import space.kscience.kmath.misc.UnstableKMathAPI
import space.kscience.kmath.misc.log import space.kscience.kmath.misc.log
@ -16,6 +13,7 @@ import space.kscience.kmath.operations.DoubleField
import space.kscience.kmath.operations.DoubleL2Norm import space.kscience.kmath.operations.DoubleL2Norm
import space.kscience.kmath.operations.algebra import space.kscience.kmath.operations.algebra
import space.kscience.kmath.structures.DoubleBuffer import space.kscience.kmath.structures.DoubleBuffer
import kotlin.math.abs
public class QowRuns(public val runs: Int) : OptimizationFeature { public class QowRuns(public val runs: Int) : OptimizationFeature {
@ -40,18 +38,24 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
@OptIn(UnstableKMathAPI::class) @OptIn(UnstableKMathAPI::class)
private class QoWeight( private class QoWeight(
val problem: XYFit, val problem: XYFit,
val parameters: Map<Symbol, Double>, val freeParameters: Map<Symbol, Double>,
) : Map<Symbol, Double> by parameters, SymbolIndexer { ) : SymbolIndexer {
override val symbols: List<Symbol> = parameters.keys.toList() val size get() = freeParameters.size
override val symbols: List<Symbol> = freeParameters.keys.toList()
val data get() = problem.data val data get() = problem.data
val allParameters by lazy {
problem.startPoint + freeParameters
}
/** /**
* Derivatives of the spectrum over parameters. First index in the point number, second one - index of parameter * Derivatives of the spectrum over parameters. First index in the point number, second one - index of parameter
*/ */
val derivs: Matrix<Double> by lazy { val derivs: Matrix<Double> by lazy {
linearSpace.buildMatrix(problem.data.size, symbols.size) { d, s -> linearSpace.buildMatrix(problem.data.size, symbols.size) { d, s ->
problem.distance(d).derivative(symbols[s])(parameters) problem.distance(d).derivative(symbols[s]).invoke(allParameters)
} }
} }
@ -60,29 +64,31 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
*/ */
val dispersion: Point<Double> by lazy { val dispersion: Point<Double> by lazy {
DoubleBuffer(problem.data.size) { d -> DoubleBuffer(problem.data.size) { d ->
1.0/problem.weight(d).invoke(parameters) 1.0 / problem.weight(d).invoke(allParameters)
} }
} }
val prior: DifferentiableExpression<Double>? get() = problem.getFeature<OptimizationPrior<Double>>() val prior: DifferentiableExpression<Double>?
get() = problem.getFeature<OptimizationPrior<Double>>()?.withDefaultArgs(allParameters)
override fun toString(): String = parameters.toString() override fun toString(): String = freeParameters.toString()
} }
/** /**
* The signed distance from the model to the [d]-th point of data. * The signed distance from the model to the [d]-th point of data.
*/ */
private fun QoWeight.distance(d: Int, parameters: Map<Symbol, Double>): Double = problem.distance(d)(parameters) private fun QoWeight.distance(d: Int, parameters: Map<Symbol, Double>): Double =
problem.distance(d)(allParameters + parameters)
/** /**
* The derivative of [distance] * The derivative of [distance]
*/ */
private fun QoWeight.distanceDerivative(symbol: Symbol, d: Int, parameters: Map<Symbol, Double>): Double = private fun QoWeight.distanceDerivative(symbol: Symbol, d: Int, parameters: Map<Symbol, Double>): Double =
problem.distance(d).derivative(symbol)(parameters) problem.distance(d).derivative(symbol).invoke(allParameters + parameters)
/** /**
* Теоретическая ковариация весовых функций. * Theoretical covariance of weight functions
* *
* D(\phi)=E(\phi_k(\theta_0) \phi_l(\theta_0))= disDeriv_k * disDeriv_l /sigma^2 * D(\phi)=E(\phi_k(\theta_0) \phi_l(\theta_0))= disDeriv_k * disDeriv_l /sigma^2
*/ */
@ -92,7 +98,7 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
} }
/** /**
* Экспериментальная ковариация весов. Формула (22) из * Experimental covariance Eq (22) from
* http://arxiv.org/abs/physics/0604127 * http://arxiv.org/abs/physics/0604127
*/ */
private fun QoWeight.covarFExp(theta: Map<Symbol, Double>): Matrix<Double> = private fun QoWeight.covarFExp(theta: Map<Symbol, Double>): Matrix<Double> =
@ -115,10 +121,9 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
* Equation derivatives for Newton run * Equation derivatives for Newton run
*/ */
private fun QoWeight.getEqDerivValues( private fun QoWeight.getEqDerivValues(
theta: Map<Symbol, Double> = parameters, theta: Map<Symbol, Double> = freeParameters,
): Matrix<Double> = with(linearSpace) { ): Matrix<Double> = with(linearSpace) {
//Возвращает производную k-того Eq по l-тому параметру //Derivative of k Eq over l parameter
//val res = Array(fitDim) { DoubleArray(fitDim) }
val sderiv = buildMatrix(data.size, size) { d, s -> val sderiv = buildMatrix(data.size, size) { d, s ->
distanceDerivative(symbols[s], d, theta) distanceDerivative(symbols[s], d, theta)
} }
@ -140,16 +145,15 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
/** /**
* Значения уравнений метода квазиоптимальных весов * Quasi optimal weights equations values
*/ */
private fun QoWeight.getEqValues(theta: Map<Symbol, Double> = this): Point<Double> { private fun QoWeight.getEqValues(theta: Map<Symbol, Double>): Point<Double> {
val distances = DoubleBuffer(data.size) { d -> distance(d, theta) } val distances = DoubleBuffer(data.size) { d -> distance(d, theta) }
return DoubleBuffer(size) { s -> return DoubleBuffer(size) { s ->
val base = (0 until data.size).sumOf { d -> distances[d] * derivs[d, s] / dispersion[d] } val base = (0 until data.size).sumOf { d -> distances[d] * derivs[d, s] / dispersion[d] }
//Поправка на априорную вероятность //Prior probability correction
prior?.let { prior -> prior?.let { prior ->
base - prior.derivative(symbols[s])(theta) / prior(theta) base - prior.derivative(symbols[s]).invoke(theta) / prior(theta)
} ?: base } ?: base
} }
} }
@ -157,16 +161,14 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
private fun QoWeight.newtonianStep( private fun QoWeight.newtonianStep(
theta: Map<Symbol, Double>, theta: Map<Symbol, Double>,
eqvalues: Point<Double>, eqValues: Point<Double>,
): QoWeight = linearSpace { ): QoWeight = linearSpace {
with(this@newtonianStep) {
val start = theta.toPoint() val start = theta.toPoint()
val invJacob = solver.inverse(this@newtonianStep.getEqDerivValues(theta)) val invJacob = solver.inverse(getEqDerivValues(theta))
val step = invJacob.dot(eqvalues) val step = invJacob.dot(eqValues)
return QoWeight(problem, theta + (start - step).toMap()) return QoWeight(problem, theta + (start - step).toMap())
} }
}
private fun QoWeight.newtonianRun( private fun QoWeight.newtonianRun(
maxSteps: Int = 100, maxSteps: Int = 100,
@ -177,10 +179,10 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
val logger = problem.getFeature<OptimizationLog>() val logger = problem.getFeature<OptimizationLog>()
var dis: Double //discrepancy value var dis: Double //discrepancy value
// Working with the full set of parameters
var par = problem.startPoint
logger?.log { "Starting newtonian iteration from: \n\t$par" } var par = freeParameters
logger?.log { "Starting newtonian iteration from: \n\t$allParameters" }
var eqvalues = getEqValues(par) //Values of the weight functions var eqvalues = getEqValues(par) //Values of the weight functions
@ -193,48 +195,48 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
logger?.log { "Starting step number $i" } logger?.log { "Starting step number $i" }
val currentSolution = if (fast) { val currentSolution = if (fast) {
//Берет значения матрицы в той точке, где считается вес //Matrix values in the point of weight computation
newtonianStep(this, eqvalues) newtonianStep(freeParameters, eqvalues)
} else { } else {
//Берет значения матрицы в точке par //Matrix values in a current point
newtonianStep(par, eqvalues) newtonianStep(par, eqvalues)
} }
// здесь должен стоять учет границ параметров // здесь должен стоять учет границ параметров
logger?.log { "Parameter values after step are: \n\t$currentSolution" } logger?.log { "Parameter values after step are: \n\t$currentSolution" }
eqvalues = getEqValues(currentSolution) eqvalues = getEqValues(currentSolution.freeParameters)
val currentDis = DoubleL2Norm.norm(eqvalues)// невязка после шага val currentDis = DoubleL2Norm.norm(eqvalues)// discrepancy after the step
logger?.log { "The discrepancy after step is: $currentDis." } logger?.log { "The discrepancy after step is: $currentDis." }
if (currentDis >= dis && i > 1) { if (currentDis >= dis && i > 1) {
//дополнительно проверяем, чтобы был сделан хотя бы один шаг //Check if one step is made
flag = true flag = true
logger?.log { "The discrepancy does not decrease. Stopping iteration." } logger?.log { "The discrepancy does not decrease. Stopping iteration." }
} else if (abs(dis - currentDis) <= tolerance) {
flag = true
par = currentSolution.freeParameters
logger?.log { "Relative discrepancy tolerance threshold is reached. Stopping iteration." }
} else { } else {
par = currentSolution par = currentSolution.freeParameters
dis = currentDis dis = currentDis
} }
if (i >= maxSteps) { if (i >= maxSteps) {
flag = true flag = true
logger?.log { "Maximum number of iterations reached. Stopping iteration." } logger?.log { "Maximum number of iterations reached. Stopping iteration." }
} }
if (dis <= tolerance) {
flag = true
logger?.log { "Tolerance threshold is reached. Stopping iteration." }
}
} }
return QoWeight(problem, par) return QoWeight(problem, par)
} }
private fun QoWeight.covariance(): Matrix<Double> { private fun QoWeight.covariance(): NamedMatrix<Double> {
val logger = problem.getFeature<OptimizationLog>() val logger = problem.getFeature<OptimizationLog>()
logger?.log { logger?.log {
""" """
Starting errors estimation using quasioptimal weights method. The starting weight is: Starting errors estimation using quasi-optimal weights method. The starting weight is:
${problem.startPoint} $allParameters
""".trimIndent() """.trimIndent()
} }
@ -248,19 +250,27 @@ public object QowOptimizer : Optimizer<Double, XYFit> {
// valid = false // valid = false
// } // }
// } // }
return covar logger?.log {
"Covariance matrix:" + "\n" + NamedMatrix.toStringWithSymbols(covar, this)
}
return covar.named(symbols)
} }
override suspend fun optimize(problem: XYFit): XYFit { override suspend fun optimize(problem: XYFit): XYFit {
val qowRuns = problem.getFeature<QowRuns>()?.runs ?: 2 val qowRuns = problem.getFeature<QowRuns>()?.runs ?: 2
val iterations = problem.getFeature<OptimizationIterations>()?.maxIterations ?: 50
val freeParameters: Map<Symbol, Double> = problem.getFeature<OptimizationParameters>()?.let { op ->
problem.startPoint.filterKeys { it in op.symbols }
} ?: problem.startPoint
var qow = QoWeight(problem, problem.startPoint) var qow = QoWeight(problem, freeParameters)
var res = qow.newtonianRun() var res = qow.newtonianRun(maxSteps = iterations)
repeat(qowRuns - 1) { repeat(qowRuns - 1) {
qow = QoWeight(problem, res.parameters) qow = QoWeight(problem, res.freeParameters)
res = qow.newtonianRun() res = qow.newtonianRun(maxSteps = iterations)
} }
return res.problem.withFeature(OptimizationResult(res.parameters)) val covariance = res.covariance()
return res.problem.withFeature(OptimizationResult(res.freeParameters), OptimizationCovariance(covariance))
} }
} }

View File

@ -152,7 +152,7 @@ public suspend fun <I : Any, A> XYColumnarData<Double, Double, Double>.fitWith(
*/ */
public val XYFit.chiSquaredOrNull: Double? public val XYFit.chiSquaredOrNull: Double?
get() { get() {
val result = resultPointOrNull ?: return null val result = startPoint + (resultPointOrNull ?: return null)
return data.indices.sumOf { index -> return data.indices.sumOf { index ->
@ -165,3 +165,6 @@ public val XYFit.chiSquaredOrNull: Double?
((y - mu) / yErr).pow(2) ((y - mu) / yErr).pow(2)
} }
} }
public val XYFit.dof: Int
get() = data.size - (getFeature<OptimizationParameters>()?.symbols?.size ?: startPoint.size)

View File

@ -1,10 +1,13 @@
/* /*
* Copyright 2018-2022 KMath contributors. * Copyright 2018-2023 KMath contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file. * Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/ */
package space.kscience.kmath.expressions package space.kscience.kmath.stat
import space.kscience.kmath.expressions.AutoDiffProcessor
import space.kscience.kmath.expressions.DifferentiableExpression
import space.kscience.kmath.expressions.ExpressionAlgebra
import space.kscience.kmath.operations.ExtendedField import space.kscience.kmath.operations.ExtendedField
import space.kscience.kmath.operations.asIterable import space.kscience.kmath.operations.asIterable
import space.kscience.kmath.structures.Buffer import space.kscience.kmath.structures.Buffer