From 7b6361e59d48d2ee12edbda6f99af91b1e38f0a5 Mon Sep 17 00:00:00 2001 From: Alexander Nozik Date: Mon, 26 Apr 2021 15:02:19 +0300 Subject: [PATCH] [WIP] optimization refactor in process --- .../commons/optimization/CMOptimization.kt | 112 +++++---------- .../kmath/commons/optimization/cmFit.kt | 8 +- .../space/kscience/kmath/misc/Featured.kt | 4 +- .../space/kscience/kmath/misc/logging.kt | 14 ++ .../optimization/FunctionOptimization.kt | 130 +++++++----------- .../kmath/optimization/OptimizationProblem.kt | 6 + .../kscience/kmath/optimization/XYFit.kt | 34 ++++- .../kscience/kmath/optimization/qow/QowFit.kt | 4 - 8 files changed, 146 insertions(+), 166 deletions(-) create mode 100644 kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/logging.kt diff --git a/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/CMOptimization.kt b/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/CMOptimization.kt index db9ba6f21..6bde14627 100644 --- a/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/CMOptimization.kt +++ b/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/CMOptimization.kt @@ -11,10 +11,8 @@ import org.apache.commons.math3.optim.nonlinear.scalar.MultivariateOptimizer import org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunction import org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunctionGradient import org.apache.commons.math3.optim.nonlinear.scalar.gradient.NonLinearConjugateGradientOptimizer -import org.apache.commons.math3.optim.nonlinear.scalar.noderiv.AbstractSimplex -import org.apache.commons.math3.optim.nonlinear.scalar.noderiv.NelderMeadSimplex -import org.apache.commons.math3.optim.nonlinear.scalar.noderiv.SimplexOptimizer -import space.kscience.kmath.expressions.* +import space.kscience.kmath.expressions.derivative +import space.kscience.kmath.expressions.withSymbols import space.kscience.kmath.misc.Symbol import space.kscience.kmath.misc.UnstableKMathAPI import space.kscience.kmath.optimization.* @@ -24,107 +22,73 @@ public operator fun PointValuePair.component1(): DoubleArray = point public operator fun PointValuePair.component2(): Double = value public class CMOptimizerFactory(public val optimizerBuilder: () -> MultivariateOptimizer) : OptimizationFeature -//public class CMOptimizerData(public val ) +public class CMOptimizerData(public val data: List) : OptimizationFeature { + public constructor(vararg data: OptimizationData) : this(data.toList()) +} @OptIn(UnstableKMathAPI::class) public class CMOptimization : Optimizer> { override suspend fun process( - problem: FunctionOptimization - ): FunctionOptimization = withSymbols(problem.parameters){ - val cmOptimizer: MultivariateOptimizer = - problem.getFeature()?.optimizerBuilder?.invoke() ?: SimplexOptimizer() - + problem: FunctionOptimization, + ): FunctionOptimization = withSymbols(problem.parameters) { val convergenceChecker: ConvergenceChecker = SimpleValueChecker( DEFAULT_RELATIVE_TOLERANCE, DEFAULT_ABSOLUTE_TOLERANCE, DEFAULT_MAX_ITER ) + val cmOptimizer: MultivariateOptimizer = problem.getFeature()?.optimizerBuilder?.invoke() + ?: NonLinearConjugateGradientOptimizer( + NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES, + convergenceChecker + ) + val optimizationData: HashMap, OptimizationData> = HashMap() fun addOptimizationData(data: OptimizationData) { optimizationData[data::class] = data } + addOptimizationData(MaxEval.unlimited()) addOptimizationData(InitialGuess(problem.initialGuess.toDoubleArray())) fun exportOptimizationData(): List = optimizationData.values.toList() - - /** - * Register no-deriv function instead of differentiable function - */ - /** - * Register no-deriv function instead of differentiable function - */ - fun noDerivFunction(expression: Expression): Unit { - val objectiveFunction = ObjectiveFunction { - val args = problem.initialGuess + it.toMap() - expression(args) - } - addOptimizationData(objectiveFunction) + val objectiveFunction = ObjectiveFunction { + val args = problem.initialGuess + it.toMap() + problem.expression(args) } + addOptimizationData(objectiveFunction) - public override fun function(expression: DifferentiableExpression>) { - noDerivFunction(expression) - val gradientFunction = ObjectiveFunctionGradient { - val args = startingPoint + it.toMap() - DoubleArray(symbols.size) { index -> - expression.derivative(symbols[index])(args) - } - } - addOptimizationData(gradientFunction) - if (optimizerBuilder == null) { - optimizerBuilder = { - NonLinearConjugateGradientOptimizer( - NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES, - convergenceChecker - ) + val gradientFunction = ObjectiveFunctionGradient { + val args = problem.initialGuess + it.toMap() + DoubleArray(symbols.size) { index -> + problem.expression.derivative(symbols[index])(args) + } + } + addOptimizationData(gradientFunction) + + val logger = problem.getFeature() + + for (feature in problem.features) { + when (feature) { + is CMOptimizerData -> feature.data.forEach { addOptimizationData(it) } + is FunctionOptimizationTarget -> when(feature){ + FunctionOptimizationTarget.MAXIMIZE -> addOptimizationData(GoalType.MAXIMIZE) + FunctionOptimizationTarget.MINIMIZE -> addOptimizationData(GoalType.MINIMIZE) } + else -> logger?.log { "The feature $feature is unused in optimization" } } } - public fun simplex(simplex: AbstractSimplex) { - addOptimizationData(simplex) - //Set optimization builder to simplex if it is not present - if (optimizerBuilder == null) { - optimizerBuilder = { SimplexOptimizer(convergenceChecker) } - } - } - - public fun simplexSteps(steps: Map) { - simplex(NelderMeadSimplex(steps.toDoubleArray())) - } - - public fun goal(goalType: GoalType) { - addOptimizationData(goalType) - } - - public fun optimizer(block: () -> MultivariateOptimizer) { - optimizerBuilder = block - } - - override fun update(result: OptimizationResult) { - initialGuess(result.point) - } - - override suspend fun optimize(): OptimizationResult { - val optimizer = optimizerBuilder?.invoke() ?: error("Optimizer not defined") - val (point, value) = optimizer.optimize(*optimizationData.values.toTypedArray()) - return OptimizationResult(point.toMap(), value) - } - return@withSymbols TODO() + val (point, value) = cmOptimizer.optimize(*optimizationData.values.toTypedArray()) + return problem.withFeatures(FunctionOptimizationResult(point.toMap(), value)) } - public companion object : OptimizationProblemFactory { + public companion object { public const val DEFAULT_RELATIVE_TOLERANCE: Double = 1e-4 public const val DEFAULT_ABSOLUTE_TOLERANCE: Double = 1e-4 public const val DEFAULT_MAX_ITER: Int = 1000 - - override fun build(symbols: List): CMOptimization = CMOptimization(symbols) } } - -public fun CMOptimization.initialGuess(vararg pairs: Pair): Unit = initialGuess(pairs.toMap()) -public fun CMOptimization.simplexSteps(vararg pairs: Pair): Unit = simplexSteps(pairs.toMap()) diff --git a/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/cmFit.kt b/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/cmFit.kt index 12d924063..9c0089b3d 100644 --- a/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/cmFit.kt +++ b/kmath-commons/src/main/kotlin/space/kscience/kmath/commons/optimization/cmFit.kt @@ -17,22 +17,22 @@ import space.kscience.kmath.structures.asBuffer /** * Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation */ -public fun FunctionOptimization.Companion.chiSquared( +public fun FunctionOptimization.Companion.chiSquaredExpression( x: Buffer, y: Buffer, yErr: Buffer, model: DerivativeStructureField.(x: DerivativeStructure) -> DerivativeStructure, -): DifferentiableExpression> = chiSquared(DerivativeStructureField, x, y, yErr, model) +): DifferentiableExpression> = chiSquaredExpression(DerivativeStructureField, x, y, yErr, model) /** * Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation */ -public fun FunctionOptimization.Companion.chiSquared( +public fun FunctionOptimization.Companion.chiSquaredExpression( x: Iterable, y: Iterable, yErr: Iterable, model: DerivativeStructureField.(x: DerivativeStructure) -> DerivativeStructure, -): DifferentiableExpression> = chiSquared( +): DifferentiableExpression> = chiSquaredExpression( DerivativeStructureField, x.toList().asBuffer(), y.toList().asBuffer(), diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/Featured.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/Featured.kt index 157ff980b..a94efc788 100644 --- a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/Featured.kt +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/Featured.kt @@ -17,7 +17,7 @@ public interface Featured { /** * A container for a set of features */ -public class FeatureSet private constructor(public val features: Map, Any>) : Featured { +public class FeatureSet private constructor(public val features: Map, F>) : Featured { @Suppress("UNCHECKED_CAST") override fun getFeature(type: KClass): T? = features[type] as? T @@ -31,6 +31,8 @@ public class FeatureSet private constructor(public val features: Map = FeatureSet(features + otherFeatures.associateBy { it::class }) + public operator fun iterator(): Iterator = features.values.iterator() + public companion object { public fun of(vararg features: F): FeatureSet = FeatureSet(features.associateBy { it::class }) } diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/logging.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/logging.kt new file mode 100644 index 000000000..d13840841 --- /dev/null +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/misc/logging.kt @@ -0,0 +1,14 @@ +/* + * Copyright 2018-2021 KMath contributors. + * Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file. + */ + +package space.kscience.kmath.misc + +public interface Loggable { + public fun log(tag: String = INFO, block: () -> String) + + public companion object { + public const val INFO: String = "INFO" + } +} \ No newline at end of file diff --git a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/FunctionOptimization.kt b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/FunctionOptimization.kt index 12ccea1d8..db613e236 100644 --- a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/FunctionOptimization.kt +++ b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/FunctionOptimization.kt @@ -15,91 +15,57 @@ import space.kscience.kmath.operations.ExtendedField import space.kscience.kmath.structures.Buffer import space.kscience.kmath.structures.indices +public class FunctionOptimizationResult(point: Map, public val value: T) : OptimizationResult(point) -public class FunctionOptimization( +public enum class FunctionOptimizationTarget : OptimizationFeature { + MAXIMIZE, + MINIMIZE +} + +public class FunctionOptimization( override val features: FeatureSet, public val expression: DifferentiableExpression>, public val initialGuess: Map, public val parameters: Collection, - public val maximize: Boolean, -) : OptimizationProblem +) : OptimizationProblem{ + public companion object{ + /** + * Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation + */ + public fun chiSquaredExpression( + autoDiff: AutoDiffProcessor>, + x: Buffer, + y: Buffer, + yErr: Buffer, + model: A.(I) -> I, + ): DifferentiableExpression> where A : ExtendedField, A : ExpressionAlgebra { + require(x.size == y.size) { "X and y buffers should be of the same size" } + require(y.size == yErr.size) { "Y and yErr buffer should of the same size" } + + return autoDiff.process { + var sum = zero + + x.indices.forEach { + val xValue = const(x[it]) + val yValue = const(y[it]) + val yErrValue = const(yErr[it]) + val modelValue = model(xValue) + sum += ((yValue - modelValue) / yErrValue).pow(2) + } + + sum + } + } + } +} + + +public fun FunctionOptimization.withFeatures( + vararg newFeature: OptimizationFeature, +): FunctionOptimization = FunctionOptimization( + features.with(*newFeature), + expression, + initialGuess, + parameters +) -// -///** -// * A likelihood function optimization problem with provided derivatives -// */ -//public interface FunctionOptimizationBuilder { -// /** -// * The optimization direction. If true search for function maximum, if false, search for the minimum -// */ -// public var maximize: Boolean -// -// /** -// * Define the initial guess for the optimization problem -// */ -// public fun initialGuess(map: Map) -// -// /** -// * Set a differentiable expression as objective function as function and gradient provider -// */ -// public fun function(expression: DifferentiableExpression>) -// -// public companion object { -// /** -// * Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation -// */ -// public fun chiSquared( -// autoDiff: AutoDiffProcessor>, -// x: Buffer, -// y: Buffer, -// yErr: Buffer, -// model: A.(I) -> I, -// ): DifferentiableExpression> where A : ExtendedField, A : ExpressionAlgebra { -// require(x.size == y.size) { "X and y buffers should be of the same size" } -// require(y.size == yErr.size) { "Y and yErr buffer should of the same size" } -// -// return autoDiff.process { -// var sum = zero -// -// x.indices.forEach { -// val xValue = const(x[it]) -// val yValue = const(y[it]) -// val yErrValue = const(yErr[it]) -// val modelValue = model(xValue) -// sum += ((yValue - modelValue) / yErrValue).pow(2) -// } -// -// sum -// } -// } -// } -//} -// -///** -// * Define a chi-squared-based objective function -// */ -//public fun FunctionOptimization.chiSquared( -// autoDiff: AutoDiffProcessor>, -// x: Buffer, -// y: Buffer, -// yErr: Buffer, -// model: A.(I) -> I, -//) where A : ExtendedField, A : ExpressionAlgebra { -// val chiSquared = FunctionOptimization.chiSquared(autoDiff, x, y, yErr, model) -// function(chiSquared) -// maximize = false -//} -// -///** -// * Optimize differentiable expression using specific [OptimizationProblemFactory] -// */ -//public suspend fun > DifferentiableExpression>.optimizeWith( -// factory: OptimizationProblemFactory, -// vararg symbols: Symbol, -// configuration: F.() -> Unit, -//): OptimizationResult { -// require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" } -// val problem = factory(symbols.toList(), configuration) -// problem.function(this) -// return problem.optimize() -//} diff --git a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/OptimizationProblem.kt b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/OptimizationProblem.kt index 9a5420be6..0d2e3cb83 100644 --- a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/OptimizationProblem.kt +++ b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/OptimizationProblem.kt @@ -7,6 +7,8 @@ package space.kscience.kmath.optimization import space.kscience.kmath.misc.FeatureSet import space.kscience.kmath.misc.Featured +import space.kscience.kmath.misc.Loggable +import space.kscience.kmath.misc.Symbol import kotlin.reflect.KClass public interface OptimizationFeature @@ -18,6 +20,10 @@ public interface OptimizationProblem : Featured { public inline fun OptimizationProblem.getFeature(): T? = getFeature(T::class) +public open class OptimizationResult(public val point: Map) : OptimizationFeature + +public class OptimizationLog(private val loggable: Loggable) : Loggable by loggable, OptimizationFeature + //public class OptimizationResult( // public val point: Map, // public val value: T, diff --git a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/XYFit.kt b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/XYFit.kt index e4998c665..f1b6ef38d 100644 --- a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/XYFit.kt +++ b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/XYFit.kt @@ -14,9 +14,11 @@ import space.kscience.kmath.misc.Symbol import space.kscience.kmath.misc.UnstableKMathAPI import space.kscience.kmath.operations.ExtendedField import space.kscience.kmath.operations.Field +import space.kscience.kmath.structures.Buffer +import space.kscience.kmath.structures.indices @UnstableKMathAPI -public interface XYFit : OptimizationProblem { +public interface XYFit : OptimizationProblem { public val algebra: Field @@ -42,4 +44,34 @@ public interface XYFit : OptimizationProblem { ): Unit where A : ExtendedField, A : ExpressionAlgebra = model { arg -> autoDiff.process { modelFunction(const(arg)) } } +} + +// +///** +// * Define a chi-squared-based objective function +// */ +//public fun FunctionOptimization.chiSquared( +// autoDiff: AutoDiffProcessor>, +// x: Buffer, +// y: Buffer, +// yErr: Buffer, +// model: A.(I) -> I, +//) where A : ExtendedField, A : ExpressionAlgebra { +// val chiSquared = FunctionOptimization.chiSquared(autoDiff, x, y, yErr, model) +// function(chiSquared) +// maximize = false +//} + +/** + * Optimize differentiable expression using specific [OptimizationProblemFactory] + */ +public suspend fun > DifferentiableExpression>.optimizeWith( + factory: OptimizationProblemFactory, + vararg symbols: Symbol, + configuration: F.() -> Unit, +): OptimizationResult { + require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" } + val problem = factory(symbols.toList(), configuration) + problem.function(this) + return problem.optimize() } \ No newline at end of file diff --git a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/qow/QowFit.kt b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/qow/QowFit.kt index 02fd12dbc..d611adf50 100644 --- a/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/qow/QowFit.kt +++ b/kmath-stat/src/commonMain/kotlin/space/kscience/kmath/optimization/qow/QowFit.kt @@ -27,10 +27,6 @@ import kotlin.math.pow private typealias ParamSet = Map -public fun interface FitLogger { - public fun log(block: () -> String) -} - @OptIn(UnstableKMathAPI::class) public class QowFit( override val symbols: List,