forked from kscience/kmath
[WIP] Optimization
This commit is contained in:
parent
3ec674c61b
commit
88e94a7fd9
@ -8,7 +8,6 @@ package space.kscience.kmath.commons.fit
|
||||
import kotlinx.html.br
|
||||
import kotlinx.html.h3
|
||||
import space.kscience.kmath.commons.optimization.chiSquared
|
||||
import space.kscience.kmath.commons.optimization.minimize
|
||||
import space.kscience.kmath.distributions.NormalDistribution
|
||||
import space.kscience.kmath.expressions.symbol
|
||||
import space.kscience.kmath.optimization.FunctionOptimization
|
||||
|
@ -13,7 +13,6 @@ import org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunctionGradient
|
||||
import org.apache.commons.math3.optim.nonlinear.scalar.gradient.NonLinearConjugateGradientOptimizer
|
||||
import space.kscience.kmath.expressions.derivative
|
||||
import space.kscience.kmath.expressions.withSymbols
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import space.kscience.kmath.misc.UnstableKMathAPI
|
||||
import space.kscience.kmath.optimization.*
|
||||
import kotlin.reflect.KClass
|
||||
@ -21,9 +20,15 @@ import kotlin.reflect.KClass
|
||||
public operator fun PointValuePair.component1(): DoubleArray = point
|
||||
public operator fun PointValuePair.component2(): Double = value
|
||||
|
||||
public class CMOptimizerFactory(public val optimizerBuilder: () -> MultivariateOptimizer) : OptimizationFeature
|
||||
public class CMOptimizer(public val optimizerBuilder: () -> MultivariateOptimizer): OptimizationFeature{
|
||||
override fun toString(): String = "CMOptimizer($optimizerBuilder)"
|
||||
}
|
||||
|
||||
public class CMOptimizerData(public val data: List<OptimizationData>) : OptimizationFeature {
|
||||
public constructor(vararg data: OptimizationData) : this(data.toList())
|
||||
|
||||
override fun toString(): String = "CMOptimizerData($data)"
|
||||
|
||||
}
|
||||
|
||||
@OptIn(UnstableKMathAPI::class)
|
||||
@ -31,59 +36,69 @@ public class CMOptimization : Optimizer<FunctionOptimization<Double>> {
|
||||
|
||||
override suspend fun process(
|
||||
problem: FunctionOptimization<Double>,
|
||||
): FunctionOptimization<Double> = withSymbols(problem.parameters) {
|
||||
val convergenceChecker: ConvergenceChecker<PointValuePair> = SimpleValueChecker(
|
||||
DEFAULT_RELATIVE_TOLERANCE,
|
||||
DEFAULT_ABSOLUTE_TOLERANCE,
|
||||
DEFAULT_MAX_ITER
|
||||
)
|
||||
): FunctionOptimization<Double> {
|
||||
val startPoint = problem.getFeature<OptimizationStartPoint<Double>>()?.point
|
||||
?: error("Starting point not defined in $problem")
|
||||
|
||||
val cmOptimizer: MultivariateOptimizer = problem.getFeature<CMOptimizerFactory>()?.optimizerBuilder?.invoke()
|
||||
?: NonLinearConjugateGradientOptimizer(
|
||||
NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES,
|
||||
convergenceChecker
|
||||
val parameters = problem.getFeature<OptimizationParameters>()?.symbols
|
||||
?: problem.getFeature<OptimizationStartPoint<Double>>()?.point?.keys
|
||||
?:startPoint.keys
|
||||
|
||||
|
||||
withSymbols(parameters) {
|
||||
val convergenceChecker: ConvergenceChecker<PointValuePair> = SimpleValueChecker(
|
||||
DEFAULT_RELATIVE_TOLERANCE,
|
||||
DEFAULT_ABSOLUTE_TOLERANCE,
|
||||
DEFAULT_MAX_ITER
|
||||
)
|
||||
|
||||
val optimizationData: HashMap<KClass<out OptimizationData>, OptimizationData> = HashMap()
|
||||
val cmOptimizer: MultivariateOptimizer = problem.getFeature<CMOptimizer>()?.optimizerBuilder?.invoke()
|
||||
?: NonLinearConjugateGradientOptimizer(
|
||||
NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES,
|
||||
convergenceChecker
|
||||
)
|
||||
|
||||
fun addOptimizationData(data: OptimizationData) {
|
||||
optimizationData[data::class] = data
|
||||
}
|
||||
val optimizationData: HashMap<KClass<out OptimizationData>, OptimizationData> = HashMap()
|
||||
|
||||
addOptimizationData(MaxEval.unlimited())
|
||||
addOptimizationData(InitialGuess(problem.initialGuess.toDoubleArray()))
|
||||
|
||||
fun exportOptimizationData(): List<OptimizationData> = optimizationData.values.toList()
|
||||
|
||||
val objectiveFunction = ObjectiveFunction {
|
||||
val args = problem.initialGuess + it.toMap()
|
||||
problem.expression(args)
|
||||
}
|
||||
addOptimizationData(objectiveFunction)
|
||||
|
||||
val gradientFunction = ObjectiveFunctionGradient {
|
||||
val args = problem.initialGuess + it.toMap()
|
||||
DoubleArray(symbols.size) { index ->
|
||||
problem.expression.derivative(symbols[index])(args)
|
||||
fun addOptimizationData(data: OptimizationData) {
|
||||
optimizationData[data::class] = data
|
||||
}
|
||||
}
|
||||
addOptimizationData(gradientFunction)
|
||||
|
||||
val logger = problem.getFeature<OptimizationLog>()
|
||||
addOptimizationData(MaxEval.unlimited())
|
||||
addOptimizationData(InitialGuess(startPoint.toDoubleArray()))
|
||||
|
||||
for (feature in problem.features) {
|
||||
when (feature) {
|
||||
is CMOptimizerData -> feature.data.forEach { addOptimizationData(it) }
|
||||
is FunctionOptimizationTarget -> when(feature){
|
||||
FunctionOptimizationTarget.MAXIMIZE -> addOptimizationData(GoalType.MAXIMIZE)
|
||||
FunctionOptimizationTarget.MINIMIZE -> addOptimizationData(GoalType.MINIMIZE)
|
||||
fun exportOptimizationData(): List<OptimizationData> = optimizationData.values.toList()
|
||||
|
||||
val objectiveFunction = ObjectiveFunction {
|
||||
val args = startPoint + it.toMap()
|
||||
problem.expression(args)
|
||||
}
|
||||
addOptimizationData(objectiveFunction)
|
||||
|
||||
val gradientFunction = ObjectiveFunctionGradient {
|
||||
val args = startPoint + it.toMap()
|
||||
DoubleArray(symbols.size) { index ->
|
||||
problem.expression.derivative(symbols[index])(args)
|
||||
}
|
||||
else -> logger?.log { "The feature $feature is unused in optimization" }
|
||||
}
|
||||
}
|
||||
addOptimizationData(gradientFunction)
|
||||
|
||||
val (point, value) = cmOptimizer.optimize(*optimizationData.values.toTypedArray())
|
||||
return problem.withFeatures(FunctionOptimizationResult(point.toMap(), value))
|
||||
val logger = problem.getFeature<OptimizationLog>()
|
||||
|
||||
for (feature in problem.features) {
|
||||
when (feature) {
|
||||
is CMOptimizerData -> feature.data.forEach { addOptimizationData(it) }
|
||||
is FunctionOptimizationTarget -> when (feature) {
|
||||
FunctionOptimizationTarget.MAXIMIZE -> addOptimizationData(GoalType.MAXIMIZE)
|
||||
FunctionOptimizationTarget.MINIMIZE -> addOptimizationData(GoalType.MINIMIZE)
|
||||
}
|
||||
else -> logger?.log { "The feature $feature is unused in optimization" }
|
||||
}
|
||||
}
|
||||
|
||||
val (point, value) = cmOptimizer.optimize(*optimizationData.values.toTypedArray())
|
||||
return problem.withFeatures(OptimizationResult(point.toMap()), OptimizationValue(value))
|
||||
}
|
||||
}
|
||||
|
||||
public companion object {
|
||||
|
@ -9,7 +9,7 @@ import org.apache.commons.math3.analysis.differentiation.DerivativeStructure
|
||||
import space.kscience.kmath.commons.expressions.DerivativeStructureField
|
||||
import space.kscience.kmath.expressions.DifferentiableExpression
|
||||
import space.kscience.kmath.expressions.Expression
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import space.kscience.kmath.expressions.Symbol
|
||||
import space.kscience.kmath.optimization.*
|
||||
import space.kscience.kmath.structures.Buffer
|
||||
import space.kscience.kmath.structures.asBuffer
|
||||
|
@ -5,10 +5,10 @@
|
||||
|
||||
package space.kscience.kmath.data
|
||||
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import space.kscience.kmath.misc.Symbol.Companion.z
|
||||
import space.kscience.kmath.data.XYErrorColumnarData.Companion
|
||||
import space.kscience.kmath.expressions.Symbol
|
||||
import space.kscience.kmath.expressions.symbol
|
||||
import space.kscience.kmath.misc.UnstableKMathAPI
|
||||
import space.kscience.kmath.misc.symbol
|
||||
import space.kscience.kmath.structures.Buffer
|
||||
|
||||
|
||||
|
@ -51,7 +51,7 @@ public class GaussIntegrator<T : Any>(
|
||||
}
|
||||
}
|
||||
|
||||
override fun integrate(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> = with(algebra) {
|
||||
override fun process(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> = with(algebra) {
|
||||
val f = integrand.function
|
||||
val (points, weights) = buildRule(integrand)
|
||||
var res = zero
|
||||
@ -95,7 +95,7 @@ public fun <T : Any> GaussIntegrator<T>.integrate(
|
||||
val ranges = UnivariateIntegrandRanges(
|
||||
(0 until intervals).map { i -> (range.start + rangeSize * i)..(range.start + rangeSize * (i + 1)) to order }
|
||||
)
|
||||
return integrate(
|
||||
return process(
|
||||
UnivariateIntegrand(
|
||||
function,
|
||||
IntegrationRange(range),
|
||||
|
@ -5,18 +5,20 @@
|
||||
|
||||
package space.kscience.kmath.integration
|
||||
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
import space.kscience.kmath.misc.Featured
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
public interface IntegrandFeature {
|
||||
override fun toString(): String
|
||||
}
|
||||
|
||||
public interface Integrand {
|
||||
public val features: Set<IntegrandFeature>
|
||||
public fun <T : IntegrandFeature> getFeature(type: KClass<T>): T?
|
||||
public interface Integrand : Featured<IntegrandFeature> {
|
||||
public val features: FeatureSet<IntegrandFeature>
|
||||
override fun <T : IntegrandFeature> getFeature(type: KClass<out T>): T? = features.getFeature(type)
|
||||
}
|
||||
|
||||
public inline fun <reified T : IntegrandFeature> Integrand.getFeature(): T? = getFeature(T::class)
|
||||
public inline fun <reified T: IntegrandFeature> Integrand.getFeature(): T? = getFeature(T::class)
|
||||
|
||||
public class IntegrandValue<T : Any>(public val value: T) : IntegrandFeature {
|
||||
override fun toString(): String = "Value($value)"
|
||||
|
@ -6,29 +6,21 @@
|
||||
package space.kscience.kmath.integration
|
||||
|
||||
import space.kscience.kmath.linear.Point
|
||||
import kotlin.reflect.KClass
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
|
||||
public class MultivariateIntegrand<T : Any> internal constructor(
|
||||
private val featureMap: Map<KClass<*>, IntegrandFeature>,
|
||||
override val features: FeatureSet<IntegrandFeature>,
|
||||
public val function: (Point<T>) -> T,
|
||||
) : Integrand {
|
||||
|
||||
override val features: Set<IntegrandFeature> get() = featureMap.values.toSet()
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
override fun <T : IntegrandFeature> getFeature(type: KClass<T>): T? = featureMap[type] as? T
|
||||
|
||||
public operator fun <F : IntegrandFeature> plus(pair: Pair<KClass<out F>, F>): MultivariateIntegrand<T> =
|
||||
MultivariateIntegrand(featureMap + pair, function)
|
||||
|
||||
public operator fun <F : IntegrandFeature> plus(feature: F): MultivariateIntegrand<T> =
|
||||
plus(feature::class to feature)
|
||||
MultivariateIntegrand(features.with(feature), function)
|
||||
}
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public fun <T : Any> MultivariateIntegrand(
|
||||
vararg features: IntegrandFeature,
|
||||
function: (Point<T>) -> T,
|
||||
): MultivariateIntegrand<T> = MultivariateIntegrand(features.associateBy { it::class }, function)
|
||||
): MultivariateIntegrand<T> = MultivariateIntegrand(FeatureSet.of(*features), function)
|
||||
|
||||
public val <T : Any> MultivariateIntegrand<T>.value: T? get() = getFeature<IntegrandValue<T>>()?.value
|
||||
|
@ -43,7 +43,7 @@ public class SimpsonIntegrator<T : Any>(
|
||||
return res
|
||||
}
|
||||
|
||||
override fun integrate(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> {
|
||||
override fun process(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> {
|
||||
val ranges = integrand.getFeature<UnivariateIntegrandRanges>()
|
||||
return if (ranges != null) {
|
||||
val res = algebra.sum(ranges.ranges.map { integrateRange(integrand, it.first, it.second) })
|
||||
@ -89,7 +89,7 @@ public object DoubleSimpsonIntegrator : UnivariateIntegrator<Double> {
|
||||
return res
|
||||
}
|
||||
|
||||
override fun integrate(integrand: UnivariateIntegrand<Double>): UnivariateIntegrand<Double> {
|
||||
override fun process(integrand: UnivariateIntegrand<Double>): UnivariateIntegrand<Double> {
|
||||
val ranges = integrand.getFeature<UnivariateIntegrandRanges>()
|
||||
return if (ranges != null) {
|
||||
val res = ranges.ranges.sumOf { integrateRange(integrand, it.first, it.second) }
|
||||
|
@ -53,7 +53,7 @@ public class SplineIntegrator<T : Comparable<T>>(
|
||||
public val algebra: Field<T>,
|
||||
public val bufferFactory: MutableBufferFactory<T>,
|
||||
) : UnivariateIntegrator<T> {
|
||||
override fun integrate(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> = algebra {
|
||||
override fun process(integrand: UnivariateIntegrand<T>): UnivariateIntegrand<T> = algebra {
|
||||
val range = integrand.getFeature<IntegrationRange>()?.range ?: 0.0..1.0
|
||||
|
||||
val interpolator: PolynomialInterpolator<T> = SplineInterpolator(algebra, bufferFactory)
|
||||
@ -80,7 +80,7 @@ public class SplineIntegrator<T : Comparable<T>>(
|
||||
*/
|
||||
@UnstableKMathAPI
|
||||
public object DoubleSplineIntegrator : UnivariateIntegrator<Double> {
|
||||
override fun integrate(integrand: UnivariateIntegrand<Double>): UnivariateIntegrand<Double> {
|
||||
override fun process(integrand: UnivariateIntegrand<Double>): UnivariateIntegrand<Double> {
|
||||
val range = integrand.getFeature<IntegrationRange>()?.range ?: 0.0..1.0
|
||||
|
||||
val interpolator: PolynomialInterpolator<Double> = SplineInterpolator(DoubleField, ::DoubleBuffer)
|
||||
|
@ -5,33 +5,24 @@
|
||||
|
||||
package space.kscience.kmath.integration
|
||||
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
import space.kscience.kmath.misc.UnstableKMathAPI
|
||||
import space.kscience.kmath.structures.Buffer
|
||||
import space.kscience.kmath.structures.DoubleBuffer
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
public class UnivariateIntegrand<T> internal constructor(
|
||||
private val featureMap: Map<KClass<*>, IntegrandFeature>,
|
||||
override val features: FeatureSet<IntegrandFeature>,
|
||||
public val function: (Double) -> T,
|
||||
) : Integrand {
|
||||
|
||||
override val features: Set<IntegrandFeature> get() = featureMap.values.toSet()
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
override fun <T : IntegrandFeature> getFeature(type: KClass<T>): T? = featureMap[type] as? T
|
||||
|
||||
public operator fun <F : IntegrandFeature> plus(pair: Pair<KClass<out F>, F>): UnivariateIntegrand<T> =
|
||||
UnivariateIntegrand(featureMap + pair, function)
|
||||
|
||||
public operator fun <F : IntegrandFeature> plus(feature: F): UnivariateIntegrand<T> =
|
||||
plus(feature::class to feature)
|
||||
UnivariateIntegrand(features.with(feature), function)
|
||||
}
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public fun <T : Any> UnivariateIntegrand(
|
||||
function: (Double) -> T,
|
||||
vararg features: IntegrandFeature,
|
||||
): UnivariateIntegrand<T> = UnivariateIntegrand(features.associateBy { it::class }, function)
|
||||
): UnivariateIntegrand<T> = UnivariateIntegrand(FeatureSet.of(*features), function)
|
||||
|
||||
public typealias UnivariateIntegrator<T> = Integrator<UnivariateIntegrand<T>>
|
||||
|
||||
@ -79,7 +70,7 @@ public val <T : Any> UnivariateIntegrand<T>.value: T get() = valueOrNull ?: erro
|
||||
public fun <T : Any> UnivariateIntegrator<T>.integrate(
|
||||
vararg features: IntegrandFeature,
|
||||
function: (Double) -> T,
|
||||
): UnivariateIntegrand<T> = integrate(UnivariateIntegrand(function, *features))
|
||||
): UnivariateIntegrand<T> = process(UnivariateIntegrand(function, *features))
|
||||
|
||||
/**
|
||||
* A shortcut method to integrate a [function] in [range] with additional [features].
|
||||
@ -90,7 +81,7 @@ public fun <T : Any> UnivariateIntegrator<T>.integrate(
|
||||
range: ClosedRange<Double>,
|
||||
vararg features: IntegrandFeature,
|
||||
function: (Double) -> T,
|
||||
): UnivariateIntegrand<T> = integrate(UnivariateIntegrand(function, IntegrationRange(range), *features))
|
||||
): UnivariateIntegrand<T> = process(UnivariateIntegrand(function, IntegrationRange(range), *features))
|
||||
|
||||
/**
|
||||
* A shortcut method to integrate a [function] in [range] with additional [features].
|
||||
@ -107,5 +98,5 @@ public fun <T : Any> UnivariateIntegrator<T>.integrate(
|
||||
featureBuilder()
|
||||
add(IntegrationRange(range))
|
||||
}
|
||||
return integrate(UnivariateIntegrand(function, *features.toTypedArray()))
|
||||
return process(UnivariateIntegrand(function, *features.toTypedArray()))
|
||||
}
|
||||
|
@ -10,12 +10,13 @@ import space.kscience.kmath.expressions.DifferentiableExpression
|
||||
import space.kscience.kmath.expressions.Expression
|
||||
import space.kscience.kmath.expressions.ExpressionAlgebra
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import space.kscience.kmath.operations.ExtendedField
|
||||
import space.kscience.kmath.structures.Buffer
|
||||
import space.kscience.kmath.structures.indices
|
||||
|
||||
public class FunctionOptimizationResult<T>(point: Map<Symbol, T>, public val value: T) : OptimizationResult<T>(point)
|
||||
public class OptimizationValue<T>(public val value: T) : OptimizationFeature{
|
||||
override fun toString(): String = "Value($value)"
|
||||
}
|
||||
|
||||
public enum class FunctionOptimizationTarget : OptimizationFeature {
|
||||
MAXIMIZE,
|
||||
@ -25,9 +26,8 @@ public enum class FunctionOptimizationTarget : OptimizationFeature {
|
||||
public class FunctionOptimization<T>(
|
||||
override val features: FeatureSet<OptimizationFeature>,
|
||||
public val expression: DifferentiableExpression<T, Expression<T>>,
|
||||
public val initialGuess: Map<Symbol, T>,
|
||||
public val parameters: Collection<Symbol>,
|
||||
) : OptimizationProblem{
|
||||
|
||||
public companion object{
|
||||
/**
|
||||
* Generate a chi squared expression from given x-y-sigma data and inline model. Provides automatic differentiation
|
||||
@ -65,7 +65,5 @@ public fun <T> FunctionOptimization<T>.withFeatures(
|
||||
): FunctionOptimization<T> = FunctionOptimization(
|
||||
features.with(*newFeature),
|
||||
expression,
|
||||
initialGuess,
|
||||
parameters
|
||||
)
|
||||
|
||||
|
@ -5,13 +5,15 @@
|
||||
|
||||
package space.kscience.kmath.optimization
|
||||
|
||||
import space.kscience.kmath.expressions.Symbol
|
||||
import space.kscience.kmath.misc.FeatureSet
|
||||
import space.kscience.kmath.misc.Featured
|
||||
import space.kscience.kmath.misc.Loggable
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
public interface OptimizationFeature
|
||||
public interface OptimizationFeature {
|
||||
override fun toString(): String
|
||||
}
|
||||
|
||||
public interface OptimizationProblem : Featured<OptimizationFeature> {
|
||||
public val features: FeatureSet<OptimizationFeature>
|
||||
@ -20,31 +22,22 @@ public interface OptimizationProblem : Featured<OptimizationFeature> {
|
||||
|
||||
public inline fun <reified T : OptimizationFeature> OptimizationProblem.getFeature(): T? = getFeature(T::class)
|
||||
|
||||
public open class OptimizationResult<T>(public val point: Map<Symbol, T>) : OptimizationFeature
|
||||
public open class OptimizationStartPoint<T>(public val point: Map<Symbol, T>) : OptimizationFeature {
|
||||
override fun toString(): String = "StartPoint($point)"
|
||||
}
|
||||
|
||||
public class OptimizationLog(private val loggable: Loggable) : Loggable by loggable, OptimizationFeature
|
||||
public open class OptimizationResult<T>(public val point: Map<Symbol, T>) : OptimizationFeature {
|
||||
override fun toString(): String = "Result($point)"
|
||||
}
|
||||
|
||||
public class OptimizationLog(private val loggable: Loggable) : Loggable by loggable, OptimizationFeature {
|
||||
override fun toString(): String = "Log($loggable)"
|
||||
}
|
||||
|
||||
public class OptimizationParameters(public val symbols: List<Symbol>): OptimizationFeature{
|
||||
override fun toString(): String = "Parameters($symbols)"
|
||||
}
|
||||
|
||||
//public class OptimizationResult<T>(
|
||||
// public val point: Map<Symbol, T>,
|
||||
// public val value: T,
|
||||
// public val features: Set<OptimizationFeature> = emptySet(),
|
||||
//) {
|
||||
// override fun toString(): String {
|
||||
// return "OptimizationResult(point=$point, value=$value)"
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//public operator fun <T> OptimizationResult<T>.plus(
|
||||
// feature: OptimizationFeature,
|
||||
//): OptimizationResult<T> = OptimizationResult(point, value, features + feature)
|
||||
//public fun interface OptimizationProblemFactory<T : Any, out P : OptimizationProblem<T>> {
|
||||
// public fun build(symbols: List<Symbol>): P
|
||||
//}
|
||||
//
|
||||
//public operator fun <T : Any, P : OptimizationProblem<T>> OptimizationProblemFactory<T, P>.invoke(
|
||||
// symbols: List<Symbol>,
|
||||
// block: P.() -> Unit,
|
||||
//): P = build(symbols).apply(block)
|
||||
|
||||
public interface Optimizer<P : OptimizationProblem> {
|
||||
public suspend fun process(problem: P): P
|
||||
|
@ -10,8 +10,6 @@ import space.kscience.kmath.expressions.*
|
||||
import space.kscience.kmath.misc.UnstableKMathAPI
|
||||
import space.kscience.kmath.operations.ExtendedField
|
||||
import space.kscience.kmath.operations.Field
|
||||
import space.kscience.kmath.structures.Buffer
|
||||
import space.kscience.kmath.structures.indices
|
||||
|
||||
@UnstableKMathAPI
|
||||
public interface XYFit<T> : OptimizationProblem {
|
||||
@ -59,15 +57,16 @@ public interface XYFit<T> : OptimizationProblem {
|
||||
//}
|
||||
|
||||
/**
|
||||
* Optimize differentiable expression using specific [OptimizationProblemFactory]
|
||||
* Optimize differentiable expression using specific [Optimizer]
|
||||
*/
|
||||
public suspend fun <T : Any, F : FunctionOptimization<T>> DifferentiableExpression<T, Expression<T>>.optimizeWith(
|
||||
factory: OptimizationProblemFactory<T, F>,
|
||||
vararg symbols: Symbol,
|
||||
configuration: F.() -> Unit,
|
||||
): OptimizationResult<T> {
|
||||
require(symbols.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
val problem = factory(symbols.toList(), configuration)
|
||||
problem.function(this)
|
||||
return problem.optimize()
|
||||
optimizer: Optimizer<F>,
|
||||
startingPoint: Map<Symbol,T>,
|
||||
vararg features: OptimizationFeature
|
||||
): OptimizationProblem {
|
||||
// require(startingPoint.isNotEmpty()) { "Must provide a list of symbols for optimization" }
|
||||
// val problem = factory(symbols.toList(), configuration)
|
||||
// problem.function(this)
|
||||
// return problem.optimize()
|
||||
val problem = FunctionOptimization<T>()
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015 Alexander Nozik.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ru.inr.mass.minuit
|
||||
|
||||
/**
|
||||
* A class representing a pair of double (x,y) or (lower,upper)
|
||||
*
|
||||
* @version $Id$
|
||||
* @author Darksnake
|
||||
*/
|
||||
class Range
|
||||
/**
|
||||
*
|
||||
* Constructor for Range.
|
||||
*
|
||||
* @param k a double.
|
||||
* @param v a double.
|
||||
*/
|
||||
(k: Double, v: Double) : Pair<Double?, Double?>(k, v)
|
@ -7,12 +7,8 @@ package space.kscience.kmath.optimization.qow
|
||||
|
||||
import space.kscience.kmath.data.ColumnarData
|
||||
import space.kscience.kmath.data.XYErrorColumnarData
|
||||
import space.kscience.kmath.expressions.DifferentiableExpression
|
||||
import space.kscience.kmath.expressions.Expression
|
||||
import space.kscience.kmath.expressions.SymbolIndexer
|
||||
import space.kscience.kmath.expressions.derivative
|
||||
import space.kscience.kmath.expressions.*
|
||||
import space.kscience.kmath.linear.*
|
||||
import space.kscience.kmath.misc.Symbol
|
||||
import space.kscience.kmath.misc.UnstableKMathAPI
|
||||
import space.kscience.kmath.operations.DoubleField
|
||||
import space.kscience.kmath.operations.Field
|
||||
|
Loading…
Reference in New Issue
Block a user