diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/operations/DoubleBufferOps.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/operations/DoubleBufferOps.kt index 28238c466..b0cce91d3 100644 --- a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/operations/DoubleBufferOps.kt +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/operations/DoubleBufferOps.kt @@ -17,12 +17,15 @@ import kotlin.math.* /** * [ExtendedFieldOps] over [DoubleBuffer]. */ -public abstract class DoubleBufferOps : - BufferAlgebra, ExtendedFieldOps>, Norm, Double> { +public abstract class DoubleBufferOps : BufferAlgebra, ExtendedFieldOps>, + Norm, Double> { override val elementAlgebra: DoubleField get() = DoubleField override val bufferFactory: BufferFactory get() = ::DoubleBuffer + override fun Buffer.map(block: DoubleField.(Double) -> Double): DoubleBuffer = + mapInline { DoubleField.block(it) } + @UnstableKMathAPI override fun unaryOperationFunction(operation: String): (arg: Buffer) -> Buffer = super.unaryOperationFunction(operation) @@ -87,8 +90,7 @@ public abstract class DoubleBufferOps : val aArray = left.array val bArray = right.array DoubleBuffer(DoubleArray(left.size) { aArray[it] * bArray[it] }) - } else - DoubleBuffer(DoubleArray(left.size) { left[it] * right[it] }) + } else DoubleBuffer(DoubleArray(left.size) { left[it] * right[it] }) } override fun divide(left: Buffer, right: Buffer): DoubleBuffer { diff --git a/kmath-multik/src/main/kotlin/space/kscience/kmath/multik/MultikDoubleAlgebra.kt b/kmath-multik/src/main/kotlin/space/kscience/kmath/multik/MultikDoubleAlgebra.kt index e5fef6c1e..4f18ee573 100644 --- a/kmath-multik/src/main/kotlin/space/kscience/kmath/multik/MultikDoubleAlgebra.kt +++ b/kmath-multik/src/main/kotlin/space/kscience/kmath/multik/MultikDoubleAlgebra.kt @@ -1,134 +1,47 @@ package space.kscience.kmath.multik -import org.jetbrains.kotlinx.multik.ndarray.data.DN import org.jetbrains.kotlinx.multik.ndarray.data.DataType import space.kscience.kmath.nd.StructureND import space.kscience.kmath.operations.DoubleField -import space.kscience.kmath.tensors.api.AnalyticTensorAlgebra -import space.kscience.kmath.tensors.api.LinearOpsTensorAlgebra -import space.kscience.kmath.tensors.api.Tensor +import space.kscience.kmath.operations.ExponentialOperations +import space.kscience.kmath.operations.TrigonometricOperations public object MultikDoubleAlgebra : MultikDivisionTensorAlgebra(), - AnalyticTensorAlgebra, LinearOpsTensorAlgebra { + TrigonometricOperations>, ExponentialOperations> { override val elementAlgebra: DoubleField get() = DoubleField override val type: DataType get() = DataType.DoubleDataType - override fun StructureND.mean(): Double = multikStat.mean(asMultik().array) + override fun sin(arg: StructureND): MultikTensor = multikMath.mathEx.sin(arg.asMultik().array).wrap() - override fun StructureND.mean(dim: Int, keepDim: Boolean): Tensor = - multikStat.mean(asMultik().array, dim).wrap() + override fun cos(arg: StructureND): MultikTensor = multikMath.mathEx.cos(arg.asMultik().array).wrap() - override fun StructureND.std(): Double { - TODO("Not yet implemented") + override fun tan(arg: StructureND): MultikTensor = sin(arg) / cos(arg) + + override fun asin(arg: StructureND): MultikTensor = arg.map { asin(it) } + + override fun acos(arg: StructureND): MultikTensor = arg.map { acos(it) } + + override fun atan(arg: StructureND): MultikTensor = arg.map { atan(it) } + + override fun exp(arg: StructureND): MultikTensor = multikMath.mathEx.exp(arg.asMultik().array).wrap() + + override fun ln(arg: StructureND): MultikTensor = multikMath.mathEx.log(arg.asMultik().array).wrap() + + override fun sinh(arg: StructureND): MultikTensor = (exp(arg) - exp(-arg)) / 2.0 + + override fun cosh(arg: StructureND): MultikTensor = (exp(arg) + exp(-arg)) / 2.0 + + override fun tanh(arg: StructureND): MultikTensor { + val expPlus = exp(arg) + val expMinus = exp(-arg) + return (expPlus - expMinus) / (expPlus + expMinus) } - override fun StructureND.std(dim: Int, keepDim: Boolean): Tensor { - TODO("Not yet implemented") - } + override fun asinh(arg: StructureND): MultikTensor = arg.map { asinh(it) } - override fun StructureND.variance(): Double { - TODO("Not yet implemented") - } + override fun acosh(arg: StructureND): MultikTensor = arg.map { acosh(it) } - override fun StructureND.variance(dim: Int, keepDim: Boolean): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.exp(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.ln(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.sqrt(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.cos(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.acos(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.cosh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.acosh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.sin(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.asin(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.sinh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.asinh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.tan(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.atan(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.tanh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.atanh(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.ceil(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.floor(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.det(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.inv(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.cholesky(): Tensor { - TODO("Not yet implemented") - } - - override fun StructureND.qr(): Pair, Tensor> { - TODO("Not yet implemented") - } - - override fun StructureND.lu(): Triple, Tensor, Tensor> { - TODO("Not yet implemented") - } - - override fun StructureND.svd(): Triple, Tensor, Tensor> { - TODO("Not yet implemented") - } - - override fun StructureND.symEig(): Pair, Tensor> { - TODO("Not yet implemented") - } + override fun atanh(arg: StructureND): MultikTensor = arg.map { atanh(it) } } public val Double.Companion.multikAlgebra: MultikTensorAlgebra get() = MultikDoubleAlgebra diff --git a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt index bae49c037..864900adb 100644 --- a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt +++ b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt @@ -75,9 +75,9 @@ public open class DoubleTensorAlgebra : override fun zip( left: StructureND, right: StructureND, - transform: DoubleField.(Double, Double) -> Double + transform: DoubleField.(Double, Double) -> Double, ): DoubleTensor { - require(left.shape.contentEquals(right.shape)){ + require(left.shape.contentEquals(right.shape)) { "The shapes in zip are not equal: left - ${left.shape}, right - ${right.shape}" } val leftTensor = left.tensor @@ -422,14 +422,11 @@ public open class DoubleTensorAlgebra : for ((res, ab) in resTensor.matrixSequence().zip(newThis.matrixSequence().zip(newOther.matrixSequence()))) { val (a, b) = ab - dotHelper(a.as2D(), b.as2D(), res.as2D(), l, m1, n) + dotTo(a.as2D(), b.as2D(), res.as2D(), l, m1, n) } if (penultimateDim) { - return resTensor.view( - resTensor.shape.dropLast(2).toIntArray() + - intArrayOf(resTensor.shape.last()) - ) + return resTensor.view(resTensor.shape.dropLast(2).toIntArray() + intArrayOf(resTensor.shape.last())) } if (lastDim) { return resTensor.view(resTensor.shape.dropLast(1).toIntArray()) @@ -441,7 +438,7 @@ public open class DoubleTensorAlgebra : diagonalEntries: Tensor, offset: Int, dim1: Int, - dim2: Int + dim2: Int, ): DoubleTensor { val n = diagonalEntries.shape.size val d1 = minusIndexFrom(n + 1, dim1) @@ -577,13 +574,13 @@ public open class DoubleTensorAlgebra : */ public fun Tensor.rowsByIndices(indices: IntArray): DoubleTensor = stack(indices.map { this[it] }) - internal inline fun StructureND.fold(foldFunction: (DoubleArray) -> Double): Double = + private inline fun StructureND.fold(foldFunction: (DoubleArray) -> Double): Double = foldFunction(tensor.copyArray()) - internal inline fun StructureND.foldDim( - foldFunction: (DoubleArray) -> R, + private inline fun StructureND.foldDim( dim: Int, keepDim: Boolean, + foldFunction: (DoubleArray) -> R, ): BufferedTensor { check(dim < dimension) { "Dimension $dim out of range $dimension" } val resShape = if (keepDim) { @@ -592,7 +589,7 @@ public open class DoubleTensorAlgebra : shape.take(dim).toIntArray() + shape.takeLast(dimension - dim - 1).toIntArray() } val resNumElements = resShape.reduce(Int::times) - val init = foldFunction(DoubleArray(1){0.0}) + val init = foldFunction(DoubleArray(1) { 0.0 }) val resTensor = BufferedTensor(resShape, MutableBuffer.auto(resNumElements) { init }, 0) for (index in resTensor.indices) { @@ -608,66 +605,59 @@ public open class DoubleTensorAlgebra : override fun StructureND.sum(): Double = tensor.fold { it.sum() } override fun StructureND.sum(dim: Int, keepDim: Boolean): DoubleTensor = - foldDim({ x -> x.sum() }, dim, keepDim).toDoubleTensor() + foldDim(dim, keepDim) { x -> x.sum() }.toDoubleTensor() override fun StructureND.min(): Double = this.fold { it.minOrNull()!! } override fun StructureND.min(dim: Int, keepDim: Boolean): DoubleTensor = - foldDim({ x -> x.minOrNull()!! }, dim, keepDim).toDoubleTensor() + foldDim(dim, keepDim) { x -> x.minOrNull()!! }.toDoubleTensor() override fun StructureND.max(): Double = this.fold { it.maxOrNull()!! } override fun StructureND.max(dim: Int, keepDim: Boolean): DoubleTensor = - foldDim({ x -> x.maxOrNull()!! }, dim, keepDim).toDoubleTensor() + foldDim(dim, keepDim) { x -> x.maxOrNull()!! }.toDoubleTensor() override fun StructureND.argMax(dim: Int, keepDim: Boolean): IntTensor = - foldDim({ x -> + foldDim(dim, keepDim) { x -> x.withIndex().maxByOrNull { it.value }?.index!! - }, dim, keepDim).toIntTensor() + }.toIntTensor() override fun StructureND.mean(): Double = this.fold { it.sum() / tensor.numElements } - override fun StructureND.mean(dim: Int, keepDim: Boolean): DoubleTensor = - foldDim( - { arr -> - check(dim < dimension) { "Dimension $dim out of range $dimension" } - arr.sum() / shape[dim] - }, - dim, - keepDim - ).toDoubleTensor() + override fun StructureND.mean(dim: Int, keepDim: Boolean): DoubleTensor = foldDim(dim, keepDim) { arr -> + check(dim < dimension) { "Dimension $dim out of range $dimension" } + arr.sum() / shape[dim] + }.toDoubleTensor() - override fun StructureND.std(): Double = this.fold { arr -> + override fun StructureND.std(): Double = fold { arr -> val mean = arr.sum() / tensor.numElements sqrt(arr.sumOf { (it - mean) * (it - mean) } / (tensor.numElements - 1)) } override fun StructureND.std(dim: Int, keepDim: Boolean): DoubleTensor = foldDim( - { arr -> - check(dim < dimension) { "Dimension $dim out of range $dimension" } - val mean = arr.sum() / shape[dim] - sqrt(arr.sumOf { (it - mean) * (it - mean) } / (shape[dim] - 1)) - }, dim, keepDim - ).toDoubleTensor() + ) { arr -> + check(dim < dimension) { "Dimension $dim out of range $dimension" } + val mean = arr.sum() / shape[dim] + sqrt(arr.sumOf { (it - mean) * (it - mean) } / (shape[dim] - 1)) + }.toDoubleTensor() - override fun StructureND.variance(): Double = this.fold { arr -> + override fun StructureND.variance(): Double = fold { arr -> val mean = arr.sum() / tensor.numElements arr.sumOf { (it - mean) * (it - mean) } / (tensor.numElements - 1) } override fun StructureND.variance(dim: Int, keepDim: Boolean): DoubleTensor = foldDim( - { arr -> - check(dim < dimension) { "Dimension $dim out of range $dimension" } - val mean = arr.sum() / shape[dim] - arr.sumOf { (it - mean) * (it - mean) } / (shape[dim] - 1) - }, dim, keepDim - ).toDoubleTensor() + ) { arr -> + check(dim < dimension) { "Dimension $dim out of range $dimension" } + val mean = arr.sum() / shape[dim] + arr.sumOf { (it - mean) * (it - mean) } / (shape[dim] - 1) + }.toDoubleTensor() private fun cov(x: DoubleTensor, y: DoubleTensor): Double { val n = x.shape[0] @@ -699,19 +689,14 @@ public open class DoubleTensorAlgebra : return resTensor } - @OptIn(PerformancePitfall::class) override fun StructureND.exp(): DoubleTensor = tensor.map { exp(it) } - @OptIn(PerformancePitfall::class) override fun StructureND.ln(): DoubleTensor = tensor.map { ln(it) } - @OptIn(PerformancePitfall::class) override fun StructureND.sqrt(): DoubleTensor = tensor.map { sqrt(it) } - @OptIn(PerformancePitfall::class) override fun StructureND.cos(): DoubleTensor = tensor.map { cos(it) } - @OptIn(PerformancePitfall::class) override fun StructureND.acos(): DoubleTensor = tensor.map { acos(it) } override fun StructureND.cosh(): DoubleTensor = tensor.map { cosh(it) } diff --git a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/internal/linUtils.kt b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/internal/linUtils.kt index d31e02677..290809cfd 100644 --- a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/internal/linUtils.kt +++ b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/internal/linUtils.kt @@ -53,7 +53,7 @@ internal val BufferedTensor.matrices: VirtualBuffer> internal fun BufferedTensor.matrixSequence(): Sequence> = matrices.asSequence() -internal fun dotHelper( +internal fun dotTo( a: MutableStructure2D, b: MutableStructure2D, res: MutableStructure2D,