diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/BroadcastDoubleTensorAlgebra.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/BroadcastDoubleTensorAlgebra.kt index 425178fc1..f315f6b51 100644 --- a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/BroadcastDoubleTensorAlgebra.kt +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/BroadcastDoubleTensorAlgebra.kt @@ -45,7 +45,7 @@ public class BroadcastDoubleTensorAlgebra : DoubleTensorAlgebra() { val newThis = broadcast[0] val newOther = broadcast[1] val resBuffer = DoubleArray(newThis.linearStructure.size) { i -> - newThis.buffer.array()[newOther.bufferStart + i] * + newThis.buffer.array()[newThis.bufferStart + i] * newOther.buffer.array()[newOther.bufferStart + i] } return DoubleTensor(newThis.shape, resBuffer) @@ -182,17 +182,13 @@ internal inline fun broadcastTensors(vararg tensors: DoubleTensor): List { - var onlyTwoDims = true - for (tensor in tensors) { - if (tensor.shape.size < 2) { + val onlyTwoDims = tensors.asSequence().onEach { + require(it.shape.size >= 2) { throw RuntimeException("Tensors must have at least 2 dimensions") } - if (tensor.shape.size != 2) { - onlyTwoDims = false - } - } + }.any { it.shape.size != 2 } - if (onlyTwoDims) { + if (!onlyTwoDims) { return tensors.asList() } @@ -233,7 +229,7 @@ internal inline fun broadcastOuterTensors(vararg tensors: DoubleTensor): List( public fun vectorSequence(): Sequence> = sequence { check(shape.size >= 1) {"todo"} - val vectorOffset = linearStructure.strides[0] + val n = shape.size + val vectorOffset = shape[n - 1] val vectorShape = intArrayOf(shape.last()) for (offset in 0 until numel step vectorOffset) { val vector = BufferedTensor(vectorShape, buffer, offset).as1D() @@ -44,8 +45,9 @@ public open class BufferedTensor( public fun matrixSequence(): Sequence> = sequence { check(shape.size >= 2) {"todo"} - val matrixOffset = linearStructure.strides[1] - val matrixShape = intArrayOf(shape[shape.size - 2], shape.last()) //todo better way? + val n = shape.size + val matrixOffset = shape[n - 1] * shape[n - 2] + val matrixShape = intArrayOf(shape[n - 2], shape[n - 1]) //todo better way? for (offset in 0 until numel step matrixOffset) { val matrix = BufferedTensor(matrixShape, buffer, offset).as2D() yield(matrix) diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleLinearOpsTensorAlgebra.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleLinearOpsTensorAlgebra.kt index e0abc49b7..8a16a991d 100644 --- a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleLinearOpsTensorAlgebra.kt +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleLinearOpsTensorAlgebra.kt @@ -8,7 +8,7 @@ public class DoubleLinearOpsTensorAlgebra : DoubleTensorAlgebra() { override fun DoubleTensor.inv(): DoubleTensor { - TODO("Not yet implemented") + TODO("ANDREI") } override fun DoubleTensor.lu(tol: Double): Pair { @@ -135,16 +135,16 @@ public class DoubleLinearOpsTensorAlgebra : } override fun DoubleTensor.qr(): DoubleTensor { - TODO("Not yet implemented") + TODO("ANDREI") } override fun DoubleTensor.svd(): Triple { - TODO("Not yet implemented") + TODO("ALYA") } override fun DoubleTensor.symEig(eigenvectors: Boolean): Pair { - TODO("Not yet implemented") + TODO("ANDREI") } } diff --git a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt index 9cdc6c130..feb8d11fa 100644 --- a/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt +++ b/kmath-core/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt @@ -241,37 +241,6 @@ public open class DoubleTensorAlgebra : TensorPartialDivisionAlgebra 2 || other.shape.size > 2) { - throw RuntimeException("Both tensors must have a maximum of 2 dimensions") - } - - if (this.shape[1] != other.shape[0]) { - throw RuntimeException("Tensors dot operation dimension mismatch: " + - "(${this.shape[0]}, ${this.shape[1]}) x (${other.shape[0]}, ${other.shape[1]})") - } - - val l = this.shape[0] - val m = this.shape[1] - val n = other.shape[1] - - val res = DoubleTensor(intArrayOf(l, n), DoubleArray(l * n)) - - for (i in 0 until l) { - for (j in 0 until n) { - var curr = 0.0 - for (k in 0 until m) { - val ik = this.linearStructure.offset(intArrayOf(i, k)) - val kj = other.linearStructure.offset(intArrayOf(k, j)) - curr += this.buffer.array()[ik] * other.buffer.array()[kj] - } - val linearIndex = res.linearStructure.offset(intArrayOf(i, j)) - res.buffer.array()[linearIndex] = curr - } - } - return res - } - override fun DoubleTensor.dot(other: DoubleTensor): DoubleTensor { if (this.shape.size == 1 && other.shape.size == 1) { return DoubleTensor(intArrayOf(1), doubleArrayOf(this.times(other).buffer.array().sum())) @@ -279,10 +248,15 @@ public open class DoubleTensorAlgebra : TensorPartialDivisionAlgebra acc * i } val resTensor = DoubleTensor(resShape, DoubleArray(resSize)) for ((res, ab) in resTensor.matrixSequence().zip(newThis.matrixSequence().zip(newOther.matrixSequence()))) { - val a = ab.first - val b = ab.second + val (a, b) = ab for (i in 0 until l) { for (j in 0 until n) { @@ -318,6 +291,13 @@ public open class DoubleTensorAlgebra : TensorPartialDivisionAlgebra