Refactoring and optimization of tensorAlgebra

This commit is contained in:
Alexander Nozik 2022-09-30 11:34:44 +03:00
parent b602066f48
commit 89d0cbc7ea
No known key found for this signature in database
GPG Key ID: F7FCF2DD25C71357
7 changed files with 126 additions and 59 deletions

View File

@ -148,4 +148,45 @@ public class PermutedBuffer<T>(
/**
* Created a permuted view of given buffer using provided [indices]
*/
public fun <T> Buffer<T>.permute(indices: IntArray): PermutedBuffer<T> = PermutedBuffer(this, indices)
public fun <T> Buffer<T>.permute(indices: IntArray): PermutedBuffer<T> =
PermutedBuffer(this, indices)
/**
* A [BufferView] that overrides indexing of the original buffer
*/
public class PermutedMutableBuffer<T>(
override val origin: MutableBuffer<T>,
private val permutations: IntArray,
) : BufferView<T>, MutableBuffer<T> {
init {
permutations.forEach { index ->
if (index !in origin.indices) {
throw IndexOutOfBoundsException("Index $index is not in ${origin.indices}")
}
}
}
override val size: Int get() = permutations.size
override fun get(index: Int): T = origin[permutations[index]]
override fun set(index: Int, value: T) {
origin[permutations[index]] = value
}
override fun copy(): MutableBuffer<T> = PermutedMutableBuffer(origin.copy(), permutations)
//TODO Probably could be optimized
override fun iterator(): Iterator<T> = permutations.asSequence().map { origin[it] }.iterator()
@UnstableKMathAPI
override fun originIndex(index: Int): Int = if (index in permutations.indices) permutations[index] else -1
override fun toString(): String = Buffer.toString(this)
}
/**
* Created a permuted mutable view of given buffer using provided [indices]
*/
public fun <T> MutableBuffer<T>.permute(indices: IntArray): PermutedMutableBuffer<T> =
PermutedMutableBuffer(this, indices)

View File

@ -10,6 +10,7 @@ import space.kscience.kmath.misc.UnstableKMathAPI
import space.kscience.kmath.nd.MutableStructure2D
import space.kscience.kmath.nd.MutableStructureND
import space.kscience.kmath.nd.Shape
import space.kscience.kmath.nd.Strides
import space.kscience.kmath.structures.*
import space.kscience.kmath.tensors.core.internal.toPrettyString
import kotlin.jvm.JvmInline
@ -81,7 +82,9 @@ public inline fun OffsetDoubleBuffer.mapInPlace(operation: (Double) -> Double) {
}
/**
* Default [BufferedTensor] implementation for [Double] values
* Default [BufferedTensor] implementation for [Double] values.
*
* [DoubleTensor] always uses row-based strides
*/
public class DoubleTensor(
shape: IntArray,
@ -128,34 +131,37 @@ public value class DoubleTensor2D(public val tensor: DoubleTensor) : MutableStru
}
// @OptIn(PerformancePitfall::class)
// override val columns: List<MutableBuffer<Double>> get() = List(colNum) { j ->
// object : MutableBuffer<Double>{
//
// override fun get(index: Int): Double {
// tensor.source.get()
// }
//
// override fun set(index: Int, value: Double) {
// TODO("Not yet implemented")
// }
//
// override fun copy(): MutableBuffer<Double> {
// TODO("Not yet implemented")
// }
//
// override val size: Int
// get() = TODO("Not yet implemented")
//
// override fun toString(): String {
// TODO("Not yet implemented")
// }
//
// }
// }
@OptIn(PerformancePitfall::class)
override val columns: List<PermutedMutableBuffer<Double>>
get() = List(colNum) { j ->
val indices = IntArray(rowNum) { i -> j + i * colNum }
tensor.source.permute(indices)
}
@PerformancePitfall
override fun elements(): Sequence<Pair<IntArray, Double>> = tensor.elements()
override fun get(index: IntArray): Double = tensor[index]
override val shape: Shape get() = tensor.shape
}
public fun DoubleTensor.asDoubleTensor2D(): DoubleTensor2D = DoubleTensor2D(this)
public fun DoubleTensor.asDoubleBuffer(): OffsetDoubleBuffer = if(shape.size == 1){
source
} else {
error("Only 1D tensors could be cast to 1D" )
}
public inline fun DoubleTensor.forEachMatrix(block: (index: IntArray, matrix: DoubleTensor2D) -> Unit) {
val n = shape.size
check(n >= 2) { "Expected tensor with 2 or more dimensions, got size $n" }
val matrixOffset = shape[n - 1] * shape[n - 2]
val matrixShape = intArrayOf(shape[n - 2], shape[n - 1])
val size = Strides.linearSizeOf(matrixShape)
for (i in 0 until linearSize / matrixOffset) {
val offset = i * matrixOffset
val index = indices.index(offset).sliceArray(0 until (shape.size - 2))
block(index, DoubleTensor(matrixShape, source.view(offset, size)).asDoubleTensor2D())
}
}

View File

@ -496,7 +496,7 @@ public open class DoubleTensorAlgebra :
* with `0.0` mean and `1.0` standard deviation.
*/
public fun randomNormal(shape: IntArray, seed: Long = 0): DoubleTensor =
DoubleTensor(shape, getRandomNormals(shape.reduce(Int::times), seed))
DoubleTensor(shape, DoubleBuffer.randomNormals(shape.reduce(Int::times), seed))
/**
* Returns a tensor with the same shape as `input` of random numbers drawn from normal distributions
@ -508,7 +508,7 @@ public open class DoubleTensorAlgebra :
* with `0.0` mean and `1.0` standard deviation.
*/
public fun Tensor<Double>.randomNormalLike(seed: Long = 0): DoubleTensor =
DoubleTensor(shape, getRandomNormals(shape.reduce(Int::times), seed))
DoubleTensor(shape, DoubleBuffer.randomNormals(shape.reduce(Int::times), seed))
/**
* Concatenates a sequence of tensors with equal shapes along the first dimension.
@ -781,7 +781,7 @@ public open class DoubleTensorAlgebra :
pTensor
.matrixSequence()
.zip(pivotsTensor.asIntTensor().vectorSequence())
.forEach { (p, pivot) -> pivInit(p.as2D(), pivot.as1D(), n) }
.forEach { (p, pivot) -> pivInit(p.asDoubleTensor2D(), pivot.as1D(), n) }
val lTensor = zeroesLike(luTensor)
val uTensor = zeroesLike(luTensor)
@ -791,7 +791,7 @@ public open class DoubleTensorAlgebra :
.zip(luTensor.asDoubleTensor().matrixSequence())
.forEach { (pairLU, lu) ->
val (l, u) = pairLU
luPivotHelper(l.as2D(), u.as2D(), lu.as2D(), n)
luPivotHelper(l.asDoubleTensor2D(), u.asDoubleTensor2D(), lu.asDoubleTensor2D(), n)
}
return Triple(pTensor, lTensor, uTensor)
@ -818,7 +818,7 @@ public open class DoubleTensorAlgebra :
val lTensor = zeroesLike(this)
for ((a, l) in asDoubleTensor().matrixSequence().zip(lTensor.matrixSequence()))
for (i in 0 until n) choleskyHelper(a.as2D(), l.as2D(), n)
for (i in 0 until n) choleskyHelper(a.asDoubleTensor2D(), l.asDoubleTensor2D(), n)
return lTensor
}
@ -837,7 +837,7 @@ public open class DoubleTensorAlgebra :
.zip(rTensor.matrixSequence()))
).forEach { (matrix, qr) ->
val (q, r) = qr
qrHelper(matrix, q, r.as2D())
qrHelper(matrix, q, r.asDoubleTensor2D())
}
return qTensor to rTensor
@ -867,7 +867,7 @@ public open class DoubleTensorAlgebra :
val sTensor = zeros(commonShape + intArrayOf(min(n, m)))
val vTensor = zeros(commonShape + intArrayOf(min(n, m), m))
val matrices: VirtualBuffer<DoubleTensor> = asDoubleTensor().matrices
val matrices = asDoubleTensor().matrices
val uTensors = uTensor.matrices
val sTensorVectors = sTensor.vectors
val vTensors = vTensor.matrices
@ -922,7 +922,7 @@ public open class DoubleTensorAlgebra :
val utv = u.transposed() matmul v
val n = s.shape.last()
for (matrix in utv.matrixSequence()) {
matrix.as2D().cleanSym(n)
matrix.asDoubleTensor2D().cleanSym(n)
}
val eig = (utv dot s.view(shp)).view(s.shape)
@ -940,7 +940,7 @@ public open class DoubleTensorAlgebra :
var eigenvalueStart = 0
var eigenvectorStart = 0
for (matrix in asDoubleTensor().matrixSequence()) {
val matrix2D = matrix.as2D()
val matrix2D = matrix.asDoubleTensor2D()
val (d, v) = matrix2D.jacobiHelper(maxIteration, epsilon)
for (i in 0 until matrix2D.rowNum) {
@ -986,8 +986,8 @@ public open class DoubleTensorAlgebra :
)
luTensor.matrixSequence().zip(pivotsTensor.vectorSequence()).forEachIndexed { index, (lu, pivots) ->
resBuffer[index] = if (luHelper(lu.as2D(), pivots.as1D(), epsilon))
0.0 else luMatrixDet(lu.as2D(), pivots.as1D())
resBuffer[index] = if (luHelper(lu.asDoubleTensor2D(), pivots.as1D(), epsilon))
0.0 else luMatrixDet(lu.asDoubleTensor2D(), pivots.as1D())
}
return detTensor
@ -1010,7 +1010,7 @@ public open class DoubleTensorAlgebra :
val seq = luTensor.matrixSequence().zip(pivotsTensor.vectorSequence()).zip(invTensor.matrixSequence())
for ((luP, invMatrix) in seq) {
val (lu, pivots) = luP
luMatrixInv(lu.as2D(), pivots.as1D(), invMatrix.as2D())
luMatrixInv(lu.asDoubleTensor2D(), pivots.as1D(), invMatrix.asDoubleTensor2D())
}
return invTensor

View File

@ -7,9 +7,7 @@ package space.kscience.kmath.tensors.core.internal
import space.kscience.kmath.nd.*
import space.kscience.kmath.nd.Strides.Companion.linearSizeOf
import space.kscience.kmath.operations.asSequence
import space.kscience.kmath.structures.DoubleBuffer
import space.kscience.kmath.structures.VirtualBuffer
import space.kscience.kmath.structures.asBuffer
import space.kscience.kmath.structures.indices
import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra.eye
@ -155,13 +153,13 @@ internal fun List<OffsetDoubleBuffer>.concat(): DoubleBuffer {
return array.asBuffer()
}
internal val DoubleTensor.vectors: VirtualBuffer<DoubleTensor>
internal val DoubleTensor.vectors: List<DoubleTensor>
get() {
val n = shape.size
val vectorOffset = shape[n - 1]
val vectorShape = intArrayOf(shape.last())
return VirtualBuffer(linearSize / vectorOffset) { index ->
return List(linearSize / vectorOffset) { index ->
val offset = index * vectorOffset
DoubleTensor(vectorShape, source.view(offset, vectorShape.first()))
}
@ -171,7 +169,7 @@ internal val DoubleTensor.vectors: VirtualBuffer<DoubleTensor>
internal fun DoubleTensor.vectorSequence(): Sequence<DoubleTensor> = vectors.asSequence()
internal val DoubleTensor.matrices: VirtualBuffer<DoubleTensor>
internal val DoubleTensor.matrices: List<DoubleTensor>
get() {
val n = shape.size
check(n >= 2) { "Expected tensor with 2 or more dimensions, got size $n" }
@ -180,7 +178,7 @@ internal val DoubleTensor.matrices: VirtualBuffer<DoubleTensor>
val size = linearSizeOf(matrixShape)
return VirtualBuffer(linearSize / matrixOffset) { index ->
return List(linearSize / matrixOffset) { index ->
val offset = index * matrixOffset
DoubleTensor(matrixShape, source.view(offset, size))
}

View File

@ -5,8 +5,12 @@
package space.kscience.kmath.tensors.core.internal
import space.kscience.kmath.nd.*
import space.kscience.kmath.nd.MutableStructure1D
import space.kscience.kmath.nd.MutableStructure2D
import space.kscience.kmath.nd.StructureND
import space.kscience.kmath.nd.as1D
import space.kscience.kmath.operations.invoke
import space.kscience.kmath.structures.DoubleBuffer
import space.kscience.kmath.structures.IntBuffer
import space.kscience.kmath.structures.asBuffer
import space.kscience.kmath.structures.indices
@ -109,7 +113,7 @@ internal fun DoubleTensorAlgebra.computeLU(
val pivotsTensor = tensor.setUpPivots()
for ((lu, pivots) in luTensor.matrixSequence().zip(pivotsTensor.vectorSequence()))
if (luHelper(lu.as2D(), pivots.as1D(), epsilon))
if (luHelper(lu.asDoubleTensor2D(), pivots.as1D(), epsilon))
return null
return Pair(luTensor, pivotsTensor)
@ -210,18 +214,18 @@ internal fun DoubleTensorAlgebra.qrHelper(
) {
checkSquareMatrix(matrix.shape)
val n = matrix.shape[0]
val qM = q.as2D()
val qM = q.asDoubleTensor2D()
val matrixT = matrix.transposed(0, 1)
val qT = q.transposed(0, 1)
for (j in 0 until n) {
val v = matrixT.getTensor(j)
val vv = v.as1D()
val vv = v.asDoubleBuffer()
if (j > 0) {
for (i in 0 until j) {
r[i, j] = (qT.getTensor(i) dot matrixT.getTensor(j)).value()
for (k in 0 until n) {
val qTi = qT.getTensor(i).as1D()
val qTi = qT.getTensor(i).asDoubleBuffer()
vv[k] = vv[k] - r[i, j] * qTi[k]
}
}
@ -239,10 +243,10 @@ internal fun DoubleTensorAlgebra.svd1d(a: DoubleTensor, epsilon: Double = 1e-10)
val b: DoubleTensor
if (n > m) {
b = a.transposed(0, 1).dot(a)
v = DoubleTensor(intArrayOf(m), getRandomUnitVector(m, 0))
v = DoubleTensor(intArrayOf(m), DoubleBuffer.randomUnitVector(m, 0))
} else {
b = a.dot(a.transposed(0, 1))
v = DoubleTensor(intArrayOf(n), getRandomUnitVector(n, 0))
v = DoubleTensor(intArrayOf(n), DoubleBuffer.randomUnitVector(n, 0))
}
var lastV: DoubleTensor

View File

@ -14,14 +14,14 @@ import space.kscience.kmath.tensors.core.BufferedTensor
import space.kscience.kmath.tensors.core.DoubleTensor
import kotlin.math.*
internal fun getRandomNormals(n: Int, seed: Long): DoubleBuffer {
internal fun DoubleBuffer.Companion.randomNormals(n: Int, seed: Long): DoubleBuffer {
val distribution = GaussianSampler(0.0, 1.0)
val generator = RandomGenerator.default(seed)
return distribution.sample(generator).nextBufferBlocking(n)
}
internal fun getRandomUnitVector(n: Int, seed: Long): DoubleBuffer {
val unnorm: DoubleBuffer = getRandomNormals(n, seed)
internal fun DoubleBuffer.Companion.randomUnitVector(n: Int, seed: Long): DoubleBuffer {
val unnorm: DoubleBuffer = randomNormals(n, seed)
val norm = sqrt(unnorm.array.sumOf { it * it })
return unnorm.map { it / norm }
}
@ -67,7 +67,7 @@ internal fun format(value: Double, digits: Int = 4): String = buildString {
}
@OptIn(PerformancePitfall::class)
internal fun DoubleTensor.toPrettyString(): String = buildString {
public fun DoubleTensor.toPrettyString(): String = buildString {
var offset = 0
val shape = this@toPrettyString.shape
val linearStructure = this@toPrettyString.indices

View File

@ -11,6 +11,7 @@ import space.kscience.kmath.operations.invoke
import space.kscience.kmath.structures.DoubleBuffer
import space.kscience.kmath.structures.toDoubleArray
import space.kscience.kmath.tensors.core.internal.matrixSequence
import space.kscience.kmath.testutils.assertBufferEquals
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertTrue
@ -38,7 +39,7 @@ internal class TestDoubleTensor {
@Test
fun testGet() = DoubleTensorAlgebra {
val tensor = fromArray(intArrayOf(1, 2, 2), doubleArrayOf(3.5, 5.8, 58.4, 2.4))
val matrix = tensor.getTensor(0).as2D()
val matrix = tensor.getTensor(0).asDoubleTensor2D()
assertEquals(matrix[0, 1], 5.8)
val vector = tensor.getTensor(0, 1).as1D()
@ -52,8 +53,8 @@ internal class TestDoubleTensor {
tensor.matrixSequence().forEach {
val a = it.asDoubleTensor()
val secondRow = a.getTensor(1).as1D()
val secondColumn = a.transposed(0, 1).getTensor(1).as1D()
val secondRow = a.getTensor(1).asDoubleBuffer()
val secondColumn = a.transposed(0, 1).getTensor(1).asDoubleBuffer()
assertEquals(secondColumn[0], 77.89)
assertEquals(secondRow[1], secondColumn[1])
}
@ -86,6 +87,23 @@ internal class TestDoubleTensor {
tensorArray[intArrayOf(0)] = 55.9
assertEquals(ndArray[intArrayOf(0)], 1.0)
}
@Test
fun test2D() = with(DoubleTensorAlgebra) {
val tensor: DoubleTensor = structureND(intArrayOf(3, 3)) { (i, j) -> (i - j).toDouble() }
//println(tensor.toPrettyString())
val tensor2d = tensor.asDoubleTensor2D()
assertBufferEquals(DoubleBuffer(1.0, 0.0, -1.0), tensor2d.rows[1])
assertBufferEquals(DoubleBuffer(-2.0, -1.0, 0.0), tensor2d.columns[2])
}
@Test
fun testMatrixIteration() = with(DoubleTensorAlgebra) {
val tensor = structureND(intArrayOf(3, 3, 3, 3)) { index -> index.sum().toDouble() }
tensor.forEachMatrix { index, matrix ->
println(index.joinToString { it.toString() })
println(matrix)
}
}
}