SVD implementation #271
@ -3,6 +3,8 @@ package space.kscience.kmath.tensors.core
|
|||||||
import space.kscience.kmath.tensors.LinearOpsTensorAlgebra
|
import space.kscience.kmath.tensors.LinearOpsTensorAlgebra
|
||||||
import space.kscience.kmath.nd.as1D
|
import space.kscience.kmath.nd.as1D
|
||||||
import space.kscience.kmath.nd.as2D
|
import space.kscience.kmath.nd.as2D
|
||||||
|
import kotlin.math.abs
|
||||||
|
import kotlin.math.min
|
||||||
|
|
||||||
public class DoubleLinearOpsTensorAlgebra :
|
public class DoubleLinearOpsTensorAlgebra :
|
||||||
LinearOpsTensorAlgebra<Double, DoubleTensor, IntTensor>,
|
LinearOpsTensorAlgebra<Double, DoubleTensor, IntTensor>,
|
||||||
@ -89,8 +91,81 @@ public class DoubleLinearOpsTensorAlgebra :
|
|||||||
return qTensor to rTensor
|
return qTensor to rTensor
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal fun svd1d(a: DoubleTensor, epsilon: Double = 1e-10): DoubleTensor {
|
||||||
|
val (n, m) = a.shape
|
||||||
|
var v: DoubleTensor
|
||||||
|
val b: DoubleTensor
|
||||||
|
if (n > m) {
|
||||||
|
b = a.transpose(0, 1).dot(a)
|
||||||
|
v = DoubleTensor(intArrayOf(m), getRandomNormals(m, 0))
|
||||||
|
} else {
|
||||||
|
b = a.dot(a.transpose(0, 1))
|
||||||
|
v = DoubleTensor(intArrayOf(n), getRandomNormals(n, 0))
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastV: DoubleTensor
|
||||||
|
while (true) {
|
||||||
|
lastV = v
|
||||||
|
v = b.dot(lastV)
|
||||||
|
val norm = DoubleAnalyticTensorAlgebra { (v dot v).sqrt().value() }
|
||||||
|
v = v.times(1.0 / norm)
|
||||||
|
if (abs(v.dot(lastV).value()) > 1 - epsilon) {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override fun DoubleTensor.svd(): Triple<DoubleTensor, DoubleTensor, DoubleTensor> {
|
override fun DoubleTensor.svd(): Triple<DoubleTensor, DoubleTensor, DoubleTensor> {
|
||||||
TODO("ALYA")
|
val size = this.shape.size
|
||||||
|
val commonShape = this.shape.sliceArray(0 until size - 2)
|
||||||
|
val (n, m) = this.shape.sliceArray(size - 2 until size)
|
||||||
|
val resU = zeros(commonShape + intArrayOf(n, min(n, m)))
|
||||||
|
val resS = zeros(commonShape + intArrayOf(min(n, m)))
|
||||||
|
val resV = zeros(commonShape + intArrayOf(min(n, m), m))
|
||||||
|
|
||||||
|
for ((matrix, USV) in this.matrixSequence()
|
||||||
|
.zip(resU.matrixSequence().zip(resS.vectorSequence().zip(resV.matrixSequence())))) {
|
||||||
|
val res = ArrayList<Triple<Double, DoubleTensor, DoubleTensor>>(0)
|
||||||
|
val (matrixU, SV) = USV
|
||||||
|
val (matrixS, matrixV) = SV
|
||||||
|
|
||||||
|
for (k in 0 until min(n, m)) {
|
||||||
|
var a = matrix.asTensor().copy()
|
||||||
|
for ((singularValue, u, v) in res.slice(0 until k)) {
|
||||||
|
val outerProduct = DoubleArray(u.shape[0] * v.shape[0])
|
||||||
|
for (i in 0 until u.shape[0]) {
|
||||||
|
for (j in 0 until v.shape[0]) {
|
||||||
|
outerProduct[i * v.shape[0] + j] = u[i].value() * v[j].value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a = a - singularValue.times(DoubleTensor(intArrayOf(u.shape[0], v.shape[0]), outerProduct))
|
||||||
|
}
|
||||||
|
var v: DoubleTensor
|
||||||
|
var u: DoubleTensor
|
||||||
|
var norm: Double
|
||||||
|
if (n > m) {
|
||||||
|
v = svd1d(a)
|
||||||
|
u = matrix.asTensor().dot(v)
|
||||||
|
norm = DoubleAnalyticTensorAlgebra { (u dot u).sqrt().value() }
|
||||||
|
u = u.times(1.0 / norm)
|
||||||
|
} else {
|
||||||
|
u = svd1d(a)
|
||||||
|
v = matrix.asTensor().transpose(0, 1).dot(u)
|
||||||
|
norm = DoubleAnalyticTensorAlgebra { (v dot v).sqrt().value() }
|
||||||
|
v = v.times(1.0 / norm)
|
||||||
|
}
|
||||||
|
|
||||||
|
res.add(Triple(norm, u, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
val s = res.map { it.first }.toDoubleArray()
|
||||||
|
val uBuffer = res.map { it.second }.flatMap { it.buffer.array().toList() }.toDoubleArray()
|
||||||
|
val vBuffer = res.map { it.third }.flatMap { it.buffer.array().toList() }.toDoubleArray()
|
||||||
|
uBuffer.copyInto(matrixU.buffer.array())
|
||||||
|
s.copyInto(matrixS.buffer.array())
|
||||||
|
vBuffer.copyInto(matrixV.buffer.array())
|
||||||
|
}
|
||||||
|
return Triple(resU, resS, resV.transpose(size - 2, size - 1))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun DoubleTensor.symEig(eigenvectors: Boolean): Pair<DoubleTensor, DoubleTensor> {
|
override fun DoubleTensor.symEig(eigenvectors: Boolean): Pair<DoubleTensor, DoubleTensor> {
|
||||||
|
@ -283,7 +283,47 @@ public open class DoubleTensorAlgebra : TensorPartialDivisionAlgebra<Double, Dou
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun diagonalEmbedding(diagonalEntries: DoubleTensor, offset: Int, dim1: Int, dim2: Int): DoubleTensor {
|
override fun diagonalEmbedding(diagonalEntries: DoubleTensor, offset: Int, dim1: Int, dim2: Int): DoubleTensor {
|
||||||
TODO("Alya")
|
val n = diagonalEntries.shape.size
|
||||||
|
if (dim1 == dim2) {
|
||||||
|
throw RuntimeException("Diagonal dimensions cannot be identical $dim1, $dim2")
|
||||||
|
}
|
||||||
|
if (dim1 > n || dim2 > n) {
|
||||||
|
throw RuntimeException("Dimension out of range")
|
||||||
|
}
|
||||||
|
|
||||||
|
var lessDim = dim1
|
||||||
|
var greaterDim = dim2
|
||||||
|
var realOffset = offset
|
||||||
|
if (lessDim > greaterDim) {
|
||||||
|
realOffset *= -1
|
||||||
|
lessDim = greaterDim.also {greaterDim = lessDim}
|
||||||
|
}
|
||||||
|
|
||||||
|
val resShape = diagonalEntries.shape.slice(0 until lessDim).toIntArray() +
|
||||||
|
intArrayOf(diagonalEntries.shape[n - 1] + abs(realOffset)) +
|
||||||
|
diagonalEntries.shape.slice(lessDim until greaterDim - 1).toIntArray() +
|
||||||
|
intArrayOf(diagonalEntries.shape[n - 1] + abs(realOffset)) +
|
||||||
|
diagonalEntries.shape.slice(greaterDim - 1 until n - 1).toIntArray()
|
||||||
|
val resTensor = zeros(resShape)
|
||||||
|
|
||||||
|
for (i in 0 until diagonalEntries.linearStructure.size) {
|
||||||
|
val multiIndex = diagonalEntries.linearStructure.index(i)
|
||||||
|
|
||||||
|
var offset1 = 0
|
||||||
|
var offset2 = abs(realOffset)
|
||||||
|
if (realOffset < 0) {
|
||||||
|
offset1 = offset2.also {offset2 = offset1}
|
||||||
|
}
|
||||||
|
val diagonalMultiIndex = multiIndex.slice(0 until lessDim).toIntArray() +
|
||||||
|
intArrayOf(multiIndex[n - 1] + offset1) +
|
||||||
|
multiIndex.slice(lessDim until greaterDim - 1).toIntArray() +
|
||||||
|
intArrayOf(multiIndex[n - 1] + offset2) +
|
||||||
|
multiIndex.slice(greaterDim - 1 until n - 1).toIntArray()
|
||||||
|
|
||||||
|
resTensor[diagonalMultiIndex] = diagonalEntries[multiIndex]
|
||||||
|
}
|
||||||
|
|
||||||
|
return resTensor
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -122,4 +122,36 @@ class TestDoubleLinearOpsTensorAlgebra {
|
|||||||
|
|
||||||
assertTrue { p.dot(tensor).buffer.array().epsEqual(l.dot(u).buffer.array()) }
|
assertTrue { p.dot(tensor).buffer.array().epsEqual(l.dot(u).buffer.array()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun svd1d() = DoubleLinearOpsTensorAlgebra {
|
||||||
|
val tensor2 = fromArray(intArrayOf(2, 3), doubleArrayOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0))
|
||||||
|
|
||||||
|
val res = svd1d(tensor2)
|
||||||
|
|
||||||
|
assertTrue(res.shape contentEquals intArrayOf(2))
|
||||||
|
assertTrue { abs(abs(res.buffer.array()[res.bufferStart]) - 0.386) < 0.01}
|
||||||
|
assertTrue { abs(abs(res.buffer.array()[res.bufferStart + 1]) - 0.922) < 0.01}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun svd() = DoubleLinearOpsTensorAlgebra {
|
||||||
|
val epsilon = 1e-10
|
||||||
|
fun test_tensor(tensor: DoubleTensor) {
|
||||||
|
val svd = tensor.svd()
|
||||||
|
|
||||||
|
val tensorSVD = svd.first
|
||||||
|
.dot(
|
||||||
|
diagonalEmbedding(svd.second, 0, 0, 1)
|
||||||
|
.dot(svd.third.transpose(0, 1))
|
||||||
|
)
|
||||||
|
|
||||||
|
for ((x1, x2) in tensor.buffer.array() zip tensorSVD.buffer.array()) {
|
||||||
|
assertTrue { abs(x1 - x2) < epsilon }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
test_tensor(fromArray(intArrayOf(2, 3), doubleArrayOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0)))
|
||||||
|
test_tensor(fromArray(intArrayOf(2, 2), doubleArrayOf(-1.0, 0.0, 239.0, 238.0)))
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,6 +115,39 @@ class TestDoubleTensorAlgebra {
|
|||||||
assertTrue(tensor4.dot(tensor5).shape contentEquals intArrayOf(5, 4, 2, 8, 3, 8, 5))
|
assertTrue(tensor4.dot(tensor5).shape contentEquals intArrayOf(5, 4, 2, 8, 3, 8, 5))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun diagonalEmbedding() = DoubleTensorAlgebra {
|
||||||
|
val tensor1 = fromArray(intArrayOf(3), doubleArrayOf(10.0, 20.0, 30.0))
|
||||||
|
val tensor2 = fromArray(intArrayOf(2, 3), doubleArrayOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0))
|
||||||
|
val tensor3 = zeros(intArrayOf(2, 3, 4, 5))
|
||||||
|
|
||||||
|
assertTrue(diagonalEmbedding(tensor3, 0, 3, 4).shape contentEquals
|
||||||
|
intArrayOf(2, 3, 4, 5, 5))
|
||||||
|
assertTrue(diagonalEmbedding(tensor3, 1, 3, 4).shape contentEquals
|
||||||
|
intArrayOf(2, 3, 4, 6, 6))
|
||||||
|
assertTrue(diagonalEmbedding(tensor3, 2, 0, 3).shape contentEquals
|
||||||
|
intArrayOf(7, 2, 3, 7, 4))
|
||||||
|
|
||||||
|
val diagonal1 = diagonalEmbedding(tensor1, 0, 1, 0)
|
||||||
|
assertTrue(diagonal1.shape contentEquals intArrayOf(3, 3))
|
||||||
|
assertTrue(diagonal1.buffer.array() contentEquals
|
||||||
|
doubleArrayOf(10.0, 0.0, 0.0, 0.0, 20.0, 0.0, 0.0, 0.0, 30.0))
|
||||||
|
|
||||||
|
val diagonal1_offset = diagonalEmbedding(tensor1, 1, 1, 0)
|
||||||
|
assertTrue(diagonal1_offset.shape contentEquals intArrayOf(4, 4))
|
||||||
|
assertTrue(diagonal1_offset.buffer.array() contentEquals
|
||||||
|
doubleArrayOf(0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0, 20.0, 0.0, 0.0, 0.0, 0.0, 30.0, 0.0))
|
||||||
|
|
||||||
|
val diagonal2 = diagonalEmbedding(tensor2, 1, 0, 2)
|
||||||
|
assertTrue(diagonal2.shape contentEquals intArrayOf(4, 2, 4))
|
||||||
|
assertTrue(diagonal2.buffer.array() contentEquals
|
||||||
|
doubleArrayOf(
|
||||||
|
0.0, 1.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0,
|
||||||
|
0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 5.0, 0.0,
|
||||||
|
0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 6.0,
|
||||||
|
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun testContentEqual() = DoubleTensorAlgebra {
|
fun testContentEqual() = DoubleTensorAlgebra {
|
||||||
//TODO()
|
//TODO()
|
||||||
|
Loading…
Reference in New Issue
Block a user