diff --git a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/Counter.kt b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/Counter.kt index 3e5d93768..291284444 100644 --- a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/Counter.kt +++ b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/Counter.kt @@ -8,7 +8,7 @@ package space.kscience.kmath.histogram import kotlinx.atomicfu.atomic import kotlinx.atomicfu.getAndUpdate import space.kscience.kmath.operations.DoubleField -import space.kscience.kmath.operations.Ring +import space.kscience.kmath.operations.Group /** * Common representation for atomic counters @@ -18,7 +18,7 @@ public interface Counter { public val value: T public companion object { - public fun real(): ObjectCounter = ObjectCounter(DoubleField) + public fun double(): ObjectCounter = ObjectCounter(DoubleField) } } @@ -32,6 +32,16 @@ public class IntCounter : Counter { override val value: Int get() = innerValue.value } +public operator fun IntCounter.inc(): IntCounter { + add(1) + return this +} + +public operator fun IntCounter.dec(): IntCounter { + add(-1) + return this +} + public class LongCounter : Counter { private val innerValue = atomic(0L) @@ -42,7 +52,17 @@ public class LongCounter : Counter { override val value: Long get() = innerValue.value } -public class ObjectCounter(public val group: Ring) : Counter { +public operator fun LongCounter.inc(): LongCounter { + add(1L) + return this +} + +public operator fun LongCounter.dec(): LongCounter { + add(-1L) + return this +} + +public class ObjectCounter(private val group: Group) : Counter { private val innerValue = atomic(group.zero) override fun add(delta: T) { diff --git a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/DoubleHistogramSpace.kt b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/DoubleHistogramSpace.kt index 28eade060..61d0b9f33 100644 --- a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/DoubleHistogramSpace.kt +++ b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/DoubleHistogramSpace.kt @@ -74,7 +74,7 @@ public class DoubleHistogramSpace( } override fun produce(builder: HistogramBuilder.() -> Unit): IndexedHistogram { - val ndCounter = StructureND.auto(shape) { Counter.real() } + val ndCounter = StructureND.auto(shape) { Counter.double() } val hBuilder = HistogramBuilder { point, value -> val index = getIndex(point) ndCounter[index].add(value.toDouble()) diff --git a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/IndexedHistogramSpace.kt b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/IndexedHistogramSpace.kt index 0c7dd81e1..9275c1c5e 100644 --- a/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/IndexedHistogramSpace.kt +++ b/kmath-histograms/src/commonMain/kotlin/space/kscience/kmath/histogram/IndexedHistogramSpace.kt @@ -41,7 +41,6 @@ public class IndexedHistogram, V : Any>( get() = DefaultStrides(context.shape).asSequence().map { context.produceBin(it, values[it]) }.asIterable() - } /** diff --git a/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/TreeHistogramSpace.kt b/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/TreeHistogramSpace.kt index f8a3a6a8b..0853615e6 100644 --- a/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/TreeHistogramSpace.kt +++ b/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/TreeHistogramSpace.kt @@ -39,7 +39,7 @@ public class TreeHistogram( @PublishedApi internal class TreeHistogramBuilder(val binFactory: (Double) -> UnivariateDomain) : UnivariateHistogramBuilder { - internal class BinCounter(val domain: UnivariateDomain, val counter: Counter = Counter.real()) : + internal class BinCounter(val domain: UnivariateDomain, val counter: Counter = Counter.double()) : ClosedFloatingPointRange by domain.range private val bins: TreeMap = TreeMap() diff --git a/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/UnivariateHistogram.kt b/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/UnivariateHistogram.kt index 723577cd9..ac0576a8e 100644 --- a/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/UnivariateHistogram.kt +++ b/kmath-histograms/src/jvmMain/kotlin/space/kscience/kmath/histogram/UnivariateHistogram.kt @@ -42,7 +42,7 @@ public interface UnivariateHistogram : Histogram { /** * Build and fill a [UnivariateHistogram]. Returns a read-only histogram. */ - public fun uniform( + public inline fun uniform( binSize: Double, start: Double = 0.0, builder: UnivariateHistogramBuilder.() -> Unit, @@ -51,7 +51,7 @@ public interface UnivariateHistogram : Histogram { /** * Build and fill a histogram with custom borders. Returns a read-only histogram. */ - public fun custom( + public inline fun custom( borders: DoubleArray, builder: UnivariateHistogramBuilder.() -> Unit, ): UnivariateHistogram = TreeHistogramSpace.custom(borders).fill(builder) diff --git a/kmath-tensorflow/build.gradle.kts b/kmath-tensorflow/build.gradle.kts new file mode 100644 index 000000000..c8307f01f --- /dev/null +++ b/kmath-tensorflow/build.gradle.kts @@ -0,0 +1,15 @@ +plugins { + id("ru.mipt.npm.gradle.jvm") +} + +description = "Google tensorflow connector" + +dependencies { + api(project(":kmath-tensors")) + api("org.tensorflow:tensorflow-core-api:0.3.3") + testImplementation("org.tensorflow:tensorflow-core-platform:0.3.3") +} + +readme { + maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE +} \ No newline at end of file diff --git a/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowAlgebra.kt b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowAlgebra.kt new file mode 100644 index 000000000..eb8245944 --- /dev/null +++ b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowAlgebra.kt @@ -0,0 +1,75 @@ +package space.kscience.kmath.tensorflow + +import org.tensorflow.Graph +import org.tensorflow.Output +import org.tensorflow.ndarray.NdArray +import org.tensorflow.op.core.Constant +import org.tensorflow.types.TFloat64 +import space.kscience.kmath.expressions.Symbol +import space.kscience.kmath.misc.PerformancePitfall +import space.kscience.kmath.nd.DefaultStrides +import space.kscience.kmath.nd.Shape +import space.kscience.kmath.nd.StructureND +import space.kscience.kmath.operations.DoubleField + +public class DoubleTensorFlowOutput( + graph: Graph, + output: Output, +) : TensorFlowOutput(graph, output) { + + override fun org.tensorflow.Tensor.actualizeTensor(): NdArray = this as TFloat64 + +} + +public class DoubleTensorFlowAlgebra internal constructor( + graph: Graph, +) : TensorFlowAlgebra(graph) { + + override val elementAlgebra: DoubleField get() = DoubleField + + override fun structureND( + shape: Shape, + initializer: DoubleField.(IntArray) -> Double, + ): StructureND { + val res = TFloat64.tensorOf(org.tensorflow.ndarray.Shape.of(*shape.toLongArray())) { array -> + DefaultStrides(shape).forEach { index -> + array.setDouble(elementAlgebra.initializer(index), *index.toLongArray()) + } + } + return DoubleTensorFlowOutput(graph, ops.constant(res).asOutput()) + } + + override fun StructureND.asTensorFlow(): TensorFlowOutput = + if (this is TensorFlowOutput && output.type() == TFloat64::class.java) { + @Suppress("UNCHECKED_CAST") + this as TensorFlowOutput + } else { + val res = TFloat64.tensorOf(org.tensorflow.ndarray.Shape.of(*shape.toLongArray())) { array -> + @OptIn(PerformancePitfall::class) + elements().forEach { (index, value) -> + array.setDouble(value, *index.toLongArray()) + } + } + DoubleTensorFlowOutput(graph, ops.constant(res).asOutput()) + } + + override fun Output.wrap(): TensorFlowOutput = DoubleTensorFlowOutput(graph, this) + + override fun const(value: Double): Constant = ops.constant(value) + + +} + +public fun DoubleField.produceWithTF( + block: DoubleTensorFlowAlgebra.() -> StructureND, +): StructureND = Graph().use { graph -> + val scope = DoubleTensorFlowAlgebra(graph) + scope.export(scope.block()) +} + +public fun DoubleField.produceMapWithTF( + block: DoubleTensorFlowAlgebra.() -> Map>, +): Map> = Graph().use { graph -> + val scope = DoubleTensorFlowAlgebra(graph) + scope.block().mapValues { scope.export(it.value) } +} \ No newline at end of file diff --git a/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/IntTensorFlowAlgebra.kt b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/IntTensorFlowAlgebra.kt new file mode 100644 index 000000000..084a445e0 --- /dev/null +++ b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/IntTensorFlowAlgebra.kt @@ -0,0 +1,21 @@ +package space.kscience.kmath.tensorflow + +import org.tensorflow.Graph +import org.tensorflow.Output +import org.tensorflow.ndarray.NdArray +import org.tensorflow.types.TInt32 +import org.tensorflow.types.TInt64 + +public class IntTensorFlowOutput( + graph: Graph, + output: Output, +) : TensorFlowOutput(graph, output) { + override fun org.tensorflow.Tensor.actualizeTensor(): NdArray = this as TInt32 +} + +public class LongTensorFlowOutput( + graph: Graph, + output: Output, +) : TensorFlowOutput(graph, output) { + override fun org.tensorflow.Tensor.actualizeTensor(): NdArray = this as TInt64 +} \ No newline at end of file diff --git a/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/TensorFlowAlgebra.kt b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/TensorFlowAlgebra.kt new file mode 100644 index 000000000..7ad91c267 --- /dev/null +++ b/kmath-tensorflow/src/main/kotlin/space/kscience/kmath/tensorflow/TensorFlowAlgebra.kt @@ -0,0 +1,237 @@ +package space.kscience.kmath.tensorflow + + +import org.tensorflow.Graph +import org.tensorflow.Operand +import org.tensorflow.Output +import org.tensorflow.Session +import org.tensorflow.ndarray.NdArray +import org.tensorflow.op.Ops +import org.tensorflow.op.core.Constant +import org.tensorflow.op.core.Max +import org.tensorflow.op.core.Min +import org.tensorflow.op.core.Sum +import org.tensorflow.types.TInt32 +import org.tensorflow.types.family.TType +import space.kscience.kmath.misc.PerformancePitfall +import space.kscience.kmath.misc.UnstableKMathAPI +import space.kscience.kmath.nd.Shape +import space.kscience.kmath.nd.StructureND +import space.kscience.kmath.operations.Ring +import space.kscience.kmath.tensors.api.Tensor +import space.kscience.kmath.tensors.api.TensorAlgebra + +internal fun IntArray.toLongArray() = LongArray(size) { get(it).toLong() } +internal fun LongArray.toIntArray() = IntArray(size) { get(it).toInt() } + +internal val NdArray.scalar: T get() = getObject() + + +public sealed interface TensorFlowTensor : Tensor + +@JvmInline +public value class TensorFlowArray(public val tensor: NdArray) : Tensor { + override val shape: Shape get() = tensor.shape().asArray().toIntArray() + + override fun get(index: IntArray): T = tensor.getObject(*index.toLongArray()) + + //TODO implement native element sequence + + override fun set(index: IntArray, value: T) { + tensor.setObject(value, *index.toLongArray()) + } +} + +public abstract class TensorFlowOutput( + protected val graph: Graph, + output: Output, +) : TensorFlowTensor { + + public var output: Output = output + internal set + + override val shape: Shape get() = output.shape().asArray().toIntArray() + + protected abstract fun org.tensorflow.Tensor.actualizeTensor(): NdArray + + internal val actualTensor by lazy { + Session(graph).use { session -> + TensorFlowArray(session.runner().fetch(output).run().first().actualizeTensor()) + } + } + + override fun get(index: IntArray): T = actualTensor[index] + + @PerformancePitfall + override fun elements(): Sequence> = actualTensor.elements() + + override fun set(index: IntArray, value: T) { + actualTensor[index] = value + } + +} + + +public abstract class TensorFlowAlgebra> internal constructor( + protected val graph: Graph, +) : TensorAlgebra { + + protected val ops: Ops by lazy { Ops.create(graph) } + + protected abstract fun StructureND.asTensorFlow(): TensorFlowOutput + + protected abstract fun Output.wrap(): TensorFlowOutput + + protected abstract fun const(value: T): Constant + + override fun StructureND.valueOrNull(): T? = if (shape contentEquals intArrayOf(1)) + get(Shape(0)) else null + + private inline fun StructureND.biOp( + other: StructureND, + operation: (left: Operand, right: Operand) -> Operand, + ): TensorFlowOutput { + val left = asTensorFlow().output + val right = other.asTensorFlow().output + return operation(left, right).asOutput().wrap() + } + + private inline fun T.biOp( + other: StructureND, + operation: (left: Operand, right: Operand) -> Operand, + ): TensorFlowOutput { + val left = const(this) + val right = other.asTensorFlow().output + return operation(left, right).asOutput().wrap() + } + + private inline fun StructureND.biOp( + value: T, + operation: (left: Operand, right: Operand) -> Operand, + ): TensorFlowOutput { + val left = asTensorFlow().output + val right = const(value) + return operation(left, right).asOutput().wrap() + } + + private inline fun Tensor.inPlaceOp( + other: StructureND, + operation: (left: Operand, right: Operand) -> Operand, + ): Unit { + val origin = asTensorFlow() + val left = origin.output + val right = other.asTensorFlow().output + origin.output = operation(left, right).asOutput() + } + + private inline fun Tensor.inPlaceOp( + value: T, + operation: (left: Operand, right: Operand) -> Operand, + ): Unit { + val origin = asTensorFlow() + val left = origin.output + val right = const(value) + origin.output = operation(left, right).asOutput() + } + + private inline fun StructureND.unOp(operation: (Operand) -> Operand): TensorFlowOutput = + operation(asTensorFlow().output).asOutput().wrap() + + override fun T.plus(arg: StructureND): TensorFlowOutput = biOp(arg, ops.math::add) + + override fun StructureND.plus(arg: T): TensorFlowOutput = biOp(arg, ops.math::add) + + override fun StructureND.plus(arg: StructureND): TensorFlowOutput = biOp(arg, ops.math::add) + + override fun Tensor.plusAssign(value: T): Unit = inPlaceOp(value, ops.math::add) + + override fun Tensor.plusAssign(arg: StructureND): Unit = inPlaceOp(arg, ops.math::add) + + override fun StructureND.minus(arg: T): TensorFlowOutput = biOp(arg, ops.math::sub) + + override fun StructureND.minus(arg: StructureND): TensorFlowOutput = biOp(arg, ops.math::sub) + + override fun T.minus(arg: StructureND): Tensor = biOp(arg, ops.math::sub) + + override fun Tensor.minusAssign(value: T): Unit = inPlaceOp(value, ops.math::sub) + + override fun Tensor.minusAssign(arg: StructureND): Unit = inPlaceOp(arg, ops.math::sub) + + override fun T.times(arg: StructureND): TensorFlowOutput = biOp(arg, ops.math::mul) + + override fun StructureND.times(arg: T): TensorFlowOutput = biOp(arg, ops.math::mul) + + override fun StructureND.times(arg: StructureND): TensorFlowOutput = biOp(arg, ops.math::mul) + + override fun Tensor.timesAssign(value: T): Unit = inPlaceOp(value, ops.math::mul) + + override fun Tensor.timesAssign(arg: StructureND): Unit = inPlaceOp(arg, ops.math::mul) + + override fun StructureND.unaryMinus(): TensorFlowOutput = unOp(ops.math::neg) + + override fun Tensor.get(i: Int): Tensor = unOp { + TODO("Not yet implemented") + } + + override fun Tensor.transpose(i: Int, j: Int): Tensor = unOp { + ops.linalg.transpose(it, ops.constant(intArrayOf(i, j))) + } + + override fun Tensor.view(shape: IntArray): Tensor = unOp { + ops.reshape(it, ops.constant(shape)) + } + + override fun Tensor.viewAs(other: StructureND): Tensor = biOp(other) { l, r -> + ops.reshape(l, ops.shape(r)) + } + + override fun StructureND.dot(other: StructureND): TensorFlowOutput = biOp(other) { l, r -> + ops.linalg.matMul( + if (l.asTensor().shape().numDimensions() == 1) ops.expandDims(l,ops.constant(0)) else l, + if (r.asTensor().shape().numDimensions() == 1) ops.expandDims(r,ops.constant(-1)) else r) + } + + override fun diagonalEmbedding( + diagonalEntries: Tensor, + offset: Int, + dim1: Int, + dim2: Int, + ): TensorFlowOutput = diagonalEntries.unOp { + TODO() + } + + override fun StructureND.sum(): T = unOp { + ops.sum(it, ops.constant(intArrayOf())) + }.value() + + override fun StructureND.sum(dim: Int, keepDim: Boolean): TensorFlowOutput = unOp { + ops.sum(it, ops.constant(dim), Sum.keepDims(keepDim)) + } + + override fun StructureND.min(): T = unOp { + ops.min(it, ops.constant(intArrayOf())) + }.value() + + override fun StructureND.min(dim: Int, keepDim: Boolean): Tensor = unOp { + ops.min(it, ops.constant(dim), Min.keepDims(keepDim)) + } + + override fun StructureND.max(): T = unOp { + ops.max(it, ops.constant(intArrayOf())) + }.value() + + override fun StructureND.max(dim: Int, keepDim: Boolean): Tensor = unOp { + ops.max(it, ops.constant(dim), Max.keepDims(keepDim)) + } + + override fun StructureND.argMax(dim: Int, keepDim: Boolean): Tensor = IntTensorFlowOutput( + graph, + ops.math.argMax(asTensorFlow().output, ops.constant(dim), TInt32::class.java).output() + ).actualTensor + + @OptIn(UnstableKMathAPI::class) + override fun export(arg: StructureND): StructureND = + if (arg is TensorFlowOutput) arg.actualTensor else arg +} + +//TODO add TensorFlow expressions \ No newline at end of file diff --git a/kmath-tensorflow/src/test/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowOps.kt b/kmath-tensorflow/src/test/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowOps.kt new file mode 100644 index 000000000..b7a4b94b4 --- /dev/null +++ b/kmath-tensorflow/src/test/kotlin/space/kscience/kmath/tensorflow/DoubleTensorFlowOps.kt @@ -0,0 +1,19 @@ +package space.kscience.kmath.tensorflow + +import org.junit.jupiter.api.Test +import space.kscience.kmath.nd.StructureND +import space.kscience.kmath.nd.structureND +import space.kscience.kmath.operations.DoubleField + +class DoubleTensorFlowOps { + @Test + fun basicOps() { + val res = DoubleField.produceWithTF { + val initial = structureND(2, 2) { 1.0 } + + initial + (initial * 2.0) + } + println(StructureND.toString(res)) + } + +} \ No newline at end of file diff --git a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/api/TensorAlgebra.kt b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/api/TensorAlgebra.kt index 33889c2f8..86d4eaa4e 100644 --- a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/api/TensorAlgebra.kt +++ b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/api/TensorAlgebra.kt @@ -208,7 +208,7 @@ public interface TensorAlgebra> : RingOpsND { * * 3. If the first argument is 1-dimensional and the second argument is 2-dimensional, * a 1 is prepended to its dimension for the purpose of the matrix multiply. - * After the matrix multiply, the prepended dimension is removed. + * After the matrix multiply, depending on the implementation the prepended dimension might be removed. * * 4. If the first argument is 2-dimensional and the second argument is 1-dimensional, * the matrix-vector product is returned. diff --git a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt index dc3ec43e9..50252ad31 100644 --- a/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt +++ b/kmath-tensors/src/commonMain/kotlin/space/kscience/kmath/tensors/core/DoubleTensorAlgebra.kt @@ -424,13 +424,13 @@ public open class DoubleTensorAlgebra : dotTo(a.as2D(), b.as2D(), res.as2D(), l, m1, n) } - if (penultimateDim) { - return resTensor.view(resTensor.shape.dropLast(2).toIntArray() + intArrayOf(resTensor.shape.last())) + return if (penultimateDim) { + resTensor.view(resTensor.shape.dropLast(2).toIntArray() + intArrayOf(resTensor.shape.last())) + } else if (lastDim) { + resTensor.view(resTensor.shape.dropLast(1).toIntArray()) + } else { + resTensor } - if (lastDim) { - return resTensor.view(resTensor.shape.dropLast(1).toIntArray()) - } - return resTensor } override fun diagonalEmbedding( diff --git a/kmath-viktor/src/main/kotlin/space/kscience/kmath/viktor/ViktorFieldOpsND.kt b/kmath-viktor/src/main/kotlin/space/kscience/kmath/viktor/ViktorFieldOpsND.kt index dc0f1f97c..1d4d6cebd 100644 --- a/kmath-viktor/src/main/kotlin/space/kscience/kmath/viktor/ViktorFieldOpsND.kt +++ b/kmath-viktor/src/main/kotlin/space/kscience/kmath/viktor/ViktorFieldOpsND.kt @@ -3,6 +3,8 @@ * Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file. */ +@file:OptIn(PerformancePitfall::class) + package space.kscience.kmath.viktor import org.jetbrains.bio.viktor.F64Array diff --git a/settings.gradle.kts b/settings.gradle.kts index 7108b0cb4..3001d000c 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -29,6 +29,7 @@ include( ":kmath-commons", ":kmath-viktor", ":kmath-multik", + ":kmath-tensorflow", ":kmath-optimization", ":kmath-stat", ":kmath-nd4j",