Feature/booleans #341
@ -15,7 +15,7 @@ allprojects {
|
||||
}
|
||||
|
||||
group = "space.kscience"
|
||||
version = "0.3.0-dev-8"
|
||||
version = "0.3.0-dev-9"
|
||||
}
|
||||
|
||||
subprojects {
|
||||
|
@ -11,10 +11,7 @@ import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
|
||||
|
||||
// Dataset normalization
|
||||
|
||||
fun main() {
|
||||
|
||||
// work in context with broadcast methods
|
||||
BroadcastDoubleTensorAlgebra {
|
||||
fun main() = BroadcastDoubleTensorAlgebra { // work in context with broadcast methods
|
||||
// take dataset of 5-element vectors from normal distribution
|
||||
val dataset = randomNormal(intArrayOf(100, 5)) * 1.5 // all elements from N(0, 1.5)
|
||||
|
||||
@ -42,5 +39,4 @@ fun main() {
|
||||
|
||||
println("Mean of scaled:\n${datasetScaled.mean(0, false)}")
|
||||
println("Mean of scaled:\n${datasetScaled.std(0, false)}")
|
||||
}
|
||||
}
|
@ -6,15 +6,12 @@
|
||||
package space.kscience.kmath.tensors
|
||||
|
||||
import space.kscience.kmath.operations.invoke
|
||||
import space.kscience.kmath.tensors.core.DoubleTensor
|
||||
import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
|
||||
import space.kscience.kmath.tensors.core.DoubleTensor
|
||||
|
||||
// solving linear system with LUP decomposition
|
||||
|
||||
fun main () {
|
||||
|
||||
// work in context with linear operations
|
||||
BroadcastDoubleTensorAlgebra {
|
||||
fun main() = BroadcastDoubleTensorAlgebra {// work in context with linear operations
|
||||
|
||||
// set true value of x
|
||||
val trueX = fromArray(
|
||||
@ -68,7 +65,7 @@ fun main () {
|
||||
fun solveLT(l: DoubleTensor, b: DoubleTensor): DoubleTensor {
|
||||
val n = l.shape[0]
|
||||
val x = zeros(intArrayOf(n))
|
||||
for (i in 0 until n){
|
||||
for (i in 0 until n) {
|
||||
x[intArrayOf(i)] = (b[intArrayOf(i)] - l[i].dot(x).value()) / l[intArrayOf(i, i)]
|
||||
}
|
||||
return x
|
||||
@ -93,5 +90,4 @@ fun main () {
|
||||
|
||||
println("True x:\n$trueX")
|
||||
println("x founded with LU method:\n$x")
|
||||
}
|
||||
}
|
@ -25,7 +25,7 @@ interface Layer {
|
||||
// activation layer
|
||||
open class Activation(
|
||||
val activation: (DoubleTensor) -> DoubleTensor,
|
||||
val activationDer: (DoubleTensor) -> DoubleTensor
|
||||
val activationDer: (DoubleTensor) -> DoubleTensor,
|
||||
) : Layer {
|
||||
override fun forward(input: DoubleTensor): DoubleTensor {
|
||||
return activation(input)
|
||||
@ -62,7 +62,7 @@ class Sigmoid : Activation(::sigmoid, ::sigmoidDer)
|
||||
class Dense(
|
||||
private val inputUnits: Int,
|
||||
private val outputUnits: Int,
|
||||
private val learningRate: Double = 0.1
|
||||
private val learningRate: Double = 0.1,
|
||||
) : Layer {
|
||||
|
||||
private val weights: DoubleTensor = DoubleTensorAlgebra {
|
||||
@ -74,8 +74,8 @@ class Dense(
|
||||
|
||||
private val bias: DoubleTensor = DoubleTensorAlgebra { zeros(intArrayOf(outputUnits)) }
|
||||
|
||||
override fun forward(input: DoubleTensor): DoubleTensor {
|
||||
return BroadcastDoubleTensorAlgebra { (input dot weights) + bias }
|
||||
override fun forward(input: DoubleTensor): DoubleTensor = BroadcastDoubleTensorAlgebra {
|
||||
(input dot weights) + bias
|
||||
}
|
||||
|
||||
override fun backward(input: DoubleTensor, outputError: DoubleTensor): DoubleTensor = DoubleTensorAlgebra {
|
||||
@ -175,8 +175,7 @@ class NeuralNetwork(private val layers: List<Layer>) {
|
||||
|
||||
|
||||
@OptIn(ExperimentalStdlibApi::class)
|
||||
fun main() {
|
||||
BroadcastDoubleTensorAlgebra {
|
||||
fun main() = BroadcastDoubleTensorAlgebra {
|
||||
val features = 5
|
||||
val sampleSize = 250
|
||||
val trainSize = 180
|
||||
@ -237,5 +236,4 @@ fun main() {
|
||||
val acc = accuracy(yTest, predictionLabels)
|
||||
println("Test accuracy:$acc")
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -11,12 +11,9 @@ import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
|
||||
|
||||
// simple PCA
|
||||
|
||||
fun main(){
|
||||
fun main() = BroadcastDoubleTensorAlgebra { // work in context with broadcast methods
|
||||
val seed = 100500L
|
||||
|
||||
// work in context with broadcast methods
|
||||
BroadcastDoubleTensorAlgebra {
|
||||
|
||||
// assume x is range from 0 until 10
|
||||
val x = fromArray(
|
||||
intArrayOf(10),
|
||||
@ -74,5 +71,4 @@ fun main(){
|
||||
val restored = (datasetReduced[n] dot v.view(intArrayOf(1, 2))) * std + mean
|
||||
println("Original value:\n${dataset[n]}")
|
||||
println("Restored value:\n$restored")
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,9 @@ import space.kscience.kmath.structures.Buffer
|
||||
public interface ColumnarData<out T> {
|
||||
public val size: Int
|
||||
|
||||
/**
|
||||
* Provide a column by symbol or null if column with given symbol is not defined
|
||||
*/
|
||||
public operator fun get(symbol: Symbol): Buffer<T>?
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ pluginManagement {
|
||||
maven("https://repo.kotlin.link")
|
||||
}
|
||||
|
||||
val toolsVersion = "0.9.6"
|
||||
val toolsVersion = "0.9.7"
|
||||
val kotlinVersion = "1.5.0"
|
||||
|
||||
plugins {
|
||||
|
Loading…
Reference in New Issue
Block a user