Feature/booleans #341

Merged
altavir merged 13 commits from feature/booleans into dev 2021-05-19 03:36:37 +03:00
7 changed files with 191 additions and 202 deletions
Showing only changes of commit 42d130f69c - Show all commits

View File

@ -15,7 +15,7 @@ allprojects {
}
group = "space.kscience"
version = "0.3.0-dev-8"
version = "0.3.0-dev-9"
}
subprojects {

View File

@ -11,10 +11,7 @@ import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
// Dataset normalization
fun main() {
// work in context with broadcast methods
BroadcastDoubleTensorAlgebra {
fun main() = BroadcastDoubleTensorAlgebra { // work in context with broadcast methods
// take dataset of 5-element vectors from normal distribution
val dataset = randomNormal(intArrayOf(100, 5)) * 1.5 // all elements from N(0, 1.5)
@ -43,4 +40,3 @@ fun main() {
println("Mean of scaled:\n${datasetScaled.mean(0, false)}")
println("Mean of scaled:\n${datasetScaled.std(0, false)}")
}
}

View File

@ -6,15 +6,12 @@
package space.kscience.kmath.tensors
import space.kscience.kmath.operations.invoke
import space.kscience.kmath.tensors.core.DoubleTensor
import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
import space.kscience.kmath.tensors.core.DoubleTensor
// solving linear system with LUP decomposition
fun main () {
// work in context with linear operations
BroadcastDoubleTensorAlgebra {
fun main() = BroadcastDoubleTensorAlgebra {// work in context with linear operations
// set true value of x
val trueX = fromArray(
@ -94,4 +91,3 @@ fun main () {
println("True x:\n$trueX")
println("x founded with LU method:\n$x")
}
}

View File

@ -25,7 +25,7 @@ interface Layer {
// activation layer
open class Activation(
val activation: (DoubleTensor) -> DoubleTensor,
val activationDer: (DoubleTensor) -> DoubleTensor
val activationDer: (DoubleTensor) -> DoubleTensor,
) : Layer {
override fun forward(input: DoubleTensor): DoubleTensor {
return activation(input)
@ -62,7 +62,7 @@ class Sigmoid : Activation(::sigmoid, ::sigmoidDer)
class Dense(
private val inputUnits: Int,
private val outputUnits: Int,
private val learningRate: Double = 0.1
private val learningRate: Double = 0.1,
) : Layer {
private val weights: DoubleTensor = DoubleTensorAlgebra {
@ -74,8 +74,8 @@ class Dense(
private val bias: DoubleTensor = DoubleTensorAlgebra { zeros(intArrayOf(outputUnits)) }
override fun forward(input: DoubleTensor): DoubleTensor {
return BroadcastDoubleTensorAlgebra { (input dot weights) + bias }
override fun forward(input: DoubleTensor): DoubleTensor = BroadcastDoubleTensorAlgebra {
(input dot weights) + bias
}
override fun backward(input: DoubleTensor, outputError: DoubleTensor): DoubleTensor = DoubleTensorAlgebra {
@ -175,8 +175,7 @@ class NeuralNetwork(private val layers: List<Layer>) {
@OptIn(ExperimentalStdlibApi::class)
fun main() {
BroadcastDoubleTensorAlgebra {
fun main() = BroadcastDoubleTensorAlgebra {
val features = 5
val sampleSize = 250
val trainSize = 180
@ -238,4 +237,3 @@ fun main() {
println("Test accuracy:$acc")
}
}

View File

@ -11,12 +11,9 @@ import space.kscience.kmath.tensors.core.BroadcastDoubleTensorAlgebra
// simple PCA
fun main(){
fun main() = BroadcastDoubleTensorAlgebra { // work in context with broadcast methods
val seed = 100500L
// work in context with broadcast methods
BroadcastDoubleTensorAlgebra {
// assume x is range from 0 until 10
val x = fromArray(
intArrayOf(10),
@ -75,4 +72,3 @@ fun main(){
println("Original value:\n${dataset[n]}")
println("Restored value:\n$restored")
}
}

View File

@ -19,6 +19,9 @@ import space.kscience.kmath.structures.Buffer
public interface ColumnarData<out T> {
public val size: Int
/**
* Provide a column by symbol or null if column with given symbol is not defined
*/
public operator fun get(symbol: Symbol): Buffer<T>?
}

View File

@ -5,7 +5,7 @@ pluginManagement {
maven("https://repo.kotlin.link")
}
val toolsVersion = "0.9.6"
val toolsVersion = "0.9.7"
val kotlinVersion = "1.5.0"
plugins {