Merge pull request #72

Kotlin 1.6
This commit is contained in:
Alexander Nozik 2021-10-10 14:38:02 +03:00 committed by GitHub
commit 387ab8747e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 219 additions and 124 deletions

View File

@ -4,7 +4,10 @@ plugins {
allprojects { allprojects {
group = "space.kscience" group = "space.kscience"
version = "0.5.1" version = "0.5.2-dev-1"
repositories{
mavenCentral()
}
} }
subprojects { subprojects {
@ -22,8 +25,5 @@ ksciencePublish {
} }
apiValidation { apiValidation {
if(project.version.toString().contains("dev")) {
validationDisabled = true
}
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental") nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
} }

View File

@ -4,6 +4,7 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job import kotlinx.coroutines.Job
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import space.kscience.dataforge.meta.* import space.kscience.dataforge.meta.*
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.Named import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider import space.kscience.dataforge.provider.Provider
@ -71,16 +72,16 @@ public open class Context internal constructor(
private val childrenContexts = HashMap<Name, Context>() private val childrenContexts = HashMap<Name, Context>()
/** /**
* Build and register a child context * Get and validate existing context or build and register a new child context.
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
*/ */
@OptIn(DFExperimental::class)
@Synchronized @Synchronized
public fun buildContext(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context { public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
val newContext = ContextBuilder(this) val existing = name?.let { childrenContexts[name] }
.apply { name?.let { name(it) } } return existing?.modify(block)?: ContextBuilder(this, name).apply(block).build().also {
.apply(block) childrenContexts[it.name] = it
.build() }
childrenContexts[newContext.name] = newContext
return newContext
} }
/** /**

View File

@ -20,7 +20,7 @@ import kotlin.collections.set
@DFBuilder @DFBuilder
public class ContextBuilder internal constructor( public class ContextBuilder internal constructor(
private val parent: Context, private val parent: Context,
public var name: Name? = null, public val name: Name? = null,
meta: Meta = Meta.EMPTY, meta: Meta = Meta.EMPTY,
) { ) {
internal val factories = HashMap<PluginFactory<*>, Meta>() internal val factories = HashMap<PluginFactory<*>, Meta>()
@ -30,10 +30,6 @@ public class ContextBuilder internal constructor(
meta.action() meta.action()
} }
public fun name(string: String) {
this.name = Name.parse(string)
}
@OptIn(DFExperimental::class) @OptIn(DFExperimental::class)
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> = private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
@ -95,19 +91,21 @@ public class ContextBuilder internal constructor(
} }
/** /**
* Check if current context contains all plugins required by the builder and return it it does or forks to a new context * Check if current context contains all plugins required by the builder and return it does or forks to a new context
* if it does not. * if it does not.
*/ */
public fun Context.withEnv(block: ContextBuilder.() -> Unit): Context { @DFExperimental
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean { fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
val loaded = plugins[factory.tag] ?: return false val loaded = plugins[factory.tag] ?: return false
return loaded.meta == meta return loaded.meta == meta
} }
val builder = ContextBuilder(this, name + "env", properties).apply(block) val builder = ContextBuilder(this, name + "mod", properties).apply(block)
val requiresFork = builder.factories.any { (factory, meta) -> val requiresFork = builder.factories.any { (factory, meta) ->
!contains(factory, meta) !contains(factory, meta)
} || ((properties as Meta) == builder.meta) } || ((properties as Meta) == builder.meta)
return if (requiresFork) builder.build() else this return if (requiresFork) builder.build() else this
} }

View File

@ -3,11 +3,13 @@ package space.kscience.dataforge.context
import kotlinx.coroutines.CoroutineName import kotlinx.coroutines.CoroutineName
import kotlinx.coroutines.Job import kotlinx.coroutines.Job
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.Name.Companion.parse
import space.kscience.dataforge.names.asName import space.kscience.dataforge.names.asName
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
import kotlin.native.concurrent.ThreadLocal import kotlin.native.concurrent.ThreadLocal
internal expect val globalLoggerFactory: PluginFactory<out LogManager> internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
/** /**
* A global root context. Closing [Global] terminates the framework. * A global root context. Closing [Global] terminates the framework.
@ -20,4 +22,4 @@ private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta
public val Global: Context get() = GlobalContext public val Global: Context get() = GlobalContext
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context = public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
Global.buildContext(name, block) Global.buildContext(name?.let(Name::parse), block)

View File

@ -75,7 +75,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
*/ */
public val Context.logger: LogManager public val Context.logger: LogManager
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
?: globalLoggerFactory(context = Global).apply { attach(Global) } ?: getGlobalLoggerFactory()(context = Global).apply { attach(Global) }
/** /**
* The named proxy logger for a context member * The named proxy logger for a context member

View File

@ -1,6 +1,7 @@
package space.kscience.dataforge.context package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.plus
import kotlin.reflect.KClass import kotlin.reflect.KClass
@ -84,7 +85,7 @@ public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>,
val existing = plugins[factory] val existing = plugins[factory]
return if (existing != null && existing.meta == meta) existing return if (existing != null && existing.meta == meta) existing
else { else {
buildContext { buildContext(name = this@fetch.name + factory.tag.name) {
plugin(factory, meta) plugin(factory, meta)
}.plugins[factory]!! }.plugins[factory]!!
} }

View File

@ -20,8 +20,7 @@ class ContextTest {
@Test @Test
fun testPluginManager() { fun testPluginManager() {
val context = Global.buildContext { val context = Context("test") {
name("test")
plugin(DummyPlugin()) plugin(DummyPlugin())
} }
val members = context.gather<Name>("test") val members = context.gather<Name>("test")

View File

@ -29,4 +29,4 @@ public class ConsoleLogManager : AbstractPlugin(), LogManager {
} }
} }
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = ConsoleLogManager internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager

View File

@ -31,4 +31,4 @@ public class SlfLogManager : AbstractPlugin(), LogManager {
} }
} }
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = SlfLogManager internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager

View File

@ -1,4 +1,4 @@
package space.kscience.dataforge.context package space.kscience.dataforge.context
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = DefaultLogManager internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager

View File

@ -32,7 +32,7 @@ public interface DataSet<out T : Any> {
*/ */
public suspend fun listTop(prefix: Name = Name.EMPTY): List<Name> = public suspend fun listTop(prefix: Name = Name.EMPTY): List<Name> =
flow().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList() flow().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList()
// By default traverses the whole tree. Could be optimized in descendants // By default, traverses the whole tree. Could be optimized in descendants
public companion object { public companion object {
public val META_KEY: Name = "@meta".asName() public val META_KEY: Name = "@meta".asName()
@ -43,7 +43,7 @@ public interface DataSet<out T : Any> {
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> { public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
override val dataType: KType = TYPE_OF_NOTHING override val dataType: KType = TYPE_OF_NOTHING
private val nothing: Nothing get() = error("this is nothing") //private val nothing: Nothing get() = error("this is nothing")
override fun flow(): Flow<NamedData<Nothing>> = emptyFlow() override fun flow(): Flow<NamedData<Nothing>> = emptyFlow()

View File

@ -1,7 +1,6 @@
package space.kscience.dataforge.data package space.kscience.dataforge.data
import kotlinx.coroutines.* import kotlinx.coroutines.*
import space.kscience.dataforge.misc.DFExperimental
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext import kotlin.coroutines.EmptyCoroutineContext
@ -67,7 +66,6 @@ public open class LazyGoal<T>(
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning * If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
* depending on the settings. * depending on the settings.
*/ */
@DFExperimental
override fun async(coroutineScope: CoroutineScope): Deferred<T> { override fun async(coroutineScope: CoroutineScope): Deferred<T> {
val log = coroutineScope.coroutineContext[GoalLogger] val log = coroutineScope.coroutineContext[GoalLogger]
// Check if context restricts goal computation // Check if context restricts goal computation

View File

@ -3,6 +3,7 @@ package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map import kotlinx.coroutines.flow.map
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.matches import space.kscience.dataforge.names.matches
@ -16,48 +17,65 @@ import kotlin.reflect.typeOf
*/ */
@Suppress("UNCHECKED_CAST") @Suppress("UNCHECKED_CAST")
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? = private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
if (!this.type.isSubtypeOf(type)) null else object : Data<R> by (this as Data<R>) { if (!this.type.isSubtypeOf(type)) {
override val type: KType = type null
} else {
object : Data<R> by (this as Data<R>) {
override val type: KType = type
}
} }
/** /**
* Select all data matching given type and filters. Does not modify paths * Select all data matching given type and filters. Does not modify paths
*
* @param namePattern a name match patter according to [Name.matches]
* @param filter addition filtering condition based on item name and meta. By default, accepts all
*/ */
@OptIn(DFExperimental::class) @OptIn(DFExperimental::class)
@PublishedApi public fun <R : Any> DataSet<*>.select(
internal fun <R : Any> DataSet<*>.select(
type: KType, type: KType,
namePattern: Name? = null, namePattern: Name? = null,
filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
): ActiveDataSet<R> = object : ActiveDataSet<R> { ): ActiveDataSet<R> = object : ActiveDataSet<R> {
override val dataType = type override val dataType = type
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
&& (namePattern == null || name.matches(namePattern))
&& filter(name, datum.meta)
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter { datum -> override fun flow(): Flow<NamedData<R>> = this@select.flow().filter {
datum.type.isSubtypeOf(type) && (namePattern == null || datum.name.matches(namePattern)) checkDatum(it.name, it.data)
}.map { }.map {
@Suppress("UNCHECKED_CAST") @Suppress("UNCHECKED_CAST")
it as NamedData<R> it as NamedData<R>
} }
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.castOrNull(type) override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.let { datum ->
if (checkDatum(name, datum)) datum.castOrNull(type) else null
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it)
datum?.type?.isSubtypeOf(type) ?: false
} }
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it) ?: return@filter false
checkDatum(it, datum)
}
} }
/** /**
* Select a single datum of the appropriate type * Select a single datum of the appropriate type
*/ */
public inline fun <reified R : Any> DataSet<*>.select(namePattern: Name? = null): DataSet<R> = public inline fun <reified R : Any> DataSet<*>.select(
select(typeOf<R>(), namePattern) namePattern: Name? = null,
noinline filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
): DataSet<R> = select(typeOf<R>(), namePattern, filter)
/**
* Select a single datum if it is present and of given [type]
*/
public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? = public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? =
getData(name)?.castOrNull<R>(type)?.named(name) getData(name)?.castOrNull<R>(type)?.named(name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(typeOf<R>(), name) public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? =
selectOne(typeOf<R>(), name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? = public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? =
selectOne(typeOf<R>(), Name.parse(name)) selectOne(typeOf<R>(), Name.parse(name))

View File

@ -10,8 +10,7 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string import space.kscience.dataforge.meta.string
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.native.concurrent.ThreadLocal
import kotlin.reflect.KClass import kotlin.reflect.KClass
public class IOPlugin(meta: Meta) : AbstractPlugin(meta) { public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
@ -75,10 +74,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
} }
} }
@ThreadLocal public val Context.io: IOPlugin
internal val ioContext = Global.withEnv { get() = if (this == Global) {
name("IO") Global.buildContext("IO".asName()) {
plugin(IOPlugin) plugin(IOPlugin)
} }.fetch(IOPlugin)
} else {
public val Context.io: IOPlugin get() = (if (this == Global) ioContext else this).fetch(IOPlugin) fetch(IOPlugin)
}

View File

@ -161,7 +161,7 @@ public class TaggedEnvelopeFormat(
} }
} }
private val default by lazy { invoke(context = ioContext) } private val default by lazy { invoke() }
override fun readPartial(input: Input): PartialEnvelope = override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) } default.run { readPartial(input) }

View File

@ -193,11 +193,9 @@ public class TaglessEnvelopeFormat(
override val name: Name = TAGLESS_ENVELOPE_TYPE.asName() override val name: Name = TAGLESS_ENVELOPE_TYPE.asName()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat { override fun invoke(meta: Meta, context: Context): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta)
return TaglessEnvelopeFormat(context.io, meta)
}
private val default by lazy { invoke(context = ioContext) } private val default by lazy { invoke() }
override fun readPartial(input: Input): PartialEnvelope = override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) } default.run { readPartial(input) }

View File

@ -123,7 +123,6 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
TODO("Not yet implemented") TODO("Not yet implemented")
} }
} }
} }

View File

@ -166,9 +166,7 @@ public class EnumValue<E : Enum<*>>(override val value: E) : Value {
override fun toString(): String = value.toString() override fun toString(): String = value.toString()
override fun equals(other: Any?): Boolean { override fun equals(other: Any?): Boolean = string == (other as? Value)?.string
return string == (other as? Value)?.string
}
override fun hashCode(): Int = value.hashCode() override fun hashCode(): Int = value.hashCode()
} }

View File

@ -0,0 +1,16 @@
package space.kscience.dataforge.meta
import kotlinx.serialization.json.Json
import space.kscience.dataforge.values.string
import kotlin.test.Test
import kotlin.test.assertEquals
class MetaSerializationTest {
@Test
fun singleValueDeserialization(){
val string = "ddd"
val meta = Json.decodeFromString(MetaSerializer, string)
assertEquals(string, meta.value?.string)
}
}

View File

@ -15,7 +15,7 @@ class BuildersKtTest {
Workspace(Global){ Workspace(Global){
println("I am working") println("I am working")
context { name("test") } //context { name("test") }
target("testTarget") { target("testTarget") {
"a" put 12 "a" put 12
@ -28,8 +28,6 @@ class BuildersKtTest {
val script = """ val script = """
println("I am working") println("I am working")
context{ name("test") }
target("testTarget"){ target("testTarget"){
"a" put 12 "a" put 12
} }

View File

@ -17,7 +17,7 @@ public class SimpleWorkspace(
private val externalTasks: Map<Name, Task<*>>, private val externalTasks: Map<Name, Task<*>>,
) : Workspace { ) : Workspace {
override val data: TaskResult<*> = internalize(data, Name.EMPTY, Meta.EMPTY) override val data: TaskResult<*> = wrapResult(data, Name.EMPTY, Meta.EMPTY)
override val tasks: Map<Name, Task<*>> override val tasks: Map<Name, Task<*>>
get() = context.gather<Task<*>>(Task.TYPE) + externalTasks get() = context.gather<Task<*>>(Task.TYPE) + externalTasks

View File

@ -28,7 +28,7 @@ public interface Task<out T : Any> : Described {
public suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T> public suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T>
public companion object { public companion object {
public const val TYPE: String = "workspace.stage" public const val TYPE: String = "workspace.task"
} }
} }
@ -42,6 +42,10 @@ public class TaskResultBuilder<T : Any>(
/** /**
* Create a [Task] that composes a result using [builder]. Only data from the workspace could be used. * Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
* Data dependency cycles are not allowed. * Data dependency cycles are not allowed.
*
* @param resultType the type boundary for data produced by this task
* @param descriptor of meta accepted by this task
* @param builder for resulting data set
*/ */
@Suppress("FunctionName") @Suppress("FunctionName")
@DFInternal @DFInternal
@ -60,9 +64,9 @@ public fun <T : Any> Task(
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) { ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
//TODO use safe builder and check for external data on add and detects cycles //TODO use safe builder and check for external data on add and detects cycles
val dataset = DataTree<T>(resultType) { val dataset = DataTree<T>(resultType) {
TaskResultBuilder(workspace,taskName, taskMeta, this).apply { builder() } TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
} }
workspace.internalize(dataset, taskName, taskMeta) workspace.wrapResult(dataset, taskName, taskMeta)
} }
} }

View File

@ -42,6 +42,6 @@ private class TaskDataImpl<out T : Any>(
// } // }
} }
internal fun <T : Any> Workspace.internalize(data: Data<T>, name: Name, stage: Name, stageMeta: Meta): TaskData<T> = public fun <T : Any> Workspace.wrapData(data: Data<T>, name: Name, taskName: Name, stageMeta: Meta): TaskData<T> =
TaskDataImpl(this, data, name, stage, stageMeta) TaskDataImpl(this, data, name, taskName, stageMeta)

View File

@ -37,13 +37,16 @@ private class TaskResultImpl<out T : Any>(
) : TaskResult<T>, DataSet<T> by dataSet { ) : TaskResult<T>, DataSet<T> by dataSet {
override fun flow(): Flow<TaskData<T>> = dataSet.flow().map { override fun flow(): Flow<TaskData<T>> = dataSet.flow().map {
workspace.internalize(it, it.name, taskName, taskMeta) workspace.wrapData(it, it.name, taskName, taskMeta)
} }
override suspend fun getData(name: Name): TaskData<T>? = dataSet.getData(name)?.let { override suspend fun getData(name: Name): TaskData<T>? = dataSet.getData(name)?.let {
workspace.internalize(it, name, taskName, taskMeta) workspace.wrapData(it, name, taskName, taskMeta)
} }
} }
internal fun <T : Any> Workspace.internalize(dataSet: DataSet<T>, stage: Name, stageMeta: Meta): TaskResult<T> = /**
TaskResultImpl(this, dataSet, stage, stageMeta) * Wrap data into [TaskResult]
*/
public fun <T : Any> Workspace.wrapResult(dataSet: DataSet<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
TaskResultImpl(this, dataSet, taskName, taskMeta)

View File

@ -1,6 +1,7 @@
package space.kscience.dataforge.workspace package space.kscience.dataforge.workspace
import space.kscience.dataforge.context.ContextAware import space.kscience.dataforge.context.ContextAware
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.misc.Type import space.kscience.dataforge.misc.Type
@ -8,6 +9,10 @@ import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider import space.kscience.dataforge.provider.Provider
public interface DataSelector<T: Any>{
public suspend fun select(workspace: Workspace, meta: Meta): DataSet<T>
}
@Type(Workspace.TYPE) @Type(Workspace.TYPE)
public interface Workspace : ContextAware, Provider { public interface Workspace : ContextAware, Provider {
/** /**

View File

@ -14,10 +14,22 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
import space.kscience.dataforge.misc.DFBuilder import space.kscience.dataforge.misc.DFBuilder
import space.kscience.dataforge.misc.DFExperimental import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.properties.PropertyDelegateProvider import kotlin.properties.PropertyDelegateProvider
import kotlin.properties.ReadOnlyProperty import kotlin.properties.ReadOnlyProperty
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>) public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>): DataSelector<T>{
@Suppress("UNCHECKED_CAST")
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> {
if (workspace.tasks[taskName] == task) {
return workspace.produce(taskName, meta) as TaskResult<T>
} else {
error("Task $taskName does not belong to the workspace")
}
}
}
public interface TaskContainer { public interface TaskContainer {
public fun registerTask(taskName: Name, task: Task<*>) public fun registerTask(taskName: Name, task: Task<*>)
@ -51,7 +63,7 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
* Define a context for the workspace * Define a context for the workspace
*/ */
public fun context(block: ContextBuilder.() -> Unit = {}) { public fun context(block: ContextBuilder.() -> Unit = {}) {
this.context = parentContext.buildContext("workspace", block) this.context = parentContext.buildContext("workspace".asName(), block)
} }
/** /**

View File

@ -0,0 +1,45 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.forEach
import space.kscience.dataforge.data.map
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.toMutableMeta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
/**
* Select data using given [selector]
*/
public suspend fun <T : Any> TaskResultBuilder<*>.from(
selector: DataSelector<T>,
): DataSet<T> = selector.select(workspace, taskMeta)
public val TaskResultBuilder<*>.allData: DataSelector<*>
get() = object : DataSelector<Any> {
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<Any> = workspace.data
}
/**
* Perform a lazy mapping task using given [selector] and [action]. The meta of resulting
* TODO move selector to receiver with multi-receivers
*/
@DFExperimental
public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFrom(
selector: DataSelector<T>,
crossinline action: suspend (arg: T, name: Name, meta: Meta) -> R
) {
from(selector).forEach { data ->
val meta = data.meta.toMutableMeta().apply {
taskName put taskMeta
}
val res = data.map(workspace.context.coroutineContext, meta) {
action(it, data.name, meta)
}
emit(data.name, res)
}
}

View File

@ -1,24 +0,0 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.select
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
public suspend inline fun <reified T : Any> TaskResultBuilder<T>.from(
task: Name,
taskMeta: Meta = Meta.EMPTY,
): DataSet<T> = workspace.produce(task, taskMeta).select()
@Suppress("UNCHECKED_CAST")
public suspend fun <R : Any> TaskResultBuilder<*>.from(
reference: TaskReference<R>,
taskMeta: Meta = Meta.EMPTY,
): DataSet<R> {
if (workspace.tasks[reference.taskName] == reference.task) {
return workspace.produce(reference.taskName, taskMeta) as TaskResult<R>
} else {
throw error("Task ${reference.taskName} does not belong to the workspace")
}
}

View File

@ -1,8 +0,0 @@
package space.kscience.dataforge.workspace
import kotlinx.coroutines.runBlocking
import space.kscience.dataforge.data.DataSetBuilder
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
buildData(builder)
}

View File

@ -0,0 +1,21 @@
package space.kscience.dataforge.workspace
import kotlinx.coroutines.runBlocking
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.DataSetBuilder
import space.kscience.dataforge.data.select
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
buildData(builder)
}
public inline fun <reified T: Any> TaskResultBuilder<*>.selectData(namePattern: Name? = null): DataSelector<T> = object : DataSelector<T> {
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> = workspace.data.select(namePattern)
}
public suspend inline fun <reified T : Any> TaskResultBuilder<*>.from(
task: Name,
taskMeta: Meta = Meta.EMPTY,
): DataSet<T> = workspace.produce(task, taskMeta).select()

View File

@ -71,21 +71,32 @@ class SimpleWorkspaceTest {
} }
val square by task<Int> { val square by task<Int> {
workspace.data.select<Int>().forEach { data -> pipeFrom(selectData<Int>()) { arg, name, meta ->
if (data.meta["testFlag"].boolean == true) { if (meta["testFlag"].boolean == true) {
println("flag") println("flag")
} }
val value = data.await() workspace.logger.info { "Starting square on $name" }
workspace.logger.info { "Starting square on $value" } arg * arg
emit(data.name, data.map { it * it })
} }
// workspace.data.select<Int>().forEach { data ->
// if (data.meta["testFlag"].boolean == true) {
// println("flag")
// }
// val value = data.await()
// workspace.logger.info { "Starting square on $value" }
// emit(data.name, data.map { it * it })
// }
} }
val linear by task<Int> { val linear by task<Int> {
workspace.data.select<Int>().forEach { data -> pipeFrom(selectData<Int>()) { arg, name, _ ->
workspace.logger.info { "Starting linear on $data" } workspace.logger.info { "Starting linear on $name" }
emit(data.name, data.data.map { it * 2 + 1 }) arg * 2 + 1
} }
// workspace.data.select<Int>().forEach { data ->
// workspace.logger.info { "Starting linear on $data" }
// emit(data.name, data.data.map { it * 2 + 1 })
// }
} }
val fullSquare by task<Int> { val fullSquare by task<Int> {

View File

@ -4,6 +4,6 @@ org.gradle.parallel=true
kotlin.code.style=official kotlin.code.style=official
kotlin.parallel.tasks.in.project=true kotlin.parallel.tasks.in.project=true
kotlin.mpp.enableGranularSourceSetsMetadata=true #kotlin.mpp.enableGranularSourceSetsMetadata=true
kotlin.native.enableDependencyPropagation=false #kotlin.native.enableDependencyPropagation=false
kotlin.mpp.stability.nowarn=true kotlin.mpp.stability.nowarn=true

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-bin.zip distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists

View File

@ -5,7 +5,7 @@ pluginManagement {
gradlePluginPortal() gradlePluginPortal()
} }
val toolsVersion = "0.10.2" val toolsVersion = "0.10.4"
plugins { plugins {
id("ru.mipt.npm.gradle.project") version toolsVersion id("ru.mipt.npm.gradle.project") version toolsVersion