Merge pull request #72
Kotlin 1.6
This commit is contained in:
commit
387ab8747e
@ -4,7 +4,10 @@ plugins {
|
||||
|
||||
allprojects {
|
||||
group = "space.kscience"
|
||||
version = "0.5.1"
|
||||
version = "0.5.2-dev-1"
|
||||
repositories{
|
||||
mavenCentral()
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
@ -22,8 +25,5 @@ ksciencePublish {
|
||||
}
|
||||
|
||||
apiValidation {
|
||||
if(project.version.toString().contains("dev")) {
|
||||
validationDisabled = true
|
||||
}
|
||||
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
|
||||
}
|
@ -4,6 +4,7 @@ import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.SupervisorJob
|
||||
import space.kscience.dataforge.meta.*
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.misc.Named
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.provider.Provider
|
||||
@ -71,16 +72,16 @@ public open class Context internal constructor(
|
||||
private val childrenContexts = HashMap<Name, Context>()
|
||||
|
||||
/**
|
||||
* Build and register a child context
|
||||
* Get and validate existing context or build and register a new child context.
|
||||
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
|
||||
*/
|
||||
@OptIn(DFExperimental::class)
|
||||
@Synchronized
|
||||
public fun buildContext(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context {
|
||||
val newContext = ContextBuilder(this)
|
||||
.apply { name?.let { name(it) } }
|
||||
.apply(block)
|
||||
.build()
|
||||
childrenContexts[newContext.name] = newContext
|
||||
return newContext
|
||||
public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
|
||||
val existing = name?.let { childrenContexts[name] }
|
||||
return existing?.modify(block)?: ContextBuilder(this, name).apply(block).build().also {
|
||||
childrenContexts[it.name] = it
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -20,7 +20,7 @@ import kotlin.collections.set
|
||||
@DFBuilder
|
||||
public class ContextBuilder internal constructor(
|
||||
private val parent: Context,
|
||||
public var name: Name? = null,
|
||||
public val name: Name? = null,
|
||||
meta: Meta = Meta.EMPTY,
|
||||
) {
|
||||
internal val factories = HashMap<PluginFactory<*>, Meta>()
|
||||
@ -30,10 +30,6 @@ public class ContextBuilder internal constructor(
|
||||
meta.action()
|
||||
}
|
||||
|
||||
public fun name(string: String) {
|
||||
this.name = Name.parse(string)
|
||||
}
|
||||
|
||||
@OptIn(DFExperimental::class)
|
||||
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
|
||||
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
|
||||
@ -95,19 +91,21 @@ public class ContextBuilder internal constructor(
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if current context contains all plugins required by the builder and return it it does or forks to a new context
|
||||
* Check if current context contains all plugins required by the builder and return it does or forks to a new context
|
||||
* if it does not.
|
||||
*/
|
||||
public fun Context.withEnv(block: ContextBuilder.() -> Unit): Context {
|
||||
@DFExperimental
|
||||
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
|
||||
|
||||
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
|
||||
val loaded = plugins[factory.tag] ?: return false
|
||||
return loaded.meta == meta
|
||||
}
|
||||
|
||||
val builder = ContextBuilder(this, name + "env", properties).apply(block)
|
||||
val builder = ContextBuilder(this, name + "mod", properties).apply(block)
|
||||
val requiresFork = builder.factories.any { (factory, meta) ->
|
||||
!contains(factory, meta)
|
||||
} || ((properties as Meta) == builder.meta)
|
||||
|
||||
return if (requiresFork) builder.build() else this
|
||||
}
|
@ -3,11 +3,13 @@ package space.kscience.dataforge.context
|
||||
import kotlinx.coroutines.CoroutineName
|
||||
import kotlinx.coroutines.Job
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.Name.Companion.parse
|
||||
import space.kscience.dataforge.names.asName
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.native.concurrent.ThreadLocal
|
||||
|
||||
internal expect val globalLoggerFactory: PluginFactory<out LogManager>
|
||||
internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
|
||||
|
||||
/**
|
||||
* A global root context. Closing [Global] terminates the framework.
|
||||
@ -20,4 +22,4 @@ private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta
|
||||
public val Global: Context get() = GlobalContext
|
||||
|
||||
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
|
||||
Global.buildContext(name, block)
|
||||
Global.buildContext(name?.let(Name::parse), block)
|
@ -75,7 +75,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
|
||||
*/
|
||||
public val Context.logger: LogManager
|
||||
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
|
||||
?: globalLoggerFactory(context = Global).apply { attach(Global) }
|
||||
?: getGlobalLoggerFactory()(context = Global).apply { attach(Global) }
|
||||
|
||||
/**
|
||||
* The named proxy logger for a context member
|
||||
|
@ -1,6 +1,7 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.plus
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
|
||||
@ -84,7 +85,7 @@ public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>,
|
||||
val existing = plugins[factory]
|
||||
return if (existing != null && existing.meta == meta) existing
|
||||
else {
|
||||
buildContext {
|
||||
buildContext(name = this@fetch.name + factory.tag.name) {
|
||||
plugin(factory, meta)
|
||||
}.plugins[factory]!!
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ class ContextTest {
|
||||
|
||||
@Test
|
||||
fun testPluginManager() {
|
||||
val context = Global.buildContext {
|
||||
name("test")
|
||||
val context = Context("test") {
|
||||
plugin(DummyPlugin())
|
||||
}
|
||||
val members = context.gather<Name>("test")
|
||||
|
@ -29,4 +29,4 @@ public class ConsoleLogManager : AbstractPlugin(), LogManager {
|
||||
}
|
||||
}
|
||||
|
||||
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = ConsoleLogManager
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager
|
||||
|
@ -31,4 +31,4 @@ public class SlfLogManager : AbstractPlugin(), LogManager {
|
||||
}
|
||||
}
|
||||
|
||||
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = SlfLogManager
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager
|
||||
|
@ -1,4 +1,4 @@
|
||||
package space.kscience.dataforge.context
|
||||
|
||||
|
||||
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = DefaultLogManager
|
||||
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager
|
||||
|
@ -32,7 +32,7 @@ public interface DataSet<out T : Any> {
|
||||
*/
|
||||
public suspend fun listTop(prefix: Name = Name.EMPTY): List<Name> =
|
||||
flow().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList()
|
||||
// By default traverses the whole tree. Could be optimized in descendants
|
||||
// By default, traverses the whole tree. Could be optimized in descendants
|
||||
|
||||
public companion object {
|
||||
public val META_KEY: Name = "@meta".asName()
|
||||
@ -43,7 +43,7 @@ public interface DataSet<out T : Any> {
|
||||
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
|
||||
override val dataType: KType = TYPE_OF_NOTHING
|
||||
|
||||
private val nothing: Nothing get() = error("this is nothing")
|
||||
//private val nothing: Nothing get() = error("this is nothing")
|
||||
|
||||
override fun flow(): Flow<NamedData<Nothing>> = emptyFlow()
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
package space.kscience.dataforge.data
|
||||
|
||||
import kotlinx.coroutines.*
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
import kotlin.coroutines.EmptyCoroutineContext
|
||||
|
||||
@ -67,7 +66,6 @@ public open class LazyGoal<T>(
|
||||
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
|
||||
* depending on the settings.
|
||||
*/
|
||||
@DFExperimental
|
||||
override fun async(coroutineScope: CoroutineScope): Deferred<T> {
|
||||
val log = coroutineScope.coroutineContext[GoalLogger]
|
||||
// Check if context restricts goal computation
|
||||
|
@ -3,6 +3,7 @@ package space.kscience.dataforge.data
|
||||
import kotlinx.coroutines.flow.Flow
|
||||
import kotlinx.coroutines.flow.filter
|
||||
import kotlinx.coroutines.flow.map
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.matches
|
||||
@ -16,48 +17,65 @@ import kotlin.reflect.typeOf
|
||||
*/
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
|
||||
if (!this.type.isSubtypeOf(type)) null else object : Data<R> by (this as Data<R>) {
|
||||
override val type: KType = type
|
||||
if (!this.type.isSubtypeOf(type)) {
|
||||
null
|
||||
} else {
|
||||
object : Data<R> by (this as Data<R>) {
|
||||
override val type: KType = type
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select all data matching given type and filters. Does not modify paths
|
||||
*
|
||||
* @param namePattern a name match patter according to [Name.matches]
|
||||
* @param filter addition filtering condition based on item name and meta. By default, accepts all
|
||||
*/
|
||||
@OptIn(DFExperimental::class)
|
||||
@PublishedApi
|
||||
internal fun <R : Any> DataSet<*>.select(
|
||||
public fun <R : Any> DataSet<*>.select(
|
||||
type: KType,
|
||||
namePattern: Name? = null,
|
||||
filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
|
||||
): ActiveDataSet<R> = object : ActiveDataSet<R> {
|
||||
override val dataType = type
|
||||
|
||||
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
|
||||
&& (namePattern == null || name.matches(namePattern))
|
||||
&& filter(name, datum.meta)
|
||||
|
||||
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter { datum ->
|
||||
datum.type.isSubtypeOf(type) && (namePattern == null || datum.name.matches(namePattern))
|
||||
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter {
|
||||
checkDatum(it.name, it.data)
|
||||
}.map {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
it as NamedData<R>
|
||||
}
|
||||
|
||||
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.castOrNull(type)
|
||||
|
||||
override val updates: Flow<Name> = this@select.updates.filter {
|
||||
val datum = this@select.getData(it)
|
||||
datum?.type?.isSubtypeOf(type) ?: false
|
||||
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.let { datum ->
|
||||
if (checkDatum(name, datum)) datum.castOrNull(type) else null
|
||||
}
|
||||
|
||||
override val updates: Flow<Name> = this@select.updates.filter {
|
||||
val datum = this@select.getData(it) ?: return@filter false
|
||||
checkDatum(it, datum)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select a single datum of the appropriate type
|
||||
*/
|
||||
public inline fun <reified R : Any> DataSet<*>.select(namePattern: Name? = null): DataSet<R> =
|
||||
select(typeOf<R>(), namePattern)
|
||||
public inline fun <reified R : Any> DataSet<*>.select(
|
||||
namePattern: Name? = null,
|
||||
noinline filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
|
||||
): DataSet<R> = select(typeOf<R>(), namePattern, filter)
|
||||
|
||||
/**
|
||||
* Select a single datum if it is present and of given [type]
|
||||
*/
|
||||
public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? =
|
||||
getData(name)?.castOrNull<R>(type)?.named(name)
|
||||
|
||||
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(typeOf<R>(), name)
|
||||
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? =
|
||||
selectOne(typeOf<R>(), name)
|
||||
|
||||
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? =
|
||||
selectOne(typeOf<R>(), Name.parse(name))
|
@ -10,8 +10,7 @@ import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.get
|
||||
import space.kscience.dataforge.meta.string
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
import kotlin.native.concurrent.ThreadLocal
|
||||
import space.kscience.dataforge.names.asName
|
||||
import kotlin.reflect.KClass
|
||||
|
||||
public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
|
||||
@ -75,10 +74,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
|
||||
}
|
||||
}
|
||||
|
||||
@ThreadLocal
|
||||
internal val ioContext = Global.withEnv {
|
||||
name("IO")
|
||||
plugin(IOPlugin)
|
||||
}
|
||||
|
||||
public val Context.io: IOPlugin get() = (if (this == Global) ioContext else this).fetch(IOPlugin)
|
||||
public val Context.io: IOPlugin
|
||||
get() = if (this == Global) {
|
||||
Global.buildContext("IO".asName()) {
|
||||
plugin(IOPlugin)
|
||||
}.fetch(IOPlugin)
|
||||
} else {
|
||||
fetch(IOPlugin)
|
||||
}
|
@ -161,7 +161,7 @@ public class TaggedEnvelopeFormat(
|
||||
}
|
||||
}
|
||||
|
||||
private val default by lazy { invoke(context = ioContext) }
|
||||
private val default by lazy { invoke() }
|
||||
|
||||
override fun readPartial(input: Input): PartialEnvelope =
|
||||
default.run { readPartial(input) }
|
||||
|
@ -193,11 +193,9 @@ public class TaglessEnvelopeFormat(
|
||||
|
||||
override val name: Name = TAGLESS_ENVELOPE_TYPE.asName()
|
||||
|
||||
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
|
||||
return TaglessEnvelopeFormat(context.io, meta)
|
||||
}
|
||||
override fun invoke(meta: Meta, context: Context): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta)
|
||||
|
||||
private val default by lazy { invoke(context = ioContext) }
|
||||
private val default by lazy { invoke() }
|
||||
|
||||
override fun readPartial(input: Input): PartialEnvelope =
|
||||
default.run { readPartial(input) }
|
||||
|
@ -123,7 +123,6 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
|
||||
TODO("Not yet implemented")
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,9 +166,7 @@ public class EnumValue<E : Enum<*>>(override val value: E) : Value {
|
||||
|
||||
override fun toString(): String = value.toString()
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
return string == (other as? Value)?.string
|
||||
}
|
||||
override fun equals(other: Any?): Boolean = string == (other as? Value)?.string
|
||||
|
||||
override fun hashCode(): Int = value.hashCode()
|
||||
}
|
||||
|
@ -0,0 +1,16 @@
|
||||
package space.kscience.dataforge.meta
|
||||
|
||||
import kotlinx.serialization.json.Json
|
||||
import space.kscience.dataforge.values.string
|
||||
import kotlin.test.Test
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
class MetaSerializationTest {
|
||||
|
||||
@Test
|
||||
fun singleValueDeserialization(){
|
||||
val string = "ddd"
|
||||
val meta = Json.decodeFromString(MetaSerializer, string)
|
||||
assertEquals(string, meta.value?.string)
|
||||
}
|
||||
}
|
@ -15,7 +15,7 @@ class BuildersKtTest {
|
||||
Workspace(Global){
|
||||
println("I am working")
|
||||
|
||||
context { name("test") }
|
||||
//context { name("test") }
|
||||
|
||||
target("testTarget") {
|
||||
"a" put 12
|
||||
@ -28,8 +28,6 @@ class BuildersKtTest {
|
||||
val script = """
|
||||
println("I am working")
|
||||
|
||||
context{ name("test") }
|
||||
|
||||
target("testTarget"){
|
||||
"a" put 12
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ public class SimpleWorkspace(
|
||||
private val externalTasks: Map<Name, Task<*>>,
|
||||
) : Workspace {
|
||||
|
||||
override val data: TaskResult<*> = internalize(data, Name.EMPTY, Meta.EMPTY)
|
||||
override val data: TaskResult<*> = wrapResult(data, Name.EMPTY, Meta.EMPTY)
|
||||
|
||||
override val tasks: Map<Name, Task<*>>
|
||||
get() = context.gather<Task<*>>(Task.TYPE) + externalTasks
|
||||
|
@ -28,7 +28,7 @@ public interface Task<out T : Any> : Described {
|
||||
public suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T>
|
||||
|
||||
public companion object {
|
||||
public const val TYPE: String = "workspace.stage"
|
||||
public const val TYPE: String = "workspace.task"
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,6 +42,10 @@ public class TaskResultBuilder<T : Any>(
|
||||
/**
|
||||
* Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
|
||||
* Data dependency cycles are not allowed.
|
||||
*
|
||||
* @param resultType the type boundary for data produced by this task
|
||||
* @param descriptor of meta accepted by this task
|
||||
* @param builder for resulting data set
|
||||
*/
|
||||
@Suppress("FunctionName")
|
||||
@DFInternal
|
||||
@ -60,9 +64,9 @@ public fun <T : Any> Task(
|
||||
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
|
||||
//TODO use safe builder and check for external data on add and detects cycles
|
||||
val dataset = DataTree<T>(resultType) {
|
||||
TaskResultBuilder(workspace,taskName, taskMeta, this).apply { builder() }
|
||||
TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
|
||||
}
|
||||
workspace.internalize(dataset, taskName, taskMeta)
|
||||
workspace.wrapResult(dataset, taskName, taskMeta)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,6 +42,6 @@ private class TaskDataImpl<out T : Any>(
|
||||
// }
|
||||
}
|
||||
|
||||
internal fun <T : Any> Workspace.internalize(data: Data<T>, name: Name, stage: Name, stageMeta: Meta): TaskData<T> =
|
||||
TaskDataImpl(this, data, name, stage, stageMeta)
|
||||
public fun <T : Any> Workspace.wrapData(data: Data<T>, name: Name, taskName: Name, stageMeta: Meta): TaskData<T> =
|
||||
TaskDataImpl(this, data, name, taskName, stageMeta)
|
||||
|
||||
|
@ -37,13 +37,16 @@ private class TaskResultImpl<out T : Any>(
|
||||
) : TaskResult<T>, DataSet<T> by dataSet {
|
||||
|
||||
override fun flow(): Flow<TaskData<T>> = dataSet.flow().map {
|
||||
workspace.internalize(it, it.name, taskName, taskMeta)
|
||||
workspace.wrapData(it, it.name, taskName, taskMeta)
|
||||
}
|
||||
|
||||
override suspend fun getData(name: Name): TaskData<T>? = dataSet.getData(name)?.let {
|
||||
workspace.internalize(it, name, taskName, taskMeta)
|
||||
workspace.wrapData(it, name, taskName, taskMeta)
|
||||
}
|
||||
}
|
||||
|
||||
internal fun <T : Any> Workspace.internalize(dataSet: DataSet<T>, stage: Name, stageMeta: Meta): TaskResult<T> =
|
||||
TaskResultImpl(this, dataSet, stage, stageMeta)
|
||||
/**
|
||||
* Wrap data into [TaskResult]
|
||||
*/
|
||||
public fun <T : Any> Workspace.wrapResult(dataSet: DataSet<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
|
||||
TaskResultImpl(this, dataSet, taskName, taskMeta)
|
@ -1,6 +1,7 @@
|
||||
package space.kscience.dataforge.workspace
|
||||
|
||||
import space.kscience.dataforge.context.ContextAware
|
||||
import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.misc.Type
|
||||
@ -8,6 +9,10 @@ import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.provider.Provider
|
||||
|
||||
|
||||
public interface DataSelector<T: Any>{
|
||||
public suspend fun select(workspace: Workspace, meta: Meta): DataSet<T>
|
||||
}
|
||||
|
||||
@Type(Workspace.TYPE)
|
||||
public interface Workspace : ContextAware, Provider {
|
||||
/**
|
||||
|
@ -14,10 +14,22 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
|
||||
import space.kscience.dataforge.misc.DFBuilder
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.asName
|
||||
import kotlin.properties.PropertyDelegateProvider
|
||||
import kotlin.properties.ReadOnlyProperty
|
||||
|
||||
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>)
|
||||
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>): DataSelector<T>{
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> {
|
||||
if (workspace.tasks[taskName] == task) {
|
||||
return workspace.produce(taskName, meta) as TaskResult<T>
|
||||
} else {
|
||||
error("Task $taskName does not belong to the workspace")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public interface TaskContainer {
|
||||
public fun registerTask(taskName: Name, task: Task<*>)
|
||||
@ -51,7 +63,7 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
|
||||
* Define a context for the workspace
|
||||
*/
|
||||
public fun context(block: ContextBuilder.() -> Unit = {}) {
|
||||
this.context = parentContext.buildContext("workspace", block)
|
||||
this.context = parentContext.buildContext("workspace".asName(), block)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,45 @@
|
||||
package space.kscience.dataforge.workspace
|
||||
|
||||
import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.data.forEach
|
||||
import space.kscience.dataforge.data.map
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.toMutableMeta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
/**
|
||||
* Select data using given [selector]
|
||||
*/
|
||||
public suspend fun <T : Any> TaskResultBuilder<*>.from(
|
||||
selector: DataSelector<T>,
|
||||
): DataSet<T> = selector.select(workspace, taskMeta)
|
||||
|
||||
public val TaskResultBuilder<*>.allData: DataSelector<*>
|
||||
get() = object : DataSelector<Any> {
|
||||
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<Any> = workspace.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a lazy mapping task using given [selector] and [action]. The meta of resulting
|
||||
* TODO move selector to receiver with multi-receivers
|
||||
*/
|
||||
@DFExperimental
|
||||
public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFrom(
|
||||
selector: DataSelector<T>,
|
||||
crossinline action: suspend (arg: T, name: Name, meta: Meta) -> R
|
||||
) {
|
||||
from(selector).forEach { data ->
|
||||
val meta = data.meta.toMutableMeta().apply {
|
||||
taskName put taskMeta
|
||||
}
|
||||
|
||||
val res = data.map(workspace.context.coroutineContext, meta) {
|
||||
action(it, data.name, meta)
|
||||
}
|
||||
|
||||
emit(data.name, res)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,24 +0,0 @@
|
||||
package space.kscience.dataforge.workspace
|
||||
|
||||
import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.data.select
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
public suspend inline fun <reified T : Any> TaskResultBuilder<T>.from(
|
||||
task: Name,
|
||||
taskMeta: Meta = Meta.EMPTY,
|
||||
): DataSet<T> = workspace.produce(task, taskMeta).select()
|
||||
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
public suspend fun <R : Any> TaskResultBuilder<*>.from(
|
||||
reference: TaskReference<R>,
|
||||
taskMeta: Meta = Meta.EMPTY,
|
||||
): DataSet<R> {
|
||||
if (workspace.tasks[reference.taskName] == reference.task) {
|
||||
return workspace.produce(reference.taskName, taskMeta) as TaskResult<R>
|
||||
} else {
|
||||
throw error("Task ${reference.taskName} does not belong to the workspace")
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
package space.kscience.dataforge.workspace
|
||||
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import space.kscience.dataforge.data.DataSetBuilder
|
||||
|
||||
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
|
||||
buildData(builder)
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
package space.kscience.dataforge.workspace
|
||||
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.data.DataSetBuilder
|
||||
import space.kscience.dataforge.data.select
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.names.Name
|
||||
|
||||
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
|
||||
buildData(builder)
|
||||
}
|
||||
|
||||
public inline fun <reified T: Any> TaskResultBuilder<*>.selectData(namePattern: Name? = null): DataSelector<T> = object : DataSelector<T> {
|
||||
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> = workspace.data.select(namePattern)
|
||||
}
|
||||
|
||||
public suspend inline fun <reified T : Any> TaskResultBuilder<*>.from(
|
||||
task: Name,
|
||||
taskMeta: Meta = Meta.EMPTY,
|
||||
): DataSet<T> = workspace.produce(task, taskMeta).select()
|
@ -71,21 +71,32 @@ class SimpleWorkspaceTest {
|
||||
}
|
||||
|
||||
val square by task<Int> {
|
||||
workspace.data.select<Int>().forEach { data ->
|
||||
if (data.meta["testFlag"].boolean == true) {
|
||||
pipeFrom(selectData<Int>()) { arg, name, meta ->
|
||||
if (meta["testFlag"].boolean == true) {
|
||||
println("flag")
|
||||
}
|
||||
val value = data.await()
|
||||
workspace.logger.info { "Starting square on $value" }
|
||||
emit(data.name, data.map { it * it })
|
||||
workspace.logger.info { "Starting square on $name" }
|
||||
arg * arg
|
||||
}
|
||||
// workspace.data.select<Int>().forEach { data ->
|
||||
// if (data.meta["testFlag"].boolean == true) {
|
||||
// println("flag")
|
||||
// }
|
||||
// val value = data.await()
|
||||
// workspace.logger.info { "Starting square on $value" }
|
||||
// emit(data.name, data.map { it * it })
|
||||
// }
|
||||
}
|
||||
|
||||
val linear by task<Int> {
|
||||
workspace.data.select<Int>().forEach { data ->
|
||||
workspace.logger.info { "Starting linear on $data" }
|
||||
emit(data.name, data.data.map { it * 2 + 1 })
|
||||
pipeFrom(selectData<Int>()) { arg, name, _ ->
|
||||
workspace.logger.info { "Starting linear on $name" }
|
||||
arg * 2 + 1
|
||||
}
|
||||
// workspace.data.select<Int>().forEach { data ->
|
||||
// workspace.logger.info { "Starting linear on $data" }
|
||||
// emit(data.name, data.data.map { it * 2 + 1 })
|
||||
// }
|
||||
}
|
||||
|
||||
val fullSquare by task<Int> {
|
||||
|
@ -4,6 +4,6 @@ org.gradle.parallel=true
|
||||
kotlin.code.style=official
|
||||
kotlin.parallel.tasks.in.project=true
|
||||
|
||||
kotlin.mpp.enableGranularSourceSetsMetadata=true
|
||||
kotlin.native.enableDependencyPropagation=false
|
||||
#kotlin.mpp.enableGranularSourceSetsMetadata=true
|
||||
#kotlin.native.enableDependencyPropagation=false
|
||||
kotlin.mpp.stability.nowarn=true
|
||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,5 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
@ -5,7 +5,7 @@ pluginManagement {
|
||||
gradlePluginPortal()
|
||||
}
|
||||
|
||||
val toolsVersion = "0.10.2"
|
||||
val toolsVersion = "0.10.4"
|
||||
|
||||
plugins {
|
||||
id("ru.mipt.npm.gradle.project") version toolsVersion
|
||||
|
Loading…
Reference in New Issue
Block a user