Remove KClass from dataforge-data

This commit is contained in:
Alexander Nozik 2021-02-07 18:15:54 +03:00
parent 03337f00f0
commit 1970243785
54 changed files with 223 additions and 172 deletions

View File

@ -1,7 +1,16 @@
package hep.dataforge.context
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.toName
import kotlin.collections.HashMap
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.collections.forEach
import kotlin.collections.set
/**
* A convenience builder for context

View File

@ -1,6 +1,6 @@
package hep.dataforge.context
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.provider.Provider

View File

@ -1,12 +1,12 @@
package hep.dataforge.properties
import hep.dataforge.meta.Config
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get
import hep.dataforge.meta.set
import hep.dataforge.meta.transformations.MetaConverter
import hep.dataforge.meta.transformations.nullableItemToObject
import hep.dataforge.meta.transformations.nullableObjectToMetaItem
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
@DFExperimental

View File

@ -1,6 +1,6 @@
package hep.dataforge.properties
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.flow.MutableStateFlow

View File

@ -1,6 +1,6 @@
package hep.dataforge.properties
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import org.w3c.dom.HTMLInputElement
@DFExperimental

View File

@ -16,7 +16,6 @@
package hep.dataforge.descriptors
import hep.dataforge.meta.DFExperimental
import hep.dataforge.values.ValueType
import kotlin.reflect.KClass

View File

@ -2,7 +2,7 @@ package hep.dataforge.provider
import hep.dataforge.context.Context
import hep.dataforge.context.gather
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.Type
import hep.dataforge.names.Name
import kotlin.reflect.KClass

View File

@ -1,10 +1,9 @@
package hep.dataforge.actions
import hep.dataforge.data.DataSet
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.*
/**
* A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].

View File

@ -1,7 +1,13 @@
package hep.dataforge.actions
import hep.dataforge.data.*
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
@ -34,7 +40,6 @@ public class MapActionBuilder<T, R>(public var name: Name, public var meta: Meta
}
}
@PublishedApi
internal class MapAction<in T : Any, out R : Any>(
private val outputType: KType,
@ -63,7 +68,9 @@ internal class MapAction<in T : Any, out R : Any>(
//getting new meta
val newMeta = builder.meta.seal()
val newData = data.map(outputType, meta = newMeta) { builder.result(env, it) }
@OptIn(DFInternal::class) val newData = Data(outputType, newMeta, dependencies = listOf(data)) {
builder.result(env, data.await())
}
//setting the data node
return newData.named(newName)
}
@ -85,6 +92,10 @@ internal class MapAction<in T : Any, out R : Any>(
}
/**
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> Action.Companion.map(
noinline builder: MapActionBuilder<T, R>.() -> Unit,

View File

@ -1,20 +1,21 @@
package hep.dataforge.actions
import hep.dataforge.data.*
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.fold
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@DFExperimental
public class JoinGroup<T : Any, R : Any>(public var name: String, internal val set: DataSet<T>) {
public var meta: MetaBuilder = MetaBuilder()
@ -27,9 +28,9 @@ public class JoinGroup<T : Any, R : Any>(public var name: String, internal val s
}
@DFExperimental
@DFBuilder
public class ReduceGroupBuilder<T : Any, R : Any>(
private val inputType: KClass<out T>,
private val inputType: KType,
private val scope: CoroutineScope,
public val actionMeta: Meta,
) {
@ -40,7 +41,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
*/
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node ->
GroupRule.byMetaValue(scope, tag, defaultTag).gather(inputType, node).map {
GroupRule.byMetaValue(scope, tag, defaultTag).gather(node).map {
JoinGroup<T, R>(it.key, it.value).apply(action)
}
}
@ -73,16 +74,17 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
}
@DFExperimental
public class ReduceAction<T : Any, R : Any>(
private val inputType: KClass<out T>,
@PublishedApi
internal class ReduceAction<T : Any, R : Any>(
private val inputType: KType,
outputType: KType,
private val action: ReduceGroupBuilder<T, R>.() -> Unit,
) : CachingAction<T, R>(outputType) {
//TODO optimize reduction. Currently the whole action recalculates on push
override fun CoroutineScope.transform(set: DataSet<T>, meta: Meta, key: Name): Flow<NamedData<R>> = flow {
ReduceGroupBuilder<T, R>(inputType,this@transform, meta).apply(action).buildGroups(set).forEach { group ->
ReduceGroupBuilder<T, R>(inputType, this@transform, meta).apply(action).buildGroups(set).forEach { group ->
val dataFlow: Map<Name, Data<T>> = group.set.flow().fold(HashMap()) { acc, value ->
acc.apply {
acc[value.name] = value.data
@ -94,8 +96,7 @@ public class ReduceAction<T : Any, R : Any>(
val groupMeta = group.meta
val env = ActionEnv(groupName.toName(), groupMeta, meta)
val res: LazyData<R> = dataFlow.reduceToData(
@OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
outputType,
meta = groupMeta
) { group.result.invoke(env, it) }
@ -105,4 +106,11 @@ public class ReduceAction<T : Any, R : Any>(
}
}
public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())
/**
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
): Action<T, R> = ReduceAction(typeOf<T>(), typeOf<R>(), builder)

View File

@ -5,13 +5,17 @@ import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.FlowPreview
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import kotlin.collections.set
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
@ -39,11 +43,13 @@ public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val me
/**
* Action that splits each incoming element into a number of fragments defined in builder
*/
@PublishedApi
internal class SplitAction<T : Any, R : Any>(
private val outputType: KType,
private val action: SplitBuilder<T, R>.() -> Unit,
) : Action<T, R> {
@OptIn(FlowPreview::class)
override suspend fun execute(
dataSet: DataSet<T>,
meta: Meta,
@ -59,7 +65,10 @@ internal class SplitAction<T : Any, R : Any>(
// apply individual fragment rules to result
return split.fragments.entries.asFlow().map { (fragmentName, rule) ->
val env = SplitBuilder.FragmentRule<T, R>(fragmentName, laminate.toMutableMeta()).apply(rule)
data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
@OptIn(DFInternal::class) Data(outputType, meta = env.meta, dependencies = listOf(data)) {
env.result(data.await())
}.named(fragmentName)
}
}
@ -75,4 +84,13 @@ internal class SplitAction<T : Any, R : Any>(
}
}
}
}
}
/**
* Action that splits each incoming element into a number of fragments defined in builder
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> Action.Companion.split(
noinline builder: SplitBuilder<T, R>.() -> Unit,
): Action<T, R> = SplitAction(typeOf<R>(), builder)

View File

@ -1,7 +1,6 @@
package hep.dataforge.data
import hep.dataforge.actions.Action
import hep.dataforge.actions.NamedData
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import hep.dataforge.names.startsWith

View File

@ -1,6 +1,6 @@
package hep.dataforge.data
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlin.coroutines.CoroutineContext

View File

@ -3,6 +3,7 @@ package hep.dataforge.data
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.DFInternal
import hep.dataforge.misc.Type
import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext
@ -35,17 +36,21 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
public companion object {
public const val TYPE: String = "data"
/**
* The type that can't have any subtypes
*/
internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
public inline fun <reified T : Any> static(
value: T,
meta: Meta = Meta.EMPTY,
): Data<T> = StaticData(typeOf<T>(),value, meta)
): Data<T> = StaticData(typeOf<T>(), value, meta)
/**
* An empty data containing only meta
*/
public fun empty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
private val nothing: Nothing get() = error("this is nothing")
override val type: KType = this::nothing.returnType
override val type: KType = TYPE_OF_NOTHING
override val meta: Meta = meta
override val dependencies: Collection<Goal<*>> = emptyList()
override val deferred: Deferred<Nothing>
@ -59,13 +64,17 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
}
}
public class LazyData<T : Any>(
/**
* A lazily computed variant of [Data] based on [LazyGoal]
* One must ensure that proper [type] is used so this method should not be used
*/
private class LazyData<T : Any>(
override val type: KType,
override val meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
additionalContext: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend () -> T,
) : Data<T>, LazyGoal<T>(context, dependencies, block)
) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
public class StaticData<T : Any>(
override val type: KType,
@ -74,6 +83,7 @@ public class StaticData<T : Any>(
) : Data<T>, StaticGoal<T>(value)
@Suppress("FunctionName")
@DFInternal
public fun <T : Any> Data(
type: KType,
meta: Meta = Meta.EMPTY,
@ -82,6 +92,7 @@ public fun <T : Any> Data(
block: suspend () -> T,
): Data<T> = LazyData(type, meta, context, dependencies, block)
@OptIn(DFInternal::class)
@Suppress("FunctionName")
public inline fun <reified T : Any> Data(
meta: Meta = Meta.EMPTY,

View File

@ -1,7 +1,6 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.data.Data.Companion.TYPE_OF_NOTHING
import hep.dataforge.meta.Meta
import hep.dataforge.meta.set
import hep.dataforge.names.*
@ -43,7 +42,7 @@ public interface DataSet<out T : Any> {
* An empty [DataSet] that suits all types
*/
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
override val dataType: KType = this::nothing.returnType
override val dataType: KType = TYPE_OF_NOTHING
private val nothing: Nothing get() = error("this is nothing")

View File

@ -1,17 +1,19 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.names.toName
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import kotlin.reflect.KType
public interface DataSetBuilder<in T : Any> {
public val dataType: KType
/**
* Remove all data items starting with [name]
*/
@ -50,8 +52,12 @@ public interface DataSetBuilder<in T : Any> {
public suspend infix fun String.put(block: suspend DataSetBuilder<T>.() -> Unit): Unit = emit(toName(), block)
}
private class SubSetBuilder<in T : Any>(private val parent: DataSetBuilder<T>, private val branch: Name) :
DataSetBuilder<T> {
private class SubSetBuilder<in T : Any>(
private val parent: DataSetBuilder<T>,
private val branch: Name,
) : DataSetBuilder<T> {
override val dataType: KType get() = parent.dataType
override suspend fun remove(name: Name) {
parent.remove(branch + name)
}
@ -101,7 +107,7 @@ public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
name: Name,
meta: Meta = Meta.EMPTY,
noinline producer: suspend () -> T,
){
) {
val data = Data(meta, block = producer)
emit(name, data)
}
@ -109,19 +115,17 @@ public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
/**
* Emit a static data with the fixed value
*/
public suspend fun <T : Any> DataSetBuilder<T>.static(name: String, data: T, meta: Meta = Meta.EMPTY): Unit =
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(name: String, data: T, meta: Meta = Meta.EMPTY): Unit =
emit(name, Data.static(data, meta))
public suspend fun <T : Any> DataSetBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY): Unit =
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY): Unit =
emit(name, Data.static(data, meta))
public suspend fun <T : Any> DataSetBuilder<T>.static(
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
name: String,
data: T,
metaBuilder: MetaBuilder.() -> Unit,
) {
emit(name.toName(), Data.static(data, Meta(metaBuilder)))
}
): Unit = emit(name.toName(), Data.static(data, Meta(metaBuilder)))
/**
* Update data with given node data and meta with node meta.

View File

@ -1,22 +1,21 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.*
import hep.dataforge.misc.Type
import hep.dataforge.names.*
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.emitAll
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.map
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.reflect.KClass
import kotlin.reflect.KType
public sealed class DataTreeItem<out T : Any> {
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>()
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>()
}
public val <T : Any> DataTreeItem<T>.type: KClass<out T>
public val <T : Any> DataTreeItem<T>.type: KType
get() = when (this) {
is DataTreeItem.Node -> tree.dataType
is DataTreeItem.Leaf -> data.type
@ -91,7 +90,7 @@ public fun <T : Any> DataTree<T>.itemFlow(): Flow<Pair<Name, DataTreeItem<T>>> =
* The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
*/
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> = object : DataTree<T> {
override val dataType: KClass<out T> get() = this@branch.dataType
override val dataType: KType get() = this@branch.dataType
override suspend fun items(): Map<NameToken, DataTreeItem<T>> = getItem(branchName).tree?.items() ?: emptyMap()
}

View File

@ -1,6 +1,6 @@
package hep.dataforge.data
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext

View File

@ -20,10 +20,9 @@ import hep.dataforge.meta.string
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.launch
import kotlin.reflect.KClass
public interface GroupRule {
public suspend fun <T : Any> gather(dataType: KClass<out T>, set: DataSet<T>): Map<String, DataSet<T>>
public suspend fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
public companion object {
/**
@ -41,21 +40,20 @@ public interface GroupRule {
): GroupRule = object : GroupRule {
override suspend fun <T : Any> gather(
dataType: KClass<out T>,
set: DataSet<T>,
): Map<String, DataSet<T>> {
val map = HashMap<String, ActiveDataTree<T>>()
set.flow().collect { data ->
val tagValue = data.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(dataType) }.emit(data.name, data.data)
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(data.name, data.data)
}
scope.launch {
set.updates.collect { name ->
val data = set.getData(name)
val tagValue = data?.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(dataType) }.emit(name, data)
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(name, data)
}
}
@ -63,4 +61,4 @@ public interface GroupRule {
}
}
}
}
}

View File

@ -1,7 +1,5 @@
package hep.dataforge.actions
package hep.dataforge.data
import hep.dataforge.data.Data
import hep.dataforge.data.StaticData
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.Named
import hep.dataforge.names.Name

View File

@ -1,14 +1,12 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.mapNotNull
import kotlin.reflect.KClass
import kotlin.reflect.KType
/**
@ -17,7 +15,7 @@ import kotlin.reflect.KClass
public fun <T : Any> DataSet<T>.filter(
predicate: suspend (Name, Data<T>) -> Boolean,
): ActiveDataSet<T> = object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@filter.dataType
override val dataType: KType get() = this@filter.dataType
override fun flow(): Flow<NamedData<T>> =
this@filter.flow().filter { predicate(it.name, it.data) }
@ -37,7 +35,7 @@ public fun <T : Any> DataSet<T>.filter(
*/
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) this
else object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@withNamePrefix.dataType
override val dataType: KType get() = this@withNamePrefix.dataType
override fun flow(): Flow<NamedData<T>> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) }
@ -53,7 +51,7 @@ else object : ActiveDataSet<T> {
public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
this
} else object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@branch.dataType
override val dataType: KType get() = this@branch.dataType
override fun flow(): Flow<NamedData<T>> = this@branch.flow().mapNotNull {
it.name.removeHeadOrNull(branchName)?.let { name ->

View File

@ -1,11 +1,10 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFInternal
import kotlinx.coroutines.flow.*
import kotlin.contracts.InvocationKind
import kotlin.contracts.contract
@ -20,23 +19,11 @@ import kotlin.reflect.typeOf
* @param meta for the resulting data. By default equals input data.
* @param block the transformation itself
*/
private fun <T : Any, R : Any> Data<T>.map(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
block: suspend (T) -> R,
): LazyData<R> = LazyData(outputType, meta, coroutineContext, listOf(this)) {
block(await())
}
/**
* See [map]
*/
public inline fun <T : Any, reified R : Any> Data<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
crossinline block: suspend (T) -> R,
): LazyData<R> = LazyData(typeOf<R>(), meta, coroutineContext, listOf(this)) {
): Data<R> = Data(meta, coroutineContext, listOf(this)) {
block(await())
}
@ -48,7 +35,7 @@ public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
crossinline block: suspend (left: T1, right: T2) -> R,
): LazyData<R> = LazyData(typeOf<R>(), meta, coroutineContext, listOf(this, other)) {
): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
block(await(), other.await())
}
@ -62,8 +49,7 @@ public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline block: suspend (Collection<T>) -> R,
): LazyData<R> = LazyData(
typeOf<R>(),
): Data<R> = Data(
meta,
coroutineContext,
this
@ -71,12 +57,13 @@ public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
block(map { it.await() })
}
@DFInternal
public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
block: suspend (Map<K, T>) -> R,
): LazyData<R> = LazyData(
): Data<R> = Data(
outputType,
meta,
coroutineContext,
@ -96,8 +83,7 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (Map<K, T>) -> R,
): LazyData<R> = LazyData(
typeOf<R>(),
): Data<R> = Data(
meta,
coroutineContext,
this.values
@ -110,12 +96,13 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
/**
* Transform a [Flow] of [NamedData] to a single [Data].
*/
@DFInternal
public suspend fun <T : Any, R : Any> Flow<NamedData<T>>.reduceToData(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = LazyData(
): Data<R> = Data(
outputType,
meta,
coroutineContext,
@ -124,11 +111,12 @@ public suspend fun <T : Any, R : Any> Flow<NamedData<T>>.reduceToData(
transformation(this)
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
transformation(it)
}
@ -140,7 +128,7 @@ public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.foldToDa
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): LazyData<R> = reduceToData(
): Data<R> = reduceToData(
coroutineContext, meta
) {
it.fold(initial, block)
@ -148,6 +136,7 @@ public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.foldToDa
//DataSet operations
@DFInternal
public suspend fun <T : Any, R : Any> DataSet<T>.map(
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
@ -157,11 +146,14 @@ public suspend fun <T : Any, R : Any> DataSet<T>.map(
populate(
flow().map {
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
it.map(outputType, coroutineContext, newMeta, block).named(it.name)
Data(outputType, newMeta, coroutineContext, listOf(it)) {
block(it.await())
}.named(it.name)
}
)
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
noinline metaTransform: MetaBuilder.() -> Unit = {},
@ -179,11 +171,11 @@ public suspend inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = flow().reduceToData(coroutineContext, meta, transformation)
): Data<R> = flow().reduceToData(coroutineContext, meta, transformation)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): LazyData<R> = flow().foldToData(initial, coroutineContext, meta, block)
): Data<R> = flow().foldToData(initial, coroutineContext, meta, block)

View File

@ -1,8 +1,6 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.matches
import hep.dataforge.names.toName

View File

@ -1,15 +1,12 @@
package hep.dataforge.data
import hep.dataforge.actions.MapAction
import hep.dataforge.actions.Action
import hep.dataforge.actions.map
import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Test
import kotlin.test.assertEquals
/**
* Block the thread and get data content
*/
public fun <T : Any> Data<T>.value(): T = runBlocking { await() }
@Suppress("EXPERIMENTAL_API_USAGE")
class ActionsTest {
val data: DataTree<Int> = runBlocking {
DataTree {
@ -21,23 +18,23 @@ class ActionsTest {
@Test
fun testStaticMapAction() {
val plusOne = MapAction<Int, Int> {
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
runBlocking {
val result = plusOne.execute(data)
assertEquals(2, result.getData("1")?.value())
assertEquals(2, result.getData("1")?.await())
}
}
@Test
fun testDynamicMapAction() {
val plusOne = MapAction<Int, Int> {
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
val datum = runBlocking {
val result = plusOne.execute(data, scope = this)
result.getData("1")?.value()
result.getData("1")?.await()
}
assertEquals(2, datum)
}

View File

@ -18,10 +18,12 @@ internal class DataTreeBuilderTest {
static("c.d", "c.d")
static("c.f", "c.f")
}
assertEquals("a", node.getData("primary.a")?.value())
assertEquals("b", node.getData("primary.b")?.value())
assertEquals("c.d", node.getData("c.d")?.value())
assertEquals("c.f", node.getData("c.f")?.value())
runBlocking {
assertEquals("a", node.getData("primary.a")?.await())
assertEquals("b", node.getData("primary.b")?.await())
assertEquals("c.d", node.getData("c.d")?.await())
assertEquals("c.f", node.getData("c.f")?.await())
}
}
@Test
@ -42,9 +44,10 @@ internal class DataTreeBuilderTest {
populate(updateData)
}
assertEquals("a", node.getData("update.a")?.value())
assertEquals("a", node.getData("primary.a")?.value())
runBlocking {
assertEquals("a", node.getData("update.a")?.await())
assertEquals("a", node.getData("primary.a")?.await())
}
}
@Test
@ -76,7 +79,7 @@ internal class DataTreeBuilderTest {
}
}
updateJob.join()
assertEquals(9, rootNode.getData("sub.value")?.value())
assertEquals(9, rootNode.getData("sub.value")?.await())
cancel()
}
} catch (t: Throwable) {

View File

@ -4,8 +4,8 @@ import hep.dataforge.context.Context
import hep.dataforge.io.*
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.io.*
import kotlinx.io.text.readUtf8Line
import kotlinx.io.text.writeUtf8String

View File

@ -8,6 +8,7 @@ import hep.dataforge.io.MetaFormatFactory
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.NameToken
import hep.dataforge.names.withIndex
import hep.dataforge.values.ListValue

View File

@ -2,10 +2,10 @@ package hep.dataforge.io.yaml
import hep.dataforge.io.parse
import hep.dataforge.io.toString
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.seal
import hep.dataforge.misc.DFExperimental
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,6 +1,6 @@
package hep.dataforge.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
/**
* A fire-and-forget consumer of messages

View File

@ -1,9 +1,9 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.misc.DFExperimental
import kotlinx.io.text.writeUtf8String
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,9 +1,9 @@
package hep.dataforge.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.DFExperimental
import kotlinx.io.*
import java.nio.file.Files
import java.nio.file.Path

View File

@ -1,7 +1,7 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.io.asBinary
import kotlinx.io.toByteArray
import kotlinx.io.writeDouble

View File

@ -1,7 +1,7 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.io.writeDouble
import java.nio.file.Files
import kotlin.test.Test

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.Name
import kotlin.properties.ReadWriteProperty

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.values.EnumValue

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.*
public interface MutableMeta<out M : MutableMeta<M>> : TypedMeta<M>, MutableItemProvider {

View File

@ -1,11 +0,0 @@
package hep.dataforge.meta
/**
* General marker for dataforge builders
*/
@DslMarker
public annotation class DFBuilder
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFExperimental

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta.descriptors
import hep.dataforge.meta.*
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.*
import hep.dataforge.values.*

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.toName
import hep.dataforge.values.ListValue
import hep.dataforge.values.Value

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta.transformations
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
/**

View File

@ -0,0 +1,21 @@
package hep.dataforge.misc
/**
* General marker for dataforge builders
*/
@DslMarker
public annotation class DFBuilder
/**
* The declaration is experimental and could be changed in future
*/
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFExperimental
/**
* The declaration is internal to the DataForge and could use unsafe or unstable features.
*/
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFInternal

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.serialization.KSerializer
import kotlinx.serialization.Serializable
import kotlinx.serialization.descriptors.PrimitiveKind
@ -186,7 +186,6 @@ public fun Name.withIndex(index: String): Name {
* Fast [String]-based accessor for item map
*/
public operator fun <T> Map<NameToken, T>.get(body: String, query: String? = null): T? = get(NameToken(body, query))
public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())
public operator fun <T> MutableMap<Name, T>.set(name: String, value: T): Unit = set(name.toName(), value)

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
/**

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.values.NumberValue
import hep.dataforge.values.True
import hep.dataforge.values.Value

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlin.test.Test
import kotlin.test.assertFails
import kotlin.test.assertFalse

View File

@ -2,6 +2,7 @@ package hep.dataforge.tables.io
import hep.dataforge.io.Envelope
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import hep.dataforge.tables.SimpleColumnHeader
import hep.dataforge.tables.Table
import hep.dataforge.values.Value

View File

@ -1,6 +1,6 @@
package hep.dataforge.tables.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.tables.Table
import hep.dataforge.tables.get
import hep.dataforge.tables.row

View File

@ -6,11 +6,13 @@ import hep.dataforge.data.GoalExecutionRestriction
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.Described
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.misc.DFInternal
import hep.dataforge.misc.Type
import hep.dataforge.names.Name
import hep.dataforge.workspace.Task.Companion.TYPE
import kotlinx.coroutines.withContext
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@Type(TYPE)
public interface Task<out T : Any> : Described {
@ -42,8 +44,9 @@ public class TaskResultBuilder<T : Any>(
* Data dependency cycles are not allowed.
*/
@Suppress("FunctionName")
@DFInternal
public fun <T : Any> Task(
resultType: KClass<out T>,
resultType: KType,
descriptor: ItemDescriptor? = null,
builder: suspend TaskResultBuilder<T>.() -> Unit,
): Task<T> = object : Task<T> {
@ -56,15 +59,16 @@ public fun <T : Any> Task(
taskMeta: Meta,
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
//TODO use safe builder and check for external data on add and detects cycles
val dataset = DataTree(resultType) {
val dataset = DataTree<T>(resultType) {
TaskResultBuilder(workspace,taskName, taskMeta, this).apply { builder() }
}
workspace.internalize(dataset, taskName, taskMeta)
}
}
@OptIn(DFInternal::class)
@Suppress("FunctionName")
public inline fun <reified T : Any> Task(
descriptor: ItemDescriptor? = null,
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): Task<T> = Task(T::class, descriptor, builder)
): Task<T> = Task(typeOf<T>(), descriptor, builder)

View File

@ -1,7 +1,7 @@
package hep.dataforge.workspace
import hep.dataforge.actions.NamedData
import hep.dataforge.data.Data
import hep.dataforge.data.NamedData
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name

View File

@ -7,16 +7,15 @@ import hep.dataforge.data.ActiveDataTree
import hep.dataforge.data.DataSet
import hep.dataforge.data.DataSetBuilder
import hep.dataforge.data.DataTree
import hep.dataforge.meta.DFBuilder
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlin.properties.PropertyDelegateProvider
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KClass
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>)
@ -24,25 +23,19 @@ public interface TaskContainer {
public fun registerTask(taskName: Name, task: Task<*>)
}
public fun <T : Any> TaskContainer.registerTask(
resultType: KClass<out T>,
name: String,
descriptorBuilder: NodeDescriptor.() -> Unit = {},
builder: suspend TaskResultBuilder<T>.() -> Unit,
): Unit = registerTask(name.toName(), Task(resultType, NodeDescriptor(descriptorBuilder), builder))
public inline fun <reified T : Any> TaskContainer.registerTask(
name: String,
noinline descriptorBuilder: NodeDescriptor.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): Unit = registerTask(T::class, name, descriptorBuilder, builder)
): Unit = registerTask(name.toName(), Task(NodeDescriptor(descriptorBuilder), builder))
public inline fun <reified T : Any> TaskContainer.task(
noinline descriptorBuilder: NodeDescriptor.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
val taskName = property.name.toName()
val task = Task(T::class, NodeDescriptor(descriptorBuilder), builder)
val task = Task(NodeDescriptor(descriptorBuilder), builder)
registerTask(taskName, task)
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
}

View File

@ -3,15 +3,14 @@ package hep.dataforge.workspace
import hep.dataforge.data.Data
import hep.dataforge.data.await
import hep.dataforge.io.*
import kotlin.reflect.KClass
import hep.dataforge.misc.DFInternal
/**
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
*/
@OptIn(DFInternal::class)
public fun <T : Any> Envelope.toData(format: IOFormat<T>): Data<T> {
@Suppress("UNCHECKED_CAST")
val kclass: KClass<T> = format.type.classifier as? KClass<T> ?: error("IOFormat type is not a class")
return Data(kclass, meta) {
return Data(format.type, meta) {
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
}
}

View File

@ -4,6 +4,7 @@ package hep.dataforge.workspace
import hep.dataforge.data.*
import hep.dataforge.io.*
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext
@ -15,7 +16,6 @@ import java.nio.file.StandardOpenOption
import java.nio.file.spi.FileSystemProvider
import java.util.zip.ZipEntry
import java.util.zip.ZipOutputStream
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
import kotlin.streams.toList
@ -36,9 +36,6 @@ internal inline fun <reified T : Any> IOPlugin.formatResolver(): FileFormatResol
resolveIOFormat<T>() ?: error("Can't resolve IO format for ${T::class}")
}
private val <T : Any> FileFormatResolver<T>.kClass: KClass<T>
get() = type.classifier as? KClass<T> ?: error("Format resolver actual type does not correspond to type parameter")
private fun newZFS(path: Path): FileSystem {
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
?: error("Zip file system provider not found")
@ -110,7 +107,7 @@ public suspend fun <T : Any> IOPlugin.readDataDirectory(
return readDataDirectory(fs.rootDirectories.first(), formatResolver)
}
if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
return DataTree(formatResolver.kClass) {
return DataTree(formatResolver.type) {
Files.list(path).toList().forEach { path ->
val fileName = path.fileName.toString()
if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {

View File

@ -4,8 +4,8 @@ import hep.dataforge.context.Global
import hep.dataforge.data.*
import hep.dataforge.io.IOFormat
import hep.dataforge.io.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.runBlocking
import kotlinx.io.Input
import kotlinx.io.Output

View File

@ -1,9 +1,9 @@
package hep.dataforge.workspace
import hep.dataforge.actions.get
import hep.dataforge.context.*
import hep.dataforge.data.*
import hep.dataforge.meta.*
import hep.dataforge.names.get
import hep.dataforge.names.plus
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.single