Merge pull request #59 from mipt-npm/refactor/data-types

Refactor/data types
This commit is contained in:
Alexander Nozik 2021-02-07 20:37:40 +03:00 committed by GitHub
commit f6210fde7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
72 changed files with 440 additions and 394 deletions

View File

@ -1,6 +1,6 @@
name: Gradle build
on: [push]
on: [ push ]
jobs:
build:
@ -8,12 +8,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Build with Gradle
run: ./gradlew build
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Build with Gradle
run: ./gradlew build

View File

@ -1,40 +1,40 @@
name: Bintray Publish
on:
release:
types:
- created
release:
types:
- created
jobs:
build-on-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Gradle clean
run: ./gradlew clean
- name: Gradle build
run: ./gradlew build
- name: Run release task
run: ./gradlew release -PbintrayUser=${{ secrets.BINTRAY_USER }} -PbintrayApiKey=${{ secrets.BINTRAY_KEY }}
build-on-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Gradle clean
run: ./gradlew clean
- name: Gradle build
run: ./gradlew build
- name: Run release task
run: ./gradlew release -PbintrayUser=${{ secrets.BINTRAY_USER }} -PbintrayApiKey=${{ secrets.BINTRAY_KEY }}
build-on-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Gradle clean
run: ./gradlew clean
- name: Gradle build
run: ./gradlew build
- name: Run release task
run: ./gradlew release -PbintrayUser=${{ secrets.BINTRAY_USER }} -PbintrayApiKey=${{ secrets.BINTRAY_KEY }}
build-on-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Gradle clean
run: ./gradlew clean
- name: Gradle build
run: ./gradlew build
- name: Run release task
run: ./gradlew release -PbintrayUser=${{ secrets.BINTRAY_USER }} -PbintrayApiKey=${{ secrets.BINTRAY_KEY }}

View File

@ -20,6 +20,7 @@
- \[Major breaking change\] Moved `NodeItem` and `ValueItem` to a top level
- Plugins are removed from Context constructor and added lazily in ContextBuilder
- \[Major breaking change\] Full refactor of DataTree/DataSource
- \[Major Breaking change\] Replace KClass with KType in data. Remove direct access to constructors with types.
### Deprecated

View File

@ -2,7 +2,7 @@ plugins {
id("ru.mipt.npm.project")
}
val dataforgeVersion by extra("0.3.0-dev-3")
val dataforgeVersion by extra("0.3.0")
val bintrayRepo by extra("dataforge")
val githubProject by extra("dataforge-core")
@ -13,20 +13,8 @@ allprojects {
version = dataforgeVersion
apply<org.jetbrains.dokka.gradle.DokkaPlugin>()
repositories {
mavenLocal()
}
}
apiValidation{
validationDisabled = true
}
subprojects {
apply(plugin = "ru.mipt.npm.publish")
}
apiValidation{
ignoredProjects.add("dataforge-tables")
}

View File

@ -28,4 +28,8 @@ kotlin {
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.DEVELOPMENT
}

View File

@ -1,7 +1,16 @@
package hep.dataforge.context
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.toName
import kotlin.collections.HashMap
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.collections.forEach
import kotlin.collections.set
/**
* A convenience builder for context

View File

@ -1,6 +1,6 @@
package hep.dataforge.context
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.provider.Provider

View File

@ -1,12 +1,12 @@
package hep.dataforge.properties
import hep.dataforge.meta.Config
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get
import hep.dataforge.meta.set
import hep.dataforge.meta.transformations.MetaConverter
import hep.dataforge.meta.transformations.nullableItemToObject
import hep.dataforge.meta.transformations.nullableObjectToMetaItem
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
@DFExperimental

View File

@ -1,6 +1,6 @@
package hep.dataforge.properties
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.flow.MutableStateFlow

View File

@ -1,10 +1,10 @@
package hep.dataforge.properties
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import org.w3c.dom.HTMLInputElement
@DFExperimental
fun HTMLInputElement.bindValue(property: Property<String>) {
public fun HTMLInputElement.bindValue(property: Property<String>) {
if (this.onchange != null) error("Input element already bound")
this.onchange = {
property.value = this.value
@ -18,7 +18,7 @@ fun HTMLInputElement.bindValue(property: Property<String>) {
}
@DFExperimental
fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
public fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
if (this.onchange != null) error("Input element already bound")
this.onchange = {
property.value = this.checked

View File

@ -16,7 +16,6 @@
package hep.dataforge.descriptors
import hep.dataforge.meta.DFExperimental
import hep.dataforge.values.ValueType
import kotlin.reflect.KClass

View File

@ -2,7 +2,7 @@ package hep.dataforge.provider
import hep.dataforge.context.Context
import hep.dataforge.context.gather
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.Type
import hep.dataforge.names.Name
import kotlin.reflect.KClass

View File

@ -12,10 +12,6 @@ kotlin {
commonMain{
dependencies {
api(project(":dataforge-meta"))
}
}
jvmMain{
dependencies{
api(kotlin("reflect"))
}
}

View File

@ -1,10 +1,9 @@
package hep.dataforge.actions
import hep.dataforge.data.DataSet
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.*
/**
* A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].

View File

@ -1,13 +1,20 @@
package hep.dataforge.actions
import hep.dataforge.data.*
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.launch
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* Action environment includes data name, data meta and action configuration meta
@ -33,9 +40,9 @@ public class MapActionBuilder<T, R>(public var name: Name, public var meta: Meta
}
}
public class MapAction<in T : Any, out R : Any>(
public val outputType: KClass<out R>,
@PublishedApi
internal class MapAction<in T : Any, out R : Any>(
private val outputType: KType,
private val block: MapActionBuilder<T, R>.() -> Unit,
) : Action<T, R> {
@ -61,7 +68,9 @@ public class MapAction<in T : Any, out R : Any>(
//getting new meta
val newMeta = builder.meta.seal()
val newData = data.map(outputType, meta = newMeta) { builder.result(env, it) }
@OptIn(DFInternal::class) val newData = Data(outputType, newMeta, dependencies = listOf(data)) {
builder.result(env, data.await())
}
//setting the data node
return newData.named(newName)
}
@ -83,9 +92,13 @@ public class MapAction<in T : Any, out R : Any>(
}
/**
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> MapAction(
public inline fun <T : Any, reified R : Any> Action.Companion.map(
noinline builder: MapActionBuilder<T, R>.() -> Unit,
): MapAction<T, R> = MapAction(R::class, builder)
): Action<T, R> = MapAction(typeOf<R>(), builder)

View File

@ -1,19 +1,21 @@
package hep.dataforge.actions
import hep.dataforge.data.*
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.fold
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@DFExperimental
public class JoinGroup<T : Any, R : Any>(public var name: String, internal val set: DataSet<T>) {
public var meta: MetaBuilder = MetaBuilder()
@ -26,9 +28,9 @@ public class JoinGroup<T : Any, R : Any>(public var name: String, internal val s
}
@DFExperimental
@DFBuilder
public class ReduceGroupBuilder<T : Any, R : Any>(
private val inputType: KClass<out T>,
private val inputType: KType,
private val scope: CoroutineScope,
public val actionMeta: Meta,
) {
@ -39,7 +41,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
*/
public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node ->
GroupRule.byValue(scope, tag, defaultTag).gather(inputType, node).map {
GroupRule.byMetaValue(scope, tag, defaultTag).gather(node).map {
JoinGroup<T, R>(it.key, it.value).apply(action)
}
}
@ -72,16 +74,17 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
}
@DFExperimental
public class ReduceAction<T : Any, R : Any>(
private val inputType: KClass<out T>,
outputType: KClass<out R>,
@PublishedApi
internal class ReduceAction<T : Any, R : Any>(
private val inputType: KType,
outputType: KType,
private val action: ReduceGroupBuilder<T, R>.() -> Unit,
) : CachingAction<T, R>(outputType) {
//TODO optimize reduction. Currently the whole action recalculates on push
override fun CoroutineScope.transform(set: DataSet<T>, meta: Meta, key: Name): Flow<NamedData<R>> = flow {
ReduceGroupBuilder<T, R>(inputType,this@transform, meta).apply(action).buildGroups(set).forEach { group ->
ReduceGroupBuilder<T, R>(inputType, this@transform, meta).apply(action).buildGroups(set).forEach { group ->
val dataFlow: Map<Name, Data<T>> = group.set.flow().fold(HashMap()) { acc, value ->
acc.apply {
acc[value.name] = value.data
@ -93,8 +96,7 @@ public class ReduceAction<T : Any, R : Any>(
val groupMeta = group.meta
val env = ActionEnv(groupName.toName(), groupMeta, meta)
val res: LazyData<R> = dataFlow.reduceToData(
@OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
outputType,
meta = groupMeta
) { group.result.invoke(env, it) }
@ -104,4 +106,11 @@ public class ReduceAction<T : Any, R : Any>(
}
}
public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())
/**
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
): Action<T, R> = ReduceAction(typeOf<T>(), typeOf<R>(), builder)

View File

@ -5,13 +5,17 @@ import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.misc.DFInternal
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.FlowPreview
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import kotlin.collections.set
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
@ -39,11 +43,13 @@ public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val me
/**
* Action that splits each incoming element into a number of fragments defined in builder
*/
public class SplitAction<T : Any, R : Any>(
private val outputType: KClass<out R>,
@PublishedApi
internal class SplitAction<T : Any, R : Any>(
private val outputType: KType,
private val action: SplitBuilder<T, R>.() -> Unit,
) : Action<T, R> {
@OptIn(FlowPreview::class)
override suspend fun execute(
dataSet: DataSet<T>,
meta: Meta,
@ -59,11 +65,14 @@ public class SplitAction<T : Any, R : Any>(
// apply individual fragment rules to result
return split.fragments.entries.asFlow().map { (fragmentName, rule) ->
val env = SplitBuilder.FragmentRule<T, R>(fragmentName, laminate.toMutableMeta()).apply(rule)
data.map(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
//data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
@OptIn(DFInternal::class) Data(outputType, meta = env.meta, dependencies = listOf(data)) {
env.result(data.await())
}.named(fragmentName)
}
}
return ActiveDataTree(outputType) {
return ActiveDataTree<R>(outputType) {
populate(dataSet.flow().flatMapConcat(transform = ::splitOne))
scope?.launch {
dataSet.updates.collect { name ->
@ -75,4 +84,13 @@ public class SplitAction<T : Any, R : Any>(
}
}
}
}
}
/**
* Action that splits each incoming element into a number of fragments defined in builder
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> Action.Companion.split(
noinline builder: SplitBuilder<T, R>.() -> Unit,
): Action<T, R> = SplitAction(typeOf<R>(), builder)

View File

@ -8,13 +8,14 @@ import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* A mutable [DataTree.Companion.active]. It
*/
public class ActiveDataTree<T : Any>(
override val dataType: KClass<out T>,
override val dataType: KType,
) : DataTree<T>, DataSetBuilder<T>, ActiveDataSet<T> {
private val mutex = Mutex()
private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
@ -49,7 +50,7 @@ public class ActiveDataTree<T : Any>(
private suspend fun getOrCreateNode(token: NameToken): ActiveDataTree<T> =
(treeItems[token] as? DataTreeItem.Node<T>)?.tree as? ActiveDataTree<T>
?: ActiveDataTree(dataType).also {
?: ActiveDataTree<T>(dataType).also {
mutex.withLock {
treeItems[token] = DataTreeItem.Node(it)
}
@ -92,10 +93,10 @@ public class ActiveDataTree<T : Any>(
*/
@Suppress("FunctionName")
public suspend fun <T : Any> ActiveDataTree(
type: KClass<out T>,
type: KType,
block: suspend ActiveDataTree<T>.() -> Unit,
): ActiveDataTree<T> {
val tree = ActiveDataTree(type)
val tree = ActiveDataTree<T>(type)
tree.block()
return tree
}
@ -103,15 +104,15 @@ public suspend fun <T : Any> ActiveDataTree(
@Suppress("FunctionName")
public suspend inline fun <reified T : Any> ActiveDataTree(
crossinline block: suspend ActiveDataTree<T>.() -> Unit,
): ActiveDataTree<T> = ActiveDataTree(T::class).apply { block() }
): ActiveDataTree<T> = ActiveDataTree<T>(typeOf<T>()).apply { block() }
public suspend inline fun <reified T : Any> ActiveDataTree<T>.emit(
name: Name,
noinline block: suspend ActiveDataTree<T>.() -> Unit,
): Unit = emit(name, ActiveDataTree(T::class, block))
): Unit = emit(name, ActiveDataTree(typeOf<T>(), block))
public suspend inline fun <reified T : Any> ActiveDataTree<T>.emit(
name: String,
noinline block: suspend ActiveDataTree<T>.() -> Unit,
): Unit = emit(name.toName(), ActiveDataTree(T::class, block))
): Unit = emit(name.toName(), ActiveDataTree(typeOf<T>(), block))

View File

@ -1,7 +1,6 @@
package hep.dataforge.data
import hep.dataforge.actions.Action
import hep.dataforge.actions.NamedData
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import hep.dataforge.names.startsWith
@ -9,7 +8,7 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import kotlin.reflect.KClass
import kotlin.reflect.KType
/**
* Remove all values with keys starting with [name]
@ -23,7 +22,7 @@ internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
* An action that caches results on-demand and recalculates them on source push
*/
public abstract class CachingAction<in T : Any, out R : Any>(
public val outputType: KClass<out R>,
public val outputType: KType,
) : Action<T, R> {
protected abstract fun CoroutineScope.transform(
@ -36,7 +35,7 @@ public abstract class CachingAction<in T : Any, out R : Any>(
dataSet: DataSet<T>,
meta: Meta,
scope: CoroutineScope?,
): DataSet<R> = ActiveDataTree(outputType) {
): DataSet<R> = ActiveDataTree<R>(outputType) {
coroutineScope {
populate(transform(dataSet, meta))
}

View File

@ -1,6 +1,6 @@
package hep.dataforge.data
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlin.coroutines.CoroutineContext

View File

@ -3,11 +3,13 @@ package hep.dataforge.data
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.DFInternal
import hep.dataforge.misc.Type
import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* A data element characterized by its meta
@ -17,7 +19,7 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
/**
* Type marker for the data. The type is known before the calculation takes place so it could be checked.
*/
public val type: KClass<out T>
public val type: KType
/**
* Meta for the data
@ -25,7 +27,7 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
public val meta: Meta
override fun toMeta(): Meta = Meta {
"type" put (type.simpleName ?: "undefined")
"type" put (type.toString())
if (!meta.isEmpty()) {
"meta" put meta
}
@ -34,16 +36,21 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
public companion object {
public const val TYPE: String = "data"
public fun <T : Any> static(
/**
* The type that can't have any subtypes
*/
internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
public inline fun <reified T : Any> static(
value: T,
meta: Meta = Meta.EMPTY,
): Data<T> = StaticData(value, meta)
): Data<T> = StaticData(typeOf<T>(), value, meta)
/**
* An empty data containing only meta
*/
public fun empty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
override val type: KClass<out Nothing> = Nothing::class
override val type: KType = TYPE_OF_NOTHING
override val meta: Meta = meta
override val dependencies: Collection<Goal<*>> = emptyList()
override val deferred: Deferred<Nothing>
@ -57,34 +64,39 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr {
}
}
public class LazyData<T : Any>(
override val type: KClass<out T>,
/**
* A lazily computed variant of [Data] based on [LazyGoal]
* One must ensure that proper [type] is used so this method should not be used
*/
private class LazyData<T : Any>(
override val type: KType,
override val meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
additionalContext: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend () -> T,
) : Data<T>, LazyGoal<T>(context, dependencies, block)
) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
public class StaticData<T : Any>(
override val type: KType,
value: T,
override val meta: Meta = Meta.EMPTY,
) : Data<T>, StaticGoal<T>(value) {
override val type: KClass<out T> get() = value::class
}
) : Data<T>, StaticGoal<T>(value)
@Suppress("FunctionName")
@DFInternal
public fun <T : Any> Data(
type: KClass<out T>,
type: KType,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend () -> T,
): Data<T> = LazyData(type, meta, context, dependencies, block)
@OptIn(DFInternal::class)
@Suppress("FunctionName")
public inline fun <reified T : Any> Data(
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend () -> T,
): Data<T> = Data(T::class, meta, context, dependencies, block)
): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)

View File

@ -1,20 +1,19 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.data.Data.Companion.TYPE_OF_NOTHING
import hep.dataforge.meta.Meta
import hep.dataforge.meta.set
import hep.dataforge.names.*
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*
import kotlin.reflect.KClass
import kotlin.reflect.KType
public interface DataSet<out T : Any> {
/**
* The minimal common ancestor to all data in the node
*/
public val dataType: KClass<out T>
public val dataType: KType
/**
* Traverse this provider or its child. The order is not guaranteed.
@ -43,7 +42,9 @@ public interface DataSet<out T : Any> {
* An empty [DataSet] that suits all types
*/
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
override val dataType: KClass<out Nothing> = Nothing::class
override val dataType: KType = TYPE_OF_NOTHING
private val nothing: Nothing get() = error("this is nothing")
override fun flow(): Flow<NamedData<Nothing>> = emptyFlow()
@ -88,7 +89,7 @@ public suspend fun DataSet<*>.toMeta(): Meta = Meta {
set(it.name, it.meta)
} else {
it.name put {
"type" put it.type.simpleName
"type" put it.type.toString()
"meta" put it.meta
}
}

View File

@ -1,17 +1,19 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.names.toName
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.collect
import kotlin.reflect.KType
public interface DataSetBuilder<in T : Any> {
public val dataType: KType
/**
* Remove all data items starting with [name]
*/
@ -50,8 +52,12 @@ public interface DataSetBuilder<in T : Any> {
public suspend infix fun String.put(block: suspend DataSetBuilder<T>.() -> Unit): Unit = emit(toName(), block)
}
private class SubSetBuilder<in T : Any>(private val parent: DataSetBuilder<T>, private val branch: Name) :
DataSetBuilder<T> {
private class SubSetBuilder<in T : Any>(
private val parent: DataSetBuilder<T>,
private val branch: Name,
) : DataSetBuilder<T> {
override val dataType: KType get() = parent.dataType
override suspend fun remove(name: Name) {
parent.remove(branch + name)
}
@ -88,7 +94,7 @@ public suspend fun <T : Any> DataSetBuilder<T>.emit(data: NamedData<T>) {
/**
* Produce lazy [Data] and emit it into the [DataSetBuilder]
*/
public suspend inline fun <reified T : Any> DataSetBuilder<T>.emitLazy(
public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
name: String,
meta: Meta = Meta.EMPTY,
noinline producer: suspend () -> T,
@ -97,11 +103,11 @@ public suspend inline fun <reified T : Any> DataSetBuilder<T>.emitLazy(
emit(name, data)
}
public suspend inline fun <reified T : Any> DataSetBuilder<T>.emitLazy(
public suspend inline fun <reified T : Any> DataSetBuilder<T>.produce(
name: Name,
meta: Meta = Meta.EMPTY,
noinline producer: suspend () -> T,
){
) {
val data = Data(meta, block = producer)
emit(name, data)
}
@ -109,19 +115,17 @@ public suspend inline fun <reified T : Any> DataSetBuilder<T>.emitLazy(
/**
* Emit a static data with the fixed value
*/
public suspend fun <T : Any> DataSetBuilder<T>.emitStatic(name: String, data: T, meta: Meta = Meta.EMPTY): Unit =
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(name: String, data: T, meta: Meta = Meta.EMPTY): Unit =
emit(name, Data.static(data, meta))
public suspend fun <T : Any> DataSetBuilder<T>.emitStatic(name: Name, data: T, meta: Meta = Meta.EMPTY): Unit =
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY): Unit =
emit(name, Data.static(data, meta))
public suspend fun <T : Any> DataSetBuilder<T>.emitStatic(
public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
name: String,
data: T,
metaBuilder: MetaBuilder.() -> Unit,
) {
emit(name.toName(), Data.static(data, Meta(metaBuilder)))
}
): Unit = emit(name.toName(), Data.static(data, Meta(metaBuilder)))
/**
* Update data with given node data and meta with node meta.

View File

@ -1,22 +1,21 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.*
import hep.dataforge.misc.Type
import hep.dataforge.names.*
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.emitAll
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.map
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.reflect.KClass
import kotlin.reflect.KType
public sealed class DataTreeItem<out T : Any> {
public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>()
public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>()
}
public val <T : Any> DataTreeItem<T>.type: KClass<out T>
public val <T : Any> DataTreeItem<T>.type: KType
get() = when (this) {
is DataTreeItem.Node -> tree.dataType
is DataTreeItem.Leaf -> data.type
@ -91,7 +90,7 @@ public fun <T : Any> DataTree<T>.itemFlow(): Flow<Pair<Name, DataTreeItem<T>>> =
* The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
*/
public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> = object : DataTree<T> {
override val dataType: KClass<out T> get() = this@branch.dataType
override val dataType: KType get() = this@branch.dataType
override suspend fun items(): Map<NameToken, DataTreeItem<T>> = getItem(branchName).tree?.items() ?: emptyMap()
}

View File

@ -1,6 +1,6 @@
package hep.dataforge.data
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext

View File

@ -19,10 +19,10 @@ import hep.dataforge.meta.get
import hep.dataforge.meta.string
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.collect
import kotlin.reflect.KClass
import kotlinx.coroutines.launch
public interface GroupRule {
public suspend fun <T : Any> gather(dataType: KClass<out T>, set: DataSet<T>): Map<String, DataSet<T>>
public suspend fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
public companion object {
/**
@ -33,48 +33,32 @@ public interface GroupRule {
* @param defaultTagValue
* @return
*/
public fun byValue(
public fun byMetaValue(
scope: CoroutineScope,
key: String,
defaultTagValue: String,
): GroupRule = object : GroupRule {
override suspend fun <T : Any> gather(
dataType: KClass<out T>,
set: DataSet<T>,
): Map<String, DataSet<T>> {
val map = HashMap<String, ActiveDataTree<T>>()
set.flow().collect { data ->
val tagValue = data.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(dataType) }.emit(data.name, data.data)
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(data.name, data.data)
}
scope.launch {
set.updates.collect { name ->
val data = set.getData(name)
val tagValue = data?.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(name, data)
}
}
return map
}
}
// @ValueDef(key = "byValue", required = true, info = "The name of annotation value by which grouping should be made")
// @ValueDef(
// key = "defaultValue",
// def = "default",
// info = "Default value which should be used for content in which the grouping value is not presented"
// )
// public fun byMeta(scope: CoroutineScope, config: Meta): GroupRule {
// //TODO expand grouping options
// return config["byValue"]?.string?.let {
// byValue(
// scope,
// it,
// config["defaultValue"]?.string ?: "default"
// )
// } ?: object : GroupRule {
// override suspend fun <T : Any> gather(
// dataType: KClass<T>,
// source: DataSource<T>,
// ): Map<String, DataSource<T>> = mapOf("" to source)
// }
// }
}
}
}

View File

@ -1,7 +1,5 @@
package hep.dataforge.actions
package hep.dataforge.data
import hep.dataforge.data.Data
import hep.dataforge.data.StaticData
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.Named
import hep.dataforge.names.Name

View File

@ -2,12 +2,13 @@ package hep.dataforge.data
import hep.dataforge.names.*
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.*
import kotlin.reflect.KClass
import kotlinx.coroutines.flow.collect
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@PublishedApi
internal class StaticDataTree<T : Any>(
override val dataType: KClass<out T>,
override val dataType: KType,
) : DataSetBuilder<T>, DataTree<T> {
private val items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
@ -22,11 +23,11 @@ internal class StaticDataTree<T : Any>(
}
}
fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
private fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
0 -> this
1 -> {
val itemName = name.firstOrNull()!!
(items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree(dataType).also {
(items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
items[itemName] = DataTreeItem.Node(it)
}
}
@ -61,14 +62,14 @@ internal class StaticDataTree<T : Any>(
@Suppress("FunctionName")
public suspend fun <T : Any> DataTree(
dataType: KClass<out T>,
dataType: KType,
block: suspend DataSetBuilder<T>.() -> Unit,
): DataTree<T> = StaticDataTree(dataType).apply { block() }
): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
@Suppress("FunctionName")
public suspend inline fun <reified T : Any> DataTree(
noinline block: suspend DataSetBuilder<T>.() -> Unit,
): DataTree<T> = DataTree(T::class, block)
): DataTree<T> = DataTree(typeOf<T>(), block)
public suspend fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType){
populate(this@seal)

View File

@ -1,14 +1,12 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.mapNotNull
import kotlin.reflect.KClass
import kotlin.reflect.KType
/**
@ -17,7 +15,7 @@ import kotlin.reflect.KClass
public fun <T : Any> DataSet<T>.filter(
predicate: suspend (Name, Data<T>) -> Boolean,
): ActiveDataSet<T> = object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@filter.dataType
override val dataType: KType get() = this@filter.dataType
override fun flow(): Flow<NamedData<T>> =
this@filter.flow().filter { predicate(it.name, it.data) }
@ -32,13 +30,12 @@ public fun <T : Any> DataSet<T>.filter(
}
}
/**
* Generate a wrapper data set with a given name prefix appended to all names
*/
public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) this
else object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@withNamePrefix.dataType
override val dataType: KType get() = this@withNamePrefix.dataType
override fun flow(): Flow<NamedData<T>> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) }
@ -48,14 +45,13 @@ else object : ActiveDataSet<T> {
override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
}
/**
* Get a subset of data starting with a given [branchName]
*/
public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
this
} else object : ActiveDataSet<T> {
override val dataType: KClass<out T> get() = this@branch.dataType
override val dataType: KType get() = this@branch.dataType
override fun flow(): Flow<NamedData<T>> = this@branch.flow().mapNotNull {
it.name.removeHeadOrNull(branchName)?.let { name ->

View File

@ -1,17 +1,17 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.seal
import hep.dataforge.meta.toMutableMeta
import hep.dataforge.misc.DFInternal
import kotlinx.coroutines.flow.*
import kotlin.contracts.InvocationKind
import kotlin.contracts.contract
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
/**
* Lazily transform this data to another data. By convention [block] should not use external data (be pure).
@ -19,23 +19,11 @@ import kotlin.reflect.KClass
* @param meta for the resulting data. By default equals input data.
* @param block the transformation itself
*/
public fun <T : Any, R : Any> Data<T>.map(
outputType: KClass<out R>,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
block: suspend (T) -> R,
): LazyData<R> = LazyData(outputType, meta, coroutineContext, listOf(this)) {
block(await())
}
/**
* See [map]
*/
public inline fun <T : Any, reified R : Any> Data<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
crossinline block: suspend (T) -> R,
): LazyData<R> = LazyData(R::class, meta, coroutineContext, listOf(this)) {
): Data<R> = Data(meta, coroutineContext, listOf(this)) {
block(await())
}
@ -47,7 +35,7 @@ public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta,
crossinline block: suspend (left: T1, right: T2) -> R,
): LazyData<R> = LazyData(R::class, meta, coroutineContext, listOf(this, other)) {
): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
block(await(), other.await())
}
@ -61,8 +49,7 @@ public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
crossinline block: suspend (Collection<T>) -> R,
): LazyData<R> = LazyData(
R::class,
): Data<R> = Data(
meta,
coroutineContext,
this
@ -70,12 +57,13 @@ public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
block(map { it.await() })
}
@DFInternal
public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
outputType: KClass<out R>,
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
block: suspend (Map<K, T>) -> R,
): LazyData<R> = LazyData(
): Data<R> = Data(
outputType,
meta,
coroutineContext,
@ -95,8 +83,7 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (Map<K, T>) -> R,
): LazyData<R> = LazyData(
R::class,
): Data<R> = Data(
meta,
coroutineContext,
this.values
@ -109,12 +96,13 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
/**
* Transform a [Flow] of [NamedData] to a single [Data].
*/
@DFInternal
public suspend fun <T : Any, R : Any> Flow<NamedData<T>>.reduceToData(
outputType: KClass<out R>,
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = LazyData(
): Data<R> = Data(
outputType,
meta,
coroutineContext,
@ -123,11 +111,12 @@ public suspend fun <T : Any, R : Any> Flow<NamedData<T>>.reduceToData(
transformation(this)
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = reduceToData(R::class, coroutineContext, meta) {
): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
transformation(it)
}
@ -139,7 +128,7 @@ public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.foldToDa
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): LazyData<R> = reduceToData(
): Data<R> = reduceToData(
coroutineContext, meta
) {
it.fold(initial, block)
@ -147,25 +136,29 @@ public suspend inline fun <T : Any, reified R : Any> Flow<NamedData<T>>.foldToDa
//DataSet operations
@DFInternal
public suspend fun <T : Any, R : Any> DataSet<T>.map(
outputType: KClass<out R>,
outputType: KType,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
metaTransform: MetaBuilder.() -> Unit = {},
block: suspend (T) -> R,
): DataTree<R> = DataTree(outputType) {
): DataTree<R> = DataTree<R>(outputType) {
populate(
flow().map {
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
it.map(outputType, coroutineContext, newMeta, block).named(it.name)
Data(outputType, newMeta, coroutineContext, listOf(it)) {
block(it.await())
}.named(it.name)
}
)
}
@OptIn(DFInternal::class)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
noinline metaTransform: MetaBuilder.() -> Unit = {},
noinline block: suspend (T) -> R,
): DataTree<R> = map(R::class, coroutineContext, metaTransform, block)
): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
public suspend fun <T : Any> DataSet<T>.forEach(block: suspend (NamedData<T>) -> Unit) {
contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) }
@ -178,11 +171,11 @@ public suspend inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): LazyData<R> = flow().reduceToData(coroutineContext, meta, transformation)
): Data<R> = flow().reduceToData(coroutineContext, meta, transformation)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): LazyData<R> = flow().foldToData(initial, coroutineContext, meta, block)
): Data<R> = flow().foldToData(initial, coroutineContext, meta, block)

View File

@ -1,42 +0,0 @@
package hep.dataforge.data
import kotlin.reflect.KClass
import kotlin.reflect.full.isSubclassOf
/**
* Check if data could be safely cast to given class
*/
internal fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean = this.type.isSubclassOf(type)
/**
* Cast the node to given type if the cast is possible or return null
*/
@Suppress("UNCHECKED_CAST")
public fun <R : Any> Data<*>.castOrNull(type: KClass<out R>): Data<R>? =
if (!canCast(type)) null else object : Data<R> by (this as Data<R>) {
override val type: KClass<out R> = type
}
/**
* Unsafe cast of data node
*/
public fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> =
castOrNull(type) ?: error("Can't cast ${this.type} to $type")
public inline fun <reified R : Any> Data<*>.cast(): Data<R> = cast(R::class)
@Suppress("UNCHECKED_CAST")
public fun <R : Any> DataSet<*>.castOrNull(type: KClass<out R>): DataSet<R>? =
if (!canCast(type)) null else object : DataSet<R> by (this as DataSet<R>) {
override val dataType: KClass<out R> = type
}
public fun <R : Any> DataSet<*>.cast(type: KClass<out R>): DataSet<R> =
castOrNull(type) ?: error("Can't cast ${this.dataType} to $type")
/**
* Check that node is compatible with given type meaning that each element could be cast to the type
*/
internal fun <R : Any> DataSet<*>.canCast(type: KClass<out R>): Boolean =
type.isSubclassOf(this.dataType)

View File

@ -1,29 +1,42 @@
package hep.dataforge.data
import hep.dataforge.actions.NamedData
import hep.dataforge.actions.named
import hep.dataforge.meta.DFExperimental
import hep.dataforge.names.*
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.matches
import hep.dataforge.names.toName
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.full.isSubtypeOf
import kotlin.reflect.typeOf
/**
* Cast the node to given type if the cast is possible or return null
*/
@Suppress("UNCHECKED_CAST")
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
if (!this.type.isSubtypeOf(type)) null else object : Data<R> by (this as Data<R>) {
override val type: KType = type
}
/**
* Select all data matching given type and filters. Does not modify paths
*/
@OptIn(DFExperimental::class)
public fun <R : Any> DataSet<*>.select(
type: KClass<out R>,
@PublishedApi
internal fun <R : Any> DataSet<*>.select(
type: KType,
namePattern: Name? = null,
): ActiveDataSet<R> = object : ActiveDataSet<R> {
override val dataType: KClass<out R> = type
override val dataType = type
@Suppress("UNCHECKED_CAST")
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter {
it.canCast(type) && (namePattern == null || it.name.matches(namePattern))
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter { datum ->
datum.type.isSubtypeOf(type) && (namePattern == null || datum.name.matches(namePattern))
}.map {
@Suppress("UNCHECKED_CAST")
it as NamedData<R>
}
@ -31,7 +44,7 @@ public fun <R : Any> DataSet<*>.select(
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it)
datum?.canCast(type) ?: false
datum?.type?.isSubtypeOf(type) ?: false
}
}
@ -40,12 +53,12 @@ public fun <R : Any> DataSet<*>.select(
* Select a single datum of the appropriate type
*/
public inline fun <reified R : Any> DataSet<*>.select(namePattern: Name? = null): DataSet<R> =
select(R::class, namePattern)
select(typeOf<R>(), namePattern)
public suspend fun <R : Any> DataSet<*>.selectOne(type: KClass<out R>, name: Name): NamedData<R>? =
getData(name)?.castOrNull(type)?.named(name)
public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? =
getData(name)?.castOrNull<R>(type)?.named(name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(R::class, name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(typeOf<R>(), name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? =
selectOne(R::class, name.toName())
selectOne(typeOf<R>(), name.toName())

View File

@ -1,43 +1,40 @@
package hep.dataforge.data
import hep.dataforge.actions.MapAction
import hep.dataforge.actions.Action
import hep.dataforge.actions.map
import kotlinx.coroutines.runBlocking
import org.junit.jupiter.api.Test
import kotlin.test.assertEquals
/**
* Block the thread and get data content
*/
public fun <T : Any> Data<T>.value(): T = runBlocking { await() }
@Suppress("EXPERIMENTAL_API_USAGE")
class ActionsTest {
val data: DataTree<Int> = runBlocking {
DataTree {
repeat(10) {
emitStatic(it.toString(), it)
static(it.toString(), it)
}
}
}
@Test
fun testStaticMapAction() {
val plusOne = MapAction<Int, Int> {
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
runBlocking {
val result = plusOne.execute(data)
assertEquals(2, result.getData("1")?.value())
assertEquals(2, result.getData("1")?.await())
}
}
@Test
fun testDynamicMapAction() {
val plusOne = MapAction<Int, Int> {
val plusOne = Action.map<Int, Int> {
result { it + 1 }
}
val datum = runBlocking {
val result = plusOne.execute(data, scope = this)
result.getData("1")?.value()
result.getData("1")?.await()
}
assertEquals(2, datum)
}

View File

@ -8,6 +8,24 @@ import kotlin.test.assertEquals
internal class DataTreeBuilderTest {
@Test
fun testTreeBuild() = runBlocking {
val node = DataTree<Any> {
"primary" put {
static("a", "a")
static("b", "b")
}
static("c.d", "c.d")
static("c.f", "c.f")
}
runBlocking {
assertEquals("a", node.getData("primary.a")?.await())
assertEquals("b", node.getData("primary.b")?.await())
assertEquals("c.d", node.getData("c.d")?.await())
assertEquals("c.f", node.getData("c.f")?.await())
}
}
@Test
fun testDataUpdate() = runBlocking {
val updateData: DataTree<Any> = DataTree {
@ -18,17 +36,18 @@ internal class DataTreeBuilderTest {
}
val node = DataTree<Any> {
emit("primary") {
emitStatic("a", "a")
emitStatic("b", "b")
"primary" put {
static("a", "a")
static("b", "b")
}
emitStatic("root", "root")
static("root", "root")
populate(updateData)
}
assertEquals("a", node.getData("update.a")?.value())
assertEquals("a", node.getData("primary.a")?.value())
runBlocking {
assertEquals("a", node.getData("update.a")?.await())
assertEquals("a", node.getData("primary.a")?.await())
}
}
@Test
@ -40,7 +59,7 @@ internal class DataTreeBuilderTest {
updateJob = launch {
repeat(10) {
delay(10)
emitStatic("value", it)
static("value", it)
}
delay(10)
}
@ -60,7 +79,7 @@ internal class DataTreeBuilderTest {
}
}
updateJob.join()
assertEquals(9, rootNode.getData("sub.value")?.value())
assertEquals(9, rootNode.getData("sub.value")?.await())
cancel()
}
} catch (t: Throwable) {

View File

@ -22,4 +22,8 @@ kotlin {
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
}

View File

@ -1,27 +1,33 @@
plugins {
id("ru.mipt.npm.mpp")
id("ru.mipt.npm.native")
// id("ru.mipt.npm.native")
}
description = "YAML meta IO"
repositories{
jcenter()
}
kscience {
useSerialization{
yamlKt()
yamlKt("0.9.0-dev-1")
}
}
repositories{
maven("https://dl.bintray.com/mamoe/yamlkt")
}
kotlin {
sourceSets {
commonMain{
dependencies {
api(project(":dataforge-io"))
// api("net.mamoe.yamlkt:yamlkt:${ru.mipt.npm.gradle.KScienceVersions.Serialization.yamlKtVersion}")
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
description ="""
YAML meta converters and Front Matter envelope format
""".trimIndent()
}

View File

@ -4,8 +4,8 @@ import hep.dataforge.context.Context
import hep.dataforge.io.*
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.io.*
import kotlinx.io.text.readUtf8Line
import kotlinx.io.text.writeUtf8String
@ -17,8 +17,7 @@ public class FrontMatterEnvelopeFormat(
) : EnvelopeFormat {
override fun readPartial(input: Input): PartialEnvelope {
@Suppress("VARIABLE_WITH_REDUNDANT_INITIALIZER")
var line = ""
var line: String
var offset = 0u
do {
line = input.readUtf8Line() //?: error("Input does not contain front matter separator")
@ -44,7 +43,7 @@ public class FrontMatterEnvelopeFormat(
}
override fun readObject(input: Input): Envelope {
var line = ""
var line: String
do {
line = input.readUtf8Line() //?: error("Input does not contain front matter separator")
} while (!line.startsWith(SEPARATOR))

View File

@ -8,6 +8,7 @@ import hep.dataforge.io.MetaFormatFactory
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.NameToken
import hep.dataforge.names.withIndex
import hep.dataforge.values.ListValue
@ -86,16 +87,15 @@ public fun YamlMap.toMeta(): Meta = YamlMeta(this)
*/
@DFExperimental
public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
private val coder = Yaml.default
override fun writeMeta(output: Output, meta: Meta, descriptor: NodeDescriptor?) {
val yaml = meta.toYaml()
val string = coder.encodeToString(yaml)
val string = Yaml.encodeToString(yaml)
output.writeUtf8String(string)
}
override fun readMeta(input: Input, descriptor: NodeDescriptor?): Meta {
val yaml = coder.decodeYamlMapFromString(input.readUtf8String())
val yaml = Yaml.decodeYamlMapFromString(input.readUtf8String())
return yaml.toMeta()
}
@ -116,7 +116,7 @@ public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
override fun writeMeta(output: Output, meta: Meta, descriptor: NodeDescriptor?): Unit =
default.writeMeta(output, meta, descriptor)
override fun readMeta(input: kotlinx.io.Input, descriptor: NodeDescriptor?): Meta =
override fun readMeta(input: Input, descriptor: NodeDescriptor?): Meta =
default.readMeta(input, descriptor)
}
}

View File

@ -2,10 +2,10 @@ package hep.dataforge.io.yaml
import hep.dataforge.io.parse
import hep.dataforge.io.toString
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.seal
import hep.dataforge.misc.DFExperimental
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,6 +1,6 @@
package hep.dataforge.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
/**
* A fire-and-forget consumer of messages

View File

@ -1,9 +1,9 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.misc.DFExperimental
import kotlinx.io.text.writeUtf8String
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,9 +1,9 @@
package hep.dataforge.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.isEmpty
import hep.dataforge.misc.DFExperimental
import kotlinx.io.*
import java.nio.file.Files
import java.nio.file.Path

View File

@ -1,7 +1,7 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.io.asBinary
import kotlinx.io.toByteArray
import kotlinx.io.writeDouble
@ -54,7 +54,7 @@ class FileBinaryTest {
val tmpPath = Files.createTempFile("dataforge_test", ".df")
Global.io.writeEnvelopeFile(tmpPath, envelope)
val binary = Global.io.readEnvelopeFile(tmpPath)?.data!!
val binary = Global.io.readEnvelopeFile(tmpPath).data!!
assertEquals(binary.size, binary.toByteArray().size)
}
}

View File

@ -1,7 +1,7 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.io.writeDouble
import java.nio.file.Files
import kotlin.test.Test
@ -29,7 +29,7 @@ class FileEnvelopeTest {
val tmpPath = Files.createTempFile("dataforge_test", ".df")
writeEnvelopeFile(tmpPath, envelope)
println(tmpPath.toUri())
val restored: Envelope = readEnvelopeFile(tmpPath)!!
val restored: Envelope = readEnvelopeFile(tmpPath)
assertTrue { envelope.contentEquals(restored) }
}
}
@ -40,7 +40,7 @@ class FileEnvelopeTest {
val tmpPath = Files.createTempFile("dataforge_test_tagless", ".df")
writeEnvelopeFile(tmpPath, envelope, envelopeFormat = TaglessEnvelopeFormat)
println(tmpPath.toUri())
val restored: Envelope = readEnvelopeFile(tmpPath)!!
val restored: Envelope = readEnvelopeFile(tmpPath)
assertTrue { envelope.contentEquals(restored) }
}
}

View File

@ -9,4 +9,8 @@ kscience {
}
}
description = "Meta definition and basic operations on meta"
description = "Meta definition and basic operations on meta"
readme{
maturity = ru.mipt.npm.gradle.Maturity.DEVELOPMENT
}

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.Name
import kotlin.properties.ReadWriteProperty

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.values.EnumValue

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.*
public interface MutableMeta<out M : MutableMeta<M>> : TypedMeta<M>, MutableItemProvider {

View File

@ -1,11 +0,0 @@
package hep.dataforge.meta
/**
* General marker for dataforge builders
*/
@DslMarker
public annotation class DFBuilder
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFExperimental

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta.descriptors
import hep.dataforge.meta.*
import hep.dataforge.misc.DFBuilder
import hep.dataforge.names.*
import hep.dataforge.values.*

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.toName
import hep.dataforge.values.ListValue
import hep.dataforge.values.Value

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta.transformations
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
/**

View File

@ -0,0 +1,21 @@
package hep.dataforge.misc
/**
* General marker for dataforge builders
*/
@DslMarker
public annotation class DFBuilder
/**
* The declaration is experimental and could be changed in future
*/
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFExperimental
/**
* The declaration is internal to the DataForge and could use unsafe or unstable features.
*/
@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
@Retention(AnnotationRetention.BINARY)
public annotation class DFInternal

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlinx.serialization.KSerializer
import kotlinx.serialization.Serializable
import kotlinx.serialization.descriptors.PrimitiveKind
@ -186,7 +186,6 @@ public fun Name.withIndex(index: String): Name {
* Fast [String]-based accessor for item map
*/
public operator fun <T> Map<NameToken, T>.get(body: String, query: String? = null): T? = get(NameToken(body, query))
public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())
public operator fun <T> MutableMap<Name, T>.set(name: String, value: T): Unit = set(name.toName(), value)

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
/**

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta
import hep.dataforge.misc.DFExperimental
import hep.dataforge.values.NumberValue
import hep.dataforge.values.True
import hep.dataforge.values.Value

View File

@ -1,6 +1,6 @@
package hep.dataforge.names
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import kotlin.test.Test
import kotlin.test.assertFails
import kotlin.test.assertFalse

View File

@ -22,4 +22,8 @@ kotlin {
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
}

View File

@ -12,4 +12,8 @@ kotlin {
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.PROTOTYPE
}

View File

@ -2,6 +2,7 @@ package hep.dataforge.tables.io
import hep.dataforge.io.Envelope
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import hep.dataforge.tables.SimpleColumnHeader
import hep.dataforge.tables.Table
import hep.dataforge.values.Value

View File

@ -4,8 +4,8 @@ import hep.dataforge.meta.Meta
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KClass
import kotlin.reflect.KProperty
import kotlin.reflect.full.cast
import kotlin.reflect.full.isSubclassOf
import kotlin.reflect.safeCast
@Suppress("UNCHECKED_CAST")
public fun <T : Any> Column<*>.cast(type: KClass<out T>): Column<T> {
@ -22,7 +22,7 @@ public class CastColumn<T : Any>(private val origin: Column<*>, override val typ
override val size: Int get() = origin.size
override fun get(index: Int): T? = type.cast(origin[index])
override fun get(index: Int): T? = type.safeCast(origin[index])
}
public class ColumnProperty<C: Any, T : C>(public val table: Table<C>, public val type: KClass<T>) : ReadOnlyProperty<Any?, Column<T>> {

View File

@ -1,6 +1,6 @@
package hep.dataforge.tables.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.misc.DFExperimental
import hep.dataforge.tables.Table
import hep.dataforge.tables.get
import hep.dataforge.tables.row

View File

@ -13,4 +13,8 @@ kotlin {
}
}
}
}
readme{
maturity = ru.mipt.npm.gradle.Maturity.EXPERIMENTAL
}

View File

@ -6,11 +6,13 @@ import hep.dataforge.data.GoalExecutionRestriction
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.Described
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.misc.DFInternal
import hep.dataforge.misc.Type
import hep.dataforge.names.Name
import hep.dataforge.workspace.Task.Companion.TYPE
import kotlinx.coroutines.withContext
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@Type(TYPE)
public interface Task<out T : Any> : Described {
@ -42,8 +44,9 @@ public class TaskResultBuilder<T : Any>(
* Data dependency cycles are not allowed.
*/
@Suppress("FunctionName")
@DFInternal
public fun <T : Any> Task(
resultType: KClass<out T>,
resultType: KType,
descriptor: ItemDescriptor? = null,
builder: suspend TaskResultBuilder<T>.() -> Unit,
): Task<T> = object : Task<T> {
@ -56,15 +59,16 @@ public fun <T : Any> Task(
taskMeta: Meta,
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
//TODO use safe builder and check for external data on add and detects cycles
val dataset = DataTree(resultType) {
val dataset = DataTree<T>(resultType) {
TaskResultBuilder(workspace,taskName, taskMeta, this).apply { builder() }
}
workspace.internalize(dataset, taskName, taskMeta)
}
}
@OptIn(DFInternal::class)
@Suppress("FunctionName")
public inline fun <reified T : Any> Task(
descriptor: ItemDescriptor? = null,
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): Task<T> = Task(T::class, descriptor, builder)
): Task<T> = Task(typeOf<T>(), descriptor, builder)

View File

@ -1,7 +1,7 @@
package hep.dataforge.workspace
import hep.dataforge.actions.NamedData
import hep.dataforge.data.Data
import hep.dataforge.data.NamedData
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name

View File

@ -7,16 +7,15 @@ import hep.dataforge.data.ActiveDataTree
import hep.dataforge.data.DataSet
import hep.dataforge.data.DataSetBuilder
import hep.dataforge.data.DataTree
import hep.dataforge.meta.DFBuilder
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.misc.DFBuilder
import hep.dataforge.misc.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlin.properties.PropertyDelegateProvider
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KClass
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>)
@ -24,25 +23,19 @@ public interface TaskContainer {
public fun registerTask(taskName: Name, task: Task<*>)
}
public fun <T : Any> TaskContainer.registerTask(
resultType: KClass<out T>,
name: String,
descriptorBuilder: NodeDescriptor.() -> Unit = {},
builder: suspend TaskResultBuilder<T>.() -> Unit,
): Unit = registerTask(name.toName(), Task(resultType, NodeDescriptor(descriptorBuilder), builder))
public inline fun <reified T : Any> TaskContainer.registerTask(
name: String,
noinline descriptorBuilder: NodeDescriptor.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): Unit = registerTask(T::class, name, descriptorBuilder, builder)
): Unit = registerTask(name.toName(), Task(NodeDescriptor(descriptorBuilder), builder))
public inline fun <reified T : Any> TaskContainer.task(
noinline descriptorBuilder: NodeDescriptor.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
val taskName = property.name.toName()
val task = Task(T::class, NodeDescriptor(descriptorBuilder), builder)
val task = Task(NodeDescriptor(descriptorBuilder), builder)
registerTask(taskName, task)
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
}

View File

@ -3,15 +3,14 @@ package hep.dataforge.workspace
import hep.dataforge.data.Data
import hep.dataforge.data.await
import hep.dataforge.io.*
import kotlin.reflect.KClass
import hep.dataforge.misc.DFInternal
/**
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
*/
@OptIn(DFInternal::class)
public fun <T : Any> Envelope.toData(format: IOFormat<T>): Data<T> {
@Suppress("UNCHECKED_CAST")
val kclass: KClass<T> = format.type.classifier as? KClass<T> ?: error("IOFormat type is not a class")
return Data(kclass, meta) {
return Data(format.type, meta) {
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
}
}

View File

@ -4,6 +4,7 @@ package hep.dataforge.workspace
import hep.dataforge.data.*
import hep.dataforge.io.*
import hep.dataforge.meta.*
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.withContext
@ -15,7 +16,6 @@ import java.nio.file.StandardOpenOption
import java.nio.file.spi.FileSystemProvider
import java.util.zip.ZipEntry
import java.util.zip.ZipOutputStream
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.typeOf
import kotlin.streams.toList
@ -36,9 +36,6 @@ internal inline fun <reified T : Any> IOPlugin.formatResolver(): FileFormatResol
resolveIOFormat<T>() ?: error("Can't resolve IO format for ${T::class}")
}
private val <T : Any> FileFormatResolver<T>.kClass: KClass<T>
get() = type.classifier as? KClass<T> ?: error("Format resolver actual type does not correspond to type parameter")
private fun newZFS(path: Path): FileSystem {
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
?: error("Zip file system provider not found")
@ -110,7 +107,7 @@ public suspend fun <T : Any> IOPlugin.readDataDirectory(
return readDataDirectory(fs.rootDirectories.first(), formatResolver)
}
if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
return DataTree(formatResolver.kClass) {
return DataTree(formatResolver.type) {
Files.list(path).toList().forEach { path ->
val fileName = path.fileName.toString()
if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {

View File

@ -49,7 +49,7 @@ class DataPropagationTest {
runBlocking {
data {
repeat(100) {
emitStatic("myData[$it]", it)
static("myData[$it]", it)
}
}
}

View File

@ -4,8 +4,8 @@ import hep.dataforge.context.Global
import hep.dataforge.data.*
import hep.dataforge.io.IOFormat
import hep.dataforge.io.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.misc.DFExperimental
import kotlinx.coroutines.runBlocking
import kotlinx.io.Input
import kotlinx.io.Output
@ -23,11 +23,11 @@ class FileDataTest {
val dataNode = runBlocking {
DataTree<String> {
emit("dir") {
emitStatic("a", "Some string") {
static("a", "Some string") {
"content" put "Some string"
}
}
emitStatic("b", "root data")
static("b", "root data")
meta {
"content" put "This is root meta node"
}

View File

@ -1,9 +1,9 @@
package hep.dataforge.workspace
import hep.dataforge.actions.get
import hep.dataforge.context.*
import hep.dataforge.data.*
import hep.dataforge.meta.*
import hep.dataforge.names.get
import hep.dataforge.names.plus
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.single
@ -56,7 +56,7 @@ class SimpleWorkspaceTest {
data {
repeat(100) {
emitStatic("myData[$it]", it)
static("myData[$it]", it)
}
}

View File

@ -10,8 +10,8 @@ pluginManagement {
maven("https://dl.bintray.com/mipt-npm/dev")
}
val toolsVersion = "0.7.4"
val kotlinVersion = "1.4.30-RC"
val toolsVersion = "0.7.6"
val kotlinVersion = "1.4.30"
plugins {
id("ru.mipt.npm.project") version toolsVersion