From 0a232451175811ecba11cecb07c5b3519e31f9ba Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 26 Nov 2023 12:02:54 +0300
Subject: [PATCH 01/77] fix sonatype deploy address

---
 build.gradle.kts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 4b5fe48c..68b1aba6 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -31,7 +31,7 @@ ksciencePublish {
         useSPCTeam()
     }
     repository("spc","https://maven.sciprog.center/kscience")
-    sonatype()
+    sonatype("https://oss.sonatype.org")
 }
 
 apiValidation {

From 5461a83417f397795332a0eb0de289bd792e6545 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 4 Dec 2023 09:55:04 +0300
Subject: [PATCH 02/77] Name refactoring. Meta.stringList accepts nullable
 receiver.

---
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/context/Plugin.kt      |  4 +--
 .../dataforge/context/PluginFactory.kt        |  4 +--
 .../kscience/dataforge/provider/dfType.kt     |  6 ++--
 .../kscience/dataforge/actions/MapAction.kt   |  3 +-
 .../dataforge/actions/ReduceAction.kt         |  2 +-
 .../kscience/dataforge/actions/SplitAction.kt |  2 +-
 .../space/kscience/dataforge/data/Data.kt     |  4 +--
 .../space/kscience/dataforge/data/DataTree.kt |  4 +--
 .../dataforge/data/actionInContext.kt         |  2 --
 .../kscience/dataforge/io/EnvelopeFormat.kt   |  4 +--
 .../space/kscience/dataforge/io/IOFormat.kt   |  4 +--
 .../space/kscience/dataforge/io/MetaFormat.kt |  4 +--
 .../space/kscience/dataforge/meta/Meta.kt     |  6 ++--
 .../meta/descriptors/MetaDescriptorBuilder.kt | 10 ++++--
 .../dataforge/misc/{DfId.kt => DfType.kt}     |  5 ++-
 .../meta/descriptors/DescriptorTest.kt        |  6 ++--
 .../kscience/dataforge/workspace/Task.kt      |  4 +--
 .../kscience/dataforge/workspace/Workspace.kt |  4 +--
 .../kscience/dataforge/workspace/fileData.kt  | 33 +++++++++++++++----
 20 files changed, 68 insertions(+), 45 deletions(-)
 delete mode 100644 dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/actionInContext.kt
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/{DfId.kt => DfType.kt} (56%)

diff --git a/build.gradle.kts b/build.gradle.kts
index 68b1aba6..edeae557 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.7.0"
+    version = "0.7.1"
 }
 
 subprojects {
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Plugin.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Plugin.kt
index 152f5a76..6c5648a6 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Plugin.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Plugin.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.context
 import space.kscience.dataforge.context.Plugin.Companion.TARGET
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
@@ -18,7 +18,7 @@ import space.kscience.dataforge.provider.Provider
  *
  * create - configure - attach - detach - destroy
  */
-@DfId(TARGET)
+@DfType(TARGET)
 public interface Plugin : Named, ContextAware, Provider, MetaRepr {
 
     /**
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/PluginFactory.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/PluginFactory.kt
index 0273d327..9cc67168 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/PluginFactory.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/PluginFactory.kt
@@ -1,9 +1,9 @@
 package space.kscience.dataforge.context
 
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 
-@DfId(PluginFactory.TYPE)
+@DfType(PluginFactory.TYPE)
 public interface PluginFactory<T : Plugin> : Factory<T> {
     public val tag: PluginTag
 
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
index ab34ea50..04e681da 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
@@ -4,7 +4,7 @@ import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.PluginBuilder
 import space.kscience.dataforge.context.gather
 import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KClass
@@ -13,10 +13,10 @@ import kotlin.reflect.full.findAnnotation
 
 @DFExperimental
 public val KClass<*>.dfId: String
-    get() = findAnnotation<DfId>()?.id ?: simpleName ?: ""
+    get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
 
 /**
- * Provide an object with given name inferring target from its type using [DfId] annotation
+ * Provide an object with given name inferring target from its type using [DfType] annotation
  */
 @DFExperimental
 public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 883b3928..7b2c94f5 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -98,8 +98,7 @@ internal class MapAction<in T : Any, R : Any>(
  * A one-to-one mapping action
  */
 @DFExperimental
-@Suppress("FunctionName")
-public inline fun <T : Any, reified R : Any> Action.Companion.map(
+public inline fun <T : Any, reified R : Any> Action.Companion.mapping(
     noinline builder: MapActionBuilder<T, R>.() -> Unit,
 ): Action<T, R> = MapAction(typeOf<R>(), builder)
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index fe823bd7..a74cfad9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -112,6 +112,6 @@ internal class ReduceAction<T : Any, R : Any>(
  * A one-to-one mapping action
  */
 @DFExperimental
-public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
+public inline fun <reified T : Any, reified R : Any> Action.Companion.reducing(
     noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
 ): Action<T, R> = ReduceAction(typeOf<R>(), builder)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 24745929..0ecde319 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -87,6 +87,6 @@ internal class SplitAction<T : Any, R : Any>(
  * Action that splits each incoming element into a number of fragments defined in builder
  */
 @DFExperimental
-public inline fun <T : Any, reified R : Any> Action.Companion.split(
+public inline fun <T : Any, reified R : Any> Action.Companion.splitting(
     noinline builder: SplitBuilder<T, R>.() -> Unit,
 ): Action<T, R> = SplitAction(typeOf<R>(), builder)
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index 984582e5..4d883795 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -5,7 +5,7 @@ import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
 import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import kotlin.coroutines.CoroutineContext
 import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
@@ -14,7 +14,7 @@ import kotlin.reflect.typeOf
 /**
  * A data element characterized by its meta
  */
-@DfId(Data.TYPE)
+@DfType(Data.TYPE)
 public interface Data<out T> : Goal<T>, MetaRepr {
     /**
      * Type marker for the data. The type is known before the calculation takes place so it could be checked.
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
index bafcbea2..b9273c07 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
@@ -2,7 +2,7 @@ package space.kscience.dataforge.data
 
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.*
 import kotlin.collections.component1
 import kotlin.collections.component2
@@ -31,7 +31,7 @@ public val <T : Any> DataTreeItem<T>.type: KType
 /**
  * A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
  */
-@DfId(DataTree.TYPE)
+@DfType(DataTree.TYPE)
 public interface DataTree<out T : Any> : DataSet<T> {
 
     /**
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/actionInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/actionInContext.kt
deleted file mode 100644
index 33731a95..00000000
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/actionInContext.kt
+++ /dev/null
@@ -1,2 +0,0 @@
-package space.kscience.dataforge.data
-
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
index 0df5ab27..0e998760 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
@@ -4,7 +4,7 @@ import kotlinx.io.Source
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.reflect.KType
@@ -17,7 +17,7 @@ public interface EnvelopeFormat : IOFormat<Envelope> {
 
 public fun EnvelopeFormat.read(input: Source): Envelope = readFrom(input)
 
-@DfId(ENVELOPE_FORMAT_TYPE)
+@DfType(ENVELOPE_FORMAT_TYPE)
 public interface EnvelopeFormatFactory : IOFormatFactory<Envelope>, EnvelopeFormat {
     override val type: KType get() = typeOf<Envelope>()
 
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
index ffcadf1a..390a8bf4 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
@@ -7,7 +7,7 @@ import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.Factory
 import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
@@ -72,7 +72,7 @@ public fun <T : Any> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
     format.writeTo(this, obj)
 
 
-@DfId(IO_FORMAT_TYPE)
+@DfType(IO_FORMAT_TYPE)
 public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
     /**
      * Explicit type for dynamic type checks
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
index cadf87ca..f864dd2f 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
@@ -9,7 +9,7 @@ import space.kscience.dataforge.context.Global
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import space.kscience.dataforge.names.plus
@@ -38,7 +38,7 @@ public interface MetaFormat : IOFormat<Meta> {
     public fun readMeta(source: Source, descriptor: MetaDescriptor? = null): Meta
 }
 
-@DfId(META_FORMAT_TYPE)
+@DfType(META_FORMAT_TYPE)
 public interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
     public val shortName: String
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
index 420625ca..979c8782 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
@@ -2,7 +2,7 @@ package space.kscience.dataforge.meta
 
 import kotlinx.serialization.Serializable
 import kotlinx.serialization.json.Json
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.unsafeCast
 import space.kscience.dataforge.names.*
 import kotlin.jvm.JvmName
@@ -31,7 +31,7 @@ public fun interface MetaProvider : ValueProvider {
  * TODO add documentation
  * Same name siblings are supported via elements with the same [Name] but different indices.
  */
-@DfId(Meta.TYPE)
+@DfType(Meta.TYPE)
 @Serializable(MetaSerializer::class)
 public interface Meta : MetaRepr, MetaProvider {
     public val value: Value?
@@ -248,7 +248,7 @@ public inline fun <reified E : Enum<E>> Meta?.enum(): E? = this?.value?.let {
     }
 }
 
-public val Meta.stringList: List<String>? get() = value?.list?.map { it.string }
+public val Meta?.stringList: List<String>? get() = this?.value?.list?.map { it.string }
 
 /**
  * Create a provider that uses given provider for default values if those are not found in this provider
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index ae6c171a..95949d03 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -9,7 +9,11 @@ import space.kscience.dataforge.names.length
 import kotlin.collections.set
 
 public class MetaDescriptorBuilder @PublishedApi internal constructor() {
-    public var info: String? = null
+    public var description: String? = null
+
+    @Deprecated("Replace by description", ReplaceWith("description"))
+    public var info: String? by ::description
+
     public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf()
     public var multiple: Boolean = false
     public var valueRestriction: ValueRestriction = ValueRestriction.NONE
@@ -87,7 +91,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
 
     @PublishedApi
     internal fun build(): MetaDescriptor = MetaDescriptor(
-        description = info,
+        description = description,
         children = children.mapValues { it.value.build() },
         multiple = multiple,
         valueRestriction = valueRestriction,
@@ -165,7 +169,7 @@ public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
 }
 
 private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
-    info = this@toBuilder.description
+    description = this@toBuilder.description
     children = this@toBuilder.children.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
     multiple = this@toBuilder.multiple
     valueRestriction = this@toBuilder.valueRestriction
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfId.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
similarity index 56%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfId.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
index 5d485e23..11f548ae 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfId.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
@@ -5,4 +5,7 @@ package space.kscience.dataforge.misc
  */
 @MustBeDocumented
 @Target(AnnotationTarget.CLASS)
-public annotation class DfId(val id: String)
+public annotation class DfType(val id: String)
+
+@Deprecated("Replace with DfType", replaceWith = ReplaceWith("DfType"))
+public typealias DfId = DfType
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/descriptors/DescriptorTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/descriptors/DescriptorTest.kt
index e8c321fc..1a08ce34 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/descriptors/DescriptorTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/descriptors/DescriptorTest.kt
@@ -11,14 +11,14 @@ class DescriptorTest {
 
     val descriptor = MetaDescriptor {
         node("aNode") {
-            info = "A root demo node"
+            description = "A root demo node"
             value("b", ValueType.NUMBER) {
-                info = "b number value"
+                description = "b number value"
             }
             node("otherNode") {
                 value("otherValue", ValueType.BOOLEAN) {
                     default(false)
-                    info = "default value"
+                    description = "default value"
                 }
             }
         }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index a1ef7be2..329d9c5a 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -9,7 +9,7 @@ import space.kscience.dataforge.meta.MetaRepr
 import space.kscience.dataforge.meta.Specification
 import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.workspace.Task.Companion.TYPE
 import kotlin.reflect.KType
@@ -19,7 +19,7 @@ import kotlin.reflect.typeOf
  * A configurable task that could be executed on a workspace. The [TaskResult] represents a lazy result of the task.
  * In general no computations should be made until the result is called.
  */
-@DfId(TYPE)
+@DfType(TYPE)
 public interface Task<out T : Any> : Described {
 
     /**
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index ee00f539..37b473db 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -6,7 +6,7 @@ import space.kscience.dataforge.data.DataSet
 import space.kscience.dataforge.data.asSequence
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.misc.DfId
+import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.provider.Provider
 
@@ -18,7 +18,7 @@ public interface DataSelector<T: Any>{
 /**
  * An environment for pull-mode computation
  */
-@DfId(Workspace.TYPE)
+@DfType(Workspace.TYPE)
 public interface Workspace : ContextAware, Provider {
     /**
      * The whole data node for current workspace
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
index d9f678b3..ce1b5152 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
@@ -11,7 +11,6 @@ import space.kscience.dataforge.data.*
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
@@ -27,10 +26,7 @@ import java.nio.file.WatchEvent
 import java.nio.file.attribute.BasicFileAttributes
 import java.nio.file.spi.FileSystemProvider
 import java.time.Instant
-import kotlin.io.path.extension
-import kotlin.io.path.name
-import kotlin.io.path.nameWithoutExtension
-import kotlin.io.path.readAttributes
+import kotlin.io.path.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -92,7 +88,7 @@ public fun <T : Any> IOPlugin.readDataFile(
 
 
 context(IOPlugin) @DFExperimental
-private fun <T : Any> DataSetBuilder<T>.directory(
+public fun <T : Any> DataSetBuilder<T>.directory(
     path: Path,
     ignoreExtensions: Set<String>,
     formatResolver: FileFormatResolver<T>,
@@ -145,7 +141,7 @@ public inline fun <reified T : Any> IOPlugin.readDataDirectory(
 ): DataTree<T> = readDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
 
 /**
- * Read raw binary data tree from the directory. All files are read as-is (save for meta files).
+ * Read a raw binary data tree from the directory. All files are read as-is (save for meta files).
  */
 @DFExperimental
 public fun IOPlugin.readRawDirectory(
@@ -260,6 +256,29 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     }
 }
 
+/**
+ * Reads the specified resources and returns a [DataTree] containing the data.
+ *
+ * @param resources The names of the resources to read.
+ * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
+ * @return A DataTree containing the data read from the resources.
+ */
+@DFExperimental
+private fun IOPlugin.readResources(
+    vararg resources: String,
+    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
+): DataTree<Binary> {
+//    require(resource.isNotBlank()) {"Can't mount root resource tree as data root"}
+    return DataTree {
+        resources.forEach { resource ->
+            val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
+                "Resource with name $resource is not resolved"
+            )
+            node(resource, readRawDirectory(path))
+        }
+    }
+}
+
 /**
  * Add file/directory-based data tree item
  *

From 946ac88480342b0b96205fcec182c75435953ecc Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 5 Dec 2023 15:13:50 +0300
Subject: [PATCH 03/77] Partially fixed a bug with `MutableMeta` observable
 wrappers.

---
 CHANGELOG.md                                  |  2 +
 dataforge-context/build.gradle.kts            |  6 ++-
 .../kscience/dataforge/context/loggingWasm.kt |  3 ++
 dataforge-data/build.gradle.kts               |  4 +-
 .../kscience/dataforge/data/ActionsTest.kt    |  6 +--
 dataforge-io/build.gradle.kts                 |  3 +-
 dataforge-meta/api/dataforge-meta.api         |  6 ++-
 dataforge-meta/build.gradle.kts               |  1 +
 .../dataforge/meta/ObservableMetaWrapper.kt   | 41 ++++++++++------
 .../space/kscience/dataforge/names/Name.kt    |  4 ++
 .../dataforge/meta/ObservableMetaTest.kt      | 49 +++++++++++++++++++
 .../space/kscience/dataforge/misc/castJs.kt   |  4 +-
 .../kscience/dataforge/misc/castNative.kt     |  2 +-
 .../space/kscience/dataforge/misc/castWasm.kt |  4 ++
 dataforge-scripting/build.gradle.kts          |  6 +--
 dataforge-workspace/build.gradle.kts          | 26 +++++-----
 gradle.properties                             |  2 +-
 17 files changed, 125 insertions(+), 44 deletions(-)
 create mode 100644 dataforge-context/src/wasmJsMain/kotlin/space/kscience/dataforge/context/loggingWasm.kt
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
 create mode 100644 dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index e32cc6bf..87ccad00 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,7 @@
 ## Unreleased
 
 ### Added
+- Wasm artifacts
 
 ### Changed
 
@@ -11,6 +12,7 @@
 ### Removed
 
 ### Fixed
+- Partially fixed a bug with `MutableMeta` observable wrappers.
 
 ### Security
 
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index be9036d0..b59abed0 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -8,12 +8,14 @@ kscience {
     jvm()
     js()
     native()
+    wasm()
     useCoroutines()
     useSerialization()
-    dependencies {
+    commonMain {
         api(project(":dataforge-meta"))
+        api(spclibs.atomicfu)
     }
-    dependencies(jvmMain){
+    jvmMain{
         api(kotlin("reflect"))
         api("org.slf4j:slf4j-api:1.7.30")
     }
diff --git a/dataforge-context/src/wasmJsMain/kotlin/space/kscience/dataforge/context/loggingWasm.kt b/dataforge-context/src/wasmJsMain/kotlin/space/kscience/dataforge/context/loggingWasm.kt
new file mode 100644
index 00000000..740957b4
--- /dev/null
+++ b/dataforge-context/src/wasmJsMain/kotlin/space/kscience/dataforge/context/loggingWasm.kt
@@ -0,0 +1,3 @@
+package space.kscience.dataforge.context
+
+internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager
\ No newline at end of file
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index 9f96604a..ea542290 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -6,9 +6,11 @@ kscience{
     jvm()
     js()
     native()
+    wasm()
     useCoroutines()
     dependencies {
-        api(project(":dataforge-meta"))
+        api(spclibs.atomicfu)
+        api(projects.dataforgeMeta)
         api(kotlin("reflect"))
     }
 }
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 3987cd19..b24c4f27 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -6,7 +6,7 @@ import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
-import space.kscience.dataforge.actions.map
+import space.kscience.dataforge.actions.mapping
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.assertEquals
 
@@ -20,7 +20,7 @@ internal class ActionsTest {
             }
         }
 
-        val plusOne = Action.map<Int, Int> {
+        val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
         val result = plusOne(data)
@@ -31,7 +31,7 @@ internal class ActionsTest {
     fun testDynamicMapAction() = runTest {
         val data: DataSourceBuilder<Int> = DataSource()
 
-        val plusOne = Action.map<Int, Int> {
+        val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
 
diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index 6d3c888c..f7197197 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,12 +4,13 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.2.1"
+val ioVersion = "0.3.0"
 
 kscience {
     jvm()
     js()
     native()
+    wasm()
     useSerialization()
     useSerialization(sourceSet = space.kscience.gradle.DependencySourceSet.TEST) {
         cbor()
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 7daa7540..1700ca7d 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -724,6 +724,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun getAttributes ()Lspace/kscience/dataforge/meta/MutableMeta;
 	public final fun getChildren ()Ljava/util/Map;
 	public final fun getDefault ()Lspace/kscience/dataforge/meta/Value;
+	public final fun getDescription ()Ljava/lang/String;
 	public final fun getIndexKey ()Ljava/lang/String;
 	public final fun getInfo ()Ljava/lang/String;
 	public final fun getMultiple ()Z
@@ -737,6 +738,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun setAttributes (Lspace/kscience/dataforge/meta/MutableMeta;)V
 	public final fun setChildren (Ljava/util/Map;)V
 	public final fun setDefault (Lspace/kscience/dataforge/meta/Value;)V
+	public final fun setDescription (Ljava/lang/String;)V
 	public final fun setIndexKey (Ljava/lang/String;)V
 	public final fun setInfo (Ljava/lang/String;)V
 	public final fun setMultiple (Z)V
@@ -903,7 +905,7 @@ public abstract interface annotation class space/kscience/dataforge/misc/DFExper
 public abstract interface annotation class space/kscience/dataforge/misc/DFInternal : java/lang/annotation/Annotation {
 }
 
-public abstract interface annotation class space/kscience/dataforge/misc/DfId : java/lang/annotation/Annotation {
+public abstract interface annotation class space/kscience/dataforge/misc/DfType : java/lang/annotation/Annotation {
 	public abstract fun id ()Ljava/lang/String;
 }
 
@@ -944,6 +946,7 @@ public final class space/kscience/dataforge/names/NameKt {
 	public static final fun asName (Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/names/Name;
 	public static final fun cutFirst (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
 	public static final fun cutLast (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
+	public static final fun endsWith (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Z
 	public static final fun endsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Z
 	public static final fun endsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Z
 	public static final fun first (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/NameToken;
@@ -966,6 +969,7 @@ public final class space/kscience/dataforge/names/NameKt {
 	public static final fun removeHeadOrNull (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
 	public static final fun replaceLast (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/names/Name;
 	public static final fun set (Ljava/util/Map;Ljava/lang/String;Ljava/lang/Object;)V
+	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Z
 	public static final fun withIndex (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
diff --git a/dataforge-meta/build.gradle.kts b/dataforge-meta/build.gradle.kts
index 51b07113..d150ef98 100644
--- a/dataforge-meta/build.gradle.kts
+++ b/dataforge-meta/build.gradle.kts
@@ -6,6 +6,7 @@ kscience {
     jvm()
     js()
     native()
+    wasm()
     useSerialization{
         json()
     }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
index 76645d83..71e15aa9 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
@@ -6,60 +6,71 @@ import space.kscience.dataforge.names.*
 
 /**
  * A class that takes [MutableMeta] provider and adds obsevability on top of that
+ *
+ * TODO rewrite to properly work with detached nodes
  */
 private class ObservableMetaWrapper(
     val root: MutableMeta,
-    val absoluteName: Name,
+    val nodeName: Name,
     val listeners: MutableSet<MetaListener>,
 ) : ObservableMutableMeta {
     override val items: Map<NameToken, ObservableMutableMeta>
         get() = root.items.keys.associateWith {
-            ObservableMetaWrapper(root, absoluteName + it, listeners)
+            ObservableMetaWrapper(root, nodeName + it, listeners)
         }
 
-    override fun get(name: Name): ObservableMutableMeta? =
-        root.get(name)?.let { ObservableMetaWrapper(root, this.absoluteName + name, listeners) }
+    override fun get(name: Name): ObservableMutableMeta? = if (root[nodeName + name] == null) {
+        null
+    } else {
+        ObservableMetaWrapper(root, nodeName + name, listeners)
+    }
 
     @ThreadSafe
     override fun onChange(owner: Any?, callback: Meta.(name: Name) -> Unit) {
         listeners.add(
-            MetaListener(Pair(owner, absoluteName)) { name ->
-                if (name.startsWith(absoluteName)) {
-                    (this[absoluteName] ?: Meta.EMPTY).callback(name.removeFirstOrNull(absoluteName)!!)
+            MetaListener(Pair(owner, nodeName)) { fullName ->
+                if (fullName.startsWith(nodeName)) {
+                    root[nodeName]?.callback(fullName.removeFirstOrNull(nodeName)!!)
                 }
             }
         )
     }
 
     override fun removeListener(owner: Any?) {
-        listeners.removeAll { it.owner === Pair(owner, absoluteName) }
+        listeners.removeAll { it.owner === Pair(owner, nodeName) }
     }
 
     override fun invalidate(name: Name) {
-        listeners.forEach { it.callback(this, name) }
+        listeners.forEach { it.callback(this, nodeName + name) }
     }
 
     override var value: Value?
-        get() = root.value
+        get() = root[nodeName]?.value
         set(value) {
-            root.value = value
+            root.getOrCreate(nodeName).value = value
             invalidate(Name.EMPTY)
         }
 
     override fun getOrCreate(name: Name): ObservableMutableMeta =
-        ObservableMetaWrapper(root, this.absoluteName + name, listeners)
+        ObservableMetaWrapper(root, nodeName + name, listeners)
 
-    override fun set(name: Name, node: Meta?) {
+    fun removeNode(name: Name): Meta? {
         val oldMeta = get(name)
         //don't forget to remove listener
         oldMeta?.removeListener(this)
-        root.set(absoluteName + name, node)
+
+        return oldMeta
+    }
+
+    override fun set(name: Name, node: Meta?) {
+        val oldMeta = removeNode(name)
+        root[nodeName + name] = node
         if (oldMeta != node) {
             invalidate(name)
         }
     }
 
-    override fun toMeta(): Meta = root[absoluteName]?.toMeta() ?: Meta.EMPTY
+    override fun toMeta(): Meta = root[nodeName]?.toMeta() ?: Meta.EMPTY
 
     override fun toString(): String = Meta.toString(this)
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
index 160ea3a1..1c9a9cf3 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
@@ -216,9 +216,13 @@ public fun Name.endsWith(token: NameToken): Boolean = lastOrNull() == token
 public fun Name.startsWith(name: Name): Boolean =
     this.length >= name.length && (this == name || tokens.subList(0, name.length) == name.tokens)
 
+public fun Name.startsWith(name: String): Boolean = startsWith(name.parseAsName())
+
 public fun Name.endsWith(name: Name): Boolean =
     this.length >= name.length && (this == name || tokens.subList(length - name.length, length) == name.tokens)
 
+public fun Name.endsWith(name: String): Boolean = endsWith(name.parseAsName())
+
 /**
  * if [this] starts with given [head] name, returns the reminder of the name (could be empty). Otherwise, returns null
  */
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
new file mode 100644
index 00000000..4681ec12
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
@@ -0,0 +1,49 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.names.startsWith
+import kotlin.test.Ignore
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+class ObservableMetaTest {
+
+    @Test
+    fun asObservable() {
+        val meta = MutableMeta {
+            "data" put {
+                "x" put ListValue(1, 2, 3)
+                "y" put ListValue(5, 6, 7)
+                "type" put "scatter"
+            }
+        }.asObservable()
+
+        assertEquals("scatter", meta["data.type"].string)
+    }
+
+    @Test
+    @Ignore
+    fun detachNode() {
+        val meta = MutableMeta {
+            "data" put {
+                "x" put ListValue(1, 2, 3)
+                "y" put ListValue(5, 6, 7)
+                "type" put "scatter"
+            }
+        }.asObservable()
+
+        var collector: Value? = null
+
+        meta.onChange(null) { name ->
+            if (name.startsWith("data")) {
+                collector = get("data.z")?.value
+            }
+        }
+
+        val data = meta["data"]!!
+
+        meta.remove("data")
+
+        data["z"] = ListValue(2, 5, 7)
+        assertEquals(null, collector)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt b/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
index b404ebb4..b057bcbe 100644
--- a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
+++ b/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
@@ -1,5 +1,5 @@
 package space.kscience.dataforge.misc
 import kotlin.js.unsafeCast as unsafeCastJs
 
-@Suppress("UNCHECKED_CAST", "NOTHING_TO_INLINE")
-public actual inline fun <T> Any?.unsafeCast(): T = this.unsafeCastJs<T>()
\ No newline at end of file
+@Suppress("NOTHING_TO_INLINE")
+public actual inline fun <T> Any?.unsafeCast(): T = unsafeCastJs<T>()
\ No newline at end of file
diff --git a/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt b/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
index 27d399fe..4d9aa758 100644
--- a/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
+++ b/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
@@ -1,4 +1,4 @@
 package space.kscience.dataforge.misc
 
-@Suppress("UNCHECKED_CAST", "NOTHING_TO_INLINE")
+@Suppress("UNCHECKED_CAST")
 public actual inline fun <T> Any?.unsafeCast(): T = this as T
\ No newline at end of file
diff --git a/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt b/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt
new file mode 100644
index 00000000..27d399fe
--- /dev/null
+++ b/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt
@@ -0,0 +1,4 @@
+package space.kscience.dataforge.misc
+
+@Suppress("UNCHECKED_CAST", "NOTHING_TO_INLINE")
+public actual inline fun <T> Any?.unsafeCast(): T = this as T
\ No newline at end of file
diff --git a/dataforge-scripting/build.gradle.kts b/dataforge-scripting/build.gradle.kts
index be81fe70..d9d87742 100644
--- a/dataforge-scripting/build.gradle.kts
+++ b/dataforge-scripting/build.gradle.kts
@@ -4,15 +4,15 @@ plugins {
 
 kscience{
     jvm()
-    dependencies {
+    commonMain {
         api(projects.dataforgeWorkspace)
         implementation(kotlin("scripting-common"))
     }
-    dependencies(jvmMain){
+    jvmMain{
         implementation(kotlin("scripting-jvm-host"))
         implementation(kotlin("scripting-jvm"))
     }
-    dependencies(jvmTest){
+    jvmTest{
         implementation(spclibs.logback.classic)
     }
 }
diff --git a/dataforge-workspace/build.gradle.kts b/dataforge-workspace/build.gradle.kts
index ec117865..5fa555eb 100644
--- a/dataforge-workspace/build.gradle.kts
+++ b/dataforge-workspace/build.gradle.kts
@@ -2,29 +2,27 @@ plugins {
     id("space.kscience.gradle.mpp")
 }
 
-kscience{
+kscience {
     jvm()
     js()
     native()
+    wasm()
     useCoroutines()
-    useSerialization{
+    useSerialization {
         protobuf()
     }
-    commonMain{
-        dependencies {
-            api(projects.dataforgeContext)
-            api(projects.dataforgeData)
-            api(projects.dataforgeIo)
-        }
+    commonMain {
+        api(projects.dataforgeContext)
+        api(projects.dataforgeData)
+        api(projects.dataforgeIo)
+
     }
-    jvmTest{
-        dependencies {
-            implementation(spclibs.logback.classic)
-            implementation(projects.dataforgeIo.dataforgeIoYaml)
-        }
+    jvmTest {
+        implementation(spclibs.logback.classic)
+        implementation(projects.dataforgeIo.dataforgeIoYaml)
     }
 }
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
 }
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index 31ef2f9a..3734d13e 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,5 +6,5 @@ kotlin.mpp.stability.nowarn=true
 kotlin.incremental.js.ir=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.15.1-kotlin-1.9.21
+toolsVersion=0.15.2-kotlin-1.9.21
 #kotlin.experimental.tryK2=true
\ No newline at end of file

From a32cbe95dc955e2c2e9b1c69a42ac3f29a6ea98d Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 19 Dec 2023 17:53:01 +0300
Subject: [PATCH 04/77] Add listOfSpec delegate to Scheme

---
 .../space/kscience/dataforge/meta/Specification.kt     |  9 ++++++++-
 .../kotlin/space/kscience/dataforge/names/Name.kt      | 10 ++++------
 2 files changed, 12 insertions(+), 7 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
index 6d3afbea..7f3dd2a6 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
@@ -7,7 +7,7 @@ import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadWriteProperty
 import kotlin.reflect.KProperty
 
-public interface ReadOnlySpecification<out T : Any>: Described {
+public interface ReadOnlySpecification<out T : Any> : Described {
 
     /**
      * Read generic read-only meta with this [Specification] producing instance of desired type.
@@ -128,3 +128,10 @@ public fun <T : Scheme> MutableMeta.listOfSpec(
         setIndexed(name, value.map { it.toMeta() })
     }
 }
+
+
+@DFExperimental
+public fun <T : Scheme> Scheme.listOfSpec(
+    spec: Specification<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = meta.listOfSpec(spec, key)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
index 1c9a9cf3..5e4b98a8 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
@@ -16,12 +16,10 @@ public class Name(public val tokens: List<NameToken>) {
 
     override fun toString(): String = tokens.joinToString(separator = NAME_SEPARATOR) { it.toString() }
 
-    override fun equals(other: Any?): Boolean {
-        return when (other) {
-            is Name -> this.tokens == other.tokens
-            is NameToken -> this.length == 1 && this.tokens.first() == other
-            else -> false
-        }
+    override fun equals(other: Any?): Boolean = when (other) {
+        is Name -> this.tokens == other.tokens
+        is NameToken -> this.length == 1 && this.tokens.first() == other
+        else -> false
     }
 
     private val cachedHashCode = if (tokens.size == 1) {

From 4de9e1865c3458a2cb41436dabb137c38d8a7740 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 19 Dec 2023 17:53:17 +0300
Subject: [PATCH 05/77] Add listOfSpec delegate to Scheme

---
 build.gradle.kts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index edeae557..1fa05824 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.7.1"
+    version = "0.7.2-dev-1"
 }
 
 subprojects {

From f7dec52438b71cabf6d09d1525d026fa022ddee3 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 19 Dec 2023 18:21:42 +0300
Subject: [PATCH 06/77] Add automatic MetaConverter for serializeable objects

---
 CHANGELOG.md                                  |  1 +
 .../meta/transformations/MetaConverter.kt     | 46 +++++++++++++++----
 2 files changed, 37 insertions(+), 10 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 87ccad00..0d89307a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
 
 ### Added
 - Wasm artifacts
+- Add automatic MetaConverter for serializeable objects
 
 ### Changed
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
index 0dfb63d7..17774913 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
@@ -1,7 +1,12 @@
 package space.kscience.dataforge.meta.transformations
 
+import kotlinx.serialization.KSerializer
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.encodeToJsonElement
+import kotlinx.serialization.serializer
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.misc.DFExperimental
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -141,19 +146,40 @@ public interface MetaConverter<T> {
         public fun <T> valueList(
             writer: (T) -> Value = { Value.of(it) },
             reader: (Value) -> T,
-        ): MetaConverter<List<T>> =
-            object : MetaConverter<List<T>> {
-                override val type: KType = typeOf<List<T>>()
+        ): MetaConverter<List<T>> = object : MetaConverter<List<T>> {
+            override val type: KType = typeOf<List<T>>()
 
-                override val descriptor: MetaDescriptor = MetaDescriptor {
-                    valueType(ValueType.LIST)
-                }
-
-                override fun metaToObjectOrNull(meta: Meta): List<T>? = meta.value?.list?.map(reader)
-
-                override fun objectToMeta(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.LIST)
             }
 
+            override fun metaToObjectOrNull(meta: Meta): List<T>? = meta.value?.list?.map(reader)
+
+            override fun objectToMeta(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
+        }
+
+        /**
+         * Automatically generate [MetaConverter] for a class using its serializer and optional [descriptor]
+         */
+        @DFExperimental
+        public inline fun <reified T> serializable(
+            descriptor: MetaDescriptor? = null,
+        ): MetaConverter<T> = object : MetaConverter<T> {
+            override val type: KType = typeOf<T>()
+            private val serializer: KSerializer<T> = serializer()
+
+            override fun metaToObjectOrNull(meta: Meta): T? {
+                val json = meta.toJson(descriptor)
+                return Json.decodeFromJsonElement(serializer, json)
+            }
+
+            override fun objectToMeta(obj: T): Meta {
+                val json = Json.encodeToJsonElement(obj)
+                return json.toMeta(descriptor)
+            }
+
+        }
+
     }
 }
 

From f9e7d0098feda048fa886a169cf6b62c1cf3ae71 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Wed, 20 Dec 2023 11:07:30 +0300
Subject: [PATCH 07/77] Add convertable and serializable extensions to Meta
 delegates

---
 CHANGELOG.md                                  |  2 +
 .../kscience/dataforge/meta/MetaDelegate.kt   | 43 +++++++++++++-
 .../kscience/dataforge/meta/MutableMeta.kt    |  2 +-
 .../dataforge/meta/MutableMetaDelegate.kt     | 56 ++++++++++++++++++-
 4 files changed, 96 insertions(+), 7 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0d89307a..7cc1c21d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,10 +5,12 @@
 ### Added
 - Wasm artifacts
 - Add automatic MetaConverter for serializeable objects
+- Add Meta and MutableMeta delegates for convertable and serializeable
 
 ### Changed
 
 ### Deprecated
+- `node(key,converter)` in favor of `serializable` delegate
 
 ### Removed
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 73923d56..42e4fe14 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -1,6 +1,8 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.transformations.MetaConverter
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadOnlyProperty
@@ -11,13 +13,48 @@ public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> =
     get(key ?: property.name.asName())
 }
 
-public fun <T> MetaProvider.node(
+/**
+ * Use [converter] to read the Meta node
+ */
+public fun <T> MetaProvider.convertable(
+    converter: MetaConverter<T>,
     key: Name? = null,
-    converter: MetaConverter<T>
 ): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
     get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
 }
 
+/**
+ * Use object serializer to transform it to Meta and back
+ */
+@DFExperimental
+public inline fun <reified T> MetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, T?> = convertable(MetaConverter.serializable(descriptor), key)
+
+@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
+public fun <T> MetaProvider.node(
+    key: Name? = null,
+    converter: MetaConverter<T>,
+): ReadOnlyProperty<Any?, T?> = convertable(converter, key)
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object
+ */
+public fun <T> Meta.listOfConvertable(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, List<T>> = ReadOnlyProperty{_, property ->
+    val name = key ?: property.name.asName()
+    getIndexed(name).values.map { converter.metaToObject(it) }
+}
+
+@DFExperimental
+public inline fun <reified T> Meta.listOfSerializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
+
 /**
  * A property delegate that uses custom key
  */
@@ -27,7 +64,7 @@ public fun MetaProvider.value(key: Name? = null): ReadOnlyProperty<Any?, Value?>
 
 public fun <R> MetaProvider.value(
     key: Name? = null,
-    reader: (Value?) -> R
+    reader: (Value?) -> R,
 ): ReadOnlyProperty<Any?, R> = ReadOnlyProperty { _, property ->
     reader(get(key ?: property.name.asName())?.value)
 }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 7e05d215..24a09240 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -165,7 +165,7 @@ public fun MutableMetaProvider.remove(key: String) {
 
 // node setters
 
-public operator fun MutableMetaProvider.set(Key: NameToken, value: Meta): Unit = set(Key.asName(), value)
+public operator fun MutableMetaProvider.set(key: NameToken, value: Meta): Unit = set(key.asName(), value)
 public operator fun MutableMetaProvider.set(key: String, value: Meta): Unit = set(Name.parse(key), value)
 
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index 0f28523c..d77348dd 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -1,6 +1,8 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.transformations.MetaConverter
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadWriteProperty
@@ -20,10 +22,18 @@ public fun MutableMetaProvider.node(key: Name? = null): ReadWriteProperty<Any?,
         }
     }
 
-public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
+/**
+ * Use [converter] to transform an object to Meta and back.
+ * Note that mutation of the object does not change Meta.
+ */
+public fun <T> MutableMetaProvider.convertable(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> =
     object : ReadWriteProperty<Any?, T?> {
         override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-            return get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
+            val name = key ?: property.name.asName()
+            return get(name)?.let { converter.metaToObject(it) }
         }
 
         override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
@@ -32,6 +42,46 @@ public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConver
         }
     }
 
+@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
+public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
+    convertable(converter, key)
+
+/**
+ * Use object serializer to transform it to Meta and back.
+ * Note that mutation of the object does not change Meta.
+ */
+@DFExperimental
+public inline fun <reified T> MutableMetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = convertable(MetaConverter.serializable(descriptor), key)
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object and back.
+ * Note that mutation of the object does not change Meta.
+ */
+public fun <T> MutableMeta.listOfConvertable(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
+        val name = key ?: property.name.asName()
+        return getIndexed(name).values.map { converter.metaToObject(it) }
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
+        val name = key ?: property.name.asName()
+        setIndexed(name, value.map { converter.objectToMeta(it) })
+    }
+}
+
+@DFExperimental
+public inline fun <reified T> MutableMeta.listOfSerializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
+
+
 public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?, Value?> =
     object : ReadWriteProperty<Any?, Value?> {
         override fun getValue(thisRef: Any?, property: KProperty<*>): Value? =
@@ -45,7 +95,7 @@ public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?,
 public fun <T> MutableMetaProvider.value(
     key: Name? = null,
     writer: (T) -> Value? = { Value.of(it) },
-    reader: (Value?) -> T
+    reader: (Value?) -> T,
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T =
         reader(get(key ?: property.name.asName())?.value)

From 6e20fc39299aeeb0af58691edd4c2a2a36b2e69e Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 28 Dec 2023 22:14:15 +0300
Subject: [PATCH 08/77] fix valueSequence

---
 CHANGELOG.md                                                  | 1 +
 build.gradle.kts                                              | 2 +-
 .../commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt   | 4 +++-
 3 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7cc1c21d..8e2861e2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,6 +16,7 @@
 
 ### Fixed
 - Partially fixed a bug with `MutableMeta` observable wrappers.
+- `valueSequence` now include root value. So `meta.update` works properly.
 
 ### Security
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 1fa05824..33be577c 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.7.2-dev-1"
+    version = "0.7.2-dev-2"
 }
 
 subprojects {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
index 979c8782..5cf53c75 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
@@ -188,10 +188,12 @@ public operator fun <M : TypedMeta<M>> M?.get(key: String): M? = this?.get(key.p
 
 
 /**
- * Get a sequence of [Name]-[Value] pairs using top-down traversal of the tree
+ * Get a sequence of [Name]-[Value] pairs using top-down traversal of the tree.
+ * The sequence includes root value with empty name
  */
 public fun Meta.valueSequence(): Sequence<Pair<Name, Value>> = sequence {
     items.forEach { (key, item) ->
+        value?.let { yield(Name.EMPTY to it) }
         item.value?.let { itemValue ->
             yield(key.asName() to itemValue)
         }

From 282b43a05a03fcfb9c78398ac31ebfce35ad64f0 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 31 Dec 2023 11:57:54 +0300
Subject: [PATCH 09/77] Clean up scheme and descriptor logic

---
 build.gradle.kts                              |   2 +-
 .../dataforge/descriptors/annotations.kt      | 126 -------------
 .../descriptors/reflectiveDescriptors.kt      | 169 +++++++++++++-----
 .../descriptors/TestAutoDescriptors.kt        |  29 +++
 .../space/kscience/dataforge/io/Binary.kt     |   5 +
 .../space/kscience/dataforge/meta/Scheme.kt   |  69 ++++---
 .../meta/descriptors/MetaDescriptorBuilder.kt | 143 ++++++++-------
 .../meta/descriptors/schemeDescriptor.kt      |   4 +-
 .../kscience/dataforge/meta/JsonMetaTest.kt   |   4 +-
 9 files changed, 274 insertions(+), 277 deletions(-)
 delete mode 100644 dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt
 create mode 100644 dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt

diff --git a/build.gradle.kts b/build.gradle.kts
index 33be577c..4a336e25 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.7.2-dev-2"
+    version = "0.8.0-dev-1"
 }
 
 subprojects {
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt
deleted file mode 100644
index f97cb28d..00000000
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright  2018 Alexander Nozik.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package space.kscience.dataforge.descriptors
-
-//@MustBeDocumented
-//annotation class Attribute(
-//    val key: String,
-//    val value: String
-//)
-//
-//@MustBeDocumented
-//annotation class Attributes(
-//    val attrs: Array<Attribute>
-//)
-//
-//@MustBeDocumented
-//annotation class ItemDef(
-//    val info: String = "",
-//    val multiple: Boolean = false,
-//    val required: Boolean = false
-//)
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@MustBeDocumented
-//annotation class ValueDef(
-//    val type: Array<ValueType> = [ValueType.STRING],
-//    val def: String = "",
-//    val allowed: Array<String> = [],
-//    val enumeration: KClass<*> = Any::class
-//)
-
-///**
-// * Description text for meta property, node or whole object
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Description(val value: String)
-//
-///**
-// * Annotation for value property which states that lists are expected
-// */
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Multiple
-//
-///**
-// * Descriptor target
-// * The DataForge path to the resource containing the description. Following targets are supported:
-// *  1. resource
-// *  1. file
-// *  1. class
-// *  1. method
-// *  1. property
-// *
-// *
-// * Does not work if [type] is provided
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Descriptor(val value: String)
-//
-//
-///**
-// * Aggregator class for descriptor nodes
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorNodes(vararg val nodes: NodeDef)
-//
-///**
-// * Aggregator class for descriptor values
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorValues(vararg val nodes: ValueDef)
-//
-///**
-// * Alternative name for property descriptor declaration
-// */
-//@Target(AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorName(val name: String)
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorValue(val def: ValueDef)
-////TODO enter fields directly?
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class ValueProperty(
-//    val name: String = "",
-//    val type: Array<ValueType> = arrayOf(ValueType.STRING),
-//    val multiple: Boolean = false,
-//    val def: String = "",
-//    val enumeration: KClass<*> = Any::class,
-//    val tags: Array<String> = emptyArray()
-//)
-//
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class NodeProperty(val name: String = "")
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
index 9de5e280..1f2db7fc 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
@@ -1,53 +1,124 @@
 package space.kscience.dataforge.descriptors
 
+import kotlinx.serialization.ExperimentalSerializationApi
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.decodeFromStream
+import org.slf4j.LoggerFactory
+import space.kscience.dataforge.meta.Scheme
+import space.kscience.dataforge.meta.SchemeSpec
+import space.kscience.dataforge.meta.ValueType
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
+import space.kscience.dataforge.meta.descriptors.node
+import java.net.URL
+import kotlin.reflect.KClass
+import kotlin.reflect.full.isSubclassOf
+import kotlin.reflect.full.memberProperties
+import kotlin.reflect.typeOf
 
-//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
-//    T::class.apply {
-//        findAnnotation<ItemDef>()?.let { def ->
-//            info = def.info
-//            required = def.required
-//            multiple = def.multiple
-//        }
-//        findAnnotation<Attribute>()?.let { attr ->
-//            attributes {
-//                this[attr.key] = attr.value.parseValue()
-//            }
-//        }
-//        findAnnotation<Attributes>()?.attrs?.forEach { attr ->
-//            attributes {
-//                this[attr.key] = attr.value.parseValue()
-//            }
-//        }
-//    }
-//    T::class.memberProperties.forEach { property ->
-//        val delegate = property.getDelegate(this@buildDescriptor)
-//
-//        val descriptor: ItemDescriptor = when (delegate) {
-//            is ConfigurableDelegate -> buildPropertyDescriptor(property, delegate)
-//            is ReadWriteDelegateWrapper<*, *> -> {
-//                if (delegate.delegate is ConfigurableDelegate) {
-//                    buildPropertyDescriptor(property, delegate.delegate as ConfigurableDelegate)
-//                } else {
-//                    return@forEach
-//                }
-//            }
-//            else -> return@forEach
-//        }
-//        defineItem(property.name, descriptor)
-//    }
-//}
 
-//inline fun <T : Scheme, reified V : Any?> buildPropertyDescriptor(
-//    property: KProperty1<T, V>,
-//    delegate: ConfigurableDelegate
-//): ItemDescriptor {
-//    when {
-//        V::class.isSubclassOf(Scheme::class) -> NodeDescriptor {
-//            default = delegate.default.node
-//        }
-//        V::class.isSubclassOf(Meta::class) -> NodeDescriptor {
-//            default = delegate.default.node
-//        }
-//
-//    }
-//}
+/**
+ * Description text for meta property, node or whole object
+ */
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class Description(val value: String)
+
+@Target(AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class Multiple()
+
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class DescriptorResource(val resourceName: String)
+
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class DescriptorUrl(val url: String)
+
+
+@OptIn(ExperimentalSerializationApi::class)
+private fun MetaDescriptorBuilder.loadDescriptorFromUrl(url: URL) {
+    url.openStream().use {
+        from(Json.decodeFromStream(MetaDescriptor.serializer(), it))
+    }
+}
+
+private fun MetaDescriptorBuilder.loadDescriptorFromResource(resource: DescriptorResource) {
+    val url = {}.javaClass.getResource(resource.resourceName)
+    if (url != null) {
+        loadDescriptorFromUrl(url)
+    } else {
+        LoggerFactory.getLogger("System")
+            .error("Can't find descriptor resource with name ${resource.resourceName}")
+    }
+}
+
+
+public fun <T : Any> MetaDescriptor.Companion.forClass(
+    kClass: KClass<T>,
+    mod: MetaDescriptorBuilder.() -> Unit = {},
+): MetaDescriptor = MetaDescriptor {
+    when {
+        kClass.isSubclassOf(Number::class) -> valueType(ValueType.NUMBER)
+        kClass == String::class -> ValueType.STRING
+        kClass == Boolean::class -> ValueType.BOOLEAN
+        kClass == DoubleArray::class -> ValueType.LIST
+    }
+
+    kClass.annotations.forEach {
+        when (it) {
+            is Description -> description = it.value
+
+            is DescriptorResource -> loadDescriptorFromResource(it)
+
+            is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
+        }
+    }
+    kClass.memberProperties.forEach { property ->
+
+        var flag = false
+
+        val descriptor = MetaDescriptor {
+            //use base type descriptor as a base
+            (property.returnType.classifier as? KClass<*>)?.let {
+                from(forClass(it))
+            }
+            property.annotations.forEach {
+                when (it) {
+                    is Description -> {
+                        description = it.value
+                        flag = true
+                    }
+
+                    is Multiple -> {
+                        multiple = true
+                        flag = true
+                    }
+
+                    is DescriptorResource -> {
+                        loadDescriptorFromResource(it)
+                        flag = true
+                    }
+
+                    is DescriptorUrl -> {
+                        loadDescriptorFromUrl(URL(it.url))
+                        flag = true
+                    }
+                }
+            }
+        }
+        if (flag) {
+            node(property.name, descriptor)
+        }
+    }
+    mod()
+}
+
+@Suppress("UNCHECKED_CAST")
+public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor( noinline mod: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptor =
+    MetaDescriptor.forClass(typeOf<T>().classifier as KClass<T>, mod)
\ No newline at end of file
diff --git a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
new file mode 100644
index 00000000..c1271537
--- /dev/null
+++ b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
@@ -0,0 +1,29 @@
+package space.kscience.dataforge.descriptors
+
+import kotlinx.serialization.encodeToString
+import kotlinx.serialization.json.Json
+import org.junit.jupiter.api.Test
+import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.transformations.MetaConverter
+
+private class TestScheme: Scheme(){
+
+    @Description("A")
+    val a by string()
+
+    @Description("B")
+    val b by int()
+
+    companion object: SchemeSpec<TestScheme>(::TestScheme){
+        override val descriptor: MetaDescriptor = autoDescriptor()
+    }
+}
+
+class TestAutoDescriptors {
+    @Test
+    fun autoDescriptor(){
+        val autoDescriptor = MetaDescriptor.forClass(TestScheme::class)
+        println(Json{prettyPrint = true}.encodeToString(autoDescriptor))
+    }
+}
\ No newline at end of file
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
index f85dd98e..de365519 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
@@ -71,6 +71,11 @@ internal class ByteArrayBinary(
 
     override fun view(offset: Int, binarySize: Int): ByteArrayBinary =
         ByteArrayBinary(array, start + offset, binarySize)
+
+    override fun toString(): String =
+        "ByteArrayBinary(array=$array, start=$start, size=$size)"
+
+
 }
 
 public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 90473286..ab504733 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -15,28 +15,38 @@ import space.kscience.dataforge.names.*
 public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurable {
 
     /**
-     * Meta to be mutated by this schme
+     * Meta to be mutated by this scheme
      */
-    private var targetMeta: MutableMeta = MutableMeta()
+    private var target: MutableMeta? = null
+        get() {
+            // automatic initialization of target if it is missing
+            if (field == null) {
+                field = MutableMeta()
+            }
+            return field
+        }
 
     /**
      * Default values provided by this scheme
      */
-    private var defaultMeta: Meta? = null
+    private var prototype: Meta? = null
 
     final override val meta: ObservableMutableMeta = SchemeMeta(Name.EMPTY)
 
     final override var descriptor: MetaDescriptor? = null
-        internal set
+        private set
 
-    internal fun wrap(
-        newMeta: MutableMeta,
-        preserveDefault: Boolean = false,
+    /**
+     * This method must be called before the scheme could be used
+     */
+    internal fun initialize(
+        target: MutableMeta,
+        prototype: Meta,
+        descriptor: MetaDescriptor?,
     ) {
-        if (preserveDefault) {
-            defaultMeta = targetMeta.seal()
-        }
-        targetMeta = newMeta
+        this.target = target
+        this.prototype = prototype
+        this.descriptor = descriptor
     }
 
     /**
@@ -47,11 +57,11 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
         return descriptor?.validate(meta) ?: true
     }
 
-    override fun get(name: Name): MutableMeta? = meta.get(name)
+    override fun get(name: Name): MutableMeta? = meta[name]
 
     override fun set(name: Name, node: Meta?) {
         if (validate(name, meta)) {
-            meta.set(name, node)
+            meta[name] = node
         } else {
             error("Validation failed for node $node at $name")
         }
@@ -68,14 +78,16 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
     private val listeners: MutableList<MetaListener> = mutableListOf()
 
+    override fun toString(): String = meta.toString()
+
     private inner class SchemeMeta(val pathName: Name) : ObservableMutableMeta {
         override var value: Value?
-            get() = targetMeta[pathName]?.value
-                ?: defaultMeta?.get(pathName)?.value
+            get() = target[pathName]?.value
+                ?: prototype?.get(pathName)?.value
                 ?: descriptor?.get(pathName)?.defaultValue
             set(value) {
-                val oldValue = targetMeta[pathName]?.value
-                targetMeta[pathName] = value
+                val oldValue = target[pathName]?.value
+                target!![pathName] = value
                 if (oldValue != value) {
                     invalidate(Name.EMPTY)
                 }
@@ -83,8 +95,8 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
         override val items: Map<NameToken, ObservableMutableMeta>
             get() {
-                val targetKeys = targetMeta[pathName]?.items?.keys ?: emptySet()
-                val defaultKeys = defaultMeta?.get(pathName)?.items?.keys ?: emptySet()
+                val targetKeys = target[pathName]?.items?.keys ?: emptySet()
+                val defaultKeys = prototype?.get(pathName)?.items?.keys ?: emptySet()
                 return (targetKeys + defaultKeys).associateWith { SchemeMeta(pathName + it) }
             }
 
@@ -111,7 +123,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
         override fun hashCode(): Int = Meta.hashCode(this)
 
         override fun set(name: Name, node: Meta?) {
-            targetMeta.set(name, node)
+            target!![name] = node
             invalidate(name)
         }
 
@@ -119,7 +131,6 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
         @DFExperimental
         override fun attach(name: Name, node: ObservableMutableMeta) {
-            //TODO implement zero-copy attachment
             set(name, node)
             node.onChange(this) { changeName ->
                 set(name + changeName, this[changeName])
@@ -131,10 +142,11 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
 /**
  * Relocate scheme target onto given [MutableMeta]. Old provider does not get updates anymore.
- * Current state of the scheme used as a default.
+ * The Current state of the scheme that os used as a default.
  */
+@DFExperimental
 public fun <T : Scheme> T.retarget(provider: MutableMeta): T = apply {
-    wrap(provider, true)
+    initialize(provider, meta.seal(), descriptor)
 }
 
 /**
@@ -155,19 +167,18 @@ public open class SchemeSpec<out T : Scheme>(
     private val builder: () -> T,
 ) : Specification<T> {
 
+    override val descriptor: MetaDescriptor? get() = null
+
     override fun read(source: Meta): T = builder().also {
-        it.wrap(MutableMeta().withDefault(source))
+        it.initialize(MutableMeta(), source, descriptor)
     }
 
     override fun write(target: MutableMeta): T = empty().also {
-        it.wrap(target)
+        it.initialize(target, Meta.EMPTY, descriptor)
     }
 
-    //TODO Generate descriptor from Scheme class
-    override val descriptor: MetaDescriptor? get() = null
-
     override fun empty(): T = builder().also {
-        it.descriptor = descriptor
+        it.initialize(MutableMeta(), Meta.EMPTY, descriptor)
     }
 
     @Suppress("OVERRIDE_BY_INLINE")
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index 95949d03..751d2525 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -44,38 +44,27 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
         attributes.apply(block)
     }
 
-    public fun item(name: Name, block: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptorBuilder {
-        return when (name.length) {
-            0 -> apply(block)
+    internal fun node(
+        name: Name,
+        descriptorBuilder: MetaDescriptorBuilder,
+    ): Unit {
+        when (name.length) {
+            0 -> error("Can't set descriptor to root")
             1 -> {
-                val target = MetaDescriptorBuilder().apply(block)
-                children[name.first().body] = target
-                target
+                children[name.first().body] = descriptorBuilder
             }
 
-            else -> {
-                children.getOrPut(name.first().body) { MetaDescriptorBuilder() }.item(name.cutFirst(), block)
-            }
+            else -> children.getOrPut(name.first().body) {
+                MetaDescriptorBuilder()
+            }.node(name.cutFirst(), descriptorBuilder)
         }
     }
 
-    public fun node(
+    internal fun node(
         name: Name,
-        descriptor: MetaDescriptor,
-        block: MetaDescriptorBuilder.() -> Unit = {},
-    ): MetaDescriptorBuilder = when (name.length) {
-        0 -> error("Can't set descriptor to root")
-        1 -> {
-            val item = descriptor.toBuilder().apply {
-                valueRestriction = ValueRestriction.ABSENT
-            }.apply(block)
-            children[name.first().body] = item
-            item
-        }
-
-        else -> children.getOrPut(name.first().body) {
-            MetaDescriptorBuilder()
-        }.node(name.cutFirst(), descriptor, block)
+        descriptorBuilder: MetaDescriptor,
+    ): Unit {
+        node(name, descriptorBuilder.toBuilder())
     }
 
     public var allowedValues: List<Value>
@@ -89,6 +78,17 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
         allowedValues = values.map { Value.of(it) }
     }
 
+    public fun from(descriptor: MetaDescriptor) {
+        description = descriptor.description
+        children.putAll(descriptor.children.mapValues { it.value.toBuilder() })
+        multiple = descriptor.multiple
+        valueRestriction = descriptor.valueRestriction
+        valueTypes = descriptor.valueTypes
+        indexKey = descriptor.indexKey
+        default = descriptor.defaultValue
+        attributes.update(descriptor.attributes)
+    }
+
     @PublishedApi
     internal fun build(): MetaDescriptor = MetaDescriptor(
         description = description,
@@ -102,40 +102,27 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
     )
 }
 
-public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
-    item(Name.parse(name), block)
+//public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
+//    item(Name.parse(name), block)
 
 public inline fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor =
     MetaDescriptorBuilder().apply(block).build()
 
 /**
- * Create and configure child value descriptor
- */
-public fun MetaDescriptorBuilder.value(
-    name: Name,
-    type: ValueType,
-    vararg additionalTypes: ValueType,
-    block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = item(name) {
-    valueType(type, *additionalTypes)
-    block()
-}
-
-public fun MetaDescriptorBuilder.value(
-    name: String,
-    type: ValueType,
-    vararg additionalTypes: ValueType,
-    block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = value(Name.parse(name), type, additionalTypes = additionalTypes, block)
-
-/**
- * Create and configure child value descriptor
+ * Create and configure child node descriptor
  */
 public fun MetaDescriptorBuilder.node(
-    name: Name, block: MetaDescriptorBuilder.() -> Unit,
-): MetaDescriptorBuilder = item(name) {
-    valueRestriction = ValueRestriction.ABSENT
-    block()
+    name: Name,
+    block: MetaDescriptorBuilder.() -> Unit,
+) {
+    node(
+        name,
+        MetaDescriptorBuilder().apply(block)
+    )
+}
+
+public fun MetaDescriptorBuilder.node(name: String, descriptor: MetaDescriptor) {
+    node(Name.parse(name), descriptor)
 }
 
 public fun MetaDescriptorBuilder.node(name: String, block: MetaDescriptorBuilder.() -> Unit) {
@@ -144,30 +131,16 @@ public fun MetaDescriptorBuilder.node(name: String, block: MetaDescriptorBuilder
 
 public fun MetaDescriptorBuilder.node(
     key: String,
-    described: Described,
+    base: Described,
     block: MetaDescriptorBuilder.() -> Unit = {},
 ) {
-    described.descriptor?.let {
-        node(Name.parse(key), it, block)
-    }
+    node(Name.parse(key), base.descriptor?.toBuilder()?.apply(block) ?: MetaDescriptorBuilder())
 }
 
 public fun MetaDescriptorBuilder.required() {
     valueRestriction = ValueRestriction.REQUIRED
 }
 
-public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
-    key: Name,
-    default: E?,
-    crossinline modifier: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = value(key, ValueType.STRING) {
-    default?.let {
-        this.default = default.asValue()
-    }
-    allowedValues = enumValues<E>().map { it.asValue() }
-    modifier()
-}
-
 private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
     description = this@toBuilder.description
     children = this@toBuilder.children.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
@@ -179,6 +152,40 @@ private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBu
     attributes = this@toBuilder.attributes.toMutableMeta()
 }
 
+
+/**
+ * Create and configure child value descriptor
+ */
+public fun MetaDescriptorBuilder.value(
+    name: Name,
+    type: ValueType,
+    vararg additionalTypes: ValueType,
+    block: MetaDescriptorBuilder.() -> Unit = {},
+): Unit = node(name) {
+    valueType(type, *additionalTypes)
+    block()
+}
+
+public fun MetaDescriptorBuilder.value(
+    name: String,
+    type: ValueType,
+    vararg additionalTypes: ValueType,
+    block: MetaDescriptorBuilder.() -> Unit = {},
+): Unit = value(Name.parse(name), type, additionalTypes = additionalTypes, block)
+
+
+public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
+    key: Name,
+    default: E?,
+    crossinline modifier: MetaDescriptorBuilder.() -> Unit = {},
+): Unit = value(key, ValueType.STRING) {
+    default?.let {
+        this.default = default.asValue()
+    }
+    allowedValues = enumValues<E>().map { it.asValue() }
+    modifier()
+}
+
 /**
  * Make a deep copy of this descriptor applying given transformation [block]
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
index 79fb6fdb..803659a6 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
@@ -9,7 +9,7 @@ import kotlin.reflect.typeOf
 public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     property: KProperty1<S, T>,
     noinline block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = when (typeOf<T>()) {
+): Unit = when (typeOf<T>()) {
     typeOf<Number>(), typeOf<Int>(), typeOf<Double>(), typeOf<Short>(), typeOf<Long>(), typeOf<Float>() ->
         value(property.name, ValueType.NUMBER) {
             block()
@@ -34,7 +34,7 @@ public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
         multiple = true
         block()
     }
-    else -> item(property.name, block)
+    else -> node(property.name, block)
 }
 
 public inline fun <S : Scheme, reified T : Scheme> MetaDescriptorBuilder.scheme(
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
index fba9b596..2ada3ade 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
@@ -2,7 +2,7 @@ package space.kscience.dataforge.meta
 
 import kotlinx.serialization.json.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.descriptors.item
+import space.kscience.dataforge.meta.descriptors.node
 import kotlin.test.Test
 import kotlin.test.assertEquals
 
@@ -32,7 +32,7 @@ class JsonMetaTest {
     }
 
     val descriptor = MetaDescriptor {
-        item("nodeArray") {
+        node("nodeArray") {
             indexKey = "index"
         }
     }

From fd1d98aa87ff1f7b013df876e2a0ac36a87a39c0 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 31 Dec 2023 12:54:38 +0300
Subject: [PATCH 10/77] Merge MetaConverter and MetaSpec

---
 CHANGELOG.md                                  |   1 +
 .../dataforge/properties/MetaProperty.kt      |  12 +-
 .../descriptors/TestAutoDescriptors.kt        |   6 +-
 .../space/kscience/dataforge/meta/JsonMeta.kt |   2 +-
 .../{transformations => }/MetaConverter.kt    |  74 +++++-----
 .../kscience/dataforge/meta/MetaDelegate.kt   |   5 +-
 .../space/kscience/dataforge/meta/MetaSpec.kt |  18 +++
 .../MetaTransformation.kt                     |   3 +-
 .../dataforge/meta/MutableMetaDelegate.kt     |   9 +-
 .../space/kscience/dataforge/meta/Scheme.kt   | 119 ++++++++++++++-
 .../kscience/dataforge/meta/Specification.kt  | 137 ------------------
 .../meta/descriptors/MetaDescriptor.kt        |  15 +-
 .../meta/descriptors/MetaDescriptorBuilder.kt |   6 +-
 .../dataforge/meta/MetaDelegateTest.kt        |   2 +-
 .../kscience/dataforge/workspace/Task.kt      |  25 ++--
 .../dataforge/workspace/WorkspaceBuilder.kt   |   4 +-
 16 files changed, 209 insertions(+), 229 deletions(-)
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/{transformations => }/MetaConverter.kt (61%)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/{transformations => }/MetaTransformation.kt (98%)
 delete mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8e2861e2..d6daac6c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@
 - Add Meta and MutableMeta delegates for convertable and serializeable
 
 ### Changed
+- Descriptor `children` renamed to `nodes`
 
 ### Deprecated
 - `node(key,converter)` in favor of `serializable` delegate
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
index e79ce931..85435602 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
@@ -1,11 +1,7 @@
 package space.kscience.dataforge.properties
 
 
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.ObservableMutableMeta
-import space.kscience.dataforge.meta.transformations.MetaConverter
-import space.kscience.dataforge.meta.transformations.nullableMetaToObject
-import space.kscience.dataforge.meta.transformations.nullableObjectToMeta
+import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.startsWith
@@ -18,14 +14,14 @@ public class MetaProperty<T : Any>(
 ) : Property<T?> {
 
     override var value: T?
-        get() = converter.nullableMetaToObject(meta[name])
+        get() = converter.readNullable(meta[name])
         set(value) {
-            meta[name] = converter.nullableObjectToMeta(value) ?: Meta.EMPTY
+            meta[name] = converter.convertNullable(value) ?: Meta.EMPTY
         }
 
     override fun onChange(owner: Any?, callback: (T?) -> Unit) {
         meta.onChange(owner) { name ->
-            if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(this[name]))
+            if (name.startsWith(this@MetaProperty.name)) callback(converter.readNullable(this[name]))
         }
     }
 
diff --git a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
index c1271537..617e85cc 100644
--- a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
+++ b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
@@ -3,9 +3,11 @@ package space.kscience.dataforge.descriptors
 import kotlinx.serialization.encodeToString
 import kotlinx.serialization.json.Json
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.meta.Scheme
+import space.kscience.dataforge.meta.SchemeSpec
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.transformations.MetaConverter
+import space.kscience.dataforge.meta.int
+import space.kscience.dataforge.meta.string
 
 private class TestScheme: Scheme(){
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 36373582..8da8b2d3 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -31,7 +31,7 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
     val pairs: MutableList<Pair<String, JsonElement>> = items.entries.groupBy {
         it.key.body
     }.mapTo(ArrayList()) { (body, list) ->
-        val childDescriptor = descriptor?.children?.get(body)
+        val childDescriptor = descriptor?.nodes?.get(body)
         if (list.size == 1) {
             val (token, element) = list.first()
                 //do not add an empty element
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
similarity index 61%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 17774913..0da36d17 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -1,19 +1,19 @@
-package space.kscience.dataforge.meta.transformations
+package space.kscience.dataforge.meta
 
 import kotlinx.serialization.KSerializer
 import kotlinx.serialization.json.Json
 import kotlinx.serialization.json.encodeToJsonElement
 import kotlinx.serialization.serializer
-import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
+
 /**
  * A converter of generic object to and from [Meta]
  */
-public interface MetaConverter<T> {
+public interface MetaConverter<T>: MetaSpec<T> {
 
     /**
      * Runtime type of [T]
@@ -23,32 +23,32 @@ public interface MetaConverter<T> {
     /**
      * A descriptor for resulting meta
      */
-    public val descriptor: MetaDescriptor get() = MetaDescriptor.EMPTY
+    override val descriptor: MetaDescriptor get() = MetaDescriptor.EMPTY
 
     /**
-     * Attempt conversion of [meta] to an object or return null if conversion failed
+     * Attempt conversion of [source] to an object or return null if conversion failed
      */
-    public fun metaToObjectOrNull(meta: Meta): T?
+    override fun readOrNull(source: Meta): T?
 
-    public fun metaToObject(meta: Meta): T =
-        metaToObjectOrNull(meta) ?: error("Meta $meta could not be interpreted by $this")
+    override fun read(source: Meta): T =
+        readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
 
-    public fun objectToMeta(obj: T): Meta
+    public fun convert(obj: T): Meta
 
     public companion object {
 
         public val meta: MetaConverter<Meta> = object : MetaConverter<Meta> {
             override val type: KType = typeOf<Meta>()
 
-            override fun metaToObjectOrNull(meta: Meta): Meta = meta
-            override fun objectToMeta(obj: Meta): Meta = obj
+            override fun readOrNull(source: Meta): Meta = source
+            override fun convert(obj: Meta): Meta = obj
         }
 
         public val value: MetaConverter<Value> = object : MetaConverter<Value> {
             override val type: KType = typeOf<Value>()
 
-            override fun metaToObjectOrNull(meta: Meta): Value? = meta.value
-            override fun objectToMeta(obj: Value): Meta = Meta(obj)
+            override fun readOrNull(source: Meta): Value? = source.value
+            override fun convert(obj: Value): Meta = Meta(obj)
         }
 
         public val string: MetaConverter<String> = object : MetaConverter<String> {
@@ -59,8 +59,8 @@ public interface MetaConverter<T> {
             }
 
 
-            override fun metaToObjectOrNull(meta: Meta): String? = meta.string
-            override fun objectToMeta(obj: String): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): String? = source.string
+            override fun convert(obj: String): Meta = Meta(obj.asValue())
         }
 
         public val boolean: MetaConverter<Boolean> = object : MetaConverter<Boolean> {
@@ -70,8 +70,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.BOOLEAN)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Boolean? = meta.boolean
-            override fun objectToMeta(obj: Boolean): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Boolean? = source.boolean
+            override fun convert(obj: Boolean): Meta = Meta(obj.asValue())
         }
 
         public val number: MetaConverter<Number> = object : MetaConverter<Number> {
@@ -81,8 +81,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.NUMBER)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Number? = meta.number
-            override fun objectToMeta(obj: Number): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Number? = source.number
+            override fun convert(obj: Number): Meta = Meta(obj.asValue())
         }
 
         public val double: MetaConverter<Double> = object : MetaConverter<Double> {
@@ -92,8 +92,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.NUMBER)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Double? = meta.double
-            override fun objectToMeta(obj: Double): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Double? = source.double
+            override fun convert(obj: Double): Meta = Meta(obj.asValue())
         }
 
         public val float: MetaConverter<Float> = object : MetaConverter<Float> {
@@ -103,8 +103,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.NUMBER)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Float? = meta.float
-            override fun objectToMeta(obj: Float): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Float? = source.float
+            override fun convert(obj: Float): Meta = Meta(obj.asValue())
         }
 
         public val int: MetaConverter<Int> = object : MetaConverter<Int> {
@@ -114,8 +114,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.NUMBER)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Int? = meta.int
-            override fun objectToMeta(obj: Int): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Int? = source.int
+            override fun convert(obj: Int): Meta = Meta(obj.asValue())
         }
 
         public val long: MetaConverter<Long> = object : MetaConverter<Long> {
@@ -125,8 +125,8 @@ public interface MetaConverter<T> {
                 valueType(ValueType.NUMBER)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): Long? = meta.long
-            override fun objectToMeta(obj: Long): Meta = Meta(obj.asValue())
+            override fun readOrNull(source: Meta): Long? = source.long
+            override fun convert(obj: Long): Meta = Meta(obj.asValue())
         }
 
         public inline fun <reified E : Enum<E>> enum(): MetaConverter<E> = object : MetaConverter<E> {
@@ -138,9 +138,9 @@ public interface MetaConverter<T> {
             }
 
             @Suppress("USELESS_CAST")
-            override fun metaToObjectOrNull(meta: Meta): E = meta.enum<E>() as? E ?: error("The Item is not a Enum")
+            override fun readOrNull(source: Meta): E = source.enum<E>() as? E ?: error("The Item is not a Enum")
 
-            override fun objectToMeta(obj: E): Meta = Meta(obj.asValue())
+            override fun convert(obj: E): Meta = Meta(obj.asValue())
         }
 
         public fun <T> valueList(
@@ -153,9 +153,9 @@ public interface MetaConverter<T> {
                 valueType(ValueType.LIST)
             }
 
-            override fun metaToObjectOrNull(meta: Meta): List<T>? = meta.value?.list?.map(reader)
+            override fun readOrNull(source: Meta): List<T>? = source.value?.list?.map(reader)
 
-            override fun objectToMeta(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
+            override fun convert(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
         }
 
         /**
@@ -168,12 +168,12 @@ public interface MetaConverter<T> {
             override val type: KType = typeOf<T>()
             private val serializer: KSerializer<T> = serializer()
 
-            override fun metaToObjectOrNull(meta: Meta): T? {
-                val json = meta.toJson(descriptor)
+            override fun readOrNull(source: Meta): T? {
+                val json = source.toJson(descriptor)
                 return Json.decodeFromJsonElement(serializer, json)
             }
 
-            override fun objectToMeta(obj: T): Meta {
+            override fun convert(obj: T): Meta {
                 val json = Json.encodeToJsonElement(obj)
                 return json.toMeta(descriptor)
             }
@@ -183,7 +183,7 @@ public interface MetaConverter<T> {
     }
 }
 
-public fun <T : Any> MetaConverter<T>.nullableMetaToObject(item: Meta?): T? = item?.let { metaToObject(it) }
-public fun <T : Any> MetaConverter<T>.nullableObjectToMeta(obj: T?): Meta? = obj?.let { objectToMeta(it) }
+public fun <T : Any> MetaConverter<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
+public fun <T : Any> MetaConverter<T>.convertNullable(obj: T?): Meta? = obj?.let { convert(it) }
 
-public fun <T> MetaConverter<T>.valueToObject(value: Value): T? = metaToObject(Meta(value))
+public fun <T> MetaConverter<T>.readValue(value: Value): T? = read(Meta(value))
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 42e4fe14..70ff460d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.transformations.MetaConverter
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
@@ -20,7 +19,7 @@ public fun <T> MetaProvider.convertable(
     converter: MetaConverter<T>,
     key: Name? = null,
 ): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
+    get(key ?: property.name.asName())?.let { converter.read(it) }
 }
 
 /**
@@ -46,7 +45,7 @@ public fun <T> Meta.listOfConvertable(
     key: Name? = null,
 ): ReadOnlyProperty<Any?, List<T>> = ReadOnlyProperty{_, property ->
     val name = key ?: property.name.asName()
-    getIndexed(name).values.map { converter.metaToObject(it) }
+    getIndexed(name).values.map { converter.read(it) }
 }
 
 @DFExperimental
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
new file mode 100644
index 00000000..be21cef7
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
@@ -0,0 +1,18 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.meta.descriptors.Described
+
+public interface MetaSpec<out T> : Described {
+
+    /**
+     * Read the source meta into an object and return null if Meta could not be interpreted as a target type
+     */
+    public fun readOrNull(source: Meta): T?
+
+    /**
+     * Read generic read-only meta with this [MetaSpec] producing instance of the desired type.
+     * Throws an error if conversion could not be done.
+     */
+    public fun read(source: Meta): T = readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
+}
+
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
similarity index 98%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
index d41365a6..dbf94f0f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
@@ -1,6 +1,5 @@
-package space.kscience.dataforge.meta.transformations
+package space.kscience.dataforge.meta
 
-import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import kotlin.jvm.JvmInline
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index d77348dd..f6e96109 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.transformations.MetaConverter
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
@@ -33,12 +32,12 @@ public fun <T> MutableMetaProvider.convertable(
     object : ReadWriteProperty<Any?, T?> {
         override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
             val name = key ?: property.name.asName()
-            return get(name)?.let { converter.metaToObject(it) }
+            return get(name)?.let { converter.read(it) }
         }
 
         override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
             val name = key ?: property.name.asName()
-            set(name, value?.let { converter.objectToMeta(it) })
+            set(name, value?.let { converter.convert(it) })
         }
     }
 
@@ -66,12 +65,12 @@ public fun <T> MutableMeta.listOfConvertable(
 ): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { converter.metaToObject(it) }
+        return getIndexed(name).values.map { converter.read(it) }
     }
 
     override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
         val name = key ?: property.name.asName()
-        setIndexed(name, value.map { converter.objectToMeta(it) })
+        setIndexed(name, value.map { converter.convert(it) })
     }
 }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index ab504733..ff8d8095 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -7,9 +7,11 @@ import space.kscience.dataforge.meta.descriptors.validate
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.*
+import kotlin.properties.ReadWriteProperty
+import kotlin.reflect.KProperty
 
 /**
- * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [Specification].
+ * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaSpec].
  * Default item provider and [MetaDescriptor] are optional
  */
 public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurable {
@@ -165,23 +167,124 @@ public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit =
  */
 public open class SchemeSpec<out T : Scheme>(
     private val builder: () -> T,
-) : Specification<T> {
+) : MetaSpec<T> {
 
     override val descriptor: MetaDescriptor? get() = null
 
-    override fun read(source: Meta): T = builder().also {
+    override fun readOrNull(source: Meta): T = builder().also {
         it.initialize(MutableMeta(), source, descriptor)
     }
 
-    override fun write(target: MutableMeta): T = empty().also {
+    public fun write(target: MutableMeta): T = empty().also {
         it.initialize(target, Meta.EMPTY, descriptor)
     }
 
-    override fun empty(): T = builder().also {
+    /**
+     * Generate an empty object
+     */
+    public fun empty(): T = builder().also {
         it.initialize(MutableMeta(), Meta.EMPTY, descriptor)
     }
 
-    @Suppress("OVERRIDE_BY_INLINE")
-    final override inline operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
+    /**
+     * A convenience method to use specifications in builders
+     */
+    public inline operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
 
-}
\ No newline at end of file
+}
+
+
+
+/**
+ * Update a [MutableMeta] using given specification
+ */
+public fun <T : Scheme> MutableMeta.updateWith(
+    spec: SchemeSpec<T>,
+    action: T.() -> Unit,
+): T = spec.write(this).apply(action)
+
+
+/**
+ * Update configuration using given specification
+ */
+public fun <T : Scheme> Configurable.updateWith(
+    spec: SchemeSpec<T>,
+    action: T.() -> Unit,
+): T = spec.write(meta).apply(action)
+
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a child of this provider
+ */
+public fun <T : Scheme> MutableMeta.scheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        val name = key ?: property.name.asName()
+        return spec.write(getOrCreate(name))
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
+        val name = key ?: property.name.asName()
+        set(name, value.toMeta())
+    }
+}
+
+public fun <T : Scheme> Scheme.scheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T> = meta.scheme(spec, key)
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a child of this provider.
+ * Returns null if meta with given name does not exist.
+ */
+public fun <T : Scheme> MutableMeta.schemeOrNull(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = object : ReadWriteProperty<Any?, T?> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
+        val name = key ?: property.name.asName()
+        return if (get(name) == null) null else spec.write(getOrCreate(name))
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
+        val name = key ?: property.name.asName()
+        if (value == null) remove(name)
+        else set(name, value.toMeta())
+    }
+}
+
+public fun <T : Scheme> Scheme.schemeOrNull(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = meta.schemeOrNull(spec, key)
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a list of child providers.
+ * If children are mutable, the changes in list elements are reflected on them.
+ * The list is a snapshot of children state, so change in structure is not reflected on its composition.
+ */
+@DFExperimental
+public fun <T : Scheme> MutableMeta.listOfScheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
+        val name = key ?: property.name.asName()
+        return getIndexed(name).values.map { spec.write(it as MutableMeta) }
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
+        val name = key ?: property.name.asName()
+        setIndexed(name, value.map { it.toMeta() })
+    }
+}
+
+
+@DFExperimental
+public fun <T : Scheme> Scheme.listOfScheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = meta.listOfScheme(spec, key)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
deleted file mode 100644
index 7f3dd2a6..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
+++ /dev/null
@@ -1,137 +0,0 @@
-package space.kscience.dataforge.meta
-
-import space.kscience.dataforge.meta.descriptors.Described
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.properties.ReadWriteProperty
-import kotlin.reflect.KProperty
-
-public interface ReadOnlySpecification<out T : Any> : Described {
-
-    /**
-     * Read generic read-only meta with this [Specification] producing instance of desired type.
-     * The source is not mutated even if it is in theory mutable
-     */
-    public fun read(source: Meta): T
-
-    /**
-     * Generate an empty object
-     */
-    public fun empty(): T
-
-    /**
-     * A convenience method to use specifications in builders
-     */
-    public operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
-}
-
-
-/**
- * Allows to apply custom configuration in a type safe way to simple untyped configuration.
- * By convention [Scheme] companion should inherit this class
- *
- */
-public interface Specification<out T : Any> : ReadOnlySpecification<T> {
-    /**
-     * Wrap [MutableMeta], using it as inner storage (changes to [Specification] are reflected on [MutableMeta]
-     */
-    public fun write(target: MutableMeta): T
-}
-
-/**
- * Update a [MutableMeta] using given specification
- */
-public fun <T : Any> MutableMeta.updateWith(
-    spec: Specification<T>,
-    action: T.() -> Unit,
-): T = spec.write(this).apply(action)
-
-
-/**
- * Update configuration using given specification
- */
-public fun <T : Any> Configurable.updateWith(
-    spec: Specification<T>,
-    action: T.() -> Unit,
-): T = spec.write(meta).apply(action)
-
-//
-//public fun  <M : MutableTypedMeta<M>> MutableMeta.withSpec(spec: Specification<M>): M? =
-//    spec.write(it)
-
-/**
- * A delegate that uses a [Specification] to wrap a child of this provider
- */
-public fun <T : Scheme> MutableMeta.spec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
-        val name = key ?: property.name.asName()
-        return spec.write(getOrCreate(name))
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
-        val name = key ?: property.name.asName()
-        set(name, value.toMeta())
-    }
-}
-
-public fun <T : Scheme> Scheme.spec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T> = meta.spec(spec, key)
-
-/**
- * A delegate that uses a [Specification] to wrap a child of this provider.
- * Returns null if meta with given name does not exist.
- */
-public fun <T : Scheme> MutableMeta.specOrNull(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T?> = object : ReadWriteProperty<Any?, T?> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-        val name = key ?: property.name.asName()
-        return if (get(name) == null) null else spec.write(getOrCreate(name))
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
-        val name = key ?: property.name.asName()
-        if (value == null) remove(name)
-        else set(name, value.toMeta())
-    }
-}
-
-public fun <T : Scheme> Scheme.specOrNull(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T?> = meta.specOrNull(spec, key)
-
-/**
- * A delegate that uses a [Specification] to wrap a list of child providers.
- * If children are mutable, the changes in list elements are reflected on them.
- * The list is a snapshot of children state, so change in structure is not reflected on its composition.
- */
-@DFExperimental
-public fun <T : Scheme> MutableMeta.listOfSpec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
-        val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { spec.write(it as MutableMeta) }
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
-        val name = key ?: property.name.asName()
-        setIndexed(name, value.map { it.toMeta() })
-    }
-}
-
-
-@DFExperimental
-public fun <T : Scheme> Scheme.listOfSpec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, List<T>> = meta.listOfSpec(spec, key)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
index 742b89ed..12bbd5d4 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
@@ -27,7 +27,7 @@ public enum class ValueRestriction {
 /**
  * The descriptor for a meta
  * @param description description text
- * @param children child descriptors for this node
+ * @param nodes child descriptors for this node
  * @param multiple True if same name siblings with this name are allowed
  * @param valueRestriction The requirements for node content
  * @param valueTypes list of allowed types for [Meta.value], null if all values are allowed.
@@ -39,7 +39,7 @@ public enum class ValueRestriction {
 @Serializable
 public data class MetaDescriptor(
     public val description: String? = null,
-    public val children: Map<String, MetaDescriptor> = emptyMap(),
+    public val nodes: Map<String, MetaDescriptor> = emptyMap(),
     public val multiple: Boolean = false,
     public val valueRestriction: ValueRestriction = ValueRestriction.NONE,
     public val valueTypes: List<ValueType>? = null,
@@ -47,6 +47,9 @@ public data class MetaDescriptor(
     public val defaultValue: Value? = null,
     public val attributes: Meta = Meta.EMPTY,
 ) {
+    @Deprecated("Replace by nodes", ReplaceWith("nodes"))
+    public val children: Map<String, MetaDescriptor> get() = nodes
+
     /**
      * A node constructed of default values for this descriptor and its children
      */
@@ -55,7 +58,7 @@ public data class MetaDescriptor(
             defaultValue?.let { defaultValue ->
                 this.value = defaultValue
             }
-            children.forEach { (key, descriptor) ->
+            nodes.forEach { (key, descriptor) ->
                 set(key, descriptor.defaultNode)
             }
         }
@@ -67,13 +70,13 @@ public data class MetaDescriptor(
     }
 }
 
-public val MetaDescriptor.required: Boolean get() = valueRestriction == ValueRestriction.REQUIRED || children.values.any { required }
+public val MetaDescriptor.required: Boolean get() = valueRestriction == ValueRestriction.REQUIRED || nodes.values.any { required }
 
 public val MetaDescriptor.allowedValues: List<Value>? get() = attributes[MetaDescriptor.ALLOWED_VALUES_KEY]?.value?.list
 
 public operator fun MetaDescriptor.get(name: Name): MetaDescriptor? = when (name.length) {
     0 -> this
-    1 -> children[name.firstOrNull()!!.toString()]
+    1 -> nodes[name.firstOrNull()!!.toString()]
     else -> get(name.firstOrNull()!!.asName())?.get(name.cutFirst())
 }
 
@@ -95,7 +98,7 @@ public fun MetaDescriptor.validate(item: Meta?): Boolean {
     if (item == null) return !required
     if (!validate(item.value)) return false
 
-    children.forEach { (key, childDescriptor) ->
+    nodes.forEach { (key, childDescriptor) ->
         if (!childDescriptor.validate(item[key])) return false
     }
     return true
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index 751d2525..5d4d81ad 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -80,7 +80,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
 
     public fun from(descriptor: MetaDescriptor) {
         description = descriptor.description
-        children.putAll(descriptor.children.mapValues { it.value.toBuilder() })
+        children.putAll(descriptor.nodes.mapValues { it.value.toBuilder() })
         multiple = descriptor.multiple
         valueRestriction = descriptor.valueRestriction
         valueTypes = descriptor.valueTypes
@@ -92,7 +92,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
     @PublishedApi
     internal fun build(): MetaDescriptor = MetaDescriptor(
         description = description,
-        children = children.mapValues { it.value.build() },
+        nodes = children.mapValues { it.value.build() },
         multiple = multiple,
         valueRestriction = valueRestriction,
         valueTypes = valueTypes,
@@ -143,7 +143,7 @@ public fun MetaDescriptorBuilder.required() {
 
 private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
     description = this@toBuilder.description
-    children = this@toBuilder.children.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
+    children = this@toBuilder.nodes.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
     multiple = this@toBuilder.multiple
     valueRestriction = this@toBuilder.valueRestriction
     valueTypes = this@toBuilder.valueTypes
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
index 7a2dbc22..4b99cc3b 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
@@ -20,7 +20,7 @@ class MetaDelegateTest {
         var myValue by string()
         var safeValue by double(2.2)
         var enumValue by enum(TestEnum.YES)
-        var inner by spec(InnerScheme)
+        var inner by scheme(InnerScheme)
 
         companion object : SchemeSpec<TestScheme>(::TestScheme)
     }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 329d9c5a..19d16c68 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -6,7 +6,7 @@ import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.GoalExecutionRestriction
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.meta.Specification
+import space.kscience.dataforge.meta.MetaSpec
 import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DfType
@@ -43,10 +43,10 @@ public interface Task<out T : Any> : Described {
 }
 
 /**
- * A [Task] with [Specification] for wrapping and unwrapping task configuration
+ * A [Task] with [MetaSpec] for wrapping and unwrapping task configuration
  */
 public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
-    public val spec: Specification<C>
+    public val spec: MetaSpec<C>
     override val descriptor: MetaDescriptor? get() = spec.descriptor
 
     public suspend fun execute(workspace: Workspace, taskName: Name, configuration: C): TaskResult<T>
@@ -55,11 +55,11 @@ public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
         execute(workspace, taskName, spec.read(taskMeta))
 }
 
-public suspend fun <T : Any, C : Any> TaskWithSpec<T, C>.execute(
-    workspace: Workspace,
-    taskName: Name,
-    block: C.() -> Unit = {},
-): TaskResult<T> = execute(workspace, taskName, spec(block))
+//public suspend fun <T : Any, C : Scheme> TaskWithSpec<T, C>.execute(
+//    workspace: Workspace,
+//    taskName: Name,
+//    block: C.() -> Unit = {},
+//): TaskResult<T> = execute(workspace, taskName, spec(block))
 
 public class TaskResultBuilder<in T : Any>(
     public val workspace: Workspace,
@@ -76,7 +76,6 @@ public class TaskResultBuilder<in T : Any>(
  * @param descriptor of meta accepted by this task
  * @param builder for resulting data set
  */
-@Suppress("FunctionName")
 public fun <T : Any> Task(
     resultType: KType,
     descriptor: MetaDescriptor? = null,
@@ -98,7 +97,6 @@ public fun <T : Any> Task(
     }
 }
 
-@Suppress("FunctionName")
 public inline fun <reified T : Any> Task(
     descriptor: MetaDescriptor? = null,
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
@@ -116,10 +114,10 @@ public inline fun <reified T : Any> Task(
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
-    override val spec: Specification<C> = specification
+    override val spec: MetaSpec<C> = specification
 
     override suspend fun execute(
         workspace: Workspace,
@@ -135,8 +133,7 @@ public fun <T : Any, C : MetaRepr> Task(
     }
 }
 
-@Suppress("FunctionName")
 public inline fun <reified T : Any, C : MetaRepr> Task(
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): Task<T> = Task(typeOf<T>(), specification, builder)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 1538460f..5489e200 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -7,8 +7,8 @@ import space.kscience.dataforge.context.Global
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
+import space.kscience.dataforge.meta.MetaSpec
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.meta.Specification
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
@@ -68,7 +68,7 @@ public inline fun <reified T : Any> TaskContainer.task(
 }
 
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)

From 991f77c45a82908de5923ce3665ca687945f01b1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 31 Dec 2023 13:08:30 +0300
Subject: [PATCH 11/77] Merge MetaConverter and MetaSpec

---
 CHANGELOG.md                                  |  1 +
 .../kscience/dataforge/properties/Property.kt |  2 --
 .../kscience/dataforge/meta/MetaConverter.kt  |  3 +--
 .../kscience/dataforge/meta/MetaDelegate.kt   | 20 +++++++-------
 .../space/kscience/dataforge/meta/MetaSpec.kt |  3 +++
 .../kscience/dataforge/meta/ObservableMeta.kt | 26 +++----------------
 .../space/kscience/dataforge/meta/Scheme.kt   | 22 ++++++++++++++++
 .../meta/descriptors/schemeDescriptor.kt      |  3 +++
 8 files changed, 44 insertions(+), 36 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d6daac6c..7b9586a8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,6 +9,7 @@
 
 ### Changed
 - Descriptor `children` renamed to `nodes`
+- `MetaConverter` now inherits `MetaSpec` (former `Specifiction`). So `MetaConverter` could be used more universally.
 
 ### Deprecated
 - `node(key,converter)` in favor of `serializable` delegate
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
index 7b0280eb..b11fb2e1 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
@@ -1,6 +1,5 @@
 package space.kscience.dataforge.properties
 
-import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.flow.MutableStateFlow
 import kotlinx.coroutines.flow.StateFlow
 import space.kscience.dataforge.misc.DFExperimental
@@ -14,7 +13,6 @@ public interface Property<T> {
 }
 
 @DFExperimental
-@OptIn(ExperimentalCoroutinesApi::class)
 public fun <T> Property<T>.toFlow(): StateFlow<T> = MutableStateFlow(value).also { stateFlow ->
     onChange {
         stateFlow.value = it
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 0da36d17..933cc13d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -183,7 +183,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
     }
 }
 
-public fun <T : Any> MetaConverter<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
 public fun <T : Any> MetaConverter<T>.convertNullable(obj: T?): Meta? = obj?.let { convert(it) }
 
-public fun <T> MetaConverter<T>.readValue(value: Value): T? = read(Meta(value))
+
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 70ff460d..a24f7371 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -13,13 +13,13 @@ public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> =
 }
 
 /**
- * Use [converter] to read the Meta node
+ * Use [metaSpec] to read the Meta node
  */
-public fun <T> MetaProvider.convertable(
-    converter: MetaConverter<T>,
+public fun <T> MetaProvider.spec(
+    metaSpec: MetaSpec<T>,
     key: Name? = null,
 ): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())?.let { converter.read(it) }
+    get(key ?: property.name.asName())?.let { metaSpec.read(it) }
 }
 
 /**
@@ -29,19 +29,19 @@ public fun <T> MetaProvider.convertable(
 public inline fun <reified T> MetaProvider.serializable(
     descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadOnlyProperty<Any?, T?> = convertable(MetaConverter.serializable(descriptor), key)
+): ReadOnlyProperty<Any?, T?> = spec(MetaConverter.serializable(descriptor), key)
 
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
     key: Name? = null,
-    converter: MetaConverter<T>,
-): ReadOnlyProperty<Any?, T?> = convertable(converter, key)
+    converter: MetaSpec<T>,
+): ReadOnlyProperty<Any?, T?> = spec(converter, key)
 
 /**
  * Use [converter] to convert a list of same name siblings meta to object
  */
-public fun <T> Meta.listOfConvertable(
-    converter: MetaConverter<T>,
+public fun <T> Meta.listOfSpec(
+    converter: MetaSpec<T>,
     key: Name? = null,
 ): ReadOnlyProperty<Any?, List<T>> = ReadOnlyProperty{_, property ->
     val name = key ?: property.name.asName()
@@ -52,7 +52,7 @@ public fun <T> Meta.listOfConvertable(
 public inline fun <reified T> Meta.listOfSerializable(
     descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadOnlyProperty<Any?, List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
+): ReadOnlyProperty<Any?, List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
 
 /**
  * A property delegate that uses custom key
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
index be21cef7..9918d504 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
@@ -16,3 +16,6 @@ public interface MetaSpec<out T> : Described {
     public fun read(source: Meta): T = readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
 }
 
+
+public fun <T : Any> MetaSpec<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
+public fun <T> MetaSpec<T>.readValue(value: Value): T? = read(Meta(value))
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 5a2b05f7..4a3db511 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -1,8 +1,10 @@
 package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.misc.ThreadSafe
-import space.kscience.dataforge.names.*
-import kotlin.reflect.KProperty1
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.cutFirst
+import space.kscience.dataforge.names.firstOrNull
+import space.kscience.dataforge.names.isEmpty
 
 
 internal data class MetaListener(
@@ -67,24 +69,4 @@ internal abstract class AbstractObservableMeta : ObservableMeta {
     override fun toString(): String = Meta.toString(this)
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
     override fun hashCode(): Int = Meta.hashCode(this)
-}
-
-/**
- * Use the value of the property in a [callBack].
- * The callback is called once immediately after subscription to pass the initial value.
- *
- * Optional [owner] property is used for
- */
-public fun <S : Scheme, T> S.useProperty(
-    property: KProperty1<S, T>,
-    owner: Any? = null,
-    callBack: S.(T) -> Unit,
-) {
-    //Pass initial value.
-    callBack(property.get(this))
-    meta.onChange(owner) { name ->
-        if (name.startsWith(property.name.asName())) {
-            callBack(property.get(this@useProperty))
-        }
-    }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index ff8d8095..1d654ac2 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -9,6 +9,7 @@ import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.*
 import kotlin.properties.ReadWriteProperty
 import kotlin.reflect.KProperty
+import kotlin.reflect.KProperty1
 
 /**
  * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaSpec].
@@ -288,3 +289,24 @@ public fun <T : Scheme> Scheme.listOfScheme(
     spec: SchemeSpec<T>,
     key: Name? = null,
 ): ReadWriteProperty<Any?, List<T>> = meta.listOfScheme(spec, key)
+
+
+/**
+ * Use the value of the property in a [callBack].
+ * The callback is called once immediately after subscription to pass the initial value.
+ *
+ * Optional [owner] property is used for
+ */
+public fun <S : Scheme, T> S.useProperty(
+    property: KProperty1<S, T>,
+    owner: Any? = null,
+    callBack: S.(T) -> Unit,
+) {
+    //Pass initial value.
+    callBack(property.get(this))
+    meta.onChange(owner) { name ->
+        if (name.startsWith(property.name.asName())) {
+            callBack(property.get(this@useProperty))
+        }
+    }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
index 803659a6..a2a77182 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
@@ -3,9 +3,11 @@ package space.kscience.dataforge.meta.descriptors
 import space.kscience.dataforge.meta.Scheme
 import space.kscience.dataforge.meta.SchemeSpec
 import space.kscience.dataforge.meta.ValueType
+import space.kscience.dataforge.misc.DFExperimental
 import kotlin.reflect.KProperty1
 import kotlin.reflect.typeOf
 
+@DFExperimental
 public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     property: KProperty1<S, T>,
     noinline block: MetaDescriptorBuilder.() -> Unit = {},
@@ -37,6 +39,7 @@ public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     else -> node(property.name, block)
 }
 
+@DFExperimental
 public inline fun <S : Scheme, reified T : Scheme> MetaDescriptorBuilder.scheme(
     property: KProperty1<S, T>,
     spec: SchemeSpec<T>,

From 25281d0f6d615ff3193b1c78316e99acdea1e607 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 31 Dec 2023 17:55:25 +0300
Subject: [PATCH 12/77] Replace properties with flows

---
 .../dataforge/properties/MetaProperty.kt      | 31 -----------
 .../kscience/dataforge/properties/Property.kt | 45 ----------------
 .../dataforge/properties/metaAsFlow.kt        | 51 +++++++++++++++++++
 .../dataforge/properties/schemeProperty.kt    | 31 -----------
 .../properties/MetaPropertiesTest.kt          | 28 ----------
 .../kscience/dataforge/properties/bindings.kt | 32 ------------
 .../kscience/dataforge/meta/ObservableMeta.kt |  7 ++-
 7 files changed, 56 insertions(+), 169 deletions(-)
 delete mode 100644 dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
 delete mode 100644 dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
 create mode 100644 dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
 delete mode 100644 dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt
 delete mode 100644 dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt
 delete mode 100644 dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt

diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
deleted file mode 100644
index 85435602..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
+++ /dev/null
@@ -1,31 +0,0 @@
-package space.kscience.dataforge.properties
-
-
-import space.kscience.dataforge.meta.*
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.startsWith
-
-@DFExperimental
-public class MetaProperty<T : Any>(
-    public val meta: ObservableMutableMeta,
-    public val name: Name,
-    public val converter: MetaConverter<T>,
-) : Property<T?> {
-
-    override var value: T?
-        get() = converter.readNullable(meta[name])
-        set(value) {
-            meta[name] = converter.convertNullable(value) ?: Meta.EMPTY
-        }
-
-    override fun onChange(owner: Any?, callback: (T?) -> Unit) {
-        meta.onChange(owner) { name ->
-            if (name.startsWith(this@MetaProperty.name)) callback(converter.readNullable(this[name]))
-        }
-    }
-
-    override fun removeChangeListener(owner: Any?) {
-        meta.removeListener(owner)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
deleted file mode 100644
index b11fb2e1..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
+++ /dev/null
@@ -1,45 +0,0 @@
-package space.kscience.dataforge.properties
-
-import kotlinx.coroutines.flow.MutableStateFlow
-import kotlinx.coroutines.flow.StateFlow
-import space.kscience.dataforge.misc.DFExperimental
-
-@DFExperimental
-public interface Property<T> {
-    public var value: T
-
-    public fun onChange(owner: Any? = null, callback: (T) -> Unit)
-    public fun removeChangeListener(owner: Any? = null)
-}
-
-@DFExperimental
-public fun <T> Property<T>.toFlow(): StateFlow<T> = MutableStateFlow(value).also { stateFlow ->
-    onChange {
-        stateFlow.value = it
-    }
-}
-
-/**
- * Reflect all changes in the [source] property onto this property. Does not reflect changes back.
- *
- * @return a mirroring job
- */
-@DFExperimental
-public fun <T> Property<T>.mirror(source: Property<T>) {
-    source.onChange(this) {
-        this.value = it
-    }
-}
-
-/**
- * Bi-directional connection between properties
- */
-@DFExperimental
-public fun <T> Property<T>.bind(other: Property<T>) {
-    onChange(other) {
-        other.value = it
-    }
-    other.onChange {
-        this.value = it
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
new file mode 100644
index 00000000..90fafc5e
--- /dev/null
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
@@ -0,0 +1,51 @@
+package space.kscience.dataforge.properties
+
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.channels.awaitClose
+import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.misc.DFExperimental
+
+@DFExperimental
+public fun <T> ObservableMeta.asFlow(converter: MetaSpec<T>): Flow<T> = callbackFlow {
+    onChange(this){
+        trySend(converter.read(this))
+    }
+
+    awaitClose{
+        removeListener(this)
+    }
+}
+
+@DFExperimental
+public fun <T> MutableMeta.listenTo(
+    scope: CoroutineScope,
+    converter: MetaConverter<T>,
+    flow: Flow<T>,
+): Job = flow.onEach {
+    update(converter.convert(it))
+}.launchIn(scope)
+
+@DFExperimental
+public fun <T> ObservableMutableMeta.bind(
+    scope: CoroutineScope,
+    converter: MetaConverter<T>,
+    flow: MutableSharedFlow<T>,
+): Job = scope.launch{
+    listenTo(this, converter,flow)
+    onChange(flow){
+        launch {
+            flow.emit(converter.read(this@onChange))
+        }
+    }
+    flow.onCompletion {
+        removeListener(flow)
+    }
+}.also {
+    it.invokeOnCompletion {
+        removeListener(flow)
+    }
+}
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt
deleted file mode 100644
index 3b4d948d..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt
+++ /dev/null
@@ -1,31 +0,0 @@
-package space.kscience.dataforge.properties
-
-
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.parseAsName
-import space.kscience.dataforge.names.startsWith
-import kotlin.reflect.KMutableProperty1
-
-@DFExperimental
-public fun <S : Scheme, T : Any> S.property(property: KMutableProperty1<S, T?>): Property<T?> =
-    object : Property<T?> {
-        override var value: T?
-            get() = property.get(this@property)
-            set(value) {
-                property.set(this@property, value)
-            }
-
-        override fun onChange(owner: Any?, callback: (T?) -> Unit) {
-            this@property.meta.onChange(this) { name ->
-                if (name.startsWith(property.name.parseAsName(true))) {
-                    callback(property.get(this@property))
-                }
-            }
-        }
-
-        override fun removeChangeListener(owner: Any?) {
-            this@property.meta.removeListener(this@property)
-        }
-
-    }
\ No newline at end of file
diff --git a/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt b/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt
deleted file mode 100644
index 00b71673..00000000
--- a/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt
+++ /dev/null
@@ -1,28 +0,0 @@
-package space.kscience.dataforge.properties
-
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.meta.SchemeSpec
-import space.kscience.dataforge.meta.int
-import space.kscience.dataforge.misc.DFExperimental
-import kotlin.test.Test
-import kotlin.test.assertEquals
-
-internal class TestScheme : Scheme() {
-    var a by int()
-    var b by int()
-    companion object : SchemeSpec<TestScheme>(::TestScheme)
-}
-
-@DFExperimental
-class MetaPropertiesTest {
-    @Test
-    fun testBinding() {
-        val scheme = TestScheme.empty()
-        val a = scheme.property(TestScheme::a)
-        val b = scheme.property(TestScheme::b)
-        a.bind(b)
-        scheme.a = 2
-        assertEquals(2, scheme.b)
-        assertEquals(2, b.value)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt b/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt
deleted file mode 100644
index b4451c97..00000000
--- a/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt
+++ /dev/null
@@ -1,32 +0,0 @@
-package space.kscience.dataforge.properties
-
-import org.w3c.dom.HTMLInputElement
-import space.kscience.dataforge.misc.DFExperimental
-
-@DFExperimental
-public fun HTMLInputElement.bindValue(property: Property<String>) {
-    if (this.onchange != null) error("Input element already bound")
-    this.onchange = {
-        property.value = this.value
-        Unit
-    }
-    property.onChange(this) {
-        if (value != it) {
-            value = it
-        }
-    }
-}
-
-@DFExperimental
-public fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
-    if (this.onchange != null) error("Input element already bound")
-    this.onchange = {
-        property.value = this.checked
-        Unit
-    }
-    property.onChange(this) {
-        if (checked != it) {
-            checked = it
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 4a3db511..b481962e 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -17,12 +17,15 @@ internal data class MetaListener(
  */
 public interface ObservableMeta : Meta {
     /**
-     * Add change listener to this meta. Owner is declared to be able to remove listeners later. Listener without owner could not be removed
+     * Add change listener to this meta. The Owner is declared to be able to remove listeners later.
+     * Listeners without an owner could be only removed all together.
+     *
+     * `this` object in the listener represents the current state of this meta. The name points to a changed node
      */
     public fun onChange(owner: Any?, callback: Meta.(name: Name) -> Unit)
 
     /**
-     * Remove all listeners belonging to given owner
+     * Remove all listeners belonging to the given [owner]. Passing null removes all listeners.
      */
     public fun removeListener(owner: Any?)
 

From 6ba189fa3473ed06c3d35b7015e4009b1c9f0f92 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 31 Dec 2023 19:57:57 +0300
Subject: [PATCH 13/77] Remove second invalidate on meta node remove

---
 .../kotlin/space/kscience/dataforge/meta/MutableMeta.kt       | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 24a09240..f1e66060 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -324,8 +324,6 @@ private class MutableMetaImpl(
                     //remove child and invalidate if argument is null
                     if (node == null) {
                         children.remove(token)?.removeListener(this)
-                        // old item is not null otherwise we can't be here
-                        invalidate(name)
                     } else {
                         val newNode = wrapItem(node)
                         newNode.adoptBy(this, token)
@@ -335,7 +333,7 @@ private class MutableMetaImpl(
 
                 else -> {
                     val token = name.firstOrNull()!!
-                    //get existing or create new node.
+                    //get an existing node or create a new node.
                     if (items[token] == null) {
                         val newNode = MutableMetaImpl(null)
                         newNode.adoptBy(this, token)

From 8f3c2f3950a55400aefa858315d53e788030f66a Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 4 Jan 2024 18:59:57 +0300
Subject: [PATCH 14/77] refactoring of data transformations

---
 CHANGELOG.md                                  |  2 +
 .../kscience/dataforge/provider/dfType.kt     | 12 +--
 .../kscience/dataforge/actions/Action.kt      | 22 +++---
 .../space/kscience/dataforge/data/Data.kt     |  2 -
 .../kscience/dataforge/data/MetaMaskData.kt   | 23 ++++++
 .../kscience/dataforge/data/dataTransform.kt  | 78 ++++++++++++-------
 .../kscience/dataforge/meta/MutableMeta.kt    | 15 ++--
 .../kscience/dataforge/meta/SealedMeta.kt     | 10 +--
 .../space/kscience/dataforge/misc/DfType.kt   |  3 -
 .../dataforge/workspace/taskBuilders.kt       | 26 ++++++-
 .../workspace/CachingWorkspaceTest.kt         |  4 +-
 .../workspace/FileWorkspaceCacheTest.kt       |  2 +-
 .../workspace/SimpleWorkspaceTest.kt          |  6 +-
 13 files changed, 136 insertions(+), 69 deletions(-)
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7b9586a8..4469f802 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,10 +6,12 @@
 - Wasm artifacts
 - Add automatic MetaConverter for serializeable objects
 - Add Meta and MutableMeta delegates for convertable and serializeable
+- Meta mapping for data.
 
 ### Changed
 - Descriptor `children` renamed to `nodes`
 - `MetaConverter` now inherits `MetaSpec` (former `Specifiction`). So `MetaConverter` could be used more universally.
+- Meta copy and modification now use lightweight non-observable meta builders.
 
 ### Deprecated
 - `node(key,converter)` in favor of `serializable` delegate
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
index 04e681da..cccc3efb 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
@@ -12,7 +12,7 @@ import kotlin.reflect.full.findAnnotation
 
 
 @DFExperimental
-public val KClass<*>.dfId: String
+public val KClass<*>.dfType: String
     get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
 
 /**
@@ -20,13 +20,13 @@ public val KClass<*>.dfId: String
  */
 @DFExperimental
 public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
-    val target = T::class.dfId
+    val target = T::class.dfType
     return provide(target, name)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
-    val target = T::class.dfId
+    val target = T::class.dfType
     return top(target)
 }
 
@@ -35,15 +35,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
  */
 @DFExperimental
 public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
-    gather<T>(T::class.dfId, inherit)
+    gather<T>(T::class.dfType, inherit)
 
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
-    provides(T::class.dfId, items)
+    provides(T::class.dfType, items)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
-    provides(T::class.dfId, *items)
+    provides(T::class.dfType, *items)
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index 4fed8e51..3feec9bc 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -7,30 +7,28 @@ import space.kscience.dataforge.misc.DFExperimental
 /**
  * A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
  */
-public interface Action<in T : Any, out R : Any> {
+public fun interface Action<in T : Any, out R : Any> {
 
     /**
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
      * so not actual computation is started at this moment.
      */
-    public fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY): DataSet<R>
+    public fun execute(dataSet: DataSet<T>, meta: Meta): DataSet<R>
 
     public companion object
 }
 
+/**
+ * A convenience method to transform data using given [action]
+ */
+public fun <T : Any, R : Any> DataSet<T>.transform(action: Action<T, R>, meta: Meta = Meta.EMPTY): DataSet<R> =
+    action.execute(this, meta)
+
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
-public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
-    // TODO introduce composite action and add optimize by adding action to the list
-    return object : Action<T, R> {
-
-        override fun execute(
-            dataSet: DataSet<T>,
-            meta: Meta,
-        ): DataSet<R> = action.execute(this@then.execute(dataSet, meta), meta)
-    }
-}
+public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> =
+    Action<T, R> { dataSet, meta -> action.execute(this@then.execute(dataSet, meta), meta) }
 
 @DFExperimental
 public operator fun <T : Any, R : Any> Action<T, R>.invoke(
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index 4d883795..756cd840 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -87,7 +87,6 @@ public class StaticData<T : Any>(
 public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
     StaticData(typeOf<T>(), value, meta)
 
-@Suppress("FunctionName")
 @DFInternal
 public fun <T : Any> Data(
     type: KType,
@@ -98,7 +97,6 @@ public fun <T : Any> Data(
 ): Data<T> = LazyData(type, meta, context, dependencies, block)
 
 @OptIn(DFInternal::class)
-@Suppress("FunctionName")
 public inline fun <reified T : Any> Data(
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
new file mode 100644
index 00000000..acf2410d
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
@@ -0,0 +1,23 @@
+package space.kscience.dataforge.data
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.copy
+
+
+private class MetaMaskData<T>(val origin: Data<T>, override val meta: Meta) : Data<T> by origin
+
+/**
+ * A data with overriden meta. It reflects original data computed state.
+ */
+public fun <T> Data<T>.withMeta(newMeta: Meta): Data<T> = if (this is MetaMaskData) {
+    MetaMaskData(origin, newMeta)
+} else {
+    MetaMaskData(this, newMeta)
+}
+
+/**
+ * Create a new [Data] with the same computation, but different meta. The meta is created by applying [block] to
+ * the existing data meta.
+ */
+public inline fun <T> Data<T>.mapMeta(block: MutableMeta.() -> Unit): Data<T> = withMeta(meta.copy(block))
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 76577346..c1ecdc09 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -1,11 +1,9 @@
 package space.kscience.dataforge.data
 
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.meta.seal
-import space.kscience.dataforge.meta.toMutableMeta
+import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
 import kotlin.coroutines.CoroutineContext
 import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
@@ -28,8 +26,8 @@ public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T>
  * @param block the transformation itself
  */
 public inline fun <T : Any, reified R : Any> Data<T>.map(
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     meta: Meta = this.meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (T) -> R,
 ): Data<R> = Data(meta, coroutineContext, listOf(this)) {
     block(await())
@@ -40,8 +38,8 @@ public inline fun <T : Any, reified R : Any> Data<T>.map(
  */
 public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
     other: Data<T2>,
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     meta: Meta = this.meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (left: T1, right: T2) -> R,
 ): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
     block(await(), other.await())
@@ -50,12 +48,22 @@ public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
 
 //data collection operations
 
+@PublishedApi
+internal fun Iterable<Data<*>>.joinMeta(): Meta = Meta {
+    var counter = 0
+    forEach { data ->
+        val inputIndex = (data as? NamedData)?.name?.toString() ?: (counter++).toString()
+        val token = NameToken("data", inputIndex)
+        set(token, data.meta)
+    }
+}
+
 /**
  * Lazily reduce a collection of [Data] to a single data.
  */
 public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     meta,
@@ -65,11 +73,19 @@ public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
     block(map { it.awaitWithMeta() })
 }
 
+@PublishedApi
+internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
+    forEach { (key, data) ->
+        val token = NameToken("data", key.toString())
+        set(token, data.meta)
+    }
+}
+
 @DFInternal
 public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     block: suspend (Map<K, ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -87,8 +103,8 @@ public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
  * @param R type of the result goal
  */
 public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     meta,
@@ -103,8 +119,8 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
 @DFInternal
 public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -117,20 +133,20 @@ public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
 
 @OptIn(DFInternal::class)
 public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
-): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
+): Data<R> = reduceToData(typeOf<R>(), meta, coroutineContext) {
     transformation(it)
 }
 
 public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: ValueWithMeta<T>) -> R,
 ): Data<R> = reduceToData(
-    coroutineContext, meta
+    meta, coroutineContext
 ) {
     it.fold(initial) { acc, t -> block(acc, t) }
 }
@@ -141,8 +157,8 @@ public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
 @DFInternal
 public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -155,10 +171,10 @@ public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
 
 @OptIn(DFInternal::class)
 public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
-): Data<R> = reduceNamedToData(typeOf<R>(), coroutineContext, meta) {
+): Data<R> = reduceNamedToData(typeOf<R>(), meta, coroutineContext) {
     transformation(it)
 }
 
@@ -167,11 +183,11 @@ public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedT
  */
 public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
 ): Data<R> = reduceNamedToData(
-    coroutineContext, meta
+    meta, coroutineContext
 ) {
     it.fold(initial) { acc, t -> block(acc, t) }
 }
@@ -181,8 +197,8 @@ public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToD
 @DFInternal
 public suspend fun <T : Any, R : Any> DataSet<T>.map(
     outputType: KType,
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     metaTransform: MutableMeta.() -> Unit = {},
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     block: suspend (NamedValueWithMeta<T>) -> R,
 ): DataTree<R> = DataTree<R>(outputType) {
     forEach {
@@ -196,10 +212,10 @@ public suspend fun <T : Any, R : Any> DataSet<T>.map(
 
 @OptIn(DFInternal::class)
 public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline metaTransform: MutableMeta.() -> Unit = {},
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
+): DataTree<R> = map(typeOf<R>(), metaTransform, coroutineContext, block)
 
 public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
     for (d in this) {
@@ -207,15 +223,25 @@ public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
     }
 }
 
+// DataSet reduction
+
+@PublishedApi
+internal fun DataSet<*>.joinMeta(): Meta = Meta {
+    forEach { (key, data) ->
+        val token = NameToken("data", key.toString())
+        set(token, data.meta)
+    }
+}
+
 public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
-): Data<R> = asIterable().reduceNamedToData(coroutineContext, meta, transformation)
+): Data<R> = asIterable().reduceNamedToData(meta, coroutineContext, transformation)
 
 public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
-): Data<R> = asIterable().foldNamedToData(initial, coroutineContext, meta, block)
\ No newline at end of file
+): Data<R> = asIterable().foldNamedToData(initial, meta, coroutineContext, block)
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index f1e66060..231f9e54 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -370,7 +370,8 @@ public fun MutableMeta.append(key: String, value: Value): Unit = append(Name.par
 /**
  * Create a mutable copy of this meta. The copy is created even if the Meta is already mutable
  */
-public fun Meta.toMutableMeta(): ObservableMutableMeta = MutableMetaImpl(value, items)
+public fun Meta.toMutableMeta(): MutableMeta =
+    MutableMeta { update(this@toMutableMeta) } //MutableMetaImpl(value, items)
 
 public fun Meta.asMutableMeta(): MutableMeta = (this as? MutableMeta) ?: toMutableMeta()
 
@@ -385,12 +386,14 @@ public inline fun ObservableMutableMeta(builder: MutableMeta.() -> Unit = {}): O
 
 
 /**
- * Create a copy of this [Meta], optionally applying the given [block].
- * The listeners of the original Config are not retained.
+ * Create a read-only copy of this [Meta]. [modification] is an optional modification applied to [Meta] on copy.
+ *
+ *  The copy does not reflect changes of the initial Meta.
  */
-public inline fun Meta.copy(block: MutableMeta.() -> Unit = {}): Meta =
-    toMutableMeta().apply(block)
-
+public inline fun Meta.copy(modification: MutableMeta.() -> Unit = {}): Meta = Meta {
+    update(this@copy)
+    modification()
+}
 
 private class MutableMetaWithDefault(
     val source: MutableMeta, val default: MetaProvider, val rootName: Name,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
index 217a6a04..e842b990 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
@@ -101,11 +101,6 @@ internal class MetaBuilder(
     override fun hashCode(): Int = Meta.hashCode(this)
 }
 
-/**
- * Create a read-only meta.
- */
-public inline fun Meta(builder: MutableMeta.() -> Unit): Meta =
-    MetaBuilder().apply(builder).seal()
 
 /**
  * Create an immutable meta.
@@ -113,6 +108,11 @@ public inline fun Meta(builder: MutableMeta.() -> Unit): Meta =
 public inline fun SealedMeta(builder: MutableMeta.() -> Unit): SealedMeta =
     MetaBuilder().apply(builder).seal()
 
+/**
+ * Create a read-only meta.
+ */
+public inline fun Meta(builder: MutableMeta.() -> Unit): Meta = SealedMeta(builder)
+
 /**
  * Create an empty meta mutable meta.
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
index 11f548ae..936e793a 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
@@ -6,6 +6,3 @@ package space.kscience.dataforge.misc
 @MustBeDocumented
 @Target(AnnotationTarget.CLASS)
 public annotation class DfType(val id: String)
-
-@Deprecated("Replace with DfType", replaceWith = ReplaceWith("DfType"))
-public typealias DfId = DfType
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index bf3d5921..a7c1f0fb 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.workspace
 
+import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
 import space.kscience.dataforge.data.DataSet
 import space.kscience.dataforge.data.forEach
@@ -68,7 +69,7 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
     }
 
 /**
- * Perform a lazy mapping task using given [selector] and [action]. The meta of resulting
+ * Perform a lazy mapping task using given [selector] and one-to-one [action].
  * TODO move selector to receiver with multi-receivers
  *
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
@@ -77,7 +78,7 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
  * @param action process individual data asynchronously.
  */
 @DFExperimental
-public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFrom(
+public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
     dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
@@ -89,7 +90,7 @@ public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFr
             dataMetaTransform(data.name)
         }
 
-        val res = data.map(workspace.context.coroutineContext, meta) {
+        val res = data.map(meta, workspace.context.coroutineContext) {
             action(it, data.name, meta)
         }
 
@@ -97,4 +98,23 @@ public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFr
     }
 }
 
+/**
+ * Set given [dataSet] as a task result.
+ */
+public fun <T : Any> TaskResultBuilder<T>.result(dataSet: DataSet<T>) {
+    node(Name.EMPTY, dataSet)
+}
+
+/**
+ * Use provided [action] to fill the result
+ */
+@DFExperimental
+public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.actionFrom(
+    selector: DataSelector<T>,
+    action: Action<T,R>,
+    dependencyMeta: Meta = defaultDependencyMeta,
+) {
+    node(Name.EMPTY, action.execute(from(selector,dependencyMeta), dependencyMeta))
+}
+
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 4e1923bc..467b04b0 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -31,7 +31,7 @@ internal class CachingWorkspaceTest {
             inMemoryCache()
 
             val doFirst by task<Any> {
-                pipeFrom(allData) { _, name, _ ->
+                transformEach(allData) { _, name, _ ->
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
@@ -39,7 +39,7 @@ internal class CachingWorkspaceTest {
 
             @Suppress("UNUSED_VARIABLE")
             val doSecond by task<Any> {
-                pipeFrom(
+                transformEach(
                     doFirst,
                     dependencyMeta = if(taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
                 ) { _, name, _ ->
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 00ca67cb..03fda0eb 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -24,7 +24,7 @@ class FileWorkspaceCacheTest {
 
             @Suppress("UNUSED_VARIABLE")
             val echo by task<String> {
-                pipeFrom(dataByType<String>()) { arg, _, _ -> arg }
+                transformEach(dataByType<String>()) { arg, _, _ -> arg }
             }
         }
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 7bfe0927..b79ed2e7 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -37,7 +37,7 @@ internal object TestPlugin : WorkspacePlugin() {
 
     val test by task {
         // type is inferred
-        pipeFrom(dataByType<Int>()) { arg, _, _ ->
+        transformEach(dataByType<Int>()) { arg, _, _ ->
             logger.info { "Test: $arg" }
             arg
         }
@@ -74,7 +74,7 @@ internal class SimpleWorkspaceTest {
         }
 
         val square by task<Int> {
-            pipeFrom(dataByType<Int>()) { arg, name, meta ->
+            transformEach(dataByType<Int>()) { arg, name, meta ->
                 if (meta["testFlag"].boolean == true) {
                     println("Side effect")
                 }
@@ -84,7 +84,7 @@ internal class SimpleWorkspaceTest {
         }
 
         val linear by task<Int> {
-            pipeFrom(dataByType<Int>()) { arg, name, _ ->
+            transformEach(dataByType<Int>()) { arg, name, _ ->
                 workspace.logger.info { "Starting linear on $name" }
                 arg * 2 + 1
             }

From 5e3de7073747d35e0f44e28602be30ba6cdb8c36 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 5 Jan 2024 13:02:11 +0300
Subject: [PATCH 15/77] Add action delegate for task creation

---
 .../dataforge/data/DataTreeBuilder.kt         | 12 ++---
 .../kscience/dataforge/data/StaticDataTree.kt |  2 -
 .../kscience/dataforge/data/ActionsTest.kt    |  2 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  4 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   | 51 +++++++++++++------
 5 files changed, 41 insertions(+), 30 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
index 303ba44e..f8a5dd4c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
@@ -10,7 +10,6 @@ import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.*
 import kotlin.collections.set
 import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.coroutineContext
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -21,7 +20,7 @@ public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
 /**
  * A mutable [DataTree] that propagates updates
  */
-public class DataTreeBuilder<T : Any>(
+public class DataTreeBuilder<T : Any> internal constructor(
     override val dataType: KType,
     coroutineContext: CoroutineContext,
 ) : DataTree<T>, DataSourceBuilder<T> {
@@ -100,19 +99,14 @@ public class DataTreeBuilder<T : Any>(
 public fun <T : Any> DataSource(
     type: KType,
     parent: CoroutineScope,
-    block: DataSourceBuilder<T>.() -> Unit,
+    block: DataSourceBuilder<T>.() -> Unit = {},
 ): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
 
 @Suppress("OPT_IN_USAGE", "FunctionName")
 public inline fun <reified T : Any> DataSource(
     parent: CoroutineScope,
-    crossinline block: DataSourceBuilder<T>.() -> Unit,
-): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
-
-@Suppress("FunctionName")
-public suspend inline fun <reified T : Any> DataSource(
     crossinline block: DataSourceBuilder<T>.() -> Unit = {},
-): DataTreeBuilder<T> = DataTreeBuilder<T>(typeOf<T>(), coroutineContext).apply { block() }
+): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
 
 public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
     name: Name,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
index 4f0f455e..186e5c2a 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
@@ -65,13 +65,11 @@ internal class StaticDataTree<T : Any>(
     }
 }
 
-@Suppress("FunctionName")
 public inline fun <T : Any> DataTree(
     dataType: KType,
     block: DataSetBuilder<T>.() -> Unit,
 ): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
 
-@Suppress("FunctionName")
 public inline fun <reified T : Any> DataTree(
     noinline block: DataSetBuilder<T>.() -> Unit,
 ): DataTree<T> = DataTree(typeOf<T>(), block)
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index b24c4f27..06a7e755 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -29,7 +29,7 @@ internal class ActionsTest {
 
     @Test
     fun testDynamicMapAction() = runTest {
-        val data: DataSourceBuilder<Int> = DataSource()
+        val data: DataSourceBuilder<Int> = DataSource(this)
 
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index b77f7ea2..a4d28eab 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -56,7 +56,7 @@ internal class DataTreeBuilderTest {
         try {
             lateinit var updateJob: Job
             supervisorScope {
-                val subNode = DataSource<Int> {
+                val subNode = DataSource<Int>(this) {
                     updateJob = launch {
                         repeat(10) {
                             delay(10)
@@ -70,7 +70,7 @@ internal class DataTreeBuilderTest {
                         println(it)
                     }
                 }
-                val rootNode = DataSource<Int> {
+                val rootNode = DataSource<Int>(this) {
                     setAndWatch("sub".asName(), subNode)
                 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 5489e200..39e35b39 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -1,18 +1,17 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.CoroutineScope
+import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
 import space.kscience.dataforge.context.Global
-import space.kscience.dataforge.data.*
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.meta.MetaSpec
-import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataSource
+import space.kscience.dataforge.data.DataSourceBuilder
+import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.collections.set
@@ -45,6 +44,9 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
 ): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
 
+/**
+ * Create a new t
+ */
 public inline fun <reified T : Any> TaskContainer.buildTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
@@ -67,6 +69,9 @@ public inline fun <reified T : Any> TaskContainer.task(
     ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
 }
 
+/**
+ * Create a task based on [MetaSpec]
+ */
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
     specification: MetaSpec<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
@@ -77,15 +82,34 @@ public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
     ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
 }
 
+/**
+ * A delegate to create a custom task
+ */
 public inline fun <reified T : Any> TaskContainer.task(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> =
     task(MetaDescriptor(descriptorBuilder), builder)
 
-public class WorkspaceBuilder(private val parentContext: Context = Global) : TaskContainer {
+/**
+ * A delegate for creating a task based on [action]
+ */
+public inline fun <T : Any, reified R : Any> TaskContainer.action(
+    selector: DataSelector<T>,
+    action: Action<T, R>,
+    noinline metaTransform: MutableMeta.()-> Unit = {},
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
+    task(MetaDescriptor(descriptorBuilder)) {
+        result(action.execute(from(selector), taskMeta.copy(metaTransform)))
+    }
+
+public class WorkspaceBuilder(
+    private val parentContext: Context = Global,
+    private val coroutineScope: CoroutineScope = parentContext,
+) : TaskContainer {
     private var context: Context? = null
-    private var data: DataSet<*>? = null
+    private val data = DataSource<Any>(coroutineScope)
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
@@ -100,13 +124,8 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSetBuilder<Any>.() -> Unit) {
-        data = DataTree(builder)
-    }
-
-    @DFExperimental
-    public fun data(scope: CoroutineScope, builder: DataSourceBuilder<Any>.() -> Unit) {
-        data = DataSource(scope, builder)
+    public fun data(builder: DataSourceBuilder<Any>.() -> Unit) {
+        data.apply(builder)
     }
 
     /**
@@ -132,7 +151,7 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
         val postProcess: suspend (TaskResult<*>) -> TaskResult<*> = { result ->
             cache?.evaluate(result) ?: result
         }
-        return WorkspaceImpl(context ?: parentContext, data ?: DataSet.EMPTY, targets, tasks, postProcess)
+        return WorkspaceImpl(context ?: parentContext, data, targets, tasks, postProcess)
     }
 }
 

From 5fec0518d46f481747b748975c75f817eb5b6e64 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 19 Jan 2024 22:23:02 +0300
Subject: [PATCH 16/77] [WIP] change data structure

---
 dataforge-context/build.gradle.kts            |   2 +-
 .../dataforge/actions/AbstractAction.kt       |  43 ++-
 .../kscience/dataforge/actions/Action.kt      |  25 +-
 .../kscience/dataforge/actions/MapAction.kt   |  20 +-
 .../dataforge/actions/ReduceAction.kt         |  14 +-
 .../kscience/dataforge/actions/SplitAction.kt |  11 +-
 .../space/kscience/dataforge/data/Data.kt     |  20 +-
 .../kscience/dataforge/data/DataFilter.kt     |  89 ++++++
 .../space/kscience/dataforge/data/DataSet.kt  | 124 --------
 .../kscience/dataforge/data/DataSetBuilder.kt | 165 -----------
 .../kscience/dataforge/data/DataSource.kt     | 271 ++++++++++++++++++
 .../space/kscience/dataforge/data/DataTree.kt | 119 --------
 .../dataforge/data/DataTreeBuilder.kt         | 121 --------
 .../space/kscience/dataforge/data/Goal.kt     |   8 +-
 .../kscience/dataforge/data/GroupRule.kt      |  41 +--
 .../kscience/dataforge/data/NamedData.kt      |   6 +-
 .../kscience/dataforge/data/StaticDataTree.kt |  80 ------
 .../kscience/dataforge/data/dataBuilders.kt   | 121 ++++++++
 .../kscience/dataforge/data/dataFilter.kt     | 105 -------
 .../kscience/dataforge/data/dataTransform.kt  |  62 ++--
 .../kscience/dataforge/data/dataFilterJvm.kt  |  92 +++---
 .../dataforge/data/dataSetBuilderInContext.kt |  30 +-
 .../kscience/dataforge/data/ActionsTest.kt    |   3 +-
 ...kt => LegacyGenericDataTreeBuilderTest.kt} |  12 +-
 .../space/kscience/dataforge/io/IOFormat.kt   |  17 +-
 .../space/kscience/dataforge/io/IOPlugin.kt   |   5 +-
 .../kscience/dataforge/meta/MetaConverter.kt  |  32 +--
 .../space/kscience/dataforge/meta/Scheme.kt   |   6 +-
 .../space/kscience/dataforge/names/Name.kt    |   7 +
 .../dataforge/workspace/EnvelopeTask.kt       |  46 ---
 .../kscience/dataforge/workspace/Task.kt      |  19 +-
 .../kscience/dataforge/workspace/TaskData.kt  |  50 ----
 .../dataforge/workspace/TaskResult.kt         |  55 +---
 .../kscience/dataforge/workspace/Workspace.kt |  23 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |  21 +-
 .../dataforge/workspace/WorkspaceCache.kt     |   2 +-
 .../dataforge/workspace/WorkspaceImpl.kt      |   6 +-
 .../dataforge/workspace/envelopeData.kt       |   3 +-
 .../dataforge/workspace/taskBuilders.kt       |  29 +-
 .../dataforge/workspace/FileWorkspaceCache.kt |  24 +-
 .../workspace/InMemoryWorkspaceCache.kt       |   2 +-
 .../kscience/dataforge/workspace/fileData.kt  | 135 ++++-----
 .../dataforge/workspace/workspaceJvm.kt       |  13 +-
 .../kscience/dataforge/workspace/zipData.kt   |   6 +-
 .../workspace/DataPropagationTest.kt          |   4 +-
 .../dataforge/workspace/FileDataTest.kt       |   2 +-
 .../workspace/SimpleWorkspaceTest.kt          |   2 +-
 47 files changed, 838 insertions(+), 1255 deletions(-)
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
 delete mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt
 rename dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/{DataTreeBuilderTest.kt => LegacyGenericDataTreeBuilderTest.kt} (87%)
 delete mode 100644 dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt
 delete mode 100644 dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt

diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index b59abed0..72a41693 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -12,7 +12,7 @@ kscience {
     useCoroutines()
     useSerialization()
     commonMain {
-        api(project(":dataforge-meta"))
+        api(projects.dataforgeMeta)
         api(spclibs.atomicfu)
     }
     jvmMain{
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index e7bbe6f6..33d4ca05 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,9 +1,9 @@
 package space.kscience.dataforge.actions
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.startsWith
 import kotlin.reflect.KType
@@ -19,47 +19,40 @@ internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
 /**
  * An action that caches results on-demand and recalculates them on source push
  */
-public abstract class AbstractAction<in T : Any, R : Any>(
+public abstract class AbstractAction<T : Any, R : Any>(
     public val outputType: KType,
 ) : Action<T, R> {
 
     /**
      * Generate initial content of the output
      */
-    protected abstract fun DataSetBuilder<R>.generate(
-        data: DataSet<T>,
+    protected abstract fun DataSink<R>.generate(
+        data: DataTree<T>,
         meta: Meta,
     )
 
     /**
-     * Update part of the data set when given [updateKey] is triggered by the source
+     * Update part of the data set using provided data
      */
-    protected open fun DataSourceBuilder<R>.update(
-        dataSet: DataSet<T>,
+    protected open fun DataSink<R>.update(
+        allData: DataTree<T>,
         meta: Meta,
-        updateKey: Name,
-    ) {
-        // By default, recalculate the whole dataset
-        generate(dataSet, meta)
+        namedData: NamedData<T>,
+    ){
+        //by default regenerate the whole data set
+        generate(allData,meta)
     }
 
-    @OptIn(DFInternal::class)
     override fun execute(
-        dataSet: DataSet<T>,
+        scope: CoroutineScope,
+        dataSet: DataTree<T>,
         meta: Meta,
-    ): DataSet<R> = if (dataSet is DataSource) {
-        DataSource(outputType, dataSet){
-            generate(dataSet, meta)
-
-            launch {
-                dataSet.updates.collect { name ->
-                    update(dataSet, meta, name)
-                }
+    ): ObservableDataTree<R> = MutableDataTree<R>(outputType, scope).apply {
+        generate(dataSet, meta)
+        scope.launch {
+            dataSet.updates().collect {
+                update(dataSet, meta, it)
             }
         }
-    } else {
-        DataTree<R>(outputType) {
-            generate(dataSet, meta)
-        }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index 3feec9bc..b9a9a0a8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -1,19 +1,22 @@
 package space.kscience.dataforge.actions
 
-import space.kscience.dataforge.data.DataSet
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.coroutineScope
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 
 /**
  * A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
  */
-public fun interface Action<in T : Any, out R : Any> {
+public fun interface Action<T : Any, R : Any> {
 
     /**
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
      * so not actual computation is started at this moment.
      */
-    public fun execute(dataSet: DataSet<T>, meta: Meta): DataSet<R>
+    public fun execute(scope: CoroutineScope, dataSet: DataTree<T>, meta: Meta): ObservableDataTree<R>
 
     public companion object
 }
@@ -21,18 +24,22 @@ public fun interface Action<in T : Any, out R : Any> {
 /**
  * A convenience method to transform data using given [action]
  */
-public fun <T : Any, R : Any> DataSet<T>.transform(action: Action<T, R>, meta: Meta = Meta.EMPTY): DataSet<R> =
-    action.execute(this, meta)
+public fun <T : Any, R : Any> DataTree<T>.transform(
+    action: Action<T, R>,
+    scope: CoroutineScope,
+    meta: Meta = Meta.EMPTY,
+): DataTree<R> = action.execute(scope, this, meta)
 
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
 public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> =
-    Action<T, R> { dataSet, meta -> action.execute(this@then.execute(dataSet, meta), meta) }
+    Action { scope, dataSet, meta -> action.execute(scope, this@then.execute(scope, dataSet, meta), meta) }
 
 @DFExperimental
-public operator fun <T : Any, R : Any> Action<T, R>.invoke(
-    dataSet: DataSet<T>,
+public suspend operator fun <T : Any, R : Any> Action<T, R>.invoke(
+    dataSet: DataTree<T>,
     meta: Meta = Meta.EMPTY,
-): DataSet<R> = execute(dataSet, meta)
+): DataTree<R> = coroutineScope { execute(this, dataSet, meta) }
+
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 7b2c94f5..60f05910 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -29,6 +29,7 @@ public class MapActionBuilder<T, R>(
     public var name: Name,
     public var meta: MutableMeta,
     public val actionMeta: Meta,
+    public val dataType: KType,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -45,19 +46,16 @@ public class MapActionBuilder<T, R>(
     /**
      * Calculate the result of goal
      */
-    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1) {
-        outputType = typeOf<R1>()
-        result = f;
-    }
+    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1): Unit = result(typeOf<R1>(), f)
 }
 
 @PublishedApi
-internal class MapAction<in T : Any, R : Any>(
+internal class MapAction<T : Any, R : Any>(
     outputType: KType,
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSetBuilder<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
+    private fun DataSink<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
         // Creating a new environment for action using **old** name, old meta and task meta
         val env = ActionEnv(name, data.meta, meta)
 
@@ -66,6 +64,7 @@ internal class MapAction<in T : Any, R : Any>(
             name,
             data.meta.toMutableMeta(), // using data meta
             meta,
+            data.type,
             outputType
         ).apply(block)
 
@@ -80,16 +79,15 @@ internal class MapAction<in T : Any, R : Any>(
             builder.result(env, data.await())
         }
         //setting the data node
-        data(newName, newData)
+        emit(newName, newData)
     }
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         data.forEach { mapOne(it.name, it.data, meta) }
     }
 
-    override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
-        remove(updateKey)
-        dataSet[updateKey]?.let { mapOne(updateKey, it, meta) }
+    override fun DataSink<R>.update(allData: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+        mapOne(namedData.name, namedData.data, namedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index a74cfad9..cc27b3d1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -14,7 +14,7 @@ import kotlin.reflect.typeOf
 
 public class JoinGroup<T : Any, R : Any>(
     public var name: String,
-    internal val set: DataSet<T>,
+    internal val set: DataTree<T>,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -39,7 +39,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
     public val actionMeta: Meta,
     private val outputType: KType,
 ) {
-    private val groupRules: MutableList<(DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
+    private val groupRules: MutableList<(DataTree<T>) -> List<JoinGroup<T, R>>> = ArrayList();
 
     /**
      * introduce grouping by meta value
@@ -54,12 +54,12 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
 
     public fun group(
         groupName: String,
-        predicate: (Name, Meta) -> Boolean,
+        predicate: DataFilter,
         action: JoinGroup<T, R>.() -> Unit,
     ) {
         groupRules += { source ->
             listOf(
-                JoinGroup<T, R>(groupName, source.filter(predicate), outputType).apply(action)
+                JoinGroup<T, R>(groupName, source.filterData(predicate), outputType).apply(action)
             )
         }
     }
@@ -73,7 +73,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
         }
     }
 
-    internal fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> =
+    internal fun buildGroups(input: DataTree<T>): List<JoinGroup<T, R>> =
         groupRules.flatMap { it.invoke(input) }
 
 }
@@ -85,7 +85,7 @@ internal class ReduceAction<T : Any, R : Any>(
 ) : AbstractAction<T, R>(outputType) {
     //TODO optimize reduction. Currently, the whole action recalculates on push
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
             val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
@@ -103,7 +103,7 @@ internal class ReduceAction<T : Any, R : Any>(
                 meta = groupMeta
             ) { group.result.invoke(env, it) }
 
-            data(env.name, res)
+            emit(env.name, res)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 0ecde319..a2da44f8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -49,7 +49,7 @@ internal class SplitAction<T : Any, R : Any>(
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSetBuilder<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
+    private fun DataSink<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
         val laminate = Laminate(data.meta, meta)
 
         val split = SplitBuilder<T, R>(name, data.meta).apply(action)
@@ -64,7 +64,7 @@ internal class SplitAction<T : Any, R : Any>(
             ).apply(rule)
             //data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
 
-            data(
+            emit(
                 fragmentName,
                 @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
                     env.result(data.await())
@@ -73,13 +73,12 @@ internal class SplitAction<T : Any, R : Any>(
         }
     }
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         data.forEach { splitOne(it.name, it.data, meta) }
     }
 
-    override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
-        remove(updateKey)
-        dataSet[updateKey]?.let { splitOne(updateKey, it, meta) }
+    override fun DataSink<R>.update(allData: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+        splitOne(namedData.name, namedData.data, namedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index 756cd840..3668af1e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -15,7 +15,7 @@ import kotlin.reflect.typeOf
  * A data element characterized by its meta
  */
 @DfType(Data.TYPE)
-public interface Data<out T> : Goal<T>, MetaRepr {
+public interface Data<T> : Goal<T>, MetaRepr {
     /**
      * Type marker for the data. The type is known before the calculation takes place so it could be checked.
      */
@@ -41,7 +41,7 @@ public interface Data<out T> : Goal<T>, MetaRepr {
          */
         internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
 
-        public inline fun <reified T : Any> static(
+        public inline fun <reified T> static(
             value: T,
             meta: Meta = Meta.EMPTY,
         ): Data<T> = StaticData(typeOf<T>(), value, meta)
@@ -69,37 +69,37 @@ public interface Data<out T> : Goal<T>, MetaRepr {
  * A lazily computed variant of [Data] based on [LazyGoal]
  * One must ensure that proper [type] is used so this method should not be used
  */
-private class LazyData<T : Any>(
+private class LazyData<T>(
     override val type: KType,
     override val meta: Meta = Meta.EMPTY,
     additionalContext: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     block: suspend () -> T,
 ) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
 
-public class StaticData<T : Any>(
+public class StaticData<T>(
     override val type: KType,
     value: T,
     override val meta: Meta = Meta.EMPTY,
 ) : Data<T>, StaticGoal<T>(value)
 
 @Suppress("FunctionName")
-public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
+public inline fun <reified T> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
     StaticData(typeOf<T>(), value, meta)
 
 @DFInternal
-public fun <T : Any> Data(
+public fun <T> Data(
     type: KType,
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     block: suspend () -> T,
 ): Data<T> = LazyData(type, meta, context, dependencies, block)
 
 @OptIn(DFInternal::class)
-public inline fun <reified T : Any> Data(
+public inline fun <reified T> Data(
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     noinline block: suspend () -> T,
 ): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
new file mode 100644
index 00000000..f45570ad
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -0,0 +1,89 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.filter
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.names.Name
+import kotlin.reflect.KType
+
+public fun interface DataFilter {
+
+    public fun accepts(name: Name, meta: Meta, type: KType): Boolean
+
+    public companion object {
+        public val EMPTY: DataFilter = DataFilter { _, _, _ -> true }
+    }
+}
+
+public fun DataFilter.accepts(data: NamedData<*>): Boolean = accepts(data.name, data.meta, data.type)
+
+public fun <T> Sequence<NamedData<T>>.filterData(predicate: DataFilter): Sequence<NamedData<T>> = filter { data ->
+    predicate.accepts(data)
+}
+
+public fun <T> Flow<NamedData<T>>.filterData(predicate: DataFilter): Flow<NamedData<T>> = filter { data ->
+    predicate.accepts(data)
+}
+
+public fun <T> DataSource<T>.filterData(
+    predicate: DataFilter,
+): DataSource<T> = object : DataSource<T> {
+    override val dataType: KType get() = this@filterData.dataType
+
+    override fun read(name: Name): Data<T>? =
+        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+}
+
+/**
+ * Stateless filtered [ObservableDataSource]
+ */
+public fun <T> ObservableDataSource<T>.filterData(
+    predicate: DataFilter,
+): ObservableDataSource<T> = object : ObservableDataSource<T> {
+    override fun updates(): Flow<NamedData<T>> = this@filterData.updates().filter { predicate.accepts(it) }
+
+    override val dataType: KType get() = this@filterData.dataType
+
+    override fun read(name: Name): Data<T>? =
+        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+}
+
+public fun <T> GenericDataTree<T, *>.filterData(
+    predicate: DataFilter,
+): DataTree<T> = asSequence().filterData(predicate).toTree(dataType)
+
+public fun <T> GenericObservableDataTree<T, *>.filterData(
+    scope: CoroutineScope,
+    predicate: DataFilter,
+): ObservableDataTree<T> = asSequence().filterData(predicate).toObservableTree(dataType, scope, updates().filterData(predicate))
+
+
+///**
+// * Generate a wrapper data set with a given name prefix appended to all names
+// */
+//public fun <T : Any> DataTree<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
+//    this
+//} else object : DataSource<T> {
+//
+//    override val dataType: KType get() = this@withNamePrefix.dataType
+//
+//    override val coroutineContext: CoroutineContext
+//        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
+//
+//    override val meta: Meta get() = this@withNamePrefix.meta
+//
+//
+//    override fun iterator(): Iterator<NamedData<T>> = iterator {
+//        for (d in this@withNamePrefix) {
+//            yield(d.data.named(prefix + d.name))
+//        }
+//    }
+//
+//    override fun get(name: Name): Data<T>? =
+//        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
+//
+//    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
+//}
+//
+
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt
deleted file mode 100644
index 44639653..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt
+++ /dev/null
@@ -1,124 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.*
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.emptyFlow
-import kotlinx.coroutines.flow.mapNotNull
-import space.kscience.dataforge.data.Data.Companion.TYPE_OF_NOTHING
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.endsWith
-import space.kscience.dataforge.names.parseAsName
-import kotlin.reflect.KType
-
-public interface DataSet<out T : Any> {
-
-    /**
-     * The minimal common ancestor to all data in the node
-     */
-    public val dataType: KType
-
-    /**
-     * Meta-data associated with this node. If no meta is provided, returns [Meta.EMPTY].
-     */
-    public val meta: Meta
-
-    /**
-     * Traverse this [DataSet] returning named data instances. The order is not guaranteed.
-     */
-    public operator fun iterator(): Iterator<NamedData<T>>
-
-    /**
-     * Get data with given name.
-     */
-    public operator fun get(name: Name): Data<T>?
-
-    public companion object {
-        public val META_KEY: Name = "@meta".asName()
-
-        /**
-         * An empty [DataSet] that suits all types
-         */
-        public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
-            override val dataType: KType = TYPE_OF_NOTHING
-            override val meta: Meta get() = Meta.EMPTY
-
-            override fun iterator(): Iterator<NamedData<Nothing>> = emptySequence<NamedData<Nothing>>().iterator()
-
-            override fun get(name: Name): Data<Nothing>? = null
-        }
-    }
-}
-
-public fun <T : Any> DataSet<T>.asSequence(): Sequence<NamedData<T>> = object : Sequence<NamedData<T>> {
-    override fun iterator(): Iterator<NamedData<T>> = this@asSequence.iterator()
-}
-
-/**
- * Return a single [Data] in this [DataSet]. Throw error if it is not single.
- */
-public fun <T : Any> DataSet<T>.single(): NamedData<T> = asSequence().single()
-
-public fun <T : Any> DataSet<T>.asIterable(): Iterable<NamedData<T>> = object : Iterable<NamedData<T>> {
-    override fun iterator(): Iterator<NamedData<T>> = this@asIterable.iterator()
-}
-
-public operator fun <T : Any> DataSet<T>.get(name: String): Data<T>? = get(name.parseAsName())
-
-/**
- * A [DataSet] with propagated updates.
- */
-public interface DataSource<out T : Any> : DataSet<T>, CoroutineScope {
-
-    /**
-     * A flow of updated item names. Updates are propagated in a form of [Flow] of names of updated nodes.
-     * Those can include new data items and replacement of existing ones. The replaced items could update existing data content
-     * and replace it completely, so they should be pulled again.
-     *
-     */
-    public val updates: Flow<Name>
-
-    /**
-     * Stop generating updates from this [DataSource]
-     */
-    public fun close() {
-        coroutineContext[Job]?.cancel()
-    }
-}
-
-public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is DataSource) updates else emptyFlow()
-//
-///**
-// * Flow all data nodes with names starting with [branchName]
-// */
-//public fun <T : Any> DataSet<T>.children(branchName: Name): Sequence<NamedData<T>> =
-//    this@children.asSequence().filter {
-//        it.name.startsWith(branchName)
-//    }
-
-/**
- * Start computation for all goals in data node and return a job for the whole node
- */
-public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
-    asIterable().map {
-        it.launch(this@launch)
-    }.joinAll()
-}
-
-public suspend fun <T : Any> DataSet<T>.computeAndJoinAll(): Unit = coroutineScope { startAll(this).join() }
-
-public fun DataSet<*>.toMeta(): Meta = Meta {
-    forEach {
-        if (it.name.endsWith(DataSet.META_KEY)) {
-            set(it.name, it.meta)
-        } else {
-            it.name put {
-                "type" put it.type.toString()
-                "meta" put it.meta
-            }
-        }
-    }
-}
-
-public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { get(it)?.named(it) }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt
deleted file mode 100644
index f9f14f37..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt
+++ /dev/null
@@ -1,165 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.isEmpty
-import space.kscience.dataforge.names.plus
-import kotlin.reflect.KType
-
-public interface DataSetBuilder<in T : Any> {
-    public val dataType: KType
-
-    /**
-     * Remove all data items starting with [name]
-     */
-    public fun remove(name: Name)
-
-    public fun data(name: Name, data: Data<T>?)
-
-    /**
-     * Set a current state of given [dataSet] into a branch [name]. Does not propagate updates
-     */
-    public fun node(name: Name, dataSet: DataSet<T>) {
-        //remove previous items
-        if (name != Name.EMPTY) {
-            remove(name)
-        }
-
-        //Set new items
-        dataSet.forEach {
-            data(name + it.name, it.data)
-        }
-    }
-
-    /**
-     * Set meta for the given node
-     */
-    public fun meta(name: Name, meta: Meta)
-
-}
-
-/**
- * Define meta in this [DataSet]
- */
-public fun <T : Any> DataSetBuilder<T>.meta(value: Meta): Unit = meta(Name.EMPTY, value)
-
-/**
- * Define meta in this [DataSet]
- */
-public fun <T : Any> DataSetBuilder<T>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))
-
-@PublishedApi
-internal class SubSetBuilder<in T : Any>(
-    private val parent: DataSetBuilder<T>,
-    private val branch: Name,
-) : DataSetBuilder<T> {
-    override val dataType: KType get() = parent.dataType
-
-    override fun remove(name: Name) {
-        parent.remove(branch + name)
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        parent.data(branch + name, data)
-    }
-
-    override fun node(name: Name, dataSet: DataSet<T>) {
-        parent.node(branch + name, dataSet)
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        parent.meta(branch + name, meta)
-    }
-}
-
-public inline fun <T : Any> DataSetBuilder<T>.node(
-    name: Name,
-    crossinline block: DataSetBuilder<T>.() -> Unit,
-) {
-    if (name.isEmpty()) block() else SubSetBuilder(this, name).block()
-}
-
-
-public fun <T : Any> DataSetBuilder<T>.data(name: String, value: Data<T>) {
-    data(Name.parse(name), value)
-}
-
-public fun <T : Any> DataSetBuilder<T>.node(name: String, set: DataSet<T>) {
-    node(Name.parse(name), set)
-}
-
-public inline fun <T : Any> DataSetBuilder<T>.node(
-    name: String,
-    crossinline block: DataSetBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(name), block)
-
-public fun <T : Any> DataSetBuilder<T>.set(value: NamedData<T>) {
-    data(value.name, value.data)
-}
-
-/**
- * Produce lazy [Data] and emit it into the [DataSetBuilder]
- */
-public inline fun <reified T : Any> DataSetBuilder<T>.produce(
-    name: String,
-    meta: Meta = Meta.EMPTY,
-    noinline producer: suspend () -> T,
-) {
-    val data = Data(meta, block = producer)
-    data(name, data)
-}
-
-public inline fun <reified T : Any> DataSetBuilder<T>.produce(
-    name: Name,
-    meta: Meta = Meta.EMPTY,
-    noinline producer: suspend () -> T,
-) {
-    val data = Data(meta, block = producer)
-    data(name, data)
-}
-
-/**
- * Emit a static data with the fixed value
- */
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: String,
-    data: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
-
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: Name,
-    data: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
-
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: String,
-    data: T,
-    mutableMeta: MutableMeta.() -> Unit,
-): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
-
-/**
- * Update data with given node data and meta with node meta.
- */
-@DFExperimental
-public fun <T : Any> DataSetBuilder<T>.populateFrom(tree: DataSet<T>): Unit {
-    tree.forEach {
-        //TODO check if the place is occupied
-        data(it.name, it.data)
-    }
-}
-
-//public fun <T : Any> DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
-//    flow.collect {
-//        data(it.name, it.data)
-//    }
-//}
-
-public fun <T : Any> DataSetBuilder<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
-    sequence.forEach {
-        data(it.name, it.data)
-    }
-}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
new file mode 100644
index 00000000..8c86d431
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -0,0 +1,271 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.names.*
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+/**
+ * A generic data provider
+ */
+public interface DataSource<T> {
+
+    /**
+     * The minimal common ancestor to all data in the node
+     */
+    public val dataType: KType
+
+    /**
+     * Get data with given name. Or null if it is not present
+     */
+    public fun read(name: Name): Data<T>?
+}
+
+/**
+ * A data provider with possible dynamic updates
+ */
+public interface ObservableDataSource<T> : DataSource<T> {
+
+    /**
+     * Flow updates made to the data
+     */
+    public fun updates(): Flow<NamedData<T>>
+}
+
+/**
+ * A tree like structure for data holding
+ */
+public interface GenericDataTree<T, out TR : GenericDataTree<T, TR>> : DataSource<T> {
+    public val self: TR
+
+    public val data: Data<T>?
+    public val items: Map<NameToken, TR>
+
+
+    override fun read(name: Name): Data<T>? = when (name.length) {
+        0 -> data
+        else -> items[name.first()]?.read(name.cutFirst())
+    }
+
+    public companion object {
+        private object EmptyDataTree : GenericDataTree<Nothing, EmptyDataTree> {
+            override val self: EmptyDataTree get() = this
+            override val data: Data<Nothing>? = null
+            override val items: Map<NameToken, EmptyDataTree> = emptyMap()
+            override val dataType: KType = typeOf<Unit>()
+
+            override fun read(name: Name): Data<Nothing>? = null
+
+        }
+
+        public val EMPTY: GenericDataTree<Nothing, *> = EmptyDataTree
+    }
+}
+
+public typealias DataTree<T> = GenericDataTree<T, *>
+
+/**
+ * An alias for easier access to tree values
+ */
+public operator fun <T> DataTree<T>.get(name: Name): Data<T>? = read(name)
+
+public operator fun <T> DataTree<T>.get(name: String): Data<T>? = read(name.parseAsName())
+
+/**
+ * Return a sequence of all data items in this tree.
+ * This method does not take updates into account.
+ */
+public fun <T> GenericDataTree<T, DataTree<T>>.asSequence(
+    namePrefix: Name = Name.EMPTY,
+): Sequence<NamedData<T>> = sequence {
+    data?.let { yield(it.named(Name.EMPTY)) }
+    items.forEach { (token, tree) ->
+        yieldAll(tree.asSequence(namePrefix + token))
+    }
+}
+
+public val DataTree<*>.meta: Meta? get() = data?.meta
+
+/**
+ * Provide subtree if it exists
+ */
+public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: Name): TR? =
+    when (name.length) {
+        0 -> self
+        1 -> items[name.first()]
+        else -> items[name.first()]?.branch(name.cutFirst())
+    }
+
+public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
+
+@PublishedApi
+internal class FlatDataTree<T>(
+    override val dataType: KType,
+    val dataSet: Map<Name, Data<T>>,
+    val prefix: Name,
+) : GenericDataTree<T, FlatDataTree<T>> {
+    override val self: FlatDataTree<T> get() = this
+    override val data: Data<T>? get() = dataSet[prefix]
+    override val items: Map<NameToken, FlatDataTree<T>>
+        get() = dataSet.keys
+            .filter { it.startsWith(prefix) && it.length > prefix.length }
+            .map { it.tokens[prefix.length + 1] }
+            .associateWith { FlatDataTree(dataType, dataSet, prefix + it) }
+
+    override fun read(name: Name): Data<T>? = dataSet[prefix + name]
+}
+
+/**
+ * Represent this flat data map as a [DataTree] without copying it
+ */
+public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = FlatDataTree(typeOf<T>(), this, Name.EMPTY)
+
+internal fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
+    FlatDataTree(type, associate { it.name to it.data }, Name.EMPTY)
+
+/**
+ * Collect a sequence of [NamedData] to a [DataTree]
+ */
+public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> =
+    FlatDataTree(typeOf<T>(), associate { it.name to it.data }, Name.EMPTY)
+
+public interface GenericObservableDataTree<T, TR : GenericObservableDataTree<T, TR>> : GenericDataTree<T, TR>,
+    ObservableDataSource<T>
+
+public typealias ObservableDataTree<T> = GenericObservableDataTree<T, *>
+
+public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> = if (this is ObservableDataTree<T>) updates() else emptyFlow()
+
+public fun interface DataSink<T> {
+    public fun emit(name: Name, data: Data<T>?)
+}
+
+public class DataTreeBuilder<T>(private val type: KType) : DataSink<T> {
+    private val map = HashMap<Name, Data<T>>()
+    override fun emit(name: Name, data: Data<T>?) {
+        if (data == null) {
+            map.remove(name)
+        } else {
+            map[name] = data
+        }
+    }
+
+    public fun build(): DataTree<T> = FlatDataTree(type, map, Name.EMPTY)
+}
+
+@DFInternal
+public inline fun <T> DataTree(
+    dataType: KType,
+    generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
+
+/**
+ * Create and a data tree.
+ */
+public inline fun <reified T> DataTree(
+    generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(typeOf<T>()).apply(generator).build()
+
+/**
+ * A mutable version of [GenericDataTree]
+ */
+public interface MutableDataTree<T> : GenericObservableDataTree<T, MutableDataTree<T>>, DataSink<T> {
+    public val scope: CoroutineScope
+
+    override var data: Data<T>?
+
+    override val items: Map<NameToken, MutableDataTree<T>>
+
+    public operator fun set(token: NameToken, data: Data<T>?)
+
+    override fun emit(name: Name, data: Data<T>?): Unit = set(name, data)
+}
+
+public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
+    when (name.length) {
+        0 -> this.data = data
+        1 -> set(name.first(), data)
+        else -> items[name.first()]?.set(name.cutFirst(), data)
+    }
+}
+
+private class ObservableMutableDataTreeImpl<T>(
+    override val dataType: KType,
+    override val scope: CoroutineScope,
+) : MutableDataTree<T> {
+
+    private val updates = MutableSharedFlow<NamedData<T>>()
+
+    private val children = HashMap<NameToken, MutableDataTree<T>>()
+
+    override var data: Data<T>? = null
+        set(value) {
+            field = value
+            if (value != null) {
+                scope.launch {
+                    updates.emit(value.named(Name.EMPTY))
+                }
+            }
+        }
+
+    override val items: Map<NameToken, MutableDataTree<T>> get() = children
+    override val self: MutableDataTree<T> get() = this
+
+    override fun set(token: NameToken, data: Data<T>?) {
+        children.getOrPut(token) {
+            ObservableMutableDataTreeImpl<T>(dataType, scope).also { subTree ->
+                subTree.updates().onEach {
+                    updates.emit(it.named(token + it.name))
+                }.launchIn(scope)
+            }
+        }.data = data
+    }
+
+    override fun updates(): Flow<NamedData<T>> = flow {
+        //emit this node updates
+        updates.collect {
+            emit(it)
+        }
+    }
+}
+
+public fun <T> MutableDataTree(
+    type: KType,
+    scope: CoroutineScope
+): MutableDataTree<T> = ObservableMutableDataTreeImpl<T>(type, scope)
+
+/**
+ * Create and initialize a observable mutable data tree.
+ */
+public inline fun <reified T> MutableDataTree(
+    scope: CoroutineScope,
+    generator: MutableDataTree<T>.() -> Unit = {},
+): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>(), scope).apply { generator() }
+
+//@DFInternal
+//public fun <T> ObservableDataTree(
+//    type: KType,
+//    scope: CoroutineScope,
+//    generator: suspend MutableDataTree<T>.() -> Unit = {},
+//): ObservableDataTree<T> = MutableDataTree<T>(type, scope.coroutineContext).apply(generator)
+
+public inline fun <reified T> ObservableDataTree(
+    scope: CoroutineScope,
+    generator: MutableDataTree<T>.() -> Unit = {},
+): ObservableDataTree<T> = MutableDataTree<T>(typeOf<T>(), scope).apply(generator)
+
+
+/**
+ * Collect a [Sequence] into an observable tree with additional [updates]
+ */
+public fun <T> Sequence<NamedData<T>>.toObservableTree(dataType: KType, scope: CoroutineScope, updates: Flow<NamedData<T>>): ObservableDataTree<T> =
+    MutableDataTree<T>(dataType, scope).apply {
+        emitAll(this@toObservableTree)
+        updates.onEach {
+            emit(it.name, it.data)
+        }.launchIn(scope)
+    }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
deleted file mode 100644
index b9273c07..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
+++ /dev/null
@@ -1,119 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.DfType
-import space.kscience.dataforge.names.*
-import kotlin.collections.component1
-import kotlin.collections.component2
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-public sealed class DataTreeItem<out T : Any> {
-
-    public abstract val meta: Meta
-
-    public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>() {
-        override val meta: Meta get() = tree.meta
-    }
-
-    public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>() {
-        override val meta: Meta get() = data.meta
-    }
-}
-
-public val <T : Any> DataTreeItem<T>.type: KType
-    get() = when (this) {
-        is DataTreeItem.Node -> tree.dataType
-        is DataTreeItem.Leaf -> data.type
-    }
-
-/**
- * A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
- */
-@DfType(DataTree.TYPE)
-public interface DataTree<out T : Any> : DataSet<T> {
-
-    /**
-     * Top-level children items of this [DataTree]
-     */
-    public val items: Map<NameToken, DataTreeItem<T>>
-
-    override val meta: Meta get() = items[META_ITEM_NAME_TOKEN]?.meta ?: Meta.EMPTY
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        items.forEach { (token, childItem: DataTreeItem<T>) ->
-            if (!token.body.startsWith("@")) {
-                when (childItem) {
-                    is DataTreeItem.Leaf -> yield(childItem.data.named(token.asName()))
-                    is DataTreeItem.Node -> yieldAll(childItem.tree.asSequence().map { it.named(token + it.name) })
-                }
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = when (name.length) {
-        0 -> null
-        1 -> items[name.firstOrNull()!!].data
-        else -> items[name.firstOrNull()!!].tree?.get(name.cutFirst())
-    }
-
-    public companion object {
-        public const val TYPE: String = "dataTree"
-
-        /**
-         * A name token used to designate tree node meta
-         */
-        public val META_ITEM_NAME_TOKEN: NameToken = NameToken("@meta")
-
-        @DFInternal
-        public fun <T : Any> emptyWithType(type: KType, meta: Meta = Meta.EMPTY): DataTree<T> = object : DataTree<T> {
-            override val items: Map<NameToken, DataTreeItem<T>> get() = emptyMap()
-            override val dataType: KType get() = type
-            override val meta: Meta get() = meta
-        }
-
-        @OptIn(DFInternal::class)
-        public inline fun <reified T : Any> empty(meta: Meta = Meta.EMPTY): DataTree<T> =
-            emptyWithType<T>(typeOf<T>(), meta)
-    }
-}
-
-public fun <T : Any> DataTree<T>.listChildren(prefix: Name): List<Name> =
-    getItem(prefix).tree?.items?.keys?.map { prefix + it } ?: emptyList()
-
-/**
- * Get a [DataTreeItem] with given [name] or null if the item does not exist
- */
-public tailrec fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
-    0 -> DataTreeItem.Node(this)
-    1 -> items[name.firstOrNull()]
-    else -> items[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
-}
-
-public val <T : Any> DataTreeItem<T>?.tree: DataTree<T>? get() = (this as? DataTreeItem.Node<T>)?.tree
-public val <T : Any> DataTreeItem<T>?.data: Data<T>? get() = (this as? DataTreeItem.Leaf<T>)?.data
-
-/**
- * A [Sequence] of all children including nodes
- */
-public fun <T : Any> DataTree<T>.traverseItems(): Sequence<Pair<Name, DataTreeItem<T>>> = sequence {
-    items.forEach { (head, item) ->
-        yield(head.asName() to item)
-        if (item is DataTreeItem.Node) {
-            val subSequence = item.tree.traverseItems()
-                .map { (name, data) -> (head.asName() + name) to data }
-            yieldAll(subSequence)
-        }
-    }
-}
-
-/**
- * Get a branch of this [DataTree] with a given [branchName].
- * The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
- */
-@OptIn(DFInternal::class)
-public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> =
-    getItem(branchName)?.tree ?: DataTree.emptyWithType(dataType)
-
-public fun <T : Any> DataTree<T>.branch(branchName: String): DataTree<T> = branch(branchName.parseAsName())
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
deleted file mode 100644
index f8a5dd4c..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
+++ /dev/null
@@ -1,121 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.MutableSharedFlow
-import kotlinx.coroutines.launch
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.ThreadSafe
-import space.kscience.dataforge.names.*
-import kotlin.collections.set
-import kotlin.coroutines.CoroutineContext
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
-    override val updates: MutableSharedFlow<Name>
-}
-
-/**
- * A mutable [DataTree] that propagates updates
- */
-public class DataTreeBuilder<T : Any> internal constructor(
-    override val dataType: KType,
-    coroutineContext: CoroutineContext,
-) : DataTree<T>, DataSourceBuilder<T> {
-
-    override val coroutineContext: CoroutineContext =
-        coroutineContext + Job(coroutineContext[Job]) + GoalExecutionRestriction()
-
-    private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
-
-    override val items: Map<NameToken, DataTreeItem<T>>
-        get() = treeItems.filter { !it.key.body.startsWith("@") }
-
-    override val updates: MutableSharedFlow<Name> = MutableSharedFlow<Name>()
-
-    @ThreadSafe
-    private fun remove(token: NameToken) {
-        if (treeItems.remove(token) != null) {
-            launch {
-                updates.emit(token.asName())
-            }
-        }
-    }
-
-    override fun remove(name: Name) {
-        if (name.isEmpty()) error("Can't remove the root node")
-        (getItem(name.cutLast()).tree as? DataTreeBuilder)?.remove(name.lastOrNull()!!)
-    }
-
-    @ThreadSafe
-    private fun set(token: NameToken, data: Data<T>) {
-        treeItems[token] = DataTreeItem.Leaf(data)
-    }
-
-    @ThreadSafe
-    private fun set(token: NameToken, node: DataTree<T>) {
-        treeItems[token] = DataTreeItem.Node(node)
-    }
-
-    private fun getOrCreateNode(token: NameToken): DataTreeBuilder<T> =
-        (treeItems[token] as? DataTreeItem.Node<T>)?.tree as? DataTreeBuilder<T>
-            ?: DataTreeBuilder<T>(dataType, coroutineContext).also { set(token, it) }
-
-    private fun getOrCreateNode(name: Name): DataTreeBuilder<T> = when (name.length) {
-        0 -> this
-        1 -> getOrCreateNode(name.firstOrNull()!!)
-        else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        if (data == null) {
-            remove(name)
-        } else {
-            when (name.length) {
-                0 -> error("Can't add data with empty name")
-                1 -> set(name.firstOrNull()!!, data)
-                2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
-            }
-        }
-        launch {
-            updates.emit(name)
-        }
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        val item = getItem(name)
-        if (item is DataTreeItem.Leaf) error("TODO: Can't change meta of existing leaf item.")
-        data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
-    }
-}
-
-/**
- * Create a dynamic [DataSource]. Initial data is placed synchronously.
- */
-@DFInternal
-@Suppress("FunctionName")
-public fun <T : Any> DataSource(
-    type: KType,
-    parent: CoroutineScope,
-    block: DataSourceBuilder<T>.() -> Unit = {},
-): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
-
-@Suppress("OPT_IN_USAGE", "FunctionName")
-public inline fun <reified T : Any> DataSource(
-    parent: CoroutineScope,
-    crossinline block: DataSourceBuilder<T>.() -> Unit = {},
-): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
-
-public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
-    name: Name,
-    parent: CoroutineScope,
-    noinline block: DataSourceBuilder<T>.() -> Unit,
-): Unit = node(name, DataSource(parent, block))
-
-public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
-    name: String,
-    parent: CoroutineScope,
-    noinline block: DataSourceBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(name), DataSource(parent, block))
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index 678711c1..95ddbbf7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -9,7 +9,7 @@ import kotlin.coroutines.EmptyCoroutineContext
  * Lazy computation result with its dependencies to allowing to stat computing dependencies ahead of time
  */
 public interface Goal<out T> {
-    public val dependencies: Collection<Goal<*>>
+    public val dependencies: Iterable<Goal<*>>
 
     /**
      * Returns current running coroutine if the goal is started. Null if the computation is not started.
@@ -54,7 +54,7 @@ public open class StaticGoal<T>(public val value: T) : Goal<T> {
  */
 public open class LazyGoal<T>(
     private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    override val dependencies: Collection<Goal<*>> = emptyList(),
+    override val dependencies: Iterable<Goal<*>> = emptyList(),
     public val block: suspend () -> T,
 ) : Goal<T> {
 
@@ -82,8 +82,8 @@ public open class LazyGoal<T>(
         }
 
         log?.emit { "Starting dependencies computation for ${this@LazyGoal}" }
-        val startedDependencies = this.dependencies.map { goal ->
-            goal.run { async(coroutineScope) }
+        val startedDependencies = dependencies.map { goal ->
+            goal.async(coroutineScope)
         }
         return deferred ?: coroutineScope.async(
             coroutineContext
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index 189087a3..27d27507 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -15,13 +15,11 @@
  */
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.DFInternal
 
 public interface GroupRule {
-    public fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
+    public fun <T : Any> gather(set: DataTree<T>): Map<String, DataTree<T>>
 
     public companion object {
         /**
@@ -32,46 +30,23 @@ public interface GroupRule {
          * @param defaultTagValue
          * @return
          */
-        @OptIn(DFInternal::class)
         public fun byMetaValue(
             key: String,
             defaultTagValue: String,
         ): GroupRule = object : GroupRule {
 
             override fun <T : Any> gather(
-                set: DataSet<T>,
-            ): Map<String, DataSet<T>> {
-                val map = HashMap<String, DataSet<T>>()
+                set: DataTree<T>,
+            ): Map<String, DataTree<T>> {
+                val map = HashMap<String, DataTreeBuilder<T>>()
 
-                if (set is DataSource) {
-                    set.forEach { data ->
-                        val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                        (map.getOrPut(tagValue) { DataTreeBuilder(set.dataType, set.coroutineContext) } as DataTreeBuilder<T>)
-                            .data(data.name, data.data)
-
-                        set.launch {
-                            set.updates.collect { name ->
-                                val dataUpdate = set[name]
-
-                                val updateTagValue = dataUpdate?.meta?.get(key)?.string ?: defaultTagValue
-                                map.getOrPut(updateTagValue) {
-                                    DataSource(set.dataType, this) {
-                                        data(name, dataUpdate)
-                                    }
-                                }
-                            }
-                        }
-                    }
-                } else {
-                    set.forEach { data ->
-                        val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                        (map.getOrPut(tagValue) { StaticDataTree(set.dataType) } as StaticDataTree<T>)
-                            .data(data.name, data.data)
-                    }
+                set.forEach { data ->
+                    val tagValue: String = data.meta[key]?.string ?: defaultTagValue
+                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.emit(data.name,data.data)
                 }
 
 
-                return map
+                return map.mapValues { it.value.build() }
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 4c9d4bb3..26ad2dfb 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -4,7 +4,7 @@ import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 
-public interface NamedData<out T : Any> : Named, Data<T> {
+public interface NamedData<T> : Named, Data<T> {
     override val name: Name
     public val data: Data<T>
 }
@@ -12,7 +12,7 @@ public interface NamedData<out T : Any> : Named, Data<T> {
 public operator fun NamedData<*>.component1(): Name = name
 public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
 
-private class NamedDataImpl<out T : Any>(
+private class NamedDataImpl<T>(
     override val name: Name,
     override val data: Data<T>,
 ) : Data<T> by data, NamedData<T> {
@@ -28,7 +28,7 @@ private class NamedDataImpl<out T : Any>(
     }
 }
 
-public fun <T : Any> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
+public fun <T> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
     NamedDataImpl(name, this.data)
 } else {
     NamedDataImpl(name, this)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
deleted file mode 100644
index 186e5c2a..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
+++ /dev/null
@@ -1,80 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.*
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-@PublishedApi
-internal class StaticDataTree<T : Any>(
-    override val dataType: KType,
-) : DataSetBuilder<T>, DataTree<T> {
-
-    private val _items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
-
-    override val items: Map<NameToken, DataTreeItem<T>>
-        get() = _items.filter { !it.key.body.startsWith("@") }
-
-    override fun remove(name: Name) {
-        when (name.length) {
-            0 -> error("Can't remove root tree node")
-            1 -> _items.remove(name.firstOrNull()!!)
-            else -> (_items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
-        }
-    }
-
-    private fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
-        0 -> this
-        1 -> {
-            val itemName = name.firstOrNull()!!
-            (_items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
-                _items[itemName] = DataTreeItem.Node(it)
-            }
-        }
-        else -> getOrCreateNode(name.cutLast()).getOrCreateNode(name.lastOrNull()!!.asName())
-    }
-
-    private fun set(name: Name, item: DataTreeItem<T>?) {
-        if (name.isEmpty()) error("Can't set top level tree node")
-        if (item == null) {
-            remove(name)
-        } else {
-            getOrCreateNode(name.cutLast())._items[name.lastOrNull()!!] = item
-        }
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        set(name, data?.let { DataTreeItem.Leaf(it) })
-    }
-
-    override fun node(name: Name, dataSet: DataSet<T>) {
-        if (dataSet is StaticDataTree) {
-            set(name, DataTreeItem.Node(dataSet))
-        } else {
-            dataSet.forEach {
-                data(name + it.name, it.data)
-            }
-        }
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        val item = getItem(name)
-        if (item is DataTreeItem.Leaf) TODO("Can't change meta of existing leaf item.")
-        data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
-    }
-}
-
-public inline fun <T : Any> DataTree(
-    dataType: KType,
-    block: DataSetBuilder<T>.() -> Unit,
-): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
-
-public inline fun <reified T : Any> DataTree(
-    noinline block: DataSetBuilder<T>.() -> Unit,
-): DataTree<T> = DataTree(typeOf<T>(), block)
-
-@OptIn(DFExperimental::class)
-public fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType) {
-    populateFrom(this@seal)
-}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
new file mode 100644
index 00000000..7eecad8f
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -0,0 +1,121 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.flow.launchIn
+import kotlinx.coroutines.flow.onEach
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.isEmpty
+import space.kscience.dataforge.names.plus
+
+
+public fun <T> DataSink<T>.emit(value: NamedData<T>) {
+    emit(value.name, value.data)
+}
+
+public fun <T> DataSink<T>.emitAll(sequence: Sequence<NamedData<T>>) {
+    sequence.forEach { emit(it) }
+}
+
+public fun <T> DataSink<T>.emitAll(dataTree: DataTree<T>) {
+    emitAll(dataTree.asSequence())
+}
+
+public inline fun <T> DataSink<T>.emitAll(
+    prefix: Name,
+    block: DataSink<T>.() -> Unit,
+) {
+    if (prefix.isEmpty()) {
+        apply(block)
+    } else {
+        val proxyDataSink = DataSink { nameWithoutPrefix, data ->
+            this.emit(prefix + nameWithoutPrefix, data)
+        }
+
+        proxyDataSink.apply(block)
+    }
+}
+
+
+public fun <T> DataSink<T>.emit(name: String, value: Data<T>) {
+    emit(Name.parse(name), value)
+}
+
+public fun <T> DataSink<T>.emitAll(name: Name, set: DataTree<T>) {
+    emitAll(name) { emitAll(set.asSequence()) }
+}
+
+public fun <T> DataSink<T>.emitAll(name: String, set: DataTree<T>) {
+    emitAll(Name.parse(name)) { emitAll(set.asSequence()) }
+}
+
+/**
+ * Produce lazy [Data] and emit it into the [MutableDataTree]
+ */
+public inline fun <reified T> DataSink<T>.produce(
+    name: String,
+    meta: Meta = Meta.EMPTY,
+    noinline producer: suspend () -> T,
+) {
+    val data = Data(meta, block = producer)
+    emit(name, data)
+}
+
+public inline fun <reified T> DataSink<T>.produce(
+    name: Name,
+    meta: Meta = Meta.EMPTY,
+    noinline producer: suspend () -> T,
+) {
+    val data = Data(meta, block = producer)
+    emit(name, data)
+}
+
+/**
+ * Emit static data with the fixed value
+ */
+public inline fun <reified T> DataSink<T>.static(
+    name: String,
+    data: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = emit(name, Data.static(data, meta))
+
+public inline fun <reified T> DataSink<T>.static(
+    name: Name,
+    data: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = emit(name, Data.static(data, meta))
+
+public inline fun <reified T> DataSink<T>.static(
+    name: String,
+    data: T,
+    mutableMeta: MutableMeta.() -> Unit,
+): Unit = emit(Name.parse(name), Data.static(data, Meta(mutableMeta)))
+
+
+public fun <T> DataSink<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
+    sequence.forEach {
+        emit(it.name, it.data)
+    }
+}
+
+public fun <T> DataSink<T>.populateFrom(tree: DataTree<T>) {
+    populateFrom(tree.asSequence())
+}
+
+
+/**
+ * Update data with given node data and meta with node meta.
+ */
+@DFExperimental
+public fun <T> MutableDataTree<T>.populateFrom(flow: ObservableDataSource<T>): Job = flow.updates().onEach {
+    //TODO check if the place is occupied
+    emit(it.name, it.data)
+}.launchIn(scope)
+
+//public fun <T > DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
+//    flow.collect {
+//        data(it.name, it.data)
+//    }
+//}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt
deleted file mode 100644
index 3d2b6537..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt
+++ /dev/null
@@ -1,105 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.filter
-import kotlinx.coroutines.flow.map
-import kotlinx.coroutines.flow.mapNotNull
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.*
-import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.EmptyCoroutineContext
-import kotlin.reflect.KType
-
-
-/**
- * A stateless filtered [DataSet]
- */
-public fun <T : Any> DataSet<T>.filter(
-    predicate: (Name, Meta) -> Boolean,
-): DataSource<T> = object : DataSource<T> {
-
-    override val dataType: KType get() = this@filter.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@filter as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-
-    override val meta: Meta get() = this@filter.meta
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@filter) {
-            if (predicate(d.name, d.meta)) {
-                yield(d)
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = this@filter.get(name)?.takeIf {
-        predicate(name, it.meta)
-    }
-
-    override val updates: Flow<Name> = this@filter.updates.filter flowFilter@{ name ->
-        val theData = this@filter[name] ?: return@flowFilter false
-        predicate(name, theData.meta)
-    }
-}
-
-/**
- * Generate a wrapper data set with a given name prefix appended to all names
- */
-public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
-    this
-} else object : DataSource<T> {
-
-    override val dataType: KType get() = this@withNamePrefix.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@withNamePrefix.meta
-
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@withNamePrefix) {
-            yield(d.data.named(prefix + d.name))
-        }
-    }
-
-    override fun get(name: Name): Data<T>? =
-        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
-
-    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
-}
-
-/**
- * Get a subset of data starting with a given [branchName]
- */
-public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
-    this
-} else object : DataSource<T> {
-    override val dataType: KType get() = this@branch.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@branch as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@branch.meta
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@branch) {
-            d.name.removeFirstOrNull(branchName)?.let { name ->
-                yield(d.data.named(name))
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = this@branch.get(branchName + name)
-
-    override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeFirstOrNull(branchName) }
-}
-
-public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(branchName.parseAsName())
-
-@DFExperimental
-public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = get(Name.EMPTY)
-
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index c1ecdc09..3afc716a 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -11,11 +11,11 @@ import kotlin.reflect.typeOf
 
 public data class ValueWithMeta<T>(val meta: Meta, val value: T)
 
-public suspend fun <T : Any> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
+public suspend fun <T> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
 
 public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
 
-public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
+public suspend fun <T> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
     NamedValueWithMeta(name, meta, await())
 
 
@@ -25,7 +25,7 @@ public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T>
  * @param meta for the resulting data. By default equals input data.
  * @param block the transformation itself
  */
-public inline fun <T : Any, reified R : Any> Data<T>.map(
+public inline fun <T : Any, reified R : Any> Data<T>.transform(
     meta: Meta = this.meta,
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (T) -> R,
@@ -58,20 +58,7 @@ internal fun Iterable<Data<*>>.joinMeta(): Meta = Meta {
     }
 }
 
-/**
- * Lazily reduce a collection of [Data] to a single data.
- */
-public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
-    meta: Meta = joinMeta(),
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
-): Data<R> = Data(
-    meta,
-    coroutineContext,
-    this
-) {
-    block(map { it.awaitWithMeta() })
-}
+
 
 @PublishedApi
 internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
@@ -195,53 +182,52 @@ public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToD
 //DataSet operations
 
 @DFInternal
-public suspend fun <T : Any, R : Any> DataSet<T>.map(
+public suspend fun <T, R> DataTree<T>.transform(
     outputType: KType,
     metaTransform: MutableMeta.() -> Unit = {},
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = DataTree<R>(outputType) {
-    forEach {
-        val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
-        val d = Data(outputType, newMeta, coroutineContext, listOf(it)) {
-            block(it.awaitWithMeta())
+): DataTree<R> = DataTree<R>(outputType){
+    //quasi-synchronous processing of elements in the tree
+    asSequence().forEach { namedData: NamedData<T> ->
+        val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
+        val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
+            block(namedData.awaitWithMeta())
         }
-        data(it.name, d)
+        emit(namedData.name, d)
     }
 }
 
 @OptIn(DFInternal::class)
-public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
+public suspend inline fun <T : Any, reified R : Any> DataTree<T>.transform(
     noinline metaTransform: MutableMeta.() -> Unit = {},
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = map(typeOf<R>(), metaTransform, coroutineContext, block)
+): DataTree<R> = this@transform.transform(typeOf<R>(), metaTransform, coroutineContext, block)
 
-public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
-    for (d in this) {
-        block(d)
-    }
+public inline fun <T : Any> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
+    asSequence().forEach(block)
 }
 
 // DataSet reduction
 
 @PublishedApi
-internal fun DataSet<*>.joinMeta(): Meta = Meta {
-    forEach { (key, data) ->
-        val token = NameToken("data", key.toString())
-        set(token, data.meta)
+internal fun DataTree<*>.joinMeta(): Meta = Meta {
+    asSequence().forEach {
+        val token = NameToken("data", it.name.toString())
+        set(token, it.meta)
     }
 }
 
-public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
+public inline fun <T : Any, reified R : Any> DataTree<T>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
-): Data<R> = asIterable().reduceNamedToData(meta, coroutineContext, transformation)
+): Data<R> = asSequence().asIterable().reduceNamedToData(meta, coroutineContext, transformation)
 
-public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
+public inline fun <T : Any, reified R : Any> DataTree<T>.foldToData(
     initial: R,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
-): Data<R> = asIterable().foldNamedToData(initial, meta, coroutineContext, block)
\ No newline at end of file
+): Data<R> = asSequence().asIterable().foldNamedToData(initial, meta, coroutineContext, block)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 74d67d9d..aa5c500a 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -1,12 +1,10 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
-import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
 import kotlin.reflect.full.isSubtypeOf
 import kotlin.reflect.typeOf
@@ -25,61 +23,65 @@ private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
+@Suppress("UNCHECKED_CAST")
+@DFInternal
+public fun <R> Sequence<NamedData<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
+    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
+
+@Suppress("UNCHECKED_CAST")
+@DFInternal
+public fun <R> Flow<NamedData<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
+    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
+
 /**
  * Select all data matching given type and filters. Does not modify paths
  *
- * @param predicate addition filtering condition based on item name and meta. By default, accepts all
+ * @param predicate additional filtering condition based on item name and meta. By default, accepts all
  */
-@OptIn(DFExperimental::class)
-public fun <R : Any> DataSet<*>.filterByType(
+@DFInternal
+public fun <R> DataTree<*>.filterByType(
     type: KType,
-    predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
-): DataSource<R> = object : DataSource<R> {
-    override val dataType = type
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@filterByType as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@filterByType.meta
-
-    private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
-            && predicate(name, datum.meta)
-
-    override fun iterator(): Iterator<NamedData<R>> = iterator {
-        for(d in this@filterByType){
-            if(checkDatum(d.name,d.data)){
-                @Suppress("UNCHECKED_CAST")
-                yield(d as NamedData<R>)
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<R>? = this@filterByType[name]?.let { datum ->
-        if (checkDatum(name, datum)) datum.castOrNull(type) else null
-    }
-
-    override val updates: Flow<Name> = this@filterByType.updates.filter { name ->
-        get(name)?.let { datum ->
-            checkDatum(name, datum)
-        } ?: false
-    }
-}
+    predicate: DataFilter = DataFilter.EMPTY,
+): DataTree<R> = asSequence().filterByDataType<R>(type).filterData(predicate).toTree(type)
 
 /**
  * Select a single datum of the appropriate type
  */
-public inline fun <reified R : Any> DataSet<*>.filterByType(
-    noinline predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
-): DataSet<R> = filterByType(typeOf<R>(), predicate)
+@OptIn(DFInternal::class)
+public inline fun <reified R : Any> DataTree<*>.filterByType(
+    predicate: DataFilter = DataFilter.EMPTY,
+): DataTree<R> = filterByType(typeOf<R>(), predicate)
 
 /**
  * Select a single datum if it is present and of given [type]
  */
-public fun <R : Any> DataSet<*>.getByType(type: KType, name: Name): NamedData<R>? =
+public fun <R : Any> DataTree<*>.getByType(type: KType, name: Name): NamedData<R>? =
     get(name)?.castOrNull<R>(type)?.named(name)
 
-public inline fun <reified R : Any> DataSet<*>.getByType(name: Name): NamedData<R>? =
+public inline fun <reified R : Any> DataTree<*>.getByType(name: Name): NamedData<R>? =
     this@getByType.getByType(typeOf<R>(), name)
 
-public inline fun <reified R : Any> DataSet<*>.getByType(name: String): NamedData<R>? =
-    this@getByType.getByType(typeOf<R>(), Name.parse(name))
\ No newline at end of file
+public inline fun <reified R : Any> DataTree<*>.getByType(name: String): NamedData<R>? =
+    this@getByType.getByType(typeOf<R>(), Name.parse(name))
+
+/**
+ * Select all data matching given type and filters. Does not modify paths
+ *
+ * @param predicate additional filtering condition based on item name and meta. By default, accepts all
+ */
+@DFInternal
+public fun <R> ObservableDataTree<*>.filterByType(
+    type: KType,
+    scope: CoroutineScope,
+    predicate: DataFilter = DataFilter.EMPTY,
+): ObservableDataTree<R> = asSequence()
+    .filterByDataType<R>(type)
+    .filterData(predicate)
+    .toObservableTree(type, scope, updates().filterByDataType<R>(type).filterData(predicate))
+
+
+@OptIn(DFInternal::class)
+public inline fun <reified R> ObservableDataTree<*>.filterByType(
+    scope: CoroutineScope,
+    predicate: DataFilter = DataFilter.EMPTY,
+): ObservableDataTree<R> = filterByType(typeOf<R>(),scope,predicate)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index cb222ea0..116ae52d 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -10,31 +10,35 @@ import space.kscience.dataforge.names.plus
 /**
  * Append data to node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(data: Data<T>): Unit =
-    data(Name.parse(this), data)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(data: Data<T>): Unit =
+    emit(Name.parse(this), data)
 
 /**
  * Append node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(dataSet: DataSet<T>): Unit =
-    node(Name.parse(this), dataSet)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
+    emitAll(this, dataSet)
 
 /**
  * Build and append node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(
-    block: DataSetBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(this), block)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(
+    block: DataSink<T>.() -> Unit,
+): Unit = emitAll(Name.parse(this), block)
 
 /**
- * Copy given data set and mirror its changes to this [DataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
+ * Copy given data set and mirror its changes to this [LegacyDataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
  */
-context(DataSetBuilder<T>) public fun <T : Any> CoroutineScope.setAndWatch(
+context(DataSink<T>)
+public fun <T : Any> CoroutineScope.setAndWatch(
     name: Name,
-    dataSet: DataSet<T>,
+    dataSet: DataTree<T>,
 ): Job = launch {
-    node(name, dataSet)
-    dataSet.updates.collect { nameInBranch ->
-        data(name + nameInBranch, dataSet.get(nameInBranch))
+    emitAll(name, dataSet)
+    dataSet.updates().collect {
+        emit(name + it.name, it.data)
     }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 06a7e755..17549b6b 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -29,7 +29,7 @@ internal class ActionsTest {
 
     @Test
     fun testDynamicMapAction() = runTest {
-        val data: DataSourceBuilder<Int> = DataSource(this)
+        val data: MutableDataTree<Int> = MutableDataTree(this)
 
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
@@ -44,7 +44,6 @@ internal class ActionsTest {
         delay(20)
 
         assertEquals(2, result["1"]?.await())
-        data.close()
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt
similarity index 87%
rename from dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
rename to dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt
index a4d28eab..a871b340 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt
@@ -7,7 +7,7 @@ import kotlin.test.Test
 import kotlin.test.assertEquals
 
 
-internal class DataTreeBuilderTest {
+internal class LegacyGenericDataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runBlocking {
         val node = DataTree<Any> {
@@ -29,7 +29,7 @@ internal class DataTreeBuilderTest {
     @OptIn(DFExperimental::class)
     @Test
     fun testDataUpdate() = runBlocking {
-        val updateData: DataTree<Any> = DataTree {
+        val updateData = DataTree<Any> {
             "update" put {
                 "a" put Data.static("a")
                 "b" put Data.static("b")
@@ -56,7 +56,7 @@ internal class DataTreeBuilderTest {
         try {
             lateinit var updateJob: Job
             supervisorScope {
-                val subNode = DataSource<Int>(this) {
+                val subNode = ObservableDataTree<Int>(this) {
                     updateJob = launch {
                         repeat(10) {
                             delay(10)
@@ -66,16 +66,16 @@ internal class DataTreeBuilderTest {
                     }
                 }
                 launch {
-                    subNode.updatesWithData.collect {
+                    subNode.updates().collect {
                         println(it)
                     }
                 }
-                val rootNode = DataSource<Int>(this) {
+                val rootNode = ObservableDataTree<Int>(this) {
                     setAndWatch("sub".asName(), subNode)
                 }
 
                 launch {
-                    rootNode.updatesWithData.collect {
+                    rootNode.updates().collect {
                         println(it)
                     }
                 }
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
index 390a8bf4..54e305c6 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
@@ -17,11 +17,7 @@ import kotlin.reflect.typeOf
 /**
  * Reader of a custom object from input
  */
-public interface IOReader<out T> {
-    /**
-     * The type of object being read
-     */
-    public val type: KType
+public fun interface IOReader<out T> {
 
     public fun readFrom(source: Source): T
 
@@ -32,7 +28,6 @@ public interface IOReader<out T> {
          * no-op reader for binaries.
          */
         public val binary: IOReader<Binary> = object : IOReader<Binary> {
-            override val type: KType = typeOf<Binary>()
 
             override fun readFrom(source: Source): Binary = source.readByteArray().asBinary()
 
@@ -42,8 +37,6 @@ public interface IOReader<out T> {
 }
 
 public inline fun <reified T> IOReader(crossinline read: Source.() -> T): IOReader<T> = object : IOReader<T> {
-    override val type: KType = typeOf<T>()
-
     override fun readFrom(source: Source): T = source.read()
 }
 
@@ -61,19 +54,19 @@ public fun <T : Any> Source.readWith(format: IOReader<T>): T = format.readFrom(t
 /**
  * Read given binary as an object using given format
  */
-public fun <T : Any> Binary.readWith(format: IOReader<T>): T = read {
+public fun <T> Binary.readWith(format: IOReader<T>): T = read {
     readWith(format)
 }
 
 /**
  * Write an object to the [Sink] with given [format]
  */
-public fun <T : Any> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
+public fun <T> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
     format.writeTo(this, obj)
 
 
 @DfType(IO_FORMAT_TYPE)
-public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
+public interface IOFormatFactory<T> : Factory<IOFormat<T>>, Named {
     /**
      * Explicit type for dynamic type checks
      */
@@ -86,7 +79,7 @@ public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
     }
 }
 
-public fun <T : Any> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeTo(this, obj) }
+public fun <T> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeTo(this, obj) }
 
 public object FloatIOFormat : IOFormat<Float>, IOFormatFactory<Float> {
     override fun build(context: Context, meta: Meta): IOFormat<Float> = this
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
index c3248021..0d79da4d 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
@@ -5,7 +5,6 @@ import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORM
 import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
@@ -21,11 +20,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
 
     @Suppress("UNCHECKED_CAST")
     @DFInternal
-    public fun <T : Any> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
+    public fun <T> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
         ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
 
     @OptIn(DFInternal::class)
-    public inline fun <reified T : Any> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
+    public inline fun <reified T> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
         resolveIOFormat(typeOf<T>(), meta)
 
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 933cc13d..8959ae4a 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -6,8 +6,6 @@ import kotlinx.serialization.json.encodeToJsonElement
 import kotlinx.serialization.serializer
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DFExperimental
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
 
 
 /**
@@ -15,15 +13,10 @@ import kotlin.reflect.typeOf
  */
 public interface MetaConverter<T>: MetaSpec<T> {
 
-    /**
-     * Runtime type of [T]
-     */
-    public val type: KType
-
     /**
      * A descriptor for resulting meta
      */
-    override val descriptor: MetaDescriptor get() = MetaDescriptor.EMPTY
+    override val descriptor: MetaDescriptor? get() = null
 
     /**
      * Attempt conversion of [source] to an object or return null if conversion failed
@@ -38,22 +31,16 @@ public interface MetaConverter<T>: MetaSpec<T> {
     public companion object {
 
         public val meta: MetaConverter<Meta> = object : MetaConverter<Meta> {
-            override val type: KType = typeOf<Meta>()
-
             override fun readOrNull(source: Meta): Meta = source
             override fun convert(obj: Meta): Meta = obj
         }
 
         public val value: MetaConverter<Value> = object : MetaConverter<Value> {
-            override val type: KType = typeOf<Value>()
-
             override fun readOrNull(source: Meta): Value? = source.value
             override fun convert(obj: Value): Meta = Meta(obj)
         }
 
         public val string: MetaConverter<String> = object : MetaConverter<String> {
-            override val type: KType = typeOf<String>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.STRING)
             }
@@ -64,8 +51,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val boolean: MetaConverter<Boolean> = object : MetaConverter<Boolean> {
-            override val type: KType = typeOf<Boolean>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.BOOLEAN)
             }
@@ -75,8 +60,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val number: MetaConverter<Number> = object : MetaConverter<Number> {
-            override val type: KType = typeOf<Number>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.NUMBER)
             }
@@ -86,8 +69,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val double: MetaConverter<Double> = object : MetaConverter<Double> {
-            override val type: KType = typeOf<Double>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.NUMBER)
             }
@@ -97,8 +78,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val float: MetaConverter<Float> = object : MetaConverter<Float> {
-            override val type: KType = typeOf<Float>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.NUMBER)
             }
@@ -108,8 +87,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val int: MetaConverter<Int> = object : MetaConverter<Int> {
-            override val type: KType = typeOf<Int>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.NUMBER)
             }
@@ -119,8 +96,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public val long: MetaConverter<Long> = object : MetaConverter<Long> {
-            override val type: KType = typeOf<Long>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.NUMBER)
             }
@@ -130,8 +105,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         }
 
         public inline fun <reified E : Enum<E>> enum(): MetaConverter<E> = object : MetaConverter<E> {
-            override val type: KType = typeOf<E>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.STRING)
                 allowedValues(enumValues<E>())
@@ -147,8 +120,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
             writer: (T) -> Value = { Value.of(it) },
             reader: (Value) -> T,
         ): MetaConverter<List<T>> = object : MetaConverter<List<T>> {
-            override val type: KType = typeOf<List<T>>()
-
             override val descriptor: MetaDescriptor = MetaDescriptor {
                 valueType(ValueType.LIST)
             }
@@ -165,7 +136,6 @@ public interface MetaConverter<T>: MetaSpec<T> {
         public inline fun <reified T> serializable(
             descriptor: MetaDescriptor? = null,
         ): MetaConverter<T> = object : MetaConverter<T> {
-            override val type: KType = typeOf<T>()
             private val serializer: KSerializer<T> = serializer()
 
             override fun readOrNull(source: Meta): T? {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 1d654ac2..2e9edc1d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -166,9 +166,9 @@ public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit =
 /**
  * A specification for simplified generation of wrappers
  */
-public open class SchemeSpec<out T : Scheme>(
+public open class SchemeSpec<T : Scheme>(
     private val builder: () -> T,
-) : MetaSpec<T> {
+) : MetaConverter<T> {
 
     override val descriptor: MetaDescriptor? get() = null
 
@@ -187,6 +187,8 @@ public open class SchemeSpec<out T : Scheme>(
         it.initialize(MutableMeta(), Meta.EMPTY, descriptor)
     }
 
+    override fun convert(obj: T): Meta  = obj.meta
+
     /**
      * A convenience method to use specifications in builders
      */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
index 5e4b98a8..b46a3507 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
@@ -113,6 +113,13 @@ public class Name(public val tokens: List<NameToken>) {
     }
 }
 
+/**
+ * Transform this [Name] to a string without escaping special characters in tokens.
+ *
+ * Parsing it back will produce a valid, but different name
+ */
+public fun Name.toStringUnescaped(): String = tokens.joinToString(separator = Name.NAME_SEPARATOR) { it.toStringUnescaped() }
+
 public operator fun Name.get(i: Int): NameToken = tokens[i]
 
 /**
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt
deleted file mode 100644
index a1588a54..00000000
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt
+++ /dev/null
@@ -1,46 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import space.kscience.dataforge.data.DataTree.Companion.META_ITEM_NAME_TOKEN
-import space.kscience.dataforge.io.Envelope
-import space.kscience.dataforge.io.IOReader
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.reflect.KType
-
-public abstract class EnvelopeTask<T : Any>(
-    override val descriptor: MetaDescriptor?,
-    private val reader: IOReader<T>,
-) : Task<T> {
-
-    public abstract suspend fun produceEnvelopes(
-        workspace: Workspace,
-        taskName: Name,
-        taskMeta: Meta,
-    ): Map<Name, Envelope>
-
-    override suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T> =
-        Result(workspace, taskName, taskMeta, reader, produceEnvelopes(workspace, taskName, taskMeta))
-
-    private class Result<T : Any>(
-        override val workspace: Workspace,
-        override val taskName: Name,
-        override val taskMeta: Meta,
-        val reader: IOReader<T>,
-        envelopes: Map<Name, Envelope>,
-    ) : TaskResult<T> {
-
-        private val dataMap = envelopes.mapValues {
-            workspace.wrapData(it.value.toData(reader), it.key, taskName, taskMeta)
-        }
-        override val meta: Meta get() = dataMap[META_ITEM_NAME_TOKEN.asName()]?.meta ?: Meta.EMPTY
-
-        override val dataType: KType get() = reader.type
-
-        override fun iterator(): Iterator<TaskData<T>> = dataMap.values.iterator()
-
-        override fun get(name: Name): TaskData<T>? = dataMap[name]
-    }
-}
-
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 19d16c68..af2520eb 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -1,9 +1,9 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataSetBuilder
-import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.DataSink
 import space.kscience.dataforge.data.GoalExecutionRestriction
+import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
 import space.kscience.dataforge.meta.MetaSpec
@@ -20,7 +20,7 @@ import kotlin.reflect.typeOf
  * In general no computations should be made until the result is called.
  */
 @DfType(TYPE)
-public interface Task<out T : Any> : Described {
+public interface Task<T> : Described {
 
     /**
      * A task identification string used to compare tasks and check task body for change
@@ -45,7 +45,7 @@ public interface Task<out T : Any> : Described {
 /**
  * A [Task] with [MetaSpec] for wrapping and unwrapping task configuration
  */
-public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
+public interface TaskWithSpec<T, C : Any> : Task<T> {
     public val spec: MetaSpec<C>
     override val descriptor: MetaDescriptor? get() = spec.descriptor
 
@@ -61,12 +61,12 @@ public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
 //    block: C.() -> Unit = {},
 //): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultBuilder<in T : Any>(
+public class TaskResultBuilder<T>(
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-    private val dataDrop: DataSetBuilder<T>,
-) : DataSetBuilder<T> by dataDrop
+    private val dataSink: DataSink<T>,
+) : DataSink<T> by dataSink
 
 /**
  * Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
@@ -90,7 +90,7 @@ public fun <T : Any> Task(
         taskMeta: Meta,
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
-        val dataset = DataTree<T>(resultType) {
+        val dataset = MutableDataTree<T>(resultType, this).apply {
             TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
         }
         workspace.wrapResult(dataset, taskName, taskMeta)
@@ -111,6 +111,7 @@ public inline fun <reified T : Any> Task(
  * @param specification a specification for task configuration
  * @param builder for resulting data set
  */
+
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
@@ -126,7 +127,7 @@ public fun <T : Any, C : MetaRepr> Task(
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
         val taskMeta = configuration.toMeta()
-        val dataset = DataTree<T>(resultType) {
+        val dataset = MutableDataTree<T>(resultType, this).apply {
             TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
         }
         workspace.wrapResult(dataset, taskName, taskMeta)
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt
deleted file mode 100644
index 080ffec3..00000000
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt
+++ /dev/null
@@ -1,50 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.NamedData
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-
-/**
- * A [Workspace]-locked [NamedData], that serves as a computation model.
- */
-public interface TaskData<out T : Any> : NamedData<T> {
-    /**
-     * The [Workspace] this data belongs to
-     */
-    public val workspace: Workspace
-
-    /**
-     * The name of the stage that produced this data. [Name.EMPTY] if the workspace intrinsic data is used.
-     */
-    public val taskName: Name
-
-    /**
-     * Stage configuration used to produce this data.
-     */
-    public val taskMeta: Meta
-
-    /**
-     * Dependencies that allow to compute transitive dependencies as well.
-     */
-//    override val dependencies: Collection<TaskData<*>>
-}
-
-private class TaskDataImpl<out T : Any>(
-    override val workspace: Workspace,
-    override val data: Data<T>,
-    override val name: Name,
-    override val taskName: Name,
-    override val taskMeta: Meta,
-) : TaskData<T>, Data<T> by data {
-//    override val dependencies: Collection<TaskData<*>> = data.dependencies.map {
-//        it as? TaskData<*> ?: error("TaskData can't depend on external data")
-//    }
-}
-
-/**
- * Adopt data into this workspace
- */
-public fun <T : Any> Workspace.wrapData(data: Data<T>, name: Name, taskName: Name, taskMeta: Meta): TaskData<T> =
-    TaskDataImpl(this, data, name, taskName, taskMeta)
-
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index d8db6417..59cb7461 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -1,54 +1,27 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.data.DataSet
-import space.kscience.dataforge.data.forEach
+import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
+import kotlin.reflect.KType
 
 /**
  * A result of a [Task]
+ * @param workspace the [Workspace] that produced the result
+ * @param taskName the name of the task that produced the result
+ * @param taskMeta The configuration of the task that produced the result
  */
-public interface TaskResult<out T : Any> : DataSet<T> {
-    /**
-     * The [Workspace] this [DataSet] belongs to
-     */
-    public val workspace: Workspace
-
-    /**
-     * The [Name] of the stage that produced this [DataSet]
-     */
-    public val taskName: Name
-
-    /**
-     * The configuration of the stage that produced this [DataSet]
-     */
-    public val taskMeta: Meta
-
-    override fun iterator(): Iterator<TaskData<T>>
-
-    override fun get(name: Name): TaskData<T>?
-}
-
-private class TaskResultImpl<out T : Any>(
-    override val workspace: Workspace,
-    override val taskName: Name,
-    override val taskMeta: Meta,
-    val dataSet: DataSet<T>,
-) : TaskResult<T>, DataSet<T> by dataSet {
-
-    override fun iterator(): Iterator<TaskData<T>> = iterator {
-        dataSet.forEach {
-            yield(workspace.wrapData(it, it.name, taskName, taskMeta))
-        }
-    }
-
-    override fun get(name: Name): TaskData<T>? = dataSet[name]?.let {
-        workspace.wrapData(it, name, taskName, taskMeta)
-    }
+public data class TaskResult<T>(
+    public val data: ObservableDataTree<T>,
+    public val workspace: Workspace,
+    public val taskName: Name,
+    public val taskMeta: Meta,
+) {
+    val dataType: KType get() = data.dataType
 }
 
 /**
  * Wrap data into [TaskResult]
  */
-public fun <T : Any> Workspace.wrapResult(dataSet: DataSet<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
-    TaskResultImpl(this, taskName, taskMeta, dataSet)
\ No newline at end of file
+public fun <T> Workspace.wrapResult(data: ObservableDataTree<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
+    TaskResult(data, this, taskName, taskMeta)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index 37b473db..bc1c19dc 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -1,29 +1,32 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.context.ContextAware
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.DataSet
-import space.kscience.dataforge.data.asSequence
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.provider.Provider
+import kotlin.coroutines.CoroutineContext
 
 
-public interface DataSelector<T: Any>{
-    public suspend fun select(workspace: Workspace, meta: Meta): DataSet<T>
+public fun interface DataSelector<T> {
+    public suspend fun select(workspace: Workspace, meta: Meta): DataTree<T>
 }
 
 /**
  * An environment for pull-mode computation
  */
 @DfType(Workspace.TYPE)
-public interface Workspace : ContextAware, Provider {
+public interface Workspace : ContextAware, Provider, CoroutineScope {
+
+    override val coroutineContext: CoroutineContext get() = context.coroutineContext
+
     /**
      * The whole data node for current workspace
      */
-    public val data: TaskResult<*>
+    public val data: ObservableDataTree<*>
 
     /**
      * All targets associated with the workspace
@@ -37,7 +40,7 @@ public interface Workspace : ContextAware, Provider {
 
     override fun content(target: String): Map<Name, Any> {
         return when (target) {
-            "target", Meta.TYPE -> targets.mapKeys { Name.parse(it.key)}
+            "target", Meta.TYPE -> targets.mapKeys { Name.parse(it.key) }
             Task.TYPE -> tasks
             Data.TYPE -> data.asSequence().associateBy { it.name }
             else -> emptyMap()
@@ -49,8 +52,8 @@ public interface Workspace : ContextAware, Provider {
         return task.execute(this, taskName, taskMeta)
     }
 
-    public suspend fun produceData(taskName: Name, taskMeta: Meta, name: Name): TaskData<*>? =
-        produce(taskName, taskMeta)[name]
+    public suspend fun produceData(taskName: Name, taskMeta: Meta, name: Name): Data<*>? =
+        produce(taskName, taskMeta).data[name]
 
     public companion object {
         public const val TYPE: String = "workspace"
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 39e35b39..e0b7fad7 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -5,9 +5,9 @@ import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
 import space.kscience.dataforge.context.Global
-import space.kscience.dataforge.data.DataSet
-import space.kscience.dataforge.data.DataSource
-import space.kscience.dataforge.data.DataSourceBuilder
+import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
@@ -17,13 +17,14 @@ import space.kscience.dataforge.names.asName
 import kotlin.collections.set
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
+import kotlin.reflect.typeOf
 
 public data class TaskReference<T : Any>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
 
     @Suppress("UNCHECKED_CAST")
-    override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> {
+    override suspend fun select(workspace: Workspace, meta: Meta): DataTree<T> {
         if (workspace.tasks[taskName] == task) {
-            return workspace.produce(taskName, meta) as TaskResult<T>
+            return workspace.produce(taskName, meta).data as DataTree<T>
         } else {
             error("Task $taskName does not belong to the workspace")
         }
@@ -45,7 +46,7 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
 ): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
 
 /**
- * Create a new t
+ * Create and register a new task
  */
 public inline fun <reified T : Any> TaskContainer.buildTask(
     name: String,
@@ -101,7 +102,7 @@ public inline fun <T : Any, reified R : Any> TaskContainer.action(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
     task(MetaDescriptor(descriptorBuilder)) {
-        result(action.execute(from(selector), taskMeta.copy(metaTransform)))
+        result(action.execute(workspace.context, from(selector), taskMeta.copy(metaTransform)))
     }
 
 public class WorkspaceBuilder(
@@ -109,7 +110,7 @@ public class WorkspaceBuilder(
     private val coroutineScope: CoroutineScope = parentContext,
 ) : TaskContainer {
     private var context: Context? = null
-    private val data = DataSource<Any>(coroutineScope)
+    private val data = MutableDataTree<Any?>(typeOf<Any?>(), coroutineScope)
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
@@ -124,7 +125,7 @@ public class WorkspaceBuilder(
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSourceBuilder<Any>.() -> Unit) {
+    public fun data(builder: DataSink<*>.() -> Unit) {
         data.apply(builder)
     }
 
@@ -149,7 +150,7 @@ public class WorkspaceBuilder(
 
     public fun build(): Workspace {
         val postProcess: suspend (TaskResult<*>) -> TaskResult<*> = { result ->
-            cache?.evaluate(result) ?: result
+            cache?.cache(result) ?: result
         }
         return WorkspaceImpl(context ?: parentContext, data, targets, tasks, postProcess)
     }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
index 62df6744..42cb7b4f 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
@@ -1,5 +1,5 @@
 package space.kscience.dataforge.workspace
 
 public interface WorkspaceCache {
-    public suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T>
+    public suspend fun <T> cache(result: TaskResult<T>): TaskResult<T>
 }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
index dae9667a..21c5e8c2 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
@@ -2,21 +2,19 @@ package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.gather
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
 
 internal class WorkspaceImpl internal constructor(
     override val context: Context,
-    data: DataSet<*>,
+    override val data: ObservableDataTree<*>,
     override val targets: Map<String, Meta>,
     tasks: Map<Name, Task<*>>,
     private val postProcess: suspend (TaskResult<*>) -> TaskResult<*>,
 ) : Workspace {
 
-    override val data: TaskResult<*> = wrapResult(data, Name.EMPTY, Meta.EMPTY)
-
     override val tasks: Map<Name, Task<*>> by lazy { context.gather<Task<*>>(Task.TYPE) + tasks }
 
     override suspend fun produce(taskName: Name, taskMeta: Meta): TaskResult<*> {
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
index 39bb0726..d54ff510 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
@@ -4,13 +4,14 @@ import space.kscience.dataforge.data.Data
 import space.kscience.dataforge.data.await
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFInternal
+import kotlin.reflect.typeOf
 
 
 /**
  * Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
  */
 @OptIn(DFInternal::class)
-public fun <T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(format.type, meta) {
+public inline fun <reified T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(typeOf<T>(), meta) {
     data?.readWith(format) ?: error("Can't convert envelope without data to Data")
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index a7c1f0fb..11594c2b 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -2,9 +2,10 @@ package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.emitAll
 import space.kscience.dataforge.data.forEach
-import space.kscience.dataforge.data.map
+import space.kscience.dataforge.data.transform
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
@@ -27,19 +28,19 @@ public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
 public suspend fun <T : Any> TaskResultBuilder<*>.from(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-): DataSet<T> = selector.select(workspace, dependencyMeta)
+): DataTree<T> = selector.select(workspace, dependencyMeta)
 
 public suspend inline fun <T : Any, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
     plugin: P,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
-): DataSet<T> {
+): TaskResult<T> {
     require(workspace.context.plugins.contains(plugin)) { "Plugin $plugin is not loaded into $workspace" }
     val taskReference: TaskReference<T> = plugin.selectorBuilder()
     val res = workspace.produce(plugin.name + taskReference.taskName, dependencyMeta)
     //TODO add explicit check after https://youtrack.jetbrains.com/issue/KT-32956
     @Suppress("UNCHECKED_CAST")
-    return  res as TaskResult<T>
+    return res as TaskResult<T>
 }
 
 /**
@@ -53,7 +54,7 @@ public suspend inline fun <reified T : Any, reified P : WorkspacePlugin> TaskRes
     pluginFactory: PluginFactory<P>,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
-): DataSet<T> {
+): TaskResult<T> {
     val plugin = workspace.context.plugins[pluginFactory]
         ?: error("Plugin ${pluginFactory.tag} not loaded into workspace context")
     val taskReference: TaskReference<T> = plugin.selectorBuilder()
@@ -64,9 +65,7 @@ public suspend inline fun <reified T : Any, reified P : WorkspacePlugin> TaskRes
 }
 
 public val TaskResultBuilder<*>.allData: DataSelector<*>
-    get() = object : DataSelector<Any> {
-        override suspend fun select(workspace: Workspace, meta: Meta): DataSet<Any> = workspace.data
-    }
+    get() = DataSelector { workspace, _ -> workspace.data }
 
 /**
  * Perform a lazy mapping task using given [selector] and one-to-one [action].
@@ -90,19 +89,19 @@ public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.transf
             dataMetaTransform(data.name)
         }
 
-        val res = data.map(meta, workspace.context.coroutineContext) {
+        val res = data.transform(meta, workspace.context.coroutineContext) {
             action(it, data.name, meta)
         }
 
-        data(data.name, res)
+        emit(data.name, res)
     }
 }
 
 /**
  * Set given [dataSet] as a task result.
  */
-public fun <T : Any> TaskResultBuilder<T>.result(dataSet: DataSet<T>) {
-    node(Name.EMPTY, dataSet)
+public fun <T : Any> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
+    emitAll(dataSet)
 }
 
 /**
@@ -111,10 +110,10 @@ public fun <T : Any> TaskResultBuilder<T>.result(dataSet: DataSet<T>) {
 @DFExperimental
 public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.actionFrom(
     selector: DataSelector<T>,
-    action: Action<T,R>,
+    action: Action<T, R>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    node(Name.EMPTY, action.execute(from(selector,dependencyMeta), dependencyMeta))
+    emitAll(action.execute(workspace.context, from(selector, dependencyMeta), dependencyMeta))
 }
 
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index 279e61a2..e6ddf026 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.map
 import kotlinx.io.*
 import kotlinx.serialization.ExperimentalSerializationApi
 import kotlinx.serialization.KSerializer
@@ -9,12 +10,10 @@ import kotlinx.serialization.serializer
 import space.kscience.dataforge.context.error
 import space.kscience.dataforge.context.logger
 import space.kscience.dataforge.context.request
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.await
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.withIndex
 import java.nio.file.Path
 import kotlin.io.path.deleteIfExists
@@ -22,7 +21,7 @@ import kotlin.io.path.div
 import kotlin.io.path.exists
 import kotlin.reflect.KType
 
-public class JsonIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
+public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
 
     @Suppress("UNCHECKED_CAST")
     private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
@@ -35,7 +34,7 @@ public class JsonIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
 }
 
 @OptIn(ExperimentalSerializationApi::class)
-public class ProtobufIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
+public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
 
     @Suppress("UNCHECKED_CAST")
     private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
@@ -53,14 +52,14 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
     //    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> = this as TaskData<T>
 
     @OptIn(DFExperimental::class, DFInternal::class)
-    override suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T> {
+    override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
         val io = result.workspace.context.request(IOPlugin)
 
         val format: IOFormat<T> = io.resolveIOFormat(result.dataType, result.taskMeta)
             ?: ProtobufIOFormat(result.dataType)
             ?: error("Can't resolve IOFormat for ${result.dataType}")
 
-        fun evaluateDatum(data: TaskData<T>): TaskData<T> {
+        fun cacheOne(data: NamedData<T>): NamedData<T> {
 
             val path = cacheDirectory /
                     result.taskName.withIndex(result.taskMeta.hashCode().toString(16)).toString() /
@@ -92,15 +91,14 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
                 }
 
             }
-            return data.workspace.wrapData(datum, data.name, data.taskName, data.taskMeta)
+            return datum.named(data.name)
         }
 
-        return object : TaskResult<T> by result {
-            override fun iterator(): Iterator<TaskData<T>> =
-                result.iterator().asSequence().map { evaluateDatum(it) }.iterator()
 
-            override fun get(name: Name): TaskData<T>? = result[name]?.let { evaluateDatum(it) }
-        }
+        val cachedTree = result.data.asSequence().map { cacheOne(it) }
+            .toObservableTree(result.dataType, result.workspace, result.data.updates().map { cacheOne(it) })
+
+        return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
 }
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index bb8ea9a4..a36e2b90 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -18,7 +18,7 @@ public class InMemoryWorkspaceCache : WorkspaceCache {
         if (type.isSubtypeOf(taskType)) this as TaskData<T>
         else error("Cached data type mismatch: expected $taskType but got $type")
 
-    override suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T> {
+    override suspend fun <T : Any> cache(result: TaskResult<T>): TaskResult<T> {
         for (d: TaskData<T> in result) {
             cache.getOrPut(result.taskName to result.taskMeta) { HashMap() }.getOrPut(d.name) { d }
         }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
index ce1b5152..a0e6ab42 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
@@ -14,11 +14,8 @@ import space.kscience.dataforge.meta.copy
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.NameToken
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.plus
-import space.kscience.dataforge.workspace.FileData.Companion.DEFAULT_IGNORE_EXTENSIONS
+import space.kscience.dataforge.names.*
+import space.kscience.dataforge.workspace.FileData.Companion.defaultPathToName
 import java.nio.file.Files
 import java.nio.file.Path
 import java.nio.file.StandardWatchEventKinds
@@ -35,6 +32,7 @@ import kotlin.reflect.typeOf
 
 public typealias FileFormatResolver<T> = (path: Path, meta: Meta) -> IOReader<T>?
 
+
 /**
  * A data based on a filesystem [Path]
  */
@@ -54,13 +52,28 @@ public class FileData<T> internal constructor(private val data: Data<T>, public
         public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
         public const val DF_FILE_EXTENSION: String = "df"
         public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
+
+        /**
+         * Transform file name into DataForg name. Ignores DataForge file extensions.
+         */
+        public val defaultPathToName: (Path) -> Name = { path ->
+            Name(
+                path.map { segment ->
+                    if (segment.isRegularFile() && segment.extension in DEFAULT_IGNORE_EXTENSIONS) {
+                        NameToken(path.nameWithoutExtension)
+                    } else {
+                        NameToken(path.name)
+                    }
+                }
+            )
+        }
     }
 }
 
 
 /**
- * Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file.
- * The operation is blocking since it must read meta header. The reading of envelope body is lazy
+ * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
+ * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
  */
 @OptIn(DFInternal::class)
 @DFExperimental
@@ -90,7 +103,7 @@ public fun <T : Any> IOPlugin.readDataFile(
 context(IOPlugin) @DFExperimental
 public fun <T : Any> DataSetBuilder<T>.directory(
     path: Path,
-    ignoreExtensions: Set<String>,
+    pathToName: (Path) -> Name = defaultPathToName,
     formatResolver: FileFormatResolver<T>,
 ) {
     Files.list(path).forEach { childPath ->
@@ -98,7 +111,7 @@ public fun <T : Any> DataSetBuilder<T>.directory(
         if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
             meta(readMetaFile(childPath))
         } else if (!fileName.startsWith("@")) {
-            file(childPath, ignoreExtensions, formatResolver)
+            file(childPath, pathToName, formatResolver)
         }
     }
 }
@@ -111,9 +124,9 @@ public fun <T : Any> DataSetBuilder<T>.directory(
 public fun <T : Any> IOPlugin.readDataDirectory(
     type: KType,
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
+    pathToName: (Path) -> Name = defaultPathToName,
     formatResolver: FileFormatResolver<T>,
-): DataTree<T> {
+): LegacyDataTree<T> {
     //read zipped data node
     if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
         //Using explicit Zip file system to avoid bizarre compatibility bugs
@@ -121,14 +134,14 @@ public fun <T : Any> IOPlugin.readDataDirectory(
             ?: error("Zip file system provider not found")
         val fs = fsProvider.newFileSystem(path, mapOf("create" to "true"))
 
-        return readDataDirectory(type, fs.rootDirectories.first(), ignoreExtensions, formatResolver)
+        return readDataDirectory(type, fs.rootDirectories.first(), pathToName, formatResolver)
     }
     if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
     return DataTree(type) {
         meta {
             FileData.FILE_PATH_KEY put path.toString()
         }
-        directory(path, ignoreExtensions, formatResolver)
+        directory(path, pathToName, formatResolver)
     }
 }
 
@@ -136,9 +149,9 @@ public fun <T : Any> IOPlugin.readDataDirectory(
 @DFExperimental
 public inline fun <reified T : Any> IOPlugin.readDataDirectory(
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
+    noinline pathToName: (Path) -> Name = defaultPathToName,
     noinline formatResolver: FileFormatResolver<T>,
-): DataTree<T> = readDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
+): LegacyDataTree<T> = readDataDirectory(typeOf<T>(), path, pathToName, formatResolver)
 
 /**
  * Read a raw binary data tree from the directory. All files are read as-is (save for meta files).
@@ -146,8 +159,8 @@ public inline fun <reified T : Any> IOPlugin.readDataDirectory(
 @DFExperimental
 public fun IOPlugin.readRawDirectory(
     path: Path,
-    ignoreExtensions: Set<String> = emptySet(),
-): DataTree<Binary> = readDataDirectory(path, ignoreExtensions) { _, _ -> IOReader.binary }
+    pathToName: (Path) -> Name = defaultPathToName,
+): LegacyDataTree<Binary> = readDataDirectory(path, pathToName) { _, _ -> IOReader.binary }
 
 
 private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
@@ -157,13 +170,13 @@ private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension)
 public fun <T : Any> IOPlugin.monitorDataDirectory(
     type: KType,
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
+    pathToName: (Path) -> Name = defaultPathToName,
     formatResolver: FileFormatResolver<T>,
 ): DataSource<T> {
     if (path.fileName.toString().endsWith(".zip")) error("Monitoring not supported for ZipFS")
     if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
     return DataSource(type, context) {
-        directory(path, ignoreExtensions, formatResolver)
+        directory(path, pathToName, formatResolver)
         launch(Dispatchers.IO) {
             val watchService = path.fileSystem.newWatchService()
 
@@ -186,7 +199,7 @@ public fun <T : Any> IOPlugin.monitorDataDirectory(
                             if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
                                 meta(readMetaFile(eventPath))
                             } else if (!fileName.startsWith("@")) {
-                                file(eventPath, ignoreExtensions, formatResolver)
+                                file(eventPath, pathToName, formatResolver)
                             }
                         }
                     }
@@ -205,9 +218,9 @@ public fun <T : Any> IOPlugin.monitorDataDirectory(
 @DFExperimental
 public inline fun <reified T : Any> IOPlugin.monitorDataDirectory(
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
+    noinline pathToName: (Path) -> Name = defaultPathToName,
     noinline formatResolver: FileFormatResolver<T>,
-): DataSource<T> = monitorDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
+): DataSource<T> = monitorDataDirectory(typeOf<T>(), path, pathToName, formatResolver)
 
 /**
  * Read and monitor raw binary data tree from the directory. All files are read as-is (save for meta files).
@@ -215,18 +228,23 @@ public inline fun <reified T : Any> IOPlugin.monitorDataDirectory(
 @DFExperimental
 public fun IOPlugin.monitorRawDirectory(
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-): DataSource<Binary> = monitorDataDirectory(path, ignoreExtensions) { _, _ -> IOReader.binary }
+    pathToName: (Path) -> Name = defaultPathToName,
+): DataSource<Binary> = monitorDataDirectory(path, pathToName) { _, _ -> IOReader.binary }
 
 /**
- * Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
+ * Write the data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
+ *
+ * @param nameToPath a [Name] to [Path] converter used to create
  */
 @DFExperimental
 public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     path: Path,
-    tree: DataTree<T>,
+    dataSet: DataSet<T>,
     format: IOWriter<T>,
     envelopeFormat: EnvelopeFormat? = null,
+    nameToPath: (name: Name, data: Data<T>) -> Path = { name, _ ->
+        Path(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+    },
 ) {
     withContext(Dispatchers.IO) {
         if (!Files.exists(path)) {
@@ -234,67 +252,54 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
         } else if (!Files.isDirectory(path)) {
             error("Can't write a node into file")
         }
-        tree.items.forEach { (token, item) ->
-            val childPath = path.resolve(token.toString())
-            when (item) {
-                is DataTreeItem.Node -> {
-                    writeDataDirectory(childPath, item.tree, format, envelopeFormat)
-                }
-
-                is DataTreeItem.Leaf -> {
-                    val envelope = item.data.toEnvelope(format)
-                    if (envelopeFormat != null) {
-                        writeEnvelopeFile(childPath, envelope, envelopeFormat)
-                    } else {
-                        writeEnvelopeDirectory(childPath, envelope)
-                    }
-                }
+        dataSet.forEach { (name, data) ->
+            val childPath = path.resolve(nameToPath(name, data))
+            childPath.parent.createDirectories()
+            val envelope = data.toEnvelope(format)
+            if (envelopeFormat != null) {
+                writeEnvelopeFile(childPath, envelope, envelopeFormat)
+            } else {
+                writeEnvelopeDirectory(childPath, envelope)
             }
         }
-        val treeMeta = tree.meta
-        writeMetaFile(path, treeMeta)
+        val directoryMeta = dataSet.meta
+        writeMetaFile(path, directoryMeta)
     }
 }
 
 /**
- * Reads the specified resources and returns a [DataTree] containing the data.
+ * Reads the specified resources and returns a [LegacyDataTree] containing the data.
  *
  * @param resources The names of the resources to read.
  * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
  * @return A DataTree containing the data read from the resources.
  */
 @DFExperimental
-private fun IOPlugin.readResources(
+public fun IOPlugin.readResources(
     vararg resources: String,
+    pathToName: (Path) -> Name = defaultPathToName,
     classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
-): DataTree<Binary> {
-//    require(resource.isNotBlank()) {"Can't mount root resource tree as data root"}
-    return DataTree {
-        resources.forEach { resource ->
-            val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
-                "Resource with name $resource is not resolved"
-            )
-            node(resource, readRawDirectory(path))
-        }
+): LegacyDataTree<Binary> = GenericDataTree {
+    resources.forEach { resource ->
+        val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
+            "Resource with name $resource is not resolved"
+        )
+        node(resource, readRawDirectory(path, pathToName))
     }
 }
 
 /**
  * Add file/directory-based data tree item
- *
- * @param ignoreExtensions a list of file extensions for which extension should be cut from the resulting item name
  */
 context(IOPlugin)
 @OptIn(DFInternal::class)
 @DFExperimental
 public fun <T : Any> DataSetBuilder<T>.file(
     path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
+    pathToName: (Path) -> Name = defaultPathToName,
     formatResolver: FileFormatResolver<out T>,
 ) {
 
-    fun defaultPath() = if (path.extension in ignoreExtensions) path.nameWithoutExtension else path.name
-
     try {
         //If path is a single file or a special directory, read it as single datum
         if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
@@ -303,16 +308,16 @@ public fun <T : Any> DataSetBuilder<T>.file(
                 logger.warn { "File format is not resolved for $path. Skipping." }
                 return
             }
-            val name: String = data.meta[Envelope.ENVELOPE_NAME_KEY].string ?: defaultPath()
-            data(name.asName(), data)
+            val name: Name = data.meta[Envelope.ENVELOPE_NAME_KEY].string?.parseAsName() ?: pathToName(path.last())
+            data(name, data)
         } else {
             //otherwise, read as directory
-            val data: DataTree<T> = readDataDirectory(dataType, path, ignoreExtensions, formatResolver)
-            val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string ?: defaultPath()
-            node(name.asName(), data)
+            val data: LegacyDataTree<T> = readDataDirectory(dataType, path, pathToName, formatResolver)
+            val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string?.parseAsName() ?: pathToName(path.last())
+            node(name, data)
         }
     } catch (ex: Exception) {
-        logger.error { "Failed to read file or directory at $path: ${ex.message}" }
+        logger.error(ex) { "Failed to read file or directory at $path: ${ex.message}" }
     }
 }
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index ea6ffb85..67570a04 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -1,6 +1,6 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.filterByType
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
@@ -16,14 +16,13 @@ import space.kscience.dataforge.names.matches
  */
 @OptIn(DFExperimental::class)
 public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
-    object : DataSelector<T> {
-        override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> =
-            workspace.data.filterByType { name, _ ->
-                namePattern == null || name.matches(namePattern)
-            }
+    DataSelector<T> { workspace, meta ->
+        workspace.data.filterByType { name, _ ->
+            namePattern == null || name.matches(namePattern)
+        }
     }
 
 public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
     task: Name,
     taskMeta: Meta = Meta.EMPTY,
-): DataSet<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
+): DataTree<T> = workspace.produce(task, taskMeta).data.filterByType()
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
index 466552c2..02d319de 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
@@ -2,8 +2,8 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.DataTreeItem
+import space.kscience.dataforge.data.LegacyDataTree
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
@@ -47,10 +47,10 @@ private suspend fun <T : Any> ZipOutputStream.writeNode(
 }
 
 /**
- * Write this [DataTree] as a zip archive
+ * Write this [LegacyDataTree] as a zip archive
  */
 @DFExperimental
-public suspend fun <T : Any> DataTree<T>.writeZip(
+public suspend fun <T : Any> LegacyDataTree<T>.writeZip(
     path: Path,
     format: IOFormat<T>,
     envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index 18086902..dbc9cb5e 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,13 +20,13 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        data("result", result)
+        emit("result", result)
     }
 
 
     val singleData by task<Int> {
         workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            data("result", it)
+            emit("result", it)
         }
     }
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 451c76f4..20d03cd3 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -26,7 +26,7 @@ import kotlin.test.assertEquals
 
 
 class FileDataTest {
-    val dataNode = DataTree<String> {
+    val dataNode = GenericDataTree<String> {
         node("dir") {
             static("a", "Some string") {
                 "content" put "Some string"
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index b79ed2e7..a41a73bc 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -140,7 +140,7 @@ internal class SimpleWorkspaceTest {
                 val meta = data.meta.toMutableMeta().apply {
                     "newValue" put 22
                 }
-                data(data.name + "new", data.map { (data.meta["value"].int ?: 0) + it })
+                data(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
             }
         }
 

From 90999f424f2254f3ccc0b8b20f1f01983f627e98 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 29 Jan 2024 22:10:06 +0300
Subject: [PATCH 17/77] [WIP] change data structure

---
 .../kscience/dataforge/actions/Action.kt      |   8 +-
 .../kscience/dataforge/actions/MapAction.kt   |   2 +-
 .../dataforge/actions/ReduceAction.kt         |   2 +-
 .../kscience/dataforge/actions/SplitAction.kt |   2 +-
 .../space/kscience/dataforge/data/Data.kt     |   2 +-
 .../kscience/dataforge/data/DataSource.kt     |  36 ++-
 .../kscience/dataforge/data/GroupRule.kt      |   2 +-
 .../kscience/dataforge/data/NamedData.kt      |   2 +-
 .../kscience/dataforge/data/dataBuilders.kt   |  48 +--
 .../kscience/dataforge/data/dataTransform.kt  |  38 +--
 .../kscience/dataforge/data/dataFilterJvm.kt  |   4 +-
 .../dataforge/data/dataSetBuilderInContext.kt |  10 +-
 ...eBuilderTest.kt => DataTreeBuilderTest.kt} |   2 +-
 .../kscience/dataforge/io/EnvelopeFormat.kt   |   5 +-
 .../space/kscience/dataforge/io/IOFormat.kt   |   4 +-
 .../space/kscience/dataforge/io/MetaFormat.kt |   2 -
 .../dataforge/workspace/TaskResult.kt         |  24 +-
 .../kscience/dataforge/workspace/Workspace.kt |   2 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |   6 +-
 .../dataforge/workspace/taskBuilders.kt       |  20 +-
 .../dataforge/workspace/FileWorkspaceCache.kt |   4 +-
 .../workspace/InMemoryWorkspaceCache.kt       |  38 +--
 .../kscience/dataforge/workspace/fileData.kt  | 293 ++++++------------
 .../dataforge/workspace/workspaceJvm.kt       |   6 +-
 .../kscience/dataforge/workspace/zipData.kt   |  52 ++--
 .../workspace/CachingWorkspaceTest.kt         |  10 +-
 .../workspace/DataPropagationTest.kt          |   8 +-
 .../dataforge/workspace/FileDataTest.kt       |  39 ++-
 .../workspace/FileWorkspaceCacheTest.kt       |   4 +-
 .../workspace/SimpleWorkspaceTest.kt          |  39 ++-
 30 files changed, 316 insertions(+), 398 deletions(-)
 rename dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/{LegacyGenericDataTreeBuilderTest.kt => DataTreeBuilderTest.kt} (98%)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index b9a9a0a8..ac903aee 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -10,7 +10,7 @@ import space.kscience.dataforge.misc.DFExperimental
 /**
  * A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
  */
-public fun interface Action<T : Any, R : Any> {
+public fun interface Action<T, R> {
 
     /**
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
@@ -24,7 +24,7 @@ public fun interface Action<T : Any, R : Any> {
 /**
  * A convenience method to transform data using given [action]
  */
-public fun <T : Any, R : Any> DataTree<T>.transform(
+public fun <T, R> DataTree<T>.transform(
     action: Action<T, R>,
     scope: CoroutineScope,
     meta: Meta = Meta.EMPTY,
@@ -33,11 +33,11 @@ public fun <T : Any, R : Any> DataTree<T>.transform(
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
-public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> =
+public infix fun <T , I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> =
     Action { scope, dataSet, meta -> action.execute(scope, this@then.execute(scope, dataSet, meta), meta) }
 
 @DFExperimental
-public suspend operator fun <T : Any, R : Any> Action<T, R>.invoke(
+public suspend operator fun <T, R> Action<T, R>.invoke(
     dataSet: DataTree<T>,
     meta: Meta = Meta.EMPTY,
 ): DataTree<R> = coroutineScope { execute(this, dataSet, meta) }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 60f05910..3cf7c788 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -79,7 +79,7 @@ internal class MapAction<T : Any, R : Any>(
             builder.result(env, data.await())
         }
         //setting the data node
-        emit(newName, newData)
+        data(newName, newData)
     }
 
     override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index cc27b3d1..5b862046 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -103,7 +103,7 @@ internal class ReduceAction<T : Any, R : Any>(
                 meta = groupMeta
             ) { group.result.invoke(env, it) }
 
-            emit(env.name, res)
+            data(env.name, res)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index a2da44f8..7926ce0b 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -64,7 +64,7 @@ internal class SplitAction<T : Any, R : Any>(
             ).apply(rule)
             //data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
 
-            emit(
+            data(
                 fragmentName,
                 @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
                     env.result(data.await())
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index 3668af1e..a0bb58ea 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -15,7 +15,7 @@ import kotlin.reflect.typeOf
  * A data element characterized by its meta
  */
 @DfType(Data.TYPE)
-public interface Data<T> : Goal<T>, MetaRepr {
+public interface Data<out T> : Goal<T>, MetaRepr {
     /**
      * Type marker for the data. The type is known before the calculation takes place so it could be checked.
      */
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 8c86d431..d6011839 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -12,7 +12,7 @@ import kotlin.reflect.typeOf
 /**
  * A generic data provider
  */
-public interface DataSource<T> {
+public interface DataSource<out T> {
 
     /**
      * The minimal common ancestor to all data in the node
@@ -28,7 +28,7 @@ public interface DataSource<T> {
 /**
  * A data provider with possible dynamic updates
  */
-public interface ObservableDataSource<T> : DataSource<T> {
+public interface ObservableDataSource<out T> : DataSource<T> {
 
     /**
      * Flow updates made to the data
@@ -39,7 +39,7 @@ public interface ObservableDataSource<T> : DataSource<T> {
 /**
  * A tree like structure for data holding
  */
-public interface GenericDataTree<T, out TR : GenericDataTree<T, TR>> : DataSource<T> {
+public interface GenericDataTree<out T, out TR : GenericDataTree<T, TR>> : DataSource<T> {
     public val self: TR
 
     public val data: Data<T>?
@@ -66,7 +66,12 @@ public interface GenericDataTree<T, out TR : GenericDataTree<T, TR>> : DataSourc
     }
 }
 
-public typealias DataTree<T> = GenericDataTree<T, *>
+public typealias DataTree<T> = GenericDataTree<T, GenericDataTree<T,*>>
+
+/**
+ * Return a single data in this tree. Throw error if it is not single.
+ */
+public fun <T> DataTree<T>.single(): NamedData<T> = asSequence().single()
 
 /**
  * An alias for easier access to tree values
@@ -79,7 +84,7 @@ public operator fun <T> DataTree<T>.get(name: String): Data<T>? = read(name.pars
  * Return a sequence of all data items in this tree.
  * This method does not take updates into account.
  */
-public fun <T> GenericDataTree<T, DataTree<T>>.asSequence(
+public fun <T> DataTree<T>.asSequence(
     namePrefix: Name = Name.EMPTY,
 ): Sequence<NamedData<T>> = sequence {
     data?.let { yield(it.named(Name.EMPTY)) }
@@ -100,6 +105,9 @@ public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branc
         else -> items[name.first()]?.branch(name.cutFirst())
     }
 
+public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: String): TR? =
+    branch(name.parseAsName())
+
 public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
 
 @PublishedApi
@@ -113,7 +121,7 @@ internal class FlatDataTree<T>(
     override val items: Map<NameToken, FlatDataTree<T>>
         get() = dataSet.keys
             .filter { it.startsWith(prefix) && it.length > prefix.length }
-            .map { it.tokens[prefix.length + 1] }
+            .map { it.tokens[prefix.length] }
             .associateWith { FlatDataTree(dataType, dataSet, prefix + it) }
 
     override fun read(name: Name): Data<T>? = dataSet[prefix + name]
@@ -133,20 +141,20 @@ internal fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
 public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> =
     FlatDataTree(typeOf<T>(), associate { it.name to it.data }, Name.EMPTY)
 
-public interface GenericObservableDataTree<T, TR : GenericObservableDataTree<T, TR>> : GenericDataTree<T, TR>,
+public interface GenericObservableDataTree<out T, out TR : GenericObservableDataTree<T, TR>> : GenericDataTree<T, TR>,
     ObservableDataSource<T>
 
-public typealias ObservableDataTree<T> = GenericObservableDataTree<T, *>
+public typealias ObservableDataTree<T> = GenericObservableDataTree<T, GenericObservableDataTree<T, *>>
 
-public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> = if (this is ObservableDataTree<T>) updates() else emptyFlow()
+public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> = if (this is GenericObservableDataTree<T,*>) updates() else emptyFlow()
 
-public fun interface DataSink<T> {
-    public fun emit(name: Name, data: Data<T>?)
+public fun interface DataSink<in T> {
+    public fun data(name: Name, data: Data<T>?)
 }
 
 public class DataTreeBuilder<T>(private val type: KType) : DataSink<T> {
     private val map = HashMap<Name, Data<T>>()
-    override fun emit(name: Name, data: Data<T>?) {
+    override fun data(name: Name, data: Data<T>?) {
         if (data == null) {
             map.remove(name)
         } else {
@@ -182,7 +190,7 @@ public interface MutableDataTree<T> : GenericObservableDataTree<T, MutableDataTr
 
     public operator fun set(token: NameToken, data: Data<T>?)
 
-    override fun emit(name: Name, data: Data<T>?): Unit = set(name, data)
+    override fun data(name: Name, data: Data<T>?): Unit = set(name, data)
 }
 
 public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
@@ -266,6 +274,6 @@ public fun <T> Sequence<NamedData<T>>.toObservableTree(dataType: KType, scope: C
     MutableDataTree<T>(dataType, scope).apply {
         emitAll(this@toObservableTree)
         updates.onEach {
-            emit(it.name, it.data)
+            data(it.name, it.data)
         }.launchIn(scope)
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index 27d27507..1a1c66b2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -42,7 +42,7 @@ public interface GroupRule {
 
                 set.forEach { data ->
                     val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.emit(data.name,data.data)
+                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.data(data.name,data.data)
                 }
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 26ad2dfb..63e36a3f 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -4,7 +4,7 @@ import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 
-public interface NamedData<T> : Named, Data<T> {
+public interface NamedData<out T> : Named, Data<T> {
     override val name: Name
     public val data: Data<T>
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 7eecad8f..72b1bb33 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -7,23 +7,24 @@ import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
 import space.kscience.dataforge.names.isEmpty
 import space.kscience.dataforge.names.plus
 
 
-public fun <T> DataSink<T>.emit(value: NamedData<T>) {
-    emit(value.name, value.data)
+public fun <T> DataSink<T>.data(value: NamedData<T>) {
+    data(value.name, value.data)
 }
 
 public fun <T> DataSink<T>.emitAll(sequence: Sequence<NamedData<T>>) {
-    sequence.forEach { emit(it) }
+    sequence.forEach { data(it) }
 }
 
-public fun <T> DataSink<T>.emitAll(dataTree: DataTree<T>) {
+public fun <T> DataSink<T>.branch(dataTree: DataTree<T>) {
     emitAll(dataTree.asSequence())
 }
 
-public inline fun <T> DataSink<T>.emitAll(
+public inline fun <T> DataSink<T>.branch(
     prefix: Name,
     block: DataSink<T>.() -> Unit,
 ) {
@@ -31,45 +32,50 @@ public inline fun <T> DataSink<T>.emitAll(
         apply(block)
     } else {
         val proxyDataSink = DataSink { nameWithoutPrefix, data ->
-            this.emit(prefix + nameWithoutPrefix, data)
+            this.data(prefix + nameWithoutPrefix, data)
         }
 
         proxyDataSink.apply(block)
     }
 }
 
+public inline fun <T> DataSink<T>.branch(
+    prefix: String,
+    block: DataSink<T>.() -> Unit,
+): Unit = branch(prefix.asName(), block)
 
-public fun <T> DataSink<T>.emit(name: String, value: Data<T>) {
-    emit(Name.parse(name), value)
+
+public fun <T> DataSink<T>.data(name: String, value: Data<T>) {
+    data(Name.parse(name), value)
 }
 
-public fun <T> DataSink<T>.emitAll(name: Name, set: DataTree<T>) {
-    emitAll(name) { emitAll(set.asSequence()) }
+public fun <T> DataSink<T>.branch(name: Name, set: DataTree<T>) {
+    branch(name) { emitAll(set.asSequence()) }
 }
 
-public fun <T> DataSink<T>.emitAll(name: String, set: DataTree<T>) {
-    emitAll(Name.parse(name)) { emitAll(set.asSequence()) }
+public fun <T> DataSink<T>.branch(name: String, set: DataTree<T>) {
+    branch(Name.parse(name)) { emitAll(set.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public inline fun <reified T> DataSink<T>.produce(
+public inline fun <reified T> DataSink<T>.data(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    emit(name, data)
+    data(name, data)
 }
 
-public inline fun <reified T> DataSink<T>.produce(
+public inline fun <reified T> DataSink<T>.data(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    emit(name, data)
+    data(name, data)
 }
 
 /**
@@ -79,24 +85,24 @@ public inline fun <reified T> DataSink<T>.static(
     name: String,
     data: T,
     meta: Meta = Meta.EMPTY,
-): Unit = emit(name, Data.static(data, meta))
+): Unit = data(name, Data.static(data, meta))
 
 public inline fun <reified T> DataSink<T>.static(
     name: Name,
     data: T,
     meta: Meta = Meta.EMPTY,
-): Unit = emit(name, Data.static(data, meta))
+): Unit = data(name, Data.static(data, meta))
 
 public inline fun <reified T> DataSink<T>.static(
     name: String,
     data: T,
     mutableMeta: MutableMeta.() -> Unit,
-): Unit = emit(Name.parse(name), Data.static(data, Meta(mutableMeta)))
+): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
 
 
 public fun <T> DataSink<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        emit(it.name, it.data)
+        data(it.name, it.data)
     }
 }
 
@@ -111,7 +117,7 @@ public fun <T> DataSink<T>.populateFrom(tree: DataTree<T>) {
 @DFExperimental
 public fun <T> MutableDataTree<T>.populateFrom(flow: ObservableDataSource<T>): Job = flow.updates().onEach {
     //TODO check if the place is occupied
-    emit(it.name, it.data)
+    data(it.name, it.data)
 }.launchIn(scope)
 
 //public fun <T > DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 3afc716a..f33ec1f0 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -9,14 +9,14 @@ import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public data class ValueWithMeta<T>(val meta: Meta, val value: T)
+public data class ValueWithMeta<T>(val value: T, val meta: Meta)
 
-public suspend fun <T> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
+public suspend fun <T> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(await(), meta)
 
-public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
+public data class NamedValueWithMeta<T>(val name: Name, val value: T, val meta: Meta)
 
 public suspend fun <T> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
-    NamedValueWithMeta(name, meta, await())
+    NamedValueWithMeta(name, await(), meta)
 
 
 /**
@@ -25,7 +25,7 @@ public suspend fun <T> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
  * @param meta for the resulting data. By default equals input data.
  * @param block the transformation itself
  */
-public inline fun <T : Any, reified R : Any> Data<T>.transform(
+public inline fun <T, reified R> Data<T>.transform(
     meta: Meta = this.meta,
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (T) -> R,
@@ -36,7 +36,7 @@ public inline fun <T : Any, reified R : Any> Data<T>.transform(
 /**
  * Combine this data with the other data using [block]. See [Data::map] for other details
  */
-public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
+public inline fun <T1, T2, reified R> Data<T1>.combine(
     other: Data<T2>,
     meta: Meta = this.meta,
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -69,7 +69,7 @@ internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
 }
 
 @DFInternal
-public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
+public fun <K, T, R> Map<K, Data<T>>.reduceToData(
     outputType: KType,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -89,7 +89,7 @@ public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
  * @param T type of the input goal
  * @param R type of the result goal
  */
-public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
+public inline fun <K, T, reified R> Map<K, Data<T>>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
@@ -104,7 +104,7 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
 //Iterable operations
 
 @DFInternal
-public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
+public inline fun <T, R> Iterable<Data<T>>.reduceToData(
     outputType: KType,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -119,7 +119,7 @@ public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
 }
 
 @OptIn(DFInternal::class)
-public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
+public inline fun <T, reified R> Iterable<Data<T>>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
@@ -127,7 +127,7 @@ public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
     transformation(it)
 }
 
-public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
+public inline fun <T, reified R> Iterable<Data<T>>.foldToData(
     initial: R,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -142,7 +142,7 @@ public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
  * Transform an [Iterable] of [NamedData] to a single [Data].
  */
 @DFInternal
-public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
+public inline fun <T, R> Iterable<NamedData<T>>.reduceNamedToData(
     outputType: KType,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -157,7 +157,7 @@ public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
 }
 
 @OptIn(DFInternal::class)
-public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
+public inline fun <T, reified R> Iterable<NamedData<T>>.reduceNamedToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
@@ -168,7 +168,7 @@ public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedT
 /**
  * Fold a [Iterable] of named data into a single [Data]
  */
-public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
+public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
     initial: R,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -194,18 +194,18 @@ public suspend fun <T, R> DataTree<T>.transform(
         val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
             block(namedData.awaitWithMeta())
         }
-        emit(namedData.name, d)
+        data(namedData.name, d)
     }
 }
 
 @OptIn(DFInternal::class)
-public suspend inline fun <T : Any, reified R : Any> DataTree<T>.transform(
+public suspend inline fun <T, reified R> DataTree<T>.transform(
     noinline metaTransform: MutableMeta.() -> Unit = {},
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
 ): DataTree<R> = this@transform.transform(typeOf<R>(), metaTransform, coroutineContext, block)
 
-public inline fun <T : Any> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
+public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
     asSequence().forEach(block)
 }
 
@@ -219,13 +219,13 @@ internal fun DataTree<*>.joinMeta(): Meta = Meta {
     }
 }
 
-public inline fun <T : Any, reified R : Any> DataTree<T>.reduceToData(
+public inline fun <T, reified R> DataTree<T>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
 ): Data<R> = asSequence().asIterable().reduceNamedToData(meta, coroutineContext, transformation)
 
-public inline fun <T : Any, reified R : Any> DataTree<T>.foldToData(
+public inline fun <T, reified R> DataTree<T>.foldToData(
     initial: R,
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index aa5c500a..48155595 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -14,7 +14,7 @@ import kotlin.reflect.typeOf
  * Cast the node to given type if the cast is possible or return null
  */
 @Suppress("UNCHECKED_CAST")
-private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
+private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
     if (!this.type.isSubtypeOf(type)) {
         null
     } else {
@@ -55,7 +55,7 @@ public inline fun <reified R : Any> DataTree<*>.filterByType(
 /**
  * Select a single datum if it is present and of given [type]
  */
-public fun <R : Any> DataTree<*>.getByType(type: KType, name: Name): NamedData<R>? =
+public fun <R> DataTree<*>.getByType(type: KType, name: Name): NamedData<R>? =
     get(name)?.castOrNull<R>(type)?.named(name)
 
 public inline fun <reified R : Any> DataTree<*>.getByType(name: Name): NamedData<R>? =
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 116ae52d..5f8791d0 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -12,14 +12,14 @@ import space.kscience.dataforge.names.plus
  */
 context(DataSink<T>)
 public infix fun <T : Any> String.put(data: Data<T>): Unit =
-    emit(Name.parse(this), data)
+    data(Name.parse(this), data)
 
 /**
  * Append node
  */
 context(DataSink<T>)
 public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
-    emitAll(this, dataSet)
+    branch(this, dataSet)
 
 /**
  * Build and append node
@@ -27,7 +27,7 @@ public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
 context(DataSink<T>)
 public infix fun <T : Any> String.put(
     block: DataSink<T>.() -> Unit,
-): Unit = emitAll(Name.parse(this), block)
+): Unit = branch(Name.parse(this), block)
 
 /**
  * Copy given data set and mirror its changes to this [LegacyDataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
@@ -37,8 +37,8 @@ public fun <T : Any> CoroutineScope.setAndWatch(
     name: Name,
     dataSet: DataTree<T>,
 ): Job = launch {
-    emitAll(name, dataSet)
+    branch(name, dataSet)
     dataSet.updates().collect {
-        emit(name + it.name, it.data)
+        data(name + it.name, it.data)
     }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
similarity index 98%
rename from dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt
rename to dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index a871b340..c5c450f7 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/LegacyGenericDataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -7,7 +7,7 @@ import kotlin.test.Test
 import kotlin.test.assertEquals
 
 
-internal class LegacyGenericDataTreeBuilderTest {
+internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runBlocking {
         val node = DataTree<Any> {
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
index 0e998760..27a192d7 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
@@ -10,10 +10,7 @@ import space.kscience.dataforge.names.asName
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface EnvelopeFormat : IOFormat<Envelope> {
-
-    override val type: KType get() = typeOf<Envelope>()
-}
+public interface EnvelopeFormat : IOFormat<Envelope>
 
 public fun EnvelopeFormat.read(input: Source): Envelope = readFrom(input)
 
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
index 54e305c6..39fa2be1 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
@@ -49,13 +49,13 @@ public fun interface IOWriter<in T> {
  */
 public interface IOFormat<T> : IOReader<T>, IOWriter<T>
 
-public fun <T : Any> Source.readWith(format: IOReader<T>): T = format.readFrom(this)
+public fun <T> Source.readWith(format: IOReader<T>): T = format.readFrom(this)
 
 /**
  * Read given binary as an object using given format
  */
 public fun <T> Binary.readWith(format: IOReader<T>): T = read {
-    readWith(format)
+    this.readWith(format)
 }
 
 /**
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
index f864dd2f..b36ddc2a 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
@@ -21,8 +21,6 @@ import kotlin.reflect.typeOf
  */
 public interface MetaFormat : IOFormat<Meta> {
 
-    override val type: KType get() = typeOf<Meta>()
-
     override fun writeTo(sink: Sink, obj: Meta) {
         writeMeta(sink, obj, null)
     }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index 59cb7461..94da4383 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -1,9 +1,13 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.ObservableDataTree
+import space.kscience.dataforge.data.asSequence
+import space.kscience.dataforge.data.launch
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
-import kotlin.reflect.KType
 
 /**
  * A result of a [Task]
@@ -12,16 +16,24 @@ import kotlin.reflect.KType
  * @param taskMeta The configuration of the task that produced the result
  */
 public data class TaskResult<T>(
-    public val data: ObservableDataTree<T>,
+    public val content: ObservableDataTree<T>,
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-) {
-    val dataType: KType get() = data.dataType
-}
+): ObservableDataTree<T> by content
 
 /**
  * Wrap data into [TaskResult]
  */
 public fun <T> Workspace.wrapResult(data: ObservableDataTree<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
-    TaskResult(data, this, taskName, taskMeta)
\ No newline at end of file
+    TaskResult(data, this, taskName, taskMeta)
+
+/**
+ * Start computation for all data elements of this node.
+ * The resulting [Job] is completed only when all of them are completed.
+ */
+public fun TaskResult<*>.compute(scope: CoroutineScope): Job = scope.launch {
+    asSequence().forEach {
+        it.data.launch(scope)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index bc1c19dc..f3ea322c 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -53,7 +53,7 @@ public interface Workspace : ContextAware, Provider, CoroutineScope {
     }
 
     public suspend fun produceData(taskName: Name, taskMeta: Meta, name: Name): Data<*>? =
-        produce(taskName, taskMeta).data[name]
+        produce(taskName, taskMeta)[name]
 
     public companion object {
         public const val TYPE: String = "workspace"
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index e0b7fad7..6ccdcb1c 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -19,12 +19,12 @@ import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
 import kotlin.reflect.typeOf
 
-public data class TaskReference<T : Any>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
+public data class TaskReference<T>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
 
     @Suppress("UNCHECKED_CAST")
     override suspend fun select(workspace: Workspace, meta: Meta): DataTree<T> {
         if (workspace.tasks[taskName] == task) {
-            return workspace.produce(taskName, meta).data as DataTree<T>
+            return workspace.produce(taskName, meta) as DataTree<T>
         } else {
             error("Task $taskName does not belong to the workspace")
         }
@@ -125,7 +125,7 @@ public class WorkspaceBuilder(
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSink<*>.() -> Unit) {
+    public fun data(builder: DataSink<Any?>.() -> Unit) {
         data.apply(builder)
     }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 11594c2b..95e7d481 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.emitAll
+import space.kscience.dataforge.data.branch
 import space.kscience.dataforge.data.forEach
 import space.kscience.dataforge.data.transform
 import space.kscience.dataforge.meta.*
@@ -25,12 +25,12 @@ public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  */
-public suspend fun <T : Any> TaskResultBuilder<*>.from(
+public suspend fun <T> TaskResultBuilder<*>.from(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ): DataTree<T> = selector.select(workspace, dependencyMeta)
 
-public suspend inline fun <T : Any, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
     plugin: P,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -50,7 +50,7 @@ public suspend inline fun <T : Any, reified P : WorkspacePlugin> TaskResultBuild
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  * @param selectorBuilder a builder of task from the plugin.
  */
-public suspend inline fun <reified T : Any, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
     pluginFactory: PluginFactory<P>,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -77,7 +77,7 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
  * @param action process individual data asynchronously.
  */
 @DFExperimental
-public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.transformEach(
+public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
     dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
@@ -93,27 +93,27 @@ public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.transf
             action(it, data.name, meta)
         }
 
-        emit(data.name, res)
+        data(data.name, res)
     }
 }
 
 /**
  * Set given [dataSet] as a task result.
  */
-public fun <T : Any> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
-    emitAll(dataSet)
+public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
+    branch(dataSet)
 }
 
 /**
  * Use provided [action] to fill the result
  */
 @DFExperimental
-public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.actionFrom(
+public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
     selector: DataSelector<T>,
     action: Action<T, R>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    emitAll(action.execute(workspace.context, from(selector, dependencyMeta), dependencyMeta))
+    branch(action.execute(workspace.context, from(selector, dependencyMeta), dependencyMeta))
 }
 
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index e6ddf026..a43657f8 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -95,8 +95,8 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
         }
 
 
-        val cachedTree = result.data.asSequence().map { cacheOne(it) }
-            .toObservableTree(result.dataType, result.workspace, result.data.updates().map { cacheOne(it) })
+        val cachedTree = result.asSequence().map { cacheOne(it) }
+            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
 
         return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index a36e2b90..a3792231 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -1,39 +1,39 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.map
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 import kotlin.reflect.full.isSubtypeOf
 
-private typealias TaskResultId = Pair<Name, Meta>
+private data class TaskResultId(val name: Name, val meta: Meta)
 
 
 public class InMemoryWorkspaceCache : WorkspaceCache {
 
-    // never do that at home!
-    private val cache = HashMap<TaskResultId, HashMap<Name, TaskData<*>>>()
+    private val cache = HashMap<TaskResultId, HashMap<Name, Data<*>>>()
 
     @Suppress("UNCHECKED_CAST")
-    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> =
-        if (type.isSubtypeOf(taskType)) this as TaskData<T>
+    private fun <T> Data<*>.checkType(taskType: KType): Data<T> =
+        if (type.isSubtypeOf(taskType)) this as Data<T>
         else error("Cached data type mismatch: expected $taskType but got $type")
 
-    override suspend fun <T : Any> cache(result: TaskResult<T>): TaskResult<T> {
-        for (d: TaskData<T> in result) {
-            cache.getOrPut(result.taskName to result.taskMeta) { HashMap() }.getOrPut(d.name) { d }
-        }
-
-        return object : TaskResult<T> by result {
-            override fun iterator(): Iterator<TaskData<T>> = (cache[result.taskName to result.taskMeta]
-                ?.values?.map { it.checkType<T>(result.dataType) }
-                ?: emptyList()).iterator()
-
-            override fun get(name: Name): TaskData<T>? {
-                val cached: TaskData<*> = cache[result.taskName to result.taskMeta]?.get(name) ?: return null
-                //TODO check types
-                return cached.checkType(result.dataType)
+    override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
+        fun cacheOne(data: NamedData<T>): NamedData<T> {
+            val cachedData =  cache.getOrPut(TaskResultId(result.taskName, result.taskMeta)){
+                HashMap()
+            }.getOrPut(data.name){
+                data.data
             }
+            return cachedData.checkType<T>(result.dataType).named(data.name)
         }
+
+
+        val cachedTree = result.asSequence().map { cacheOne(it) }
+            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
+
+        return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
 }
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
index a0e6ab42..4eba363b 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
@@ -1,30 +1,24 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.isActive
-import kotlinx.coroutines.launch
-import kotlinx.coroutines.withContext
-import space.kscience.dataforge.context.error
-import space.kscience.dataforge.context.logger
-import space.kscience.dataforge.context.warn
+import kotlinx.coroutines.*
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.names.*
-import space.kscience.dataforge.workspace.FileData.Companion.defaultPathToName
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.plus
+import space.kscience.dataforge.workspace.FileData.defaultPathToName
 import java.nio.file.Files
 import java.nio.file.Path
 import java.nio.file.StandardWatchEventKinds
 import java.nio.file.WatchEvent
 import java.nio.file.attribute.BasicFileAttributes
 import java.nio.file.spi.FileSystemProvider
-import java.time.Instant
 import kotlin.io.path.*
-import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
 
@@ -33,40 +27,28 @@ import kotlin.reflect.typeOf
 public typealias FileFormatResolver<T> = (path: Path, meta: Meta) -> IOReader<T>?
 
 
-/**
- * A data based on a filesystem [Path]
- */
-public class FileData<T> internal constructor(private val data: Data<T>, public val path: Path) : Data<T> by data {
+public object FileData {
+    public val FILE_KEY: Name = "file".asName()
+    public val FILE_PATH_KEY: Name = FILE_KEY + "path"
+    public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
+    public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
+    public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
+    public const val DF_FILE_EXTENSION: String = "df"
+    public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
 
-    //    public val path: String? get() = meta[META_FILE_PATH_KEY].string
-//    public val extension: String? get() = meta[META_FILE_EXTENSION_KEY].string
-//
-    public val createdTime: Instant? get() = meta[FILE_CREATE_TIME_KEY].string?.let { Instant.parse(it) }
-    public val updatedTime: Instant? get() = meta[FILE_UPDATE_TIME_KEY].string?.let { Instant.parse(it) }
-
-    public companion object {
-        public val FILE_KEY: Name = "file".asName()
-        public val FILE_PATH_KEY: Name = FILE_KEY + "path"
-        public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
-        public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
-        public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
-        public const val DF_FILE_EXTENSION: String = "df"
-        public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
-
-        /**
-         * Transform file name into DataForg name. Ignores DataForge file extensions.
-         */
-        public val defaultPathToName: (Path) -> Name = { path ->
-            Name(
-                path.map { segment ->
-                    if (segment.isRegularFile() && segment.extension in DEFAULT_IGNORE_EXTENSIONS) {
-                        NameToken(path.nameWithoutExtension)
-                    } else {
-                        NameToken(path.name)
-                    }
+    /**
+     * Transform file name into DataForg name. Ignores DataForge file extensions.
+     */
+    public val defaultPathToName: (Path) -> Name = { path ->
+        Name(
+            path.map { segment ->
+                if (segment.isRegularFile() && segment.extension in DEFAULT_IGNORE_EXTENSIONS) {
+                    NameToken(path.nameWithoutExtension)
+                } else {
+                    NameToken(path.name)
                 }
-            )
-        }
+            }
+        )
     }
 }
 
@@ -75,14 +57,11 @@ public class FileData<T> internal constructor(private val data: Data<T>, public
  * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
  * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
  */
-@OptIn(DFInternal::class)
-@DFExperimental
-public fun <T : Any> IOPlugin.readDataFile(
+@OptIn(DFExperimental::class)
+public fun IOPlugin.readFileData(
     path: Path,
-    formatResolver: FileFormatResolver<T>,
-): FileData<T>? {
+): Data<Binary> {
     val envelope = readEnvelopeFile(path, true)
-    val format = formatResolver(path, envelope.meta) ?: return null
     val updatedMeta = envelope.meta.copy {
         FileData.FILE_PATH_KEY put path.toString()
         FileData.FILE_EXTENSION_KEY put path.extension
@@ -91,146 +70,104 @@ public fun <T : Any> IOPlugin.readDataFile(
         FileData.FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
         FileData.FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
     }
-    return FileData(
-        Data(format.type, updatedMeta) {
-            (envelope.data ?: Binary.EMPTY).readWith(format)
-        },
-        path
+    return StaticData(
+        typeOf<Binary>(),
+        envelope.data ?: Binary.EMPTY,
+        updatedMeta
     )
 }
 
+public fun DataSink<Binary>.file(io: IOPlugin, path: Path, name: Name) {
+    if (!path.isRegularFile()) error("Only regular files could be handled by this function")
+    data(name, io.readFileData(path))
+}
 
-context(IOPlugin) @DFExperimental
-public fun <T : Any> DataSetBuilder<T>.directory(
+public fun DataSink<Binary>.directory(
+    io: IOPlugin,
     path: Path,
     pathToName: (Path) -> Name = defaultPathToName,
-    formatResolver: FileFormatResolver<T>,
 ) {
+    if (!path.isDirectory()) error("Only directories could be handled by this function")
+    val metaFile = path.resolve(IOPlugin.META_FILE_NAME)
+    val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
+    //process root data
+    if (metaFile.exists() || dataFile.exists()) {
+        data(
+            Name.EMPTY,
+            StaticData(
+                typeOf<Binary>(),
+                dataFile.takeIf { it.exists() }?.asBinary() ?: Binary.EMPTY,
+                io.readMetaFileOrNull(metaFile) ?: Meta.EMPTY
+            )
+        )
+    }
     Files.list(path).forEach { childPath ->
         val fileName = childPath.fileName.toString()
-        if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-            meta(readMetaFile(childPath))
-        } else if (!fileName.startsWith("@")) {
-            file(childPath, pathToName, formatResolver)
+        if (!fileName.startsWith("@")) {
+            files(io, childPath, pathToName)
         }
     }
 }
 
-/**
- * Read the directory as a data node. If [path] is a zip archive, read it as directory
- */
-@DFExperimental
-@DFInternal
-public fun <T : Any> IOPlugin.readDataDirectory(
-    type: KType,
-    path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
-    formatResolver: FileFormatResolver<T>,
-): LegacyDataTree<T> {
-    //read zipped data node
-    if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
+public fun DataSink<Binary>.files(io: IOPlugin, path: Path, pathToName: (Path) -> Name = defaultPathToName) {
+    if (path.isRegularFile() && path.extension == "zip") {
         //Using explicit Zip file system to avoid bizarre compatibility bugs
         val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
             ?: error("Zip file system provider not found")
         val fs = fsProvider.newFileSystem(path, mapOf("create" to "true"))
 
-        return readDataDirectory(type, fs.rootDirectories.first(), pathToName, formatResolver)
+        return files(io, fs.rootDirectories.first(), pathToName)
     }
-    if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
-    return DataTree(type) {
-        meta {
-            FileData.FILE_PATH_KEY put path.toString()
-        }
-        directory(path, pathToName, formatResolver)
+    if (path.isRegularFile()) {
+        file(io, path, pathToName(path))
+    } else {
+        directory(io, path, pathToName)
     }
 }
 
-@OptIn(DFInternal::class)
-@DFExperimental
-public inline fun <reified T : Any> IOPlugin.readDataDirectory(
-    path: Path,
-    noinline pathToName: (Path) -> Name = defaultPathToName,
-    noinline formatResolver: FileFormatResolver<T>,
-): LegacyDataTree<T> = readDataDirectory(typeOf<T>(), path, pathToName, formatResolver)
-
-/**
- * Read a raw binary data tree from the directory. All files are read as-is (save for meta files).
- */
-@DFExperimental
-public fun IOPlugin.readRawDirectory(
-    path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
-): LegacyDataTree<Binary> = readDataDirectory(path, pathToName) { _, _ -> IOReader.binary }
-
 
 private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
 
 @DFInternal
 @DFExperimental
-public fun <T : Any> IOPlugin.monitorDataDirectory(
-    type: KType,
+public fun DataSink<Binary>.monitorFiles(
+    io: IOPlugin,
     path: Path,
     pathToName: (Path) -> Name = defaultPathToName,
-    formatResolver: FileFormatResolver<T>,
-): DataSource<T> {
-    if (path.fileName.toString().endsWith(".zip")) error("Monitoring not supported for ZipFS")
-    if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
-    return DataSource(type, context) {
-        directory(path, pathToName, formatResolver)
-        launch(Dispatchers.IO) {
-            val watchService = path.fileSystem.newWatchService()
+    scope: CoroutineScope = io.context,
+): Job {
+    files(io, path, pathToName)
+    return scope.launch(Dispatchers.IO) {
+        val watchService = path.fileSystem.newWatchService()
 
-            path.register(
-                watchService,
-                StandardWatchEventKinds.ENTRY_DELETE,
-                StandardWatchEventKinds.ENTRY_MODIFY,
-                StandardWatchEventKinds.ENTRY_CREATE
-            )
+        path.register(
+            watchService,
+            StandardWatchEventKinds.ENTRY_DELETE,
+            StandardWatchEventKinds.ENTRY_MODIFY,
+            StandardWatchEventKinds.ENTRY_CREATE
+        )
 
-            do {
-                val key = watchService.take()
-                if (key != null) {
-                    for (event: WatchEvent<*> in key.pollEvents()) {
-                        val eventPath = event.context() as Path
-                        if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
-                            remove(eventPath.toName())
-                        } else {
-                            val fileName = eventPath.fileName.toString()
-                            if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-                                meta(readMetaFile(eventPath))
-                            } else if (!fileName.startsWith("@")) {
-                                file(eventPath, pathToName, formatResolver)
-                            }
+        do {
+            val key = watchService.take()
+            if (key != null) {
+                for (event: WatchEvent<*> in key.pollEvents()) {
+                    val eventPath = event.context() as Path
+                    if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
+                        data(eventPath.toName(), null)
+                    } else {
+                        val fileName = eventPath.fileName.toString()
+                        if (!fileName.startsWith("@")) {
+                            files(io, eventPath, pathToName)
                         }
                     }
-                    key.reset()
                 }
-            } while (isActive && key != null)
-        }
+                key.reset()
+            }
+        } while (isActive && key != null)
     }
+
 }
 
-
-/**
- * Start monitoring given directory ([path]) as a [DataSource].
- */
-@OptIn(DFInternal::class)
-@DFExperimental
-public inline fun <reified T : Any> IOPlugin.monitorDataDirectory(
-    path: Path,
-    noinline pathToName: (Path) -> Name = defaultPathToName,
-    noinline formatResolver: FileFormatResolver<T>,
-): DataSource<T> = monitorDataDirectory(typeOf<T>(), path, pathToName, formatResolver)
-
-/**
- * Read and monitor raw binary data tree from the directory. All files are read as-is (save for meta files).
- */
-@DFExperimental
-public fun IOPlugin.monitorRawDirectory(
-    path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
-): DataSource<Binary> = monitorDataDirectory(path, pathToName) { _, _ -> IOReader.binary }
-
 /**
  * Write the data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
  *
@@ -239,7 +176,7 @@ public fun IOPlugin.monitorRawDirectory(
 @DFExperimental
 public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     path: Path,
-    dataSet: DataSet<T>,
+    dataSet: DataTree<T>,
     format: IOWriter<T>,
     envelopeFormat: EnvelopeFormat? = null,
     nameToPath: (name: Name, data: Data<T>) -> Path = { name, _ ->
@@ -262,62 +199,28 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
                 writeEnvelopeDirectory(childPath, envelope)
             }
         }
-        val directoryMeta = dataSet.meta
-        writeMetaFile(path, directoryMeta)
+        dataSet.meta?.let { writeMetaFile(path, it) }
+
     }
 }
 
 /**
- * Reads the specified resources and returns a [LegacyDataTree] containing the data.
- *
  * @param resources The names of the resources to read.
  * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
- * @return A DataTree containing the data read from the resources.
  */
 @DFExperimental
-public fun IOPlugin.readResources(
+public fun DataSink<Binary>.resources(
+    io: IOPlugin,
     vararg resources: String,
     pathToName: (Path) -> Name = defaultPathToName,
     classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
-): LegacyDataTree<Binary> = GenericDataTree {
+) {
     resources.forEach { resource ->
         val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
             "Resource with name $resource is not resolved"
         )
-        node(resource, readRawDirectory(path, pathToName))
-    }
-}
-
-/**
- * Add file/directory-based data tree item
- */
-context(IOPlugin)
-@OptIn(DFInternal::class)
-@DFExperimental
-public fun <T : Any> DataSetBuilder<T>.file(
-    path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
-    formatResolver: FileFormatResolver<out T>,
-) {
-
-    try {
-        //If path is a single file or a special directory, read it as single datum
-        if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
-            val data = readDataFile(path, formatResolver)
-            if (data == null) {
-                logger.warn { "File format is not resolved for $path. Skipping." }
-                return
-            }
-            val name: Name = data.meta[Envelope.ENVELOPE_NAME_KEY].string?.parseAsName() ?: pathToName(path.last())
-            data(name, data)
-        } else {
-            //otherwise, read as directory
-            val data: LegacyDataTree<T> = readDataDirectory(dataType, path, pathToName, formatResolver)
-            val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string?.parseAsName() ?: pathToName(path.last())
-            node(name, data)
+        branch(resource.asName()) {
+            files(io, path, pathToName)
         }
-    } catch (ex: Exception) {
-        logger.error(ex) { "Failed to read file or directory at $path: ${ex.message}" }
     }
 }
-
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index 67570a04..61caf7e0 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -16,8 +16,8 @@ import space.kscience.dataforge.names.matches
  */
 @OptIn(DFExperimental::class)
 public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
-    DataSelector<T> { workspace, meta ->
-        workspace.data.filterByType { name, _ ->
+    DataSelector<T> { workspace, _ ->
+        workspace.data.filterByType { name, _, _ ->
             namePattern == null || name.matches(namePattern)
         }
     }
@@ -25,4 +25,4 @@ public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern:
 public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
     task: Name,
     taskMeta: Meta = Meta.EMPTY,
-): DataTree<T> = workspace.produce(task, taskMeta).data.filterByType()
\ No newline at end of file
+): DataTree<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
index 02d319de..83b617cb 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
@@ -2,8 +2,7 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataTreeItem
-import space.kscience.dataforge.data.LegacyDataTree
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
@@ -15,42 +14,39 @@ import java.util.zip.ZipOutputStream
 
 private suspend fun <T : Any> ZipOutputStream.writeNode(
     name: String,
-    treeItem: DataTreeItem<T>,
+    tree: DataTree<T>,
     dataFormat: IOFormat<T>,
     envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
 ): Unit = withContext(Dispatchers.IO) {
-    when (treeItem) {
-        is DataTreeItem.Leaf -> {
-            //TODO add directory-based envelope writer
-            val envelope = treeItem.data.toEnvelope(dataFormat)
-            val entry = ZipEntry(name)
-            putNextEntry(entry)
-
-            //TODO remove additional copy
-            val bytes = ByteArray {
-                writeWith(envelopeFormat, envelope)
-            }
-            write(bytes)
+    //TODO add directory-based envelope writer
+    tree.data?.let {
+        val envelope = it.toEnvelope(dataFormat)
+        val entry = ZipEntry(name)
+        putNextEntry(entry)
 
+        //TODO remove additional copy
+        val bytes = ByteArray {
+            writeWith(envelopeFormat, envelope)
         }
-
-        is DataTreeItem.Node -> {
-            val entry = ZipEntry("$name/")
-            putNextEntry(entry)
-            closeEntry()
-            treeItem.tree.items.forEach { (token, item) ->
-                val childName = "$name/$token"
-                writeNode(childName, item, dataFormat, envelopeFormat)
-            }
-        }
+        write(bytes)
     }
+
+
+    val entry = ZipEntry("$name/")
+    putNextEntry(entry)
+    closeEntry()
+    tree.items.forEach { (token, item) ->
+        val childName = "$name/$token"
+        writeNode(childName, item, dataFormat, envelopeFormat)
+    }
+
 }
 
 /**
- * Write this [LegacyDataTree] as a zip archive
+ * Write this [DataTree] as a zip archive
  */
 @DFExperimental
-public suspend fun <T : Any> LegacyDataTree<T>.writeZip(
+public suspend fun <T : Any> DataTree<T>.writeZip(
     path: Path,
     format: IOFormat<T>,
     envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
@@ -68,6 +64,6 @@ public suspend fun <T : Any> LegacyDataTree<T>.writeZip(
     )
     val zos = ZipOutputStream(fos)
     zos.use {
-        it.writeNode("", DataTreeItem.Node(this@writeZip), format, envelopeFormat)
+        it.writeNode("", this@writeZip, format, envelopeFormat)
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 467b04b0..78ce853e 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -4,7 +4,6 @@ import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.startAll
 import space.kscience.dataforge.data.static
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
@@ -37,7 +36,6 @@ internal class CachingWorkspaceTest {
                 }
             }
 
-            @Suppress("UNUSED_VARIABLE")
             val doSecond by task<Any> {
                 transformEach(
                     doFirst,
@@ -54,11 +52,11 @@ internal class CachingWorkspaceTest {
         val secondB = workspace.produce("doSecond", Meta { "flag" put true })
         val secondC = workspace.produce("doSecond")
         coroutineScope {
-            first.startAll(this)
-            secondA.startAll(this)
-            secondB.startAll(this)
+            first.compute(this)
+            secondA.compute(this)
+            secondB.compute(this)
             //repeat to check caching
-            secondC.startAll(this)
+            secondC.compute(this)
         }
         assertEquals(10, firstCounter)
         assertEquals(10, secondCounter)
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index dbc9cb5e..403b1c51 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,13 +20,13 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        emit("result", result)
+        data("result", result)
     }
 
 
     val singleData by task<Int> {
         workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            emit("result", it)
+            data("result", it)
         }
     }
 
@@ -55,12 +55,12 @@ class DataPropagationTest {
     @Test
     fun testAllData() = runTest {
         val node = testWorkspace.produce("Test.allData")
-        assertEquals(4950, node.asSequence().single().await())
+        assertEquals(4950, node.content.asSequence().single().await())
     }
 
     @Test
     fun testSingleData() = runTest {
         val node = testWorkspace.produce("Test.singleData")
-        assertEquals(12, node.asSequence().single().await())
+        assertEquals(12, node.content.asSequence().single().await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 20d03cd3..1a21fb17 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -9,38 +9,32 @@ import kotlinx.io.writeString
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.Global
 import space.kscience.dataforge.data.*
-import space.kscience.dataforge.io.Envelope
-import space.kscience.dataforge.io.IOFormat
-import space.kscience.dataforge.io.io
-import space.kscience.dataforge.io.readEnvelopeFile
+import space.kscience.dataforge.io.*
 import space.kscience.dataforge.io.yaml.YamlPlugin
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 import kotlin.io.path.fileSize
 import kotlin.io.path.toPath
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
 import kotlin.test.Test
 import kotlin.test.assertEquals
 
 
 class FileDataTest {
-    val dataNode = GenericDataTree<String> {
-        node("dir") {
+    val dataNode = DataTree<String> {
+        branch("dir") {
             static("a", "Some string") {
                 "content" put "Some string"
             }
         }
         static("b", "root data")
-        meta {
-            "content" put "This is root meta node"
-        }
+//        meta {
+//            "content" put "This is root meta node"
+//        }
     }
 
 
     object StringIOFormat : IOFormat<String> {
-        override val type: KType get() = typeOf<String>()
 
         override fun writeTo(sink: Sink, obj: String) {
             sink.writeString(obj)
@@ -52,11 +46,13 @@ class FileDataTest {
     @Test
     @DFExperimental
     fun testDataWriteRead() = with(Global.io) {
+        val io = Global.io
         val dir = Files.createTempDirectory("df_data_node")
         runBlocking {
             writeDataDirectory(dir, dataNode, StringIOFormat)
             println(dir.toUri().toString())
-            val reconstructed = readDataDirectory(dir) { _, _ -> StringIOFormat }
+            val reconstructed = DataTree { files(io, dir) }
+                .transform { (_, value) -> value.toByteArray().decodeToString() }
             assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
             assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
         }
@@ -66,14 +62,15 @@ class FileDataTest {
     @Test
     @DFExperimental
     fun testZipWriteRead() = runTest {
-        with(Global.io) {
-            val zip = Files.createTempFile("df_data_node", ".zip")
-            dataNode.writeZip(zip, StringIOFormat)
-            println(zip.toUri().toString())
-            val reconstructed = readDataDirectory(zip) { _, _ -> StringIOFormat }
-            assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
-            assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
-        }
+        val io = Global.io
+        val zip = Files.createTempFile("df_data_node", ".zip")
+        dataNode.writeZip(zip, StringIOFormat)
+        println(zip.toUri().toString())
+        val reconstructed = DataTree { files(io, zip) }
+            .transform { (_, value) -> value.toByteArray().decodeToString() }
+        assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
+        assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
+
     }
 
     @OptIn(DFExperimental::class)
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 03fda0eb..ee497e1b 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,7 +3,6 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.startAll
 import space.kscience.dataforge.data.static
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
@@ -22,13 +21,12 @@ class FileWorkspaceCacheTest {
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
 
-            @Suppress("UNUSED_VARIABLE")
             val echo by task<String> {
                 transformEach(dataByType<String>()) { arg, _, _ -> arg }
             }
         }
 
-        workspace.produce("echo").startAll(this)
+        workspace.produce("echo").compute(this)
 
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index a41a73bc..91fa2f6c 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -27,8 +27,8 @@ public fun <P : Plugin> P.toFactory(): PluginFactory<P> = object : PluginFactory
     override val tag: PluginTag = this@toFactory.tag
 }
 
-public fun Workspace.produceBlocking(task: String, block: MutableMeta.() -> Unit = {}): DataSet<Any> = runBlocking {
-    produce(task, block)
+public fun Workspace.produceBlocking(task: String, block: MutableMeta.() -> Unit = {}): DataTree<*> = runBlocking {
+    produce(task, block).content
 }
 
 @OptIn(DFExperimental::class)
@@ -68,8 +68,8 @@ internal class SimpleWorkspaceTest {
 
         val filterOne by task<Int> {
             val name by taskMeta.string { error("Name field not defined") }
-            from(testPluginFactory) { test }.getByType<Int>(name)?.let { source ->
-                data(source.name, source.map { it })
+            from(testPluginFactory) { test }[name]?.let { source: Data<Int> ->
+                data(name, source)
             }
         }
 
@@ -110,14 +110,14 @@ internal class SimpleWorkspaceTest {
         }
 
         val averageByGroup by task<Int> {
-            val evenSum = workspace.data.filterByType<Int> { name, _ ->
+            val evenSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 0
             }.foldToData(0) { l, r ->
                 l + r.value
             }
 
             data("even", evenSum)
-            val oddSum = workspace.data.filterByType<Int> { name, _ ->
+            val oddSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 1
             }.foldToData(0) { l, r ->
                 l + r.value
@@ -159,7 +159,7 @@ internal class SimpleWorkspaceTest {
     @Timeout(1)
     fun testMetaPropagation() = runTest {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node.asSequence().single().await()
+        val res = node.single().await()
     }
 
     @Test
@@ -170,20 +170,25 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    fun testFullSquare() {
-        runBlocking {
-            val node = workspace.produce("fullSquare")
-            println(node.toMeta())
+    fun testFullSquare() = runTest {
+        val result = workspace.produce("fullSquare")
+        result.forEach {
+            println(
+                """
+                Name: ${it.name}
+                Meta: ${it.meta}
+                Data: ${it.data.await()}
+            """.trimIndent()
+            )
         }
     }
 
     @Test
-    fun testFilter() {
-        runBlocking {
-            val node = workspace.produce("filterOne") {
-                "name" put "myData[12]"
-            }
-            assertEquals(12, node.single().await())
+    fun testFilter() = runTest {
+        val node = workspace.produce("filterOne") {
+            "name" put "myData[12]"
         }
+        assertEquals(12, node.single().await())
     }
+
 }
\ No newline at end of file

From 466e46098949663d2da39a8403e059e59256c02c Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Feb 2024 17:25:49 +0300
Subject: [PATCH 18/77] Data tree refactored to a uniform tree instead of
 sealed class.

---
 .../dataforge/actions/AbstractAction.kt       |  30 +++-
 .../kscience/dataforge/actions/Action.kt      |  18 +--
 .../kscience/dataforge/actions/MapAction.kt   |   4 +-
 .../dataforge/actions/ReduceAction.kt         |   2 +-
 .../kscience/dataforge/actions/SplitAction.kt |   4 +-
 .../kscience/dataforge/data/DataSource.kt     | 143 ++++++++++++------
 .../kscience/dataforge/data/GroupRule.kt      |   4 +-
 .../kscience/dataforge/data/dataBuilders.kt   |  75 ++++-----
 .../kscience/dataforge/data/dataTransform.kt  |   2 +-
 .../dataforge/data/dataSetBuilderInContext.kt |  19 +--
 .../kscience/dataforge/data/ActionsTest.kt    |  20 ++-
 .../dataforge/data/DataTreeBuilderTest.kt     |  81 ++++------
 .../kscience/dataforge/workspace/Task.kt      |  13 +-
 .../dataforge/workspace/TaskResult.kt         |  10 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |   2 +-
 .../dataforge/workspace/taskBuilders.kt       |   4 +-
 .../kscience/dataforge/workspace/fileData.kt  | 124 ++++++++-------
 .../kscience/dataforge/workspace/zipData.kt   |  69 +++------
 .../workspace/CachingWorkspaceTest.kt         |  19 +--
 .../workspace/DataPropagationTest.kt          |   6 +-
 .../dataforge/workspace/FileDataTest.kt       |  28 ++--
 .../workspace/FileWorkspaceCacheTest.kt       |   6 +-
 .../workspace/SimpleWorkspaceTest.kt          |  16 +-
 23 files changed, 353 insertions(+), 346 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 33d4ca05..7cd1ced5 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,9 +1,11 @@
 package space.kscience.dataforge.actions
 
-import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.flow.launchIn
+import kotlinx.coroutines.flow.onEach
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.startsWith
 import kotlin.reflect.KType
@@ -33,26 +35,38 @@ public abstract class AbstractAction<T : Any, R : Any>(
 
     /**
      * Update part of the data set using provided data
+     *
+     * @param source the source data tree in case we need several data items to update
      */
     protected open fun DataSink<R>.update(
-        allData: DataTree<T>,
+        source: DataTree<T>,
         meta: Meta,
         namedData: NamedData<T>,
     ){
         //by default regenerate the whole data set
-        generate(allData,meta)
+        generate(source,meta)
     }
 
+    @OptIn(DFInternal::class)
     override fun execute(
-        scope: CoroutineScope,
         dataSet: DataTree<T>,
         meta: Meta,
-    ): ObservableDataTree<R> = MutableDataTree<R>(outputType, scope).apply {
-        generate(dataSet, meta)
-        scope.launch {
-            dataSet.updates().collect {
+    ): DataTree<R> = if(dataSet.isObservable()) {
+        MutableDataTree<R>(outputType, dataSet.updatesScope).apply {
+            generate(dataSet, meta)
+            dataSet.updates().onEach {
                 update(dataSet, meta, it)
+            }.launchIn(updatesScope)
+
+            //close updates when the source is closed
+            updatesScope.launch {
+                dataSet.awaitClose()
+                close()
             }
         }
+    } else{
+        DataTree(outputType){
+            generate(dataSet, meta)
+        }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index ac903aee..5ed60db9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -1,9 +1,6 @@
 package space.kscience.dataforge.actions
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.coroutineScope
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 
@@ -16,7 +13,7 @@ public fun interface Action<T, R> {
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
      * so not actual computation is started at this moment.
      */
-    public fun execute(scope: CoroutineScope, dataSet: DataTree<T>, meta: Meta): ObservableDataTree<R>
+    public fun execute(dataSet: DataTree<T>, meta: Meta): DataTree<R>
 
     public companion object
 }
@@ -26,20 +23,21 @@ public fun interface Action<T, R> {
  */
 public fun <T, R> DataTree<T>.transform(
     action: Action<T, R>,
-    scope: CoroutineScope,
     meta: Meta = Meta.EMPTY,
-): DataTree<R> = action.execute(scope, this, meta)
+): DataTree<R> = action.execute(this, meta)
 
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
-public infix fun <T , I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> =
-    Action { scope, dataSet, meta -> action.execute(scope, this@then.execute(scope, dataSet, meta), meta) }
+public infix fun <T, I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> = Action { dataSet, meta ->
+    action.execute(this@then.execute(dataSet, meta), meta)
+}
 
 @DFExperimental
-public suspend operator fun <T, R> Action<T, R>.invoke(
+public operator fun <T, R> Action<T, R>.invoke(
     dataSet: DataTree<T>,
     meta: Meta = Meta.EMPTY,
-): DataTree<R> = coroutineScope { execute(this, dataSet, meta) }
+): DataTree<R> = execute(dataSet, meta)
+
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 3cf7c788..1f40ed73 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -79,14 +79,14 @@ internal class MapAction<T : Any, R : Any>(
             builder.result(env, data.await())
         }
         //setting the data node
-        data(newName, newData)
+        put(newName, newData)
     }
 
     override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         data.forEach { mapOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(allData: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
         mapOne(namedData.name, namedData.data, namedData.meta)
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 5b862046..9440be55 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -103,7 +103,7 @@ internal class ReduceAction<T : Any, R : Any>(
                 meta = groupMeta
             ) { group.result.invoke(env, it) }
 
-            data(env.name, res)
+            put(env.name, res)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 7926ce0b..057419a7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -64,7 +64,7 @@ internal class SplitAction<T : Any, R : Any>(
             ).apply(rule)
             //data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
 
-            data(
+            put(
                 fragmentName,
                 @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
                     env.result(data.await())
@@ -77,7 +77,7 @@ internal class SplitAction<T : Any, R : Any>(
         data.forEach { splitOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(allData: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
         splitOne(namedData.name, namedData.data, namedData.meta)
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index d6011839..d379d027 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -1,11 +1,11 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.*
 import kotlinx.coroutines.flow.*
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.*
+import kotlin.contracts.contract
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -66,7 +66,7 @@ public interface GenericDataTree<out T, out TR : GenericDataTree<T, TR>> : DataS
     }
 }
 
-public typealias DataTree<T> = GenericDataTree<T, GenericDataTree<T,*>>
+public typealias DataTree<T> = GenericDataTree<T, GenericDataTree<T, *>>
 
 /**
  * Return a single data in this tree. Throw error if it is not single.
@@ -87,7 +87,7 @@ public operator fun <T> DataTree<T>.get(name: String): Data<T>? = read(name.pars
 public fun <T> DataTree<T>.asSequence(
     namePrefix: Name = Name.EMPTY,
 ): Sequence<NamedData<T>> = sequence {
-    data?.let { yield(it.named(Name.EMPTY)) }
+    data?.let { yield(it.named(namePrefix)) }
     items.forEach { (token, tree) ->
         yieldAll(tree.asSequence(namePrefix + token))
     }
@@ -113,8 +113,8 @@ public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEm
 @PublishedApi
 internal class FlatDataTree<T>(
     override val dataType: KType,
-    val dataSet: Map<Name, Data<T>>,
-    val prefix: Name,
+    private val dataSet: Map<Name, Data<T>>,
+    private val prefix: Name,
 ) : GenericDataTree<T, FlatDataTree<T>> {
     override val self: FlatDataTree<T> get() = this
     override val data: Data<T>? get() = dataSet[prefix]
@@ -141,20 +141,56 @@ internal fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
 public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> =
     FlatDataTree(typeOf<T>(), associate { it.name to it.data }, Name.EMPTY)
 
-public interface GenericObservableDataTree<out T, out TR : GenericObservableDataTree<T, TR>> : GenericDataTree<T, TR>,
-    ObservableDataSource<T>
+public interface GenericObservableDataTree<out T, out TR : GenericObservableDataTree<T, TR>> :
+    GenericDataTree<T, TR>, ObservableDataSource<T>, AutoCloseable {
+
+    /**
+     * A scope that is used to propagate updates. When this scope is closed, no new updates could arrive.
+     */
+    public val updatesScope: CoroutineScope
+
+    /**
+     * Close this data tree updates channel
+     */
+    override fun close() {
+        updatesScope.cancel()
+    }
+
+}
 
 public typealias ObservableDataTree<T> = GenericObservableDataTree<T, GenericObservableDataTree<T, *>>
 
-public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> = if (this is GenericObservableDataTree<T,*>) updates() else emptyFlow()
-
-public fun interface DataSink<in T> {
-    public fun data(name: Name, data: Data<T>?)
+/**
+ * Check if the [DataTree] is observable
+ */
+public fun <T> DataTree<T>.isObservable(): Boolean {
+    contract {
+        returns(true) implies (this@isObservable is GenericObservableDataTree<T, *>)
+    }
+    return this is GenericObservableDataTree<T, *>
 }
 
+/**
+ * Wait for this data tree to stop spawning updates (updatesScope is closed).
+ * If this [DataTree] is not observable, return immediately.
+ */
+public suspend fun <T> DataTree<T>.awaitClose() {
+    if (isObservable()) {
+        updatesScope.coroutineContext[Job]?.join()
+    }
+}
+
+public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> =
+    if (this is GenericObservableDataTree<T, *>) updates() else emptyFlow()
+
+public fun interface DataSink<in T> {
+    public fun put(name: Name, data: Data<T>?)
+}
+
+@DFInternal
 public class DataTreeBuilder<T>(private val type: KType) : DataSink<T> {
     private val map = HashMap<Name, Data<T>>()
-    override fun data(name: Name, data: Data<T>?) {
+    override fun put(name: Name, data: Data<T>?) {
         if (data == null) {
             map.remove(name)
         } else {
@@ -174,6 +210,7 @@ public inline fun <T> DataTree(
 /**
  * Create and a data tree.
  */
+@OptIn(DFInternal::class)
 public inline fun <reified T> DataTree(
     generator: DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTreeBuilder<T>(typeOf<T>()).apply(generator).build()
@@ -182,77 +219,88 @@ public inline fun <reified T> DataTree(
  * A mutable version of [GenericDataTree]
  */
 public interface MutableDataTree<T> : GenericObservableDataTree<T, MutableDataTree<T>>, DataSink<T> {
-    public val scope: CoroutineScope
-
     override var data: Data<T>?
 
     override val items: Map<NameToken, MutableDataTree<T>>
 
+    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
+
     public operator fun set(token: NameToken, data: Data<T>?)
 
-    override fun data(name: Name, data: Data<T>?): Unit = set(name, data)
+    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
 }
 
 public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
     when (name.length) {
         0 -> this.data = data
         1 -> set(name.first(), data)
-        else -> items[name.first()]?.set(name.cutFirst(), data)
+        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
     }
 }
 
-private class ObservableMutableDataTreeImpl<T>(
+private class MutableDataTreeImpl<T>(
     override val dataType: KType,
-    override val scope: CoroutineScope,
+    override val updatesScope: CoroutineScope,
 ) : MutableDataTree<T> {
 
+
     private val updates = MutableSharedFlow<NamedData<T>>()
 
     private val children = HashMap<NameToken, MutableDataTree<T>>()
 
     override var data: Data<T>? = null
         set(value) {
+            if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
             field = value
             if (value != null) {
-                scope.launch {
+                updatesScope.launch {
                     updates.emit(value.named(Name.EMPTY))
                 }
             }
         }
 
     override val items: Map<NameToken, MutableDataTree<T>> get() = children
+
+    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = children.getOrPut(token){
+        MutableDataTreeImpl(dataType, updatesScope)
+    }
+
     override val self: MutableDataTree<T> get() = this
 
     override fun set(token: NameToken, data: Data<T>?) {
-        children.getOrPut(token) {
-            ObservableMutableDataTreeImpl<T>(dataType, scope).also { subTree ->
-                subTree.updates().onEach {
-                    updates.emit(it.named(token + it.name))
-                }.launchIn(scope)
-            }
-        }.data = data
+        if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
+        val subTree = getOrCreateItem(token)
+        subTree.updates().onEach {
+            updates.emit(it.named(token + it.name))
+        }.launchIn(updatesScope)
+        subTree.data = data
     }
 
-    override fun updates(): Flow<NamedData<T>> = flow {
-        //emit this node updates
-        updates.collect {
-            emit(it)
-        }
-    }
+    override fun updates(): Flow<NamedData<T>> = updates
 }
 
+/**
+ * Create a new [MutableDataTree]
+ *
+ * @param parentScope a [CoroutineScope] to control data propagation. By default uses [GlobalScope]
+ */
+@OptIn(DelicateCoroutinesApi::class)
 public fun <T> MutableDataTree(
     type: KType,
-    scope: CoroutineScope
-): MutableDataTree<T> = ObservableMutableDataTreeImpl<T>(type, scope)
+    parentScope: CoroutineScope = GlobalScope,
+): MutableDataTree<T> = MutableDataTreeImpl<T>(
+    type,
+    CoroutineScope(parentScope.coroutineContext + Job(parentScope.coroutineContext[Job]))
+)
 
 /**
  * Create and initialize a observable mutable data tree.
  */
+@OptIn(DelicateCoroutinesApi::class)
 public inline fun <reified T> MutableDataTree(
-    scope: CoroutineScope,
+    parentScope: CoroutineScope = GlobalScope,
     generator: MutableDataTree<T>.() -> Unit = {},
-): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>(), scope).apply { generator() }
+): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply { generator() }
 
 //@DFInternal
 //public fun <T> ObservableDataTree(
@@ -262,18 +310,21 @@ public inline fun <reified T> MutableDataTree(
 //): ObservableDataTree<T> = MutableDataTree<T>(type, scope.coroutineContext).apply(generator)
 
 public inline fun <reified T> ObservableDataTree(
-    scope: CoroutineScope,
+    parentScope: CoroutineScope,
     generator: MutableDataTree<T>.() -> Unit = {},
-): ObservableDataTree<T> = MutableDataTree<T>(typeOf<T>(), scope).apply(generator)
+): ObservableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply(generator)
 
 
 /**
  * Collect a [Sequence] into an observable tree with additional [updates]
  */
-public fun <T> Sequence<NamedData<T>>.toObservableTree(dataType: KType, scope: CoroutineScope, updates: Flow<NamedData<T>>): ObservableDataTree<T> =
-    MutableDataTree<T>(dataType, scope).apply {
-        emitAll(this@toObservableTree)
-        updates.onEach {
-            data(it.name, it.data)
-        }.launchIn(scope)
-    }
+public fun <T> Sequence<NamedData<T>>.toObservableTree(
+    dataType: KType,
+    parentScope: CoroutineScope,
+    updates: Flow<NamedData<T>>,
+): ObservableDataTree<T> = MutableDataTree<T>(dataType, parentScope).apply {
+    this.putAll(this@toObservableTree)
+    updates.onEach {
+        put(it.name, it.data)
+    }.launchIn(updatesScope)
+}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index 1a1c66b2..90486d85 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -17,6 +17,7 @@ package space.kscience.dataforge.data
 
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
+import space.kscience.dataforge.misc.DFInternal
 
 public interface GroupRule {
     public fun <T : Any> gather(set: DataTree<T>): Map<String, DataTree<T>>
@@ -30,6 +31,7 @@ public interface GroupRule {
          * @param defaultTagValue
          * @return
          */
+        @OptIn(DFInternal::class)
         public fun byMetaValue(
             key: String,
             defaultTagValue: String,
@@ -42,7 +44,7 @@ public interface GroupRule {
 
                 set.forEach { data ->
                     val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.data(data.name,data.data)
+                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.put(data.name, data.data)
                 }
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 72b1bb33..8e8b6eaa 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -12,16 +12,12 @@ import space.kscience.dataforge.names.isEmpty
 import space.kscience.dataforge.names.plus
 
 
-public fun <T> DataSink<T>.data(value: NamedData<T>) {
-    data(value.name, value.data)
-}
-
-public fun <T> DataSink<T>.emitAll(sequence: Sequence<NamedData<T>>) {
-    sequence.forEach { data(it) }
+public fun <T> DataSink<T>.put(value: NamedData<T>) {
+    put(value.name, value.data)
 }
 
 public fun <T> DataSink<T>.branch(dataTree: DataTree<T>) {
-    emitAll(dataTree.asSequence())
+    putAll(dataTree.asSequence())
 }
 
 public inline fun <T> DataSink<T>.branch(
@@ -32,7 +28,7 @@ public inline fun <T> DataSink<T>.branch(
         apply(block)
     } else {
         val proxyDataSink = DataSink { nameWithoutPrefix, data ->
-            this.data(prefix + nameWithoutPrefix, data)
+            this.put(prefix + nameWithoutPrefix, data)
         }
 
         proxyDataSink.apply(block)
@@ -45,69 +41,69 @@ public inline fun <T> DataSink<T>.branch(
 ): Unit = branch(prefix.asName(), block)
 
 
-public fun <T> DataSink<T>.data(name: String, value: Data<T>) {
-    data(Name.parse(name), value)
+public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
+    put(Name.parse(name), value)
 }
 
 public fun <T> DataSink<T>.branch(name: Name, set: DataTree<T>) {
-    branch(name) { emitAll(set.asSequence()) }
+    branch(name) { putAll(set.asSequence()) }
 }
 
 public fun <T> DataSink<T>.branch(name: String, set: DataTree<T>) {
-    branch(Name.parse(name)) { emitAll(set.asSequence()) }
+    branch(Name.parse(name)) { putAll(set.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public inline fun <reified T> DataSink<T>.data(
+public inline fun <reified T> DataSink<T>.put(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    data(name, data)
+    put(name, data)
 }
 
-public inline fun <reified T> DataSink<T>.data(
+public inline fun <reified T> DataSink<T>.put(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    data(name, data)
+    put(name, data)
 }
 
 /**
  * Emit static data with the fixed value
  */
-public inline fun <reified T> DataSink<T>.static(
+public inline fun <reified T> DataSink<T>.wrap(
     name: String,
     data: T,
     meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
+): Unit = put(name, Data.static(data, meta))
 
-public inline fun <reified T> DataSink<T>.static(
+public inline fun <reified T> DataSink<T>.wrap(
     name: Name,
     data: T,
     meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
+): Unit = put(name, Data.static(data, meta))
 
-public inline fun <reified T> DataSink<T>.static(
+public inline fun <reified T> DataSink<T>.wrap(
     name: String,
     data: T,
     mutableMeta: MutableMeta.() -> Unit,
-): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
+): Unit = put(Name.parse(name), Data.static(data, Meta(mutableMeta)))
 
 
-public fun <T> DataSink<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
+public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        data(it.name, it.data)
+        put(it.name, it.data)
     }
 }
 
-public fun <T> DataSink<T>.populateFrom(tree: DataTree<T>) {
-    populateFrom(tree.asSequence())
+public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+    this.putAll(tree.asSequence())
 }
 
 
@@ -115,13 +111,22 @@ public fun <T> DataSink<T>.populateFrom(tree: DataTree<T>) {
  * Update data with given node data and meta with node meta.
  */
 @DFExperimental
-public fun <T> MutableDataTree<T>.populateFrom(flow: ObservableDataSource<T>): Job = flow.updates().onEach {
-    //TODO check if the place is occupied
-    data(it.name, it.data)
-}.launchIn(scope)
+public fun <T> MutableDataTree<T>.putAll(source: DataTree<T>) {
+    source.forEach {
+        put(it.name, it.data)
+    }
+}
 
-//public fun <T > DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
-//    flow.collect {
-//        data(it.name, it.data)
-//    }
-//}
+/**
+ * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
+ */
+public fun <T : Any> DataSink<T>.watchBranch(
+    name: Name,
+    dataSet: ObservableDataTree<T>,
+): Job {
+    branch(name, dataSet)
+    return dataSet.updates().onEach {
+        put(name + it.name, it.data)
+    }.launchIn(dataSet.updatesScope)
+
+}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index f33ec1f0..8c7ce70e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -194,7 +194,7 @@ public suspend fun <T, R> DataTree<T>.transform(
         val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
             block(namedData.awaitWithMeta())
         }
-        data(namedData.name, d)
+        put(namedData.name, d)
     }
 }
 
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 5f8791d0..cfccb02b 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -1,10 +1,6 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.plus
 
 
 /**
@@ -12,7 +8,7 @@ import space.kscience.dataforge.names.plus
  */
 context(DataSink<T>)
 public infix fun <T : Any> String.put(data: Data<T>): Unit =
-    data(Name.parse(this), data)
+    put(Name.parse(this), data)
 
 /**
  * Append node
@@ -29,16 +25,3 @@ public infix fun <T : Any> String.put(
     block: DataSink<T>.() -> Unit,
 ): Unit = branch(Name.parse(this), block)
 
-/**
- * Copy given data set and mirror its changes to this [LegacyDataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
- */
-context(DataSink<T>)
-public fun <T : Any> CoroutineScope.setAndWatch(
-    name: Name,
-    dataSet: DataTree<T>,
-): Job = launch {
-    branch(name, dataSet)
-    dataSet.updates().collect {
-        data(name + it.name, it.data)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 17549b6b..4aa6e6d4 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,7 +1,7 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.delay
+import kotlinx.coroutines.runBlocking
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
 import space.kscience.dataforge.actions.Action
@@ -10,13 +10,13 @@ import space.kscience.dataforge.actions.mapping
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.assertEquals
 
-@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
+@OptIn(DFExperimental::class)
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest {
         val data: DataTree<Int> = DataTree {
             repeat(10) {
-                static(it.toString(), it)
+                wrap(it.toString(), it)
             }
         }
 
@@ -28,20 +28,24 @@ internal class ActionsTest {
     }
 
     @Test
-    fun testDynamicMapAction() = runTest {
-        val data: MutableDataTree<Int> = MutableDataTree(this)
+    fun testDynamicMapAction() = runBlocking {
+        val source: MutableDataTree<Int> = MutableDataTree()
 
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
 
-        val result = plusOne(data)
+        val result = plusOne(source)
+
 
         repeat(10) {
-            data.static(it.toString(), it)
+            source.wrap(it.toString(), it)
         }
 
-        delay(20)
+        delay(10)
+
+        source.close()
+        result.awaitClose()
 
         assertEquals(2, result["1"]?.await())
     }
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index c5c450f7..95b7a7bd 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -1,6 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.*
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.runBlocking
+import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.asName
 import kotlin.test.Test
@@ -9,26 +11,25 @@ import kotlin.test.assertEquals
 
 internal class DataTreeBuilderTest {
     @Test
-    fun testTreeBuild() = runBlocking {
+    fun testTreeBuild() = runTest {
         val node = DataTree<Any> {
             "primary" put {
-                static("a", "a")
-                static("b", "b")
+                wrap("a", "a")
+                wrap("b", "b")
             }
-            static("c.d", "c.d")
-            static("c.f", "c.f")
-        }
-        runBlocking {
-            assertEquals("a", node["primary.a"]?.await())
-            assertEquals("b", node["primary.b"]?.await())
-            assertEquals("c.d", node["c.d"]?.await())
-            assertEquals("c.f", node["c.f"]?.await())
+            wrap("c.d", "c.d")
+            wrap("c.f", "c.f")
         }
+        assertEquals("a", node["primary.a"]?.await())
+        assertEquals("b", node["primary.b"]?.await())
+        assertEquals("c.d", node["c.d"]?.await())
+        assertEquals("c.f", node["c.f"]?.await())
+
     }
 
     @OptIn(DFExperimental::class)
     @Test
-    fun testDataUpdate() = runBlocking {
+    fun testDataUpdate() = runTest {
         val updateData = DataTree<Any> {
             "update" put {
                 "a" put Data.static("a")
@@ -38,54 +39,30 @@ internal class DataTreeBuilderTest {
 
         val node = DataTree<Any> {
             "primary" put {
-                static("a", "a")
-                static("b", "b")
+                wrap("a", "a")
+                wrap("b", "b")
             }
-            static("root", "root")
-            populateFrom(updateData)
+            wrap("root", "root")
+            putAll(updateData)
         }
 
-        runBlocking {
-            assertEquals("a", node["update.a"]?.await())
-            assertEquals("a", node["primary.a"]?.await())
-        }
+        assertEquals("a", node["update.a"]?.await())
+        assertEquals("a", node["primary.a"]?.await())
     }
 
     @Test
     fun testDynamicUpdates() = runBlocking {
-        try {
-            lateinit var updateJob: Job
-            supervisorScope {
-                val subNode = ObservableDataTree<Int>(this) {
-                    updateJob = launch {
-                        repeat(10) {
-                            delay(10)
-                            static("value", it)
-                        }
-                        delay(10)
-                    }
-                }
-                launch {
-                    subNode.updates().collect {
-                        println(it)
-                    }
-                }
-                val rootNode = ObservableDataTree<Int>(this) {
-                    setAndWatch("sub".asName(), subNode)
-                }
+        val subNode = MutableDataTree<Int>()
 
-                launch {
-                    rootNode.updates().collect {
-                        println(it)
-                    }
-                }
-                updateJob.join()
-                assertEquals(9, rootNode["sub.value"]?.await())
-                cancel()
-            }
-        } catch (t: Throwable) {
-            if (t !is CancellationException) throw t
+        val rootNode = MutableDataTree<Int> {
+            watchBranch("sub".asName(), subNode)
         }
 
+        repeat(10) {
+            subNode.wrap("value[$it]", it)
+        }
+
+        delay(20)
+        assertEquals(9, rootNode["sub.value[9]"]?.await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index af2520eb..e636de49 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -88,12 +88,17 @@ public fun <T : Any> Task(
         workspace: Workspace,
         taskName: Name,
         taskMeta: Meta,
-    ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
+    ): TaskResult<T> {
         //TODO use safe builder and check for external data on add and detects cycles
-        val dataset = MutableDataTree<T>(resultType, this).apply {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
+        val dataset = MutableDataTree<T>(resultType, workspace.context).apply {
+            TaskResultBuilder(workspace, taskName, taskMeta, this).apply {
+                withContext(GoalExecutionRestriction() + workspace.goalLogger) {
+                    builder()
+                }
+            }
         }
-        workspace.wrapResult(dataset, taskName, taskMeta)
+        return workspace.wrapResult(dataset, taskName, taskMeta)
+
     }
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index 94da4383..7aa94101 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -2,6 +2,7 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
+import kotlinx.coroutines.joinAll
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.data.asSequence
@@ -20,7 +21,7 @@ public data class TaskResult<T>(
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-): ObservableDataTree<T> by content
+) : ObservableDataTree<T> by content
 
 /**
  * Wrap data into [TaskResult]
@@ -32,8 +33,9 @@ public fun <T> Workspace.wrapResult(data: ObservableDataTree<T>, taskName: Name,
  * Start computation for all data elements of this node.
  * The resulting [Job] is completed only when all of them are completed.
  */
-public fun TaskResult<*>.compute(scope: CoroutineScope): Job = scope.launch {
-    asSequence().forEach {
+public fun TaskResult<*>.launch(scope: CoroutineScope): Job {
+    val jobs = asSequence().map {
         it.data.launch(scope)
-    }
+    }.toList()
+    return scope.launch { jobs.joinAll() }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 6ccdcb1c..4705c3b0 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -102,7 +102,7 @@ public inline fun <T : Any, reified R : Any> TaskContainer.action(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
     task(MetaDescriptor(descriptorBuilder)) {
-        result(action.execute(workspace.context, from(selector), taskMeta.copy(metaTransform)))
+        result(action.execute(from(selector), taskMeta.copy(metaTransform)))
     }
 
 public class WorkspaceBuilder(
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 95e7d481..1900ff23 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -93,7 +93,7 @@ public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
             action(it, data.name, meta)
         }
 
-        data(data.name, res)
+        put(data.name, res)
     }
 }
 
@@ -113,7 +113,7 @@ public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
     action: Action<T, R>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    branch(action.execute(workspace.context, from(selector, dependencyMeta), dependencyMeta))
+    branch(action.execute(from(selector, dependencyMeta), dependencyMeta))
 }
 
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
index 4eba363b..51c9a5e8 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
@@ -11,7 +11,6 @@ import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import space.kscience.dataforge.names.asName
 import space.kscience.dataforge.names.plus
-import space.kscience.dataforge.workspace.FileData.defaultPathToName
 import java.nio.file.Files
 import java.nio.file.Path
 import java.nio.file.StandardWatchEventKinds
@@ -36,20 +35,6 @@ public object FileData {
     public const val DF_FILE_EXTENSION: String = "df"
     public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
 
-    /**
-     * Transform file name into DataForg name. Ignores DataForge file extensions.
-     */
-    public val defaultPathToName: (Path) -> Name = { path ->
-        Name(
-            path.map { segment ->
-                if (segment.isRegularFile() && segment.extension in DEFAULT_IGNORE_EXTENSIONS) {
-                    NameToken(path.nameWithoutExtension)
-                } else {
-                    NameToken(path.name)
-                }
-            }
-        )
-    }
 }
 
 
@@ -77,51 +62,68 @@ public fun IOPlugin.readFileData(
     )
 }
 
-public fun DataSink<Binary>.file(io: IOPlugin, path: Path, name: Name) {
+public fun DataSink<Binary>.file(io: IOPlugin, name: Name, path: Path) {
     if (!path.isRegularFile()) error("Only regular files could be handled by this function")
-    data(name, io.readFileData(path))
+    put(name, io.readFileData(path))
 }
 
 public fun DataSink<Binary>.directory(
     io: IOPlugin,
+    name: Name,
     path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
 ) {
     if (!path.isDirectory()) error("Only directories could be handled by this function")
-    val metaFile = path.resolve(IOPlugin.META_FILE_NAME)
-    val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
     //process root data
-    if (metaFile.exists() || dataFile.exists()) {
-        data(
-            Name.EMPTY,
+
+    var dataBinary: Binary? = null
+    var meta: Meta? = null
+    Files.list(path).forEach { childPath ->
+        val fileName = childPath.fileName.toString()
+        if (fileName == IOPlugin.DATA_FILE_NAME) {
+            dataBinary = childPath.asBinary()
+        } else if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
+            meta = io.readMetaFileOrNull(childPath)
+        } else if (!fileName.startsWith("@")) {
+            val token = if (childPath.isRegularFile() && childPath.extension in FileData.DEFAULT_IGNORE_EXTENSIONS) {
+                NameToken(childPath.nameWithoutExtension)
+            } else {
+                NameToken(childPath.name)
+            }
+
+            files(io, name + token, childPath)
+        }
+    }
+
+    //set data if it is relevant
+    if (dataBinary != null || meta != null) {
+        put(
+            name,
             StaticData(
                 typeOf<Binary>(),
-                dataFile.takeIf { it.exists() }?.asBinary() ?: Binary.EMPTY,
-                io.readMetaFileOrNull(metaFile) ?: Meta.EMPTY
+                dataBinary ?: Binary.EMPTY,
+                meta ?: Meta.EMPTY
             )
         )
     }
-    Files.list(path).forEach { childPath ->
-        val fileName = childPath.fileName.toString()
-        if (!fileName.startsWith("@")) {
-            files(io, childPath, pathToName)
-        }
-    }
 }
 
-public fun DataSink<Binary>.files(io: IOPlugin, path: Path, pathToName: (Path) -> Name = defaultPathToName) {
+public fun DataSink<Binary>.files(
+    io: IOPlugin,
+    name: Name,
+    path: Path,
+) {
     if (path.isRegularFile() && path.extension == "zip") {
         //Using explicit Zip file system to avoid bizarre compatibility bugs
         val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
             ?: error("Zip file system provider not found")
         val fs = fsProvider.newFileSystem(path, mapOf("create" to "true"))
 
-        return files(io, fs.rootDirectories.first(), pathToName)
+        files(io, name, fs.rootDirectories.first())
     }
     if (path.isRegularFile()) {
-        file(io, path, pathToName(path))
+        file(io, name, path)
     } else {
-        directory(io, path, pathToName)
+        directory(io, name, path)
     }
 }
 
@@ -132,11 +134,11 @@ private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension)
 @DFExperimental
 public fun DataSink<Binary>.monitorFiles(
     io: IOPlugin,
+    name: Name,
     path: Path,
-    pathToName: (Path) -> Name = defaultPathToName,
     scope: CoroutineScope = io.context,
 ): Job {
-    files(io, path, pathToName)
+    files(io, name, path)
     return scope.launch(Dispatchers.IO) {
         val watchService = path.fileSystem.newWatchService()
 
@@ -153,11 +155,11 @@ public fun DataSink<Binary>.monitorFiles(
                 for (event: WatchEvent<*> in key.pollEvents()) {
                     val eventPath = event.context() as Path
                     if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
-                        data(eventPath.toName(), null)
+                        put(eventPath.toName(), null)
                     } else {
                         val fileName = eventPath.fileName.toString()
                         if (!fileName.startsWith("@")) {
-                            files(io, eventPath, pathToName)
+                            files(io, name, eventPath)
                         }
                     }
                 }
@@ -179,29 +181,24 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     dataSet: DataTree<T>,
     format: IOWriter<T>,
     envelopeFormat: EnvelopeFormat? = null,
-    nameToPath: (name: Name, data: Data<T>) -> Path = { name, _ ->
-        Path(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
-    },
-) {
-    withContext(Dispatchers.IO) {
-        if (!Files.exists(path)) {
-            Files.createDirectories(path)
-        } else if (!Files.isDirectory(path)) {
-            error("Can't write a node into file")
-        }
-        dataSet.forEach { (name, data) ->
-            val childPath = path.resolve(nameToPath(name, data))
-            childPath.parent.createDirectories()
-            val envelope = data.toEnvelope(format)
-            if (envelopeFormat != null) {
-                writeEnvelopeFile(childPath, envelope, envelopeFormat)
-            } else {
-                writeEnvelopeDirectory(childPath, envelope)
-            }
-        }
-        dataSet.meta?.let { writeMetaFile(path, it) }
-
+): Unit = withContext(Dispatchers.IO) {
+    if (!Files.exists(path)) {
+        Files.createDirectories(path)
+    } else if (!Files.isDirectory(path)) {
+        error("Can't write a node into file")
     }
+    dataSet.forEach { (name, data) ->
+        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+        childPath.parent.createDirectories()
+        val envelope = data.toEnvelope(format)
+        if (envelopeFormat != null) {
+            writeEnvelopeFile(childPath, envelope, envelopeFormat)
+        } else {
+            writeEnvelopeDirectory(childPath, envelope)
+        }
+    }
+    dataSet.meta?.let { writeMetaFile(path, it) }
+
 }
 
 /**
@@ -212,15 +209,12 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
 public fun DataSink<Binary>.resources(
     io: IOPlugin,
     vararg resources: String,
-    pathToName: (Path) -> Name = defaultPathToName,
     classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
 ) {
     resources.forEach { resource ->
         val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
             "Resource with name $resource is not resolved"
         )
-        branch(resource.asName()) {
-            files(io, path, pathToName)
-        }
+        files(io, resource.asName(), path)
     }
 }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
index 83b617cb..9d175bd3 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
@@ -3,67 +3,36 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.withContext
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.io.*
+import space.kscience.dataforge.io.EnvelopeFormat
+import space.kscience.dataforge.io.IOPlugin
+import space.kscience.dataforge.io.IOWriter
 import space.kscience.dataforge.misc.DFExperimental
-import java.nio.file.Files
 import java.nio.file.Path
-import java.nio.file.StandardOpenOption
-import java.util.zip.ZipEntry
-import java.util.zip.ZipOutputStream
-
-
-private suspend fun <T : Any> ZipOutputStream.writeNode(
-    name: String,
-    tree: DataTree<T>,
-    dataFormat: IOFormat<T>,
-    envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
-): Unit = withContext(Dispatchers.IO) {
-    //TODO add directory-based envelope writer
-    tree.data?.let {
-        val envelope = it.toEnvelope(dataFormat)
-        val entry = ZipEntry(name)
-        putNextEntry(entry)
-
-        //TODO remove additional copy
-        val bytes = ByteArray {
-            writeWith(envelopeFormat, envelope)
-        }
-        write(bytes)
-    }
-
-
-    val entry = ZipEntry("$name/")
-    putNextEntry(entry)
-    closeEntry()
-    tree.items.forEach { (token, item) ->
-        val childName = "$name/$token"
-        writeNode(childName, item, dataFormat, envelopeFormat)
-    }
-
-}
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.exists
+import kotlin.io.path.extension
 
 /**
  * Write this [DataTree] as a zip archive
  */
 @DFExperimental
-public suspend fun <T : Any> DataTree<T>.writeZip(
+public suspend fun <T : Any> IOPlugin.writeZip(
     path: Path,
-    format: IOFormat<T>,
-    envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
+    dataSet: DataTree<T>,
+    format: IOWriter<T>,
+    envelopeFormat: EnvelopeFormat? = null,
 ): Unit = withContext(Dispatchers.IO) {
-    val actualFile = if (path.toString().endsWith(".zip")) {
+    if (path.exists()) error("Can't override existing zip data file $path")
+    val actualFile = if (path.extension == "zip") {
         path
     } else {
         path.resolveSibling(path.fileName.toString() + ".zip")
     }
-    val fos = Files.newOutputStream(
-        actualFile,
-        StandardOpenOption.WRITE,
-        StandardOpenOption.CREATE,
-        StandardOpenOption.TRUNCATE_EXISTING
-    )
-    val zos = ZipOutputStream(fos)
-    zos.use {
-        it.writeNode("", this@writeZip, format, envelopeFormat)
+    val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+        ?: error("Zip file system provider not found")
+    //val fs = FileSystems.newFileSystem(actualFile, mapOf("create" to true))
+    val fs = fsProvider.newFileSystem(actualFile, mapOf("create" to true))
+    fs.use {
+        writeDataDirectory(fs.rootDirectories.first(), dataSet, format, envelopeFormat)
     }
-}
\ No newline at end of file
+}
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 78ce853e..e5c2c230 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -1,17 +1,16 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.static
+import space.kscience.dataforge.data.wrap
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.assertEquals
 
-@OptIn(ExperimentalCoroutinesApi::class, DFExperimental::class)
+@OptIn(DFExperimental::class)
 internal class CachingWorkspaceTest {
 
     @Test
@@ -23,7 +22,7 @@ internal class CachingWorkspaceTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    static("myData[$it]", it)
+                    wrap("myData[$it]", it)
                 }
             }
 
@@ -39,7 +38,7 @@ internal class CachingWorkspaceTest {
             val doSecond by task<Any> {
                 transformEach(
                     doFirst,
-                    dependencyMeta = if(taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
+                    dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
                 ) { _, name, _ ->
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
@@ -51,13 +50,15 @@ internal class CachingWorkspaceTest {
         val secondA = workspace.produce("doSecond")
         val secondB = workspace.produce("doSecond", Meta { "flag" put true })
         val secondC = workspace.produce("doSecond")
+        //use coroutineScope to wait for the result
         coroutineScope {
-            first.compute(this)
-            secondA.compute(this)
-            secondB.compute(this)
+            first.launch(this)
+            secondA.launch(this)
+            secondB.launch(this)
             //repeat to check caching
-            secondC.compute(this)
+            secondC.launch(this)
         }
+
         assertEquals(10, firstCounter)
         assertEquals(10, secondCounter)
     }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index 403b1c51..d611b1c8 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,13 +20,13 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        data("result", result)
+        put("result", result)
     }
 
 
     val singleData by task<Int> {
         workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            data("result", it)
+            put("result", it)
         }
     }
 
@@ -47,7 +47,7 @@ class DataPropagationTest {
         }
         data {
             repeat(100) {
-                static("myData[$it]", it)
+                wrap("myData[$it]", it)
             }
         }
     }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 1a21fb17..10a1c268 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -1,6 +1,5 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.runBlocking
 import kotlinx.coroutines.test.runTest
 import kotlinx.io.Sink
 import kotlinx.io.Source
@@ -13,7 +12,9 @@ import space.kscience.dataforge.io.*
 import space.kscience.dataforge.io.yaml.YamlPlugin
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
 import java.nio.file.Files
+import kotlin.io.path.deleteExisting
 import kotlin.io.path.fileSize
 import kotlin.io.path.toPath
 import kotlin.test.Test
@@ -23,11 +24,11 @@ import kotlin.test.assertEquals
 class FileDataTest {
     val dataNode = DataTree<String> {
         branch("dir") {
-            static("a", "Some string") {
+            wrap("a", "Some string") {
                 "content" put "Some string"
             }
         }
-        static("b", "root data")
+        wrap("b", "root data")
 //        meta {
 //            "content" put "This is root meta node"
 //        }
@@ -45,17 +46,17 @@ class FileDataTest {
 
     @Test
     @DFExperimental
-    fun testDataWriteRead() = with(Global.io) {
+    fun testDataWriteRead() = runTest {
         val io = Global.io
         val dir = Files.createTempDirectory("df_data_node")
-        runBlocking {
-            writeDataDirectory(dir, dataNode, StringIOFormat)
-            println(dir.toUri().toString())
-            val reconstructed = DataTree { files(io, dir) }
-                .transform { (_, value) -> value.toByteArray().decodeToString() }
-            assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
-            assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
+        io.writeDataDirectory(dir, dataNode, StringIOFormat)
+        println(dir.toUri().toString())
+        val data = DataTree {
+            files(io, Name.EMPTY, dir)
         }
+        val reconstructed = data.transform { (_, value) -> value.toByteArray().decodeToString() }
+        assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
+        assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
 
 
@@ -64,9 +65,10 @@ class FileDataTest {
     fun testZipWriteRead() = runTest {
         val io = Global.io
         val zip = Files.createTempFile("df_data_node", ".zip")
-        dataNode.writeZip(zip, StringIOFormat)
+        zip.deleteExisting()
+        io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
-        val reconstructed = DataTree { files(io, zip) }
+        val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
             .transform { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index ee497e1b..0f16b1c8 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.static
+import space.kscience.dataforge.data.wrap
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 
@@ -16,7 +16,7 @@ class FileWorkspaceCacheTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    static("myData[$it]", it)
+                    wrap("myData[$it]", it)
                 }
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
@@ -26,7 +26,7 @@ class FileWorkspaceCacheTest {
             }
         }
 
-        workspace.produce("echo").compute(this)
+        workspace.produce("echo").launch(this)
 
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 91fa2f6c..b49b9d54 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -62,14 +62,14 @@ internal class SimpleWorkspaceTest {
         data {
             //statically initialize data
             repeat(100) {
-                static("myData[$it]", it)
+                wrap("myData[$it]", it)
             }
         }
 
         val filterOne by task<Int> {
             val name by taskMeta.string { error("Name field not defined") }
             from(testPluginFactory) { test }[name]?.let { source: Data<Int> ->
-                data(name, source)
+                put(name, source)
             }
         }
 
@@ -97,7 +97,7 @@ internal class SimpleWorkspaceTest {
                 val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
                     l + r
                 }
-                data(data.name, newData)
+                put(data.name, newData)
             }
         }
 
@@ -106,7 +106,7 @@ internal class SimpleWorkspaceTest {
             val res = from(square).foldToData(0) { l, r ->
                 l + r.value
             }
-            data("sum", res)
+            put("sum", res)
         }
 
         val averageByGroup by task<Int> {
@@ -116,13 +116,13 @@ internal class SimpleWorkspaceTest {
                 l + r.value
             }
 
-            data("even", evenSum)
+            put("even", evenSum)
             val oddSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 1
             }.foldToData(0) { l, r ->
                 l + r.value
             }
-            data("odd", oddSum)
+            put("odd", oddSum)
         }
 
         val delta by task<Int> {
@@ -132,7 +132,7 @@ internal class SimpleWorkspaceTest {
             val res = even.combine(odd) { l, r ->
                 l - r
             }
-            data("res", res)
+            put("res", res)
         }
 
         val customPipe by task<Int> {
@@ -140,7 +140,7 @@ internal class SimpleWorkspaceTest {
                 val meta = data.meta.toMutableMeta().apply {
                     "newValue" put 22
                 }
-                data(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
+                put(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
             }
         }
 

From 6e209ab5cd5e569f46f4264d9702d532ac6c1568 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Feb 2024 17:34:19 +0300
Subject: [PATCH 19/77] Data tree refactored to a uniform tree instead of
 sealed class.

---
 dataforge-io/build.gradle.kts                 |  2 +-
 .../{fileData.kt => readFileData.kt}          | 42 ++---------
 .../dataforge/workspace/writeFileData.kt      | 72 +++++++++++++++++++
 .../kscience/dataforge/workspace/zipData.kt   | 38 ----------
 4 files changed, 77 insertions(+), 77 deletions(-)
 rename dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/{fileData.kt => readFileData.kt} (81%)
 create mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
 delete mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt

diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index f7197197..5be52e61 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.3.0"
+val ioVersion = "0.3.1"
 
 kscience {
     jvm()
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
similarity index 81%
rename from dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
rename to dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
index 51c9a5e8..1815c3e4 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
@@ -1,7 +1,9 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.*
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.StaticData
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.copy
@@ -21,11 +23,6 @@ import kotlin.io.path.*
 import kotlin.reflect.typeOf
 
 
-//public typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
-
-public typealias FileFormatResolver<T> = (path: Path, meta: Meta) -> IOReader<T>?
-
-
 public object FileData {
     public val FILE_KEY: Name = "file".asName()
     public val FILE_PATH_KEY: Name = FILE_KEY + "path"
@@ -116,7 +113,7 @@ public fun DataSink<Binary>.files(
         //Using explicit Zip file system to avoid bizarre compatibility bugs
         val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
             ?: error("Zip file system provider not found")
-        val fs = fsProvider.newFileSystem(path, mapOf("create" to "true"))
+        val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
 
         files(io, name, fs.rootDirectories.first())
     }
@@ -170,37 +167,6 @@ public fun DataSink<Binary>.monitorFiles(
 
 }
 
-/**
- * Write the data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
- *
- * @param nameToPath a [Name] to [Path] converter used to create
- */
-@DFExperimental
-public suspend fun <T : Any> IOPlugin.writeDataDirectory(
-    path: Path,
-    dataSet: DataTree<T>,
-    format: IOWriter<T>,
-    envelopeFormat: EnvelopeFormat? = null,
-): Unit = withContext(Dispatchers.IO) {
-    if (!Files.exists(path)) {
-        Files.createDirectories(path)
-    } else if (!Files.isDirectory(path)) {
-        error("Can't write a node into file")
-    }
-    dataSet.forEach { (name, data) ->
-        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
-        childPath.parent.createDirectories()
-        val envelope = data.toEnvelope(format)
-        if (envelopeFormat != null) {
-            writeEnvelopeFile(childPath, envelope, envelopeFormat)
-        } else {
-            writeEnvelopeDirectory(childPath, envelope)
-        }
-    }
-    dataSet.meta?.let { writeMetaFile(path, it) }
-
-}
-
 /**
  * @param resources The names of the resources to read.
  * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
new file mode 100644
index 00000000..379a79dd
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
@@ -0,0 +1,72 @@
+package space.kscience.dataforge.workspace
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import space.kscience.dataforge.data.*
+import space.kscience.dataforge.io.*
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import java.nio.file.Files
+import java.nio.file.Path
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.Path
+import kotlin.io.path.createDirectories
+import kotlin.io.path.exists
+import kotlin.io.path.extension
+
+
+/**
+ * Write the data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
+ *
+ * @param nameToPath a [Name] to [Path] converter used to create
+ */
+@DFExperimental
+public suspend fun <T : Any> IOPlugin.writeDataDirectory(
+    path: Path,
+    dataSet: DataTree<T>,
+    format: IOWriter<T>,
+    envelopeFormat: EnvelopeFormat? = null,
+): Unit = withContext(Dispatchers.IO) {
+    if (!Files.exists(path)) {
+        Files.createDirectories(path)
+    } else if (!Files.isDirectory(path)) {
+        error("Can't write a node into file")
+    }
+    dataSet.forEach { (name, data) ->
+        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+        childPath.parent.createDirectories()
+        val envelope = data.toEnvelope(format)
+        if (envelopeFormat != null) {
+            writeEnvelopeFile(childPath, envelope, envelopeFormat)
+        } else {
+            writeEnvelopeDirectory(childPath, envelope)
+        }
+    }
+    dataSet.meta?.let { writeMetaFile(path, it) }
+
+}
+
+/**
+ * Write this [DataTree] as a zip archive
+ */
+@DFExperimental
+public suspend fun <T : Any> IOPlugin.writeZip(
+    path: Path,
+    dataSet: DataTree<T>,
+    format: IOWriter<T>,
+    envelopeFormat: EnvelopeFormat? = null,
+): Unit = withContext(Dispatchers.IO) {
+    if (path.exists()) error("Can't override existing zip data file $path")
+    val actualFile = if (path.extension == "zip") {
+        path
+    } else {
+        path.resolveSibling(path.fileName.toString() + ".zip")
+    }
+    val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+        ?: error("Zip file system provider not found")
+    //val fs = FileSystems.newFileSystem(actualFile, mapOf("create" to true))
+    val fs = fsProvider.newFileSystem(actualFile, mapOf("create" to true))
+    fs.use {
+        writeDataDirectory(fs.rootDirectories.first(), dataSet, format, envelopeFormat)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
deleted file mode 100644
index 9d175bd3..00000000
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
+++ /dev/null
@@ -1,38 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.io.EnvelopeFormat
-import space.kscience.dataforge.io.IOPlugin
-import space.kscience.dataforge.io.IOWriter
-import space.kscience.dataforge.misc.DFExperimental
-import java.nio.file.Path
-import java.nio.file.spi.FileSystemProvider
-import kotlin.io.path.exists
-import kotlin.io.path.extension
-
-/**
- * Write this [DataTree] as a zip archive
- */
-@DFExperimental
-public suspend fun <T : Any> IOPlugin.writeZip(
-    path: Path,
-    dataSet: DataTree<T>,
-    format: IOWriter<T>,
-    envelopeFormat: EnvelopeFormat? = null,
-): Unit = withContext(Dispatchers.IO) {
-    if (path.exists()) error("Can't override existing zip data file $path")
-    val actualFile = if (path.extension == "zip") {
-        path
-    } else {
-        path.resolveSibling(path.fileName.toString() + ".zip")
-    }
-    val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
-        ?: error("Zip file system provider not found")
-    //val fs = FileSystems.newFileSystem(actualFile, mapOf("create" to true))
-    val fs = fsProvider.newFileSystem(actualFile, mapOf("create" to true))
-    fs.use {
-        writeDataDirectory(fs.rootDirectories.first(), dataSet, format, envelopeFormat)
-    }
-}

From 6524c6c69182b51eddaf7ae73e55b997e7de8b34 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Feb 2024 17:36:49 +0300
Subject: [PATCH 20/77] Add .kotlin to gitignore

---
 .gitignore | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitignore b/.gitignore
index 17a319a4..53b55cd4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,5 +5,7 @@ out/
 .gradle
 build/
 
+.kotlin
+
 
 !gradle-wrapper.jar
\ No newline at end of file

From 297847663bbbeab041750d9fd2c7ec3aa31e2a78 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Feb 2024 19:16:18 +0300
Subject: [PATCH 21/77] Update documentation

---
 CHANGELOG.md                                  |  23 +-
 build.gradle.kts                              |   2 +-
 dataforge-context/README.md                   |   6 +-
 dataforge-context/api/dataforge-context.api   |  21 +-
 dataforge-data/README.md                      |   6 +-
 dataforge-data/build.gradle.kts               |   1 +
 dataforge-io/README.md                        |   6 +-
 dataforge-io/dataforge-io-yaml/README.md      |   6 +-
 dataforge-meta/README.md                      |   6 +-
 dataforge-meta/api/dataforge-meta.api         | 315 +++++++++---------
 .../meta/descriptors/MetaDescriptor.kt        |   1 +
 dataforge-scripting/README.md                 |   6 +-
 dataforge-workspace/README.md                 |   6 +-
 13 files changed, 211 insertions(+), 194 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4469f802..b9d3b6a9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,27 +3,42 @@
 ## Unreleased
 
 ### Added
+
+### Changed
+
+### Deprecated
+
+### Removed
+
+### Fixed
+
+### Security
+
+## 0.8.0 - 2024-02-03
+
+### Added
+
 - Wasm artifacts
 - Add automatic MetaConverter for serializeable objects
 - Add Meta and MutableMeta delegates for convertable and serializeable
 - Meta mapping for data.
 
 ### Changed
+
 - Descriptor `children` renamed to `nodes`
 - `MetaConverter` now inherits `MetaSpec` (former `Specifiction`). So `MetaConverter` could be used more universally.
 - Meta copy and modification now use lightweight non-observable meta builders.
+- Full refactor of Data API. DataTree now works similar to Meta: contains optional anonymous root element and data items. Updates are available for `ObservaleDataSource` and `ObservableDataTree` variants.
 
 ### Deprecated
+
 - `node(key,converter)` in favor of `serializable` delegate
 
-### Removed
-
 ### Fixed
+
 - Partially fixed a bug with `MutableMeta` observable wrappers.
 - `valueSequence` now include root value. So `meta.update` works properly.
 
-### Security
-
 ## 0.7.0 - 2023-11-26
 
 ### Added
diff --git a/build.gradle.kts b/build.gradle.kts
index 4a336e25..b9349868 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.0-dev-1"
+    version = "0.8.0"
 }
 
 subprojects {
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index 905171b1..f0aff459 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,18 +6,16 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.7.0")
+    implementation("space.kscience:dataforge-context:0.8.0")
 }
 ```
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index eac8e52c..12bca176 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -249,10 +249,27 @@ public final class space/kscience/dataforge/context/SlfLogManager$Companion : sp
 	public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
 }
 
-public final class space/kscience/dataforge/properties/PropertyKt {
+public abstract interface annotation class space/kscience/dataforge/descriptors/Description : java/lang/annotation/Annotation {
+	public abstract fun value ()Ljava/lang/String;
 }
 
-public final class space/kscience/dataforge/properties/SchemePropertyKt {
+public abstract interface annotation class space/kscience/dataforge/descriptors/DescriptorResource : java/lang/annotation/Annotation {
+	public abstract fun resourceName ()Ljava/lang/String;
+}
+
+public abstract interface annotation class space/kscience/dataforge/descriptors/DescriptorUrl : java/lang/annotation/Annotation {
+	public abstract fun url ()Ljava/lang/String;
+}
+
+public abstract interface annotation class space/kscience/dataforge/descriptors/Multiple : java/lang/annotation/Annotation {
+}
+
+public final class space/kscience/dataforge/descriptors/ReflectiveDescriptorsKt {
+	public static final fun forClass (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+	public static synthetic fun forClass$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+}
+
+public final class space/kscience/dataforge/properties/MetaAsFlowKt {
 }
 
 public final class space/kscience/dataforge/provider/DfTypeKt {
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index 5935af6e..d77ed1b9 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.7.0")
+    implementation("space.kscience:dataforge-data:0.8.0")
 }
 ```
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index ea542290..99314ea3 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -11,6 +11,7 @@ kscience{
     dependencies {
         api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
+        //Remove after subtype moved to stdlib
         api(kotlin("reflect"))
     }
 }
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index ec431a04..85e49e5a 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -6,18 +6,16 @@ IO module
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.7.0")
+    implementation("space.kscience:dataforge-io:0.8.0")
 }
 ```
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index 06a0efb4..20f5b4f6 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,18 +6,16 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.7.0")
+    implementation("space.kscience:dataforge-io-yaml:0.8.0")
 }
 ```
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index 5f214640..bd11ebf1 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -6,18 +6,16 @@ Meta definition and basic operations on meta
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.7.0")
+    implementation("space.kscience:dataforge-meta:0.8.0")
 }
 ```
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 1700ca7d..7564da80 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -56,6 +56,20 @@ public final class space/kscience/dataforge/meta/JsonMetaKt {
 	public static final fun toValue (Lkotlinx/serialization/json/JsonPrimitive;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/Value;
 }
 
+public final class space/kscience/dataforge/meta/KeepTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lkotlin/jvm/functions/Function1;)V
+	public final fun component1 ()Lkotlin/jvm/functions/Function1;
+	public final fun copy (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/KeepTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/KeepTransformationRule;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/KeepTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getSelector ()Lkotlin/jvm/functions/Function1;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/Laminate : space/kscience/dataforge/meta/TypedMeta {
 	public static final field Companion Lspace/kscience/dataforge/meta/Laminate$Companion;
 	public fun equals (Ljava/lang/Object;)Z
@@ -159,6 +173,32 @@ public final class space/kscience/dataforge/meta/MetaBuilder : space/kscience/da
 public abstract interface annotation class space/kscience/dataforge/meta/MetaBuilderMarker : java/lang/annotation/Annotation {
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaConverter : space/kscience/dataforge/meta/MetaSpec {
+	public static final field Companion Lspace/kscience/dataforge/meta/MetaConverter$Companion;
+	public abstract fun convert (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaConverter$Companion {
+	public final fun getBoolean ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getDouble ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getFloat ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getInt ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getLong ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getMeta ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getNumber ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getString ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getValue ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MetaConverter;
+	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaConverter;
+}
+
+public final class space/kscience/dataforge/meta/MetaConverterKt {
+	public static final fun convertNullable (Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+}
+
 public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
@@ -178,20 +218,24 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
@@ -252,6 +296,47 @@ public final class space/kscience/dataforge/meta/MetaSerializer : kotlinx/serial
 	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/Meta;)V
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaSpec : space/kscience/dataforge/meta/descriptors/Described {
+	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaSpecKt {
+	public static final fun readNullable (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformation {
+	public static final field Companion Lspace/kscience/dataforge/meta/MetaTransformation$Companion;
+	public static final fun apply-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
+	public static final fun bind-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/ObservableMeta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+	public static final synthetic fun box-impl (Ljava/util/Collection;)Lspace/kscience/dataforge/meta/MetaTransformation;
+	public static fun constructor-impl (Ljava/util/Collection;)Ljava/util/Collection;
+	public fun equals (Ljava/lang/Object;)Z
+	public static fun equals-impl (Ljava/util/Collection;Ljava/lang/Object;)Z
+	public static final fun equals-impl0 (Ljava/util/Collection;Ljava/util/Collection;)Z
+	public static final fun generate-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
+	public fun hashCode ()I
+	public static fun hashCode-impl (Ljava/util/Collection;)I
+	public fun toString ()Ljava/lang/String;
+	public static fun toString-impl (Ljava/util/Collection;)Ljava/lang/String;
+	public final synthetic fun unbox-impl ()Ljava/util/Collection;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformation$Companion {
+	public final fun make--mWxz5M (Lkotlin/jvm/functions/Function1;)Ljava/util/Collection;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformationBuilder {
+	public fun <init> ()V
+	public final fun build-m6Fha10 ()Ljava/util/Collection;
+	public final fun keep (Ljava/lang/String;)V
+	public final fun keep (Lkotlin/jvm/functions/Function1;)V
+	public final fun keep (Lspace/kscience/dataforge/names/Name;)V
+	public final fun move (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
+	public static synthetic fun move$default (Lspace/kscience/dataforge/meta/MetaTransformationBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
+}
+
 public abstract interface class space/kscience/dataforge/meta/MutableMeta : space/kscience/dataforge/meta/Meta, space/kscience/dataforge/meta/MutableMetaProvider {
 	public static final field Companion Lspace/kscience/dataforge/meta/MutableMeta$Companion;
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
@@ -293,6 +378,8 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
@@ -307,6 +394,8 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun listOfConvertable (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun listOfConvertable$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun listValue (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listValue$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
@@ -314,9 +403,9 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadWriteProperty;
@@ -365,7 +454,7 @@ public final class space/kscience/dataforge/meta/MutableMetaKt {
 	public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
 	public static final fun setIndexed (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;)V
 	public static synthetic fun setIndexed$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
-	public static final fun toMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public static final fun toMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/MutableMeta;
 	public static final fun update (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/Meta;)V
 	public static final fun withDefault (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaProvider;)Lspace/kscience/dataforge/meta/MutableMeta;
 }
@@ -419,11 +508,6 @@ public abstract interface class space/kscience/dataforge/meta/ObservableMeta : s
 	public abstract fun removeListener (Ljava/lang/Object;)V
 }
 
-public final class space/kscience/dataforge/meta/ObservableMetaKt {
-	public static final fun useProperty (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)V
-	public static synthetic fun useProperty$default (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
-}
-
 public final class space/kscience/dataforge/meta/ObservableMetaWrapperKt {
 	public static final fun asObservable (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 }
@@ -437,10 +521,19 @@ public abstract interface class space/kscience/dataforge/meta/ObservableMutableM
 	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 }
 
-public abstract interface class space/kscience/dataforge/meta/ReadOnlySpecification : space/kscience/dataforge/meta/descriptors/Described {
-	public abstract fun empty ()Ljava/lang/Object;
-	public fun invoke (Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
-	public abstract fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+public final class space/kscience/dataforge/meta/RegexItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
+	public final fun component1 ()Lkotlin/text/Regex;
+	public final fun component2 ()Lkotlin/jvm/functions/Function4;
+	public final fun copy (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)Lspace/kscience/dataforge/meta/RegexItemTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/RegexItemTransformationRule;Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/RegexItemTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getFrom ()Lkotlin/text/Regex;
+	public final fun getTransform ()Lkotlin/jvm/functions/Function4;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
 public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/meta/Configurable, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/meta/MutableMetaProvider, space/kscience/dataforge/meta/descriptors/Described {
@@ -454,6 +547,7 @@ public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/met
 	public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
 	public fun toMeta ()Lspace/kscience/dataforge/meta/Laminate;
 	public synthetic fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
+	public fun toString ()Ljava/lang/String;
 	public fun validate (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
 }
 
@@ -461,20 +555,30 @@ public final class space/kscience/dataforge/meta/SchemeKt {
 	public static final fun copy (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
 	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Scheme;
 	public static final fun invoke (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
-	public static final fun retarget (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun schemeOrNull$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun schemeOrNull$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun updateWith (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun updateWith (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun useProperty (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)V
+	public static synthetic fun useProperty$default (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
 }
 
-public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/Specification {
+public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/MetaConverter {
 	public fun <init> (Lkotlin/jvm/functions/Function0;)V
-	public synthetic fun empty ()Ljava/lang/Object;
-	public fun empty ()Lspace/kscience/dataforge/meta/Scheme;
+	public synthetic fun convert (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+	public fun convert (Lspace/kscience/dataforge/meta/Scheme;)Lspace/kscience/dataforge/meta/Meta;
+	public final fun empty ()Lspace/kscience/dataforge/meta/Scheme;
 	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public synthetic fun invoke (Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
 	public final fun invoke (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
-	public synthetic fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public fun read (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Scheme;
-	public synthetic fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Ljava/lang/Object;
-	public fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
+	public synthetic fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Scheme;
+	public final fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
 }
 
 public final class space/kscience/dataforge/meta/SealedMeta : space/kscience/dataforge/meta/TypedMeta {
@@ -515,21 +619,20 @@ public final class space/kscience/dataforge/meta/SealedMetaKt {
 	public static final fun seal (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/SealedMeta;
 }
 
-public abstract interface class space/kscience/dataforge/meta/Specification : space/kscience/dataforge/meta/ReadOnlySpecification {
-	public abstract fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Ljava/lang/Object;
-}
-
-public final class space/kscience/dataforge/meta/SpecificationKt {
-	public static final fun spec (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun spec (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun specOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun specOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun updateWith (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
-	public static final fun updateWith (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
+public final class space/kscience/dataforge/meta/SingleItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)V
+	public final fun component1 ()Lspace/kscience/dataforge/names/Name;
+	public final fun component2 ()Lkotlin/jvm/functions/Function3;
+	public final fun copy (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)Lspace/kscience/dataforge/meta/SingleItemTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/SingleItemTransformationRule;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/SingleItemTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getFrom ()Lspace/kscience/dataforge/names/Name;
+	public final fun getTransform ()Lkotlin/jvm/functions/Function3;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
 public final class space/kscience/dataforge/meta/StringValue : space/kscience/dataforge/meta/Value {
@@ -550,6 +653,12 @@ public final class space/kscience/dataforge/meta/StringValue : space/kscience/da
 	public final synthetic fun unbox-impl ()Ljava/lang/String;
 }
 
+public abstract interface class space/kscience/dataforge/meta/TransformationRule {
+	public abstract fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public abstract fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/True : space/kscience/dataforge/meta/Value {
 	public static final field INSTANCE Lspace/kscience/dataforge/meta/True;
 	public fun equals (Ljava/lang/Object;)Z
@@ -692,6 +801,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor {
 	public final fun getDescription ()Ljava/lang/String;
 	public final fun getIndexKey ()Ljava/lang/String;
 	public final fun getMultiple ()Z
+	public final fun getNodes ()Ljava/util/Map;
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
 	public fun hashCode ()I
@@ -720,6 +830,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun attributes (Lkotlin/jvm/functions/Function1;)V
 	public final fun build ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public final fun default (Ljava/lang/Object;)V
+	public final fun from (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
 	public final fun getAllowedValues ()Ljava/util/List;
 	public final fun getAttributes ()Lspace/kscience/dataforge/meta/MutableMeta;
 	public final fun getChildren ()Ljava/util/Map;
@@ -730,10 +841,6 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun getMultiple ()Z
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
-	public final fun item (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun item$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public final fun node (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
 	public final fun setAllowedValues (Ljava/util/List;)V
 	public final fun setAttributes (Lspace/kscience/dataforge/meta/MutableMeta;)V
 	public final fun setChildren (Ljava/util/Map;)V
@@ -751,16 +858,16 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public static final fun MetaDescriptor (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public static final fun copy (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public static final fun item (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
 	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
 	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/Described;Lkotlin/jvm/functions/Function1;)V
-	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
+	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
+	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/Described;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
 	public static final fun required (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;)V
-	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
+	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)V
+	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)V
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
 }
 
 public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
@@ -781,117 +888,6 @@ public final class space/kscience/dataforge/meta/descriptors/ValueRestriction :
 	public static fun values ()[Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 }
 
-public final class space/kscience/dataforge/meta/transformations/KeepTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lkotlin/jvm/functions/Function1;)V
-	public final fun component1 ()Lkotlin/jvm/functions/Function1;
-	public final fun copy (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getSelector ()Lkotlin/jvm/functions/Function1;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public abstract interface class space/kscience/dataforge/meta/transformations/MetaConverter {
-	public static final field Companion Lspace/kscience/dataforge/meta/transformations/MetaConverter$Companion;
-	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public abstract fun getType ()Lkotlin/reflect/KType;
-	public fun metaToObject (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public abstract fun metaToObjectOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public abstract fun objectToMeta (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaConverter$Companion {
-	public final fun getBoolean ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getDouble ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getFloat ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getInt ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getLong ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getMeta ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getNumber ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getString ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getValue ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/transformations/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaConverterKt {
-	public static final fun nullableMetaToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public static final fun nullableObjectToMeta (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
-	public static final fun valueToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformation {
-	public static final field Companion Lspace/kscience/dataforge/meta/transformations/MetaTransformation$Companion;
-	public static final fun apply-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
-	public static final fun bind-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/ObservableMeta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-	public static final synthetic fun box-impl (Ljava/util/Collection;)Lspace/kscience/dataforge/meta/transformations/MetaTransformation;
-	public static fun constructor-impl (Ljava/util/Collection;)Ljava/util/Collection;
-	public fun equals (Ljava/lang/Object;)Z
-	public static fun equals-impl (Ljava/util/Collection;Ljava/lang/Object;)Z
-	public static final fun equals-impl0 (Ljava/util/Collection;Ljava/util/Collection;)Z
-	public static final fun generate-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
-	public fun hashCode ()I
-	public static fun hashCode-impl (Ljava/util/Collection;)I
-	public fun toString ()Ljava/lang/String;
-	public static fun toString-impl (Ljava/util/Collection;)Ljava/lang/String;
-	public final synthetic fun unbox-impl ()Ljava/util/Collection;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformation$Companion {
-	public final fun make-XNaMui4 (Lkotlin/jvm/functions/Function1;)Ljava/util/Collection;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformationBuilder {
-	public fun <init> ()V
-	public final fun build-050menU ()Ljava/util/Collection;
-	public final fun keep (Ljava/lang/String;)V
-	public final fun keep (Lkotlin/jvm/functions/Function1;)V
-	public final fun keep (Lspace/kscience/dataforge/names/Name;)V
-	public final fun move (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
-	public static synthetic fun move$default (Lspace/kscience/dataforge/meta/transformations/MetaTransformationBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
-}
-
-public final class space/kscience/dataforge/meta/transformations/RegexItemTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
-	public final fun component1 ()Lkotlin/text/Regex;
-	public final fun component2 ()Lkotlin/jvm/functions/Function4;
-	public final fun copy (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getFrom ()Lkotlin/text/Regex;
-	public final fun getTransform ()Lkotlin/jvm/functions/Function4;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public final class space/kscience/dataforge/meta/transformations/SingleItemTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)V
-	public final fun component1 ()Lspace/kscience/dataforge/names/Name;
-	public final fun component2 ()Lkotlin/jvm/functions/Function3;
-	public final fun copy (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getFrom ()Lspace/kscience/dataforge/names/Name;
-	public final fun getTransform ()Lkotlin/jvm/functions/Function3;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public abstract interface class space/kscience/dataforge/meta/transformations/TransformationRule {
-	public abstract fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public abstract fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
 public final class space/kscience/dataforge/misc/CastJvmKt {
 	public static final fun unsafeCast (Ljava/lang/Object;)Ljava/lang/Object;
 }
@@ -972,6 +968,7 @@ public final class space/kscience/dataforge/names/NameKt {
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Z
+	public static final fun toStringUnescaped (Lspace/kscience/dataforge/names/Name;)Ljava/lang/String;
 	public static final fun withIndex (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
 }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
index 12bbd5d4..45954985 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
@@ -7,6 +7,7 @@ import space.kscience.dataforge.names.*
 /**
  * Restrictions on value in the node
  */
+@Serializable
 public enum class ValueRestriction {
     /**
      * No restrictions
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index af79cc8f..fbc5cb69 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.7.0")
+    implementation("space.kscience:dataforge-scripting:0.8.0")
 }
 ```
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index c096699f..cea37368 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.7.0")
+    implementation("space.kscience:dataforge-workspace:0.8.0")
 }
 ```

From 196b394278afdb67bc3993618ad362ced04b2c4b Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Feb 2024 19:34:01 +0300
Subject: [PATCH 22/77] Update API

---
 dataforge-meta/api/dataforge-meta.api | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 7564da80..49f28259 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -881,6 +881,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
 
 public final class space/kscience/dataforge/meta/descriptors/ValueRestriction : java/lang/Enum {
 	public static final field ABSENT Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
+	public static final field Companion Lspace/kscience/dataforge/meta/descriptors/ValueRestriction$Companion;
 	public static final field NONE Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public static final field REQUIRED Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public static fun getEntries ()Lkotlin/enums/EnumEntries;
@@ -888,6 +889,10 @@ public final class space/kscience/dataforge/meta/descriptors/ValueRestriction :
 	public static fun values ()[Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 }
 
+public final class space/kscience/dataforge/meta/descriptors/ValueRestriction$Companion {
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
+}
+
 public final class space/kscience/dataforge/misc/CastJvmKt {
 	public static final fun unsafeCast (Ljava/lang/Object;)Ljava/lang/Object;
 }

From 66ce15ae6d90d1fce9648748a73a9959274dc436 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 4 Feb 2024 13:54:08 +0300
Subject: [PATCH 23/77] Hot-fix for scheme initialization

---
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/data/ActionsTest.kt    |  2 +-
 dataforge-meta/api/dataforge-meta.api         |  9 ++++-
 .../kscience/dataforge/meta/MutableMeta.kt    |  5 +++
 .../space/kscience/dataforge/meta/Scheme.kt   | 40 +++++++++----------
 .../meta/descriptors/MetaDescriptorBuilder.kt |  3 --
 .../meta/descriptors/schemeDescriptor.kt      |  9 +++--
 .../kscience/dataforge/meta/SchemeTest.kt     | 23 ++++++++++-
 8 files changed, 61 insertions(+), 32 deletions(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index b9349868..8be3a7b0 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.0"
+    version = "0.8.1"
 }
 
 subprojects {
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 4aa6e6d4..f6eae12c 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -42,7 +42,7 @@ internal class ActionsTest {
             source.wrap(it.toString(), it)
         }
 
-        delay(10)
+        delay(20)
 
         source.close()
         result.awaitClose()
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 49f28259..f9834c54 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -433,6 +433,7 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 public final class space/kscience/dataforge/meta/MutableMetaKt {
 	public static final fun ObservableMutableMeta ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public static final fun ObservableMutableMeta (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public static final fun ObservableMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public static synthetic fun ObservableMutableMeta$default (Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/meta/Meta;)V
 	public static final fun append (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
@@ -538,6 +539,8 @@ public final class space/kscience/dataforge/meta/RegexItemTransformationRule : s
 
 public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/meta/Configurable, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/meta/MutableMetaProvider, space/kscience/dataforge/meta/descriptors/Described {
 	public fun <init> ()V
+	public fun <init> (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
+	public synthetic fun <init> (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILkotlin/jvm/internal/DefaultConstructorMarker;)V
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
 	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
 	public final fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
@@ -555,6 +558,10 @@ public final class space/kscience/dataforge/meta/SchemeKt {
 	public static final fun copy (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
 	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Scheme;
 	public static final fun invoke (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun listOfScheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun listOfScheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun scheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
@@ -837,7 +844,6 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun getDefault ()Lspace/kscience/dataforge/meta/Value;
 	public final fun getDescription ()Ljava/lang/String;
 	public final fun getIndexKey ()Ljava/lang/String;
-	public final fun getInfo ()Ljava/lang/String;
 	public final fun getMultiple ()Z
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
@@ -847,7 +853,6 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun setDefault (Lspace/kscience/dataforge/meta/Value;)V
 	public final fun setDescription (Ljava/lang/String;)V
 	public final fun setIndexKey (Ljava/lang/String;)V
-	public final fun setInfo (Ljava/lang/String;)V
 	public final fun setMultiple (Z)V
 	public final fun setValueRestriction (Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;)V
 	public final fun setValueTypes (Ljava/util/List;)V
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 231f9e54..d3453c4d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -378,6 +378,11 @@ public fun Meta.asMutableMeta(): MutableMeta = (this as? MutableMeta) ?: toMutab
 @JsName("newObservableMutableMeta")
 public fun ObservableMutableMeta(): ObservableMutableMeta = MutableMetaImpl(null)
 
+/**
+ * Create a pre-filled [ObservableMutableMeta]
+ */
+public fun ObservableMutableMeta(content: Meta): ObservableMutableMeta = ObservableMutableMeta { update(content) }
+
 /**
  * Build a [MutableMeta] using given transformation
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 2e9edc1d..c330de99 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -13,32 +13,28 @@ import kotlin.reflect.KProperty1
 
 /**
  * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaSpec].
- * Default item provider and [MetaDescriptor] are optional
+ *
+ * @param prototype default values provided by this scheme
  */
-public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurable {
+public open class Scheme(
+    private var prototype: Meta? = null,
+    descriptor: MetaDescriptor? = null
+) : Described, MetaRepr, MutableMetaProvider, Configurable {
 
     /**
      * Meta to be mutated by this scheme
      */
-    private var target: MutableMeta? = null
-        get() {
-            // automatic initialization of target if it is missing
-            if (field == null) {
-                field = MutableMeta()
-            }
-            return field
-        }
+    internal var target: MutableMeta = MutableMeta()
 
     /**
-     * Default values provided by this scheme
+     * A descriptor of this scheme
      */
-    private var prototype: Meta? = null
+    final override var descriptor: MetaDescriptor? = descriptor
+        private set
+
 
     final override val meta: ObservableMutableMeta = SchemeMeta(Name.EMPTY)
 
-    final override var descriptor: MetaDescriptor? = null
-        private set
-
     /**
      * This method must be called before the scheme could be used
      */
@@ -90,7 +86,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
                 ?: descriptor?.get(pathName)?.defaultValue
             set(value) {
                 val oldValue = target[pathName]?.value
-                target!![pathName] = value
+                target[pathName] = value
                 if (oldValue != value) {
                     invalidate(Name.EMPTY)
                 }
@@ -126,7 +122,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
         override fun hashCode(): Int = Meta.hashCode(this)
 
         override fun set(name: Name, node: Meta?) {
-            target!![name] = node
+            target[name] = node
             invalidate(name)
         }
 
@@ -176,15 +172,19 @@ public open class SchemeSpec<T : Scheme>(
         it.initialize(MutableMeta(), source, descriptor)
     }
 
+    /**
+     * Write changes made to the [Scheme] to target [MutableMeta]. If the empty [Scheme] contains any data it is copied to the target.
+     */
     public fun write(target: MutableMeta): T = empty().also {
+        target.update(it.meta)
         it.initialize(target, Meta.EMPTY, descriptor)
     }
 
     /**
-     * Generate an empty object
+     * Generate a blank object. The object could contain some elements if they are defined in a constructor
      */
     public fun empty(): T = builder().also {
-        it.initialize(MutableMeta(), Meta.EMPTY, descriptor)
+        it.initialize(MutableMeta(), it.target, descriptor)
     }
 
     override fun convert(obj: T): Meta  = obj.meta
@@ -269,7 +269,6 @@ public fun <T : Scheme> Scheme.schemeOrNull(
  * If children are mutable, the changes in list elements are reflected on them.
  * The list is a snapshot of children state, so change in structure is not reflected on its composition.
  */
-@DFExperimental
 public fun <T : Scheme> MutableMeta.listOfScheme(
     spec: SchemeSpec<T>,
     key: Name? = null,
@@ -286,7 +285,6 @@ public fun <T : Scheme> MutableMeta.listOfScheme(
 }
 
 
-@DFExperimental
 public fun <T : Scheme> Scheme.listOfScheme(
     spec: SchemeSpec<T>,
     key: Name? = null,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index 5d4d81ad..2291e3d3 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -11,9 +11,6 @@ import kotlin.collections.set
 public class MetaDescriptorBuilder @PublishedApi internal constructor() {
     public var description: String? = null
 
-    @Deprecated("Replace by description", ReplaceWith("description"))
-    public var info: String? by ::description
-
     public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf()
     public var multiple: Boolean = false
     public var valueRestriction: ValueRestriction = ValueRestriction.NONE
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
index a2a77182..c7b73508 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
@@ -3,11 +3,12 @@ package space.kscience.dataforge.meta.descriptors
 import space.kscience.dataforge.meta.Scheme
 import space.kscience.dataforge.meta.SchemeSpec
 import space.kscience.dataforge.meta.ValueType
-import space.kscience.dataforge.misc.DFExperimental
 import kotlin.reflect.KProperty1
 import kotlin.reflect.typeOf
 
-@DFExperimental
+/**
+ * Add a value item to a [MetaDescriptor] inferring some of its properties from the type
+ */
 public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     property: KProperty1<S, T>,
     noinline block: MetaDescriptorBuilder.() -> Unit = {},
@@ -39,7 +40,9 @@ public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     else -> node(property.name, block)
 }
 
-@DFExperimental
+/**
+ * Add a schem-based branch to a [MetaDescriptor]
+ */
 public inline fun <S : Scheme, reified T : Scheme> MetaDescriptorBuilder.scheme(
     property: KProperty1<S, T>,
     spec: SchemeSpec<T>,
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt
index bb2736ce..eaa766d8 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt
@@ -5,7 +5,18 @@ import kotlin.test.Test
 import kotlin.test.assertEquals
 import kotlin.test.assertNotNull
 
-@DFExperimental
+
+private class SchemeWithInit: Scheme(){
+    init {
+        set("initial", "initialValue")
+    }
+
+    var initial by string()
+    companion object: SchemeSpec<SchemeWithInit>(::SchemeWithInit)
+}
+
+
+
 class SchemeTest {
     @Test
     fun testSchemeWrappingBeforeEdit() {
@@ -15,6 +26,7 @@ class SchemeTest {
         assertEquals(29, config["a"].int)
     }
 
+    @OptIn(DFExperimental::class)
     @Test
     fun testSchemeWrappingAfterEdit() {
         val scheme = TestScheme.empty()
@@ -46,4 +58,13 @@ class SchemeTest {
         scheme.v = ListValue(1.0, 2.0, 3.0)
         assertNotNull(value)
     }
+
+
+    @Test
+    fun testSchemeWithInit(){
+        val scheme = SchemeWithInit()
+        assertEquals("initialValue", scheme.initial)
+        scheme.initial = "none"
+        assertEquals("none", scheme.initial)
+    }
 }
\ No newline at end of file

From 78641f6f877b1e521e662e6596192e9da214f6e0 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 4 Feb 2024 13:54:44 +0300
Subject: [PATCH 24/77] Roll-back version

---
 build.gradle.kts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 8be3a7b0..b9349868 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.1"
+    version = "0.8.0"
 }
 
 subprojects {

From 0e5a31db9f2b232baed5b475cdd5052f1923cad1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 6 Feb 2024 10:21:05 +0300
Subject: [PATCH 25/77] Hot fix for observable meta wrapper

---
 .../space/kscience/dataforge/meta/ObservableMetaWrapper.kt    | 4 ++--
 .../space/kscience/dataforge/meta/ObservableMetaTest.kt       | 2 ++
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
index 71e15aa9..abd2deb6 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
@@ -15,9 +15,9 @@ private class ObservableMetaWrapper(
     val listeners: MutableSet<MetaListener>,
 ) : ObservableMutableMeta {
     override val items: Map<NameToken, ObservableMutableMeta>
-        get() = root.items.keys.associateWith {
+        get() = root[nodeName]?.items?.keys?.associateWith {
             ObservableMetaWrapper(root, nodeName + it, listeners)
-        }
+        } ?: emptyMap()
 
     override fun get(name: Name): ObservableMutableMeta? = if (root[nodeName + name] == null) {
         null
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
index 4681ec12..2d6cc36f 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
@@ -17,6 +17,8 @@ class ObservableMetaTest {
             }
         }.asObservable()
 
+        println(meta)
+
         assertEquals("scatter", meta["data.type"].string)
     }
 

From 4197b4bb6165f5847d1d84359a67702894b337a6 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 6 Feb 2024 14:36:20 +0300
Subject: [PATCH 26/77] Add ordering to name indices

---
 CHANGELOG.md                                  |  2 ++
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/io/EnvelopeParts.kt    |  3 +-
 .../dataforge/meta/MutableMetaDelegate.kt     |  3 +-
 .../space/kscience/dataforge/meta/Scheme.kt   |  7 ++---
 .../dataforge/names/NameIndexComparator.kt    | 30 +++++++++++++++++++
 .../dataforge/meta/ObservableMetaTest.kt      |  2 --
 7 files changed, 40 insertions(+), 9 deletions(-)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index b9d3b6a9..e5d85a2f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,7 @@
 ## Unreleased
 
 ### Added
+- Name index comparator
 
 ### Changed
 
@@ -11,6 +12,7 @@
 ### Removed
 
 ### Fixed
+- `listOfScheme` and `listOfConvertable` delegates provides correct items order
 
 ### Security
 
diff --git a/build.gradle.kts b/build.gradle.kts
index b9349868..d41918ac 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.0"
+    version = "0.8.1-dev-1"
 }
 
 subprojects {
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeParts.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeParts.kt
index 183e7b03..88231899 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeParts.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeParts.kt
@@ -11,6 +11,7 @@ import space.kscience.dataforge.io.PartDescriptor.Companion.PARTS_KEY
 import space.kscience.dataforge.io.PartDescriptor.Companion.SEPARATOR_KEY
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.getIndexedList
 import space.kscience.dataforge.names.plus
 
 private class PartDescriptor : Scheme() {
@@ -84,7 +85,7 @@ public fun EnvelopeBuilder.envelopes(
 public fun Envelope.parts(): EnvelopeParts {
     if (data == null) return emptyList()
     //TODO add zip folder reader
-    val parts = meta.getIndexed(PARTS_KEY).values.map {
+    val parts = meta.getIndexedList(PARTS_KEY).map {
         PartDescriptor.read(it)
     }
     return if (parts.isEmpty()) {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index f6e96109..70c4aceb 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -4,6 +4,7 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.getIndexedList
 import kotlin.properties.ReadWriteProperty
 import kotlin.reflect.KProperty
 
@@ -65,7 +66,7 @@ public fun <T> MutableMeta.listOfConvertable(
 ): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { converter.read(it) }
+        return getIndexedList(name).map { converter.read(it) }
     }
 
     override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index c330de99..96b6df49 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -18,7 +18,7 @@ import kotlin.reflect.KProperty1
  */
 public open class Scheme(
     private var prototype: Meta? = null,
-    descriptor: MetaDescriptor? = null
+    descriptor: MetaDescriptor? = null,
 ) : Described, MetaRepr, MutableMetaProvider, Configurable {
 
     /**
@@ -187,7 +187,7 @@ public open class SchemeSpec<T : Scheme>(
         it.initialize(MutableMeta(), it.target, descriptor)
     }
 
-    override fun convert(obj: T): Meta  = obj.meta
+    override fun convert(obj: T): Meta = obj.meta
 
     /**
      * A convenience method to use specifications in builders
@@ -197,7 +197,6 @@ public open class SchemeSpec<T : Scheme>(
 }
 
 
-
 /**
  * Update a [MutableMeta] using given specification
  */
@@ -275,7 +274,7 @@ public fun <T : Scheme> MutableMeta.listOfScheme(
 ): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { spec.write(it as MutableMeta) }
+        return getIndexedList(name).map { spec.write(it as MutableMeta) }
     }
 
     override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
new file mode 100644
index 00000000..742f8ebb
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
@@ -0,0 +1,30 @@
+package space.kscience.dataforge.names
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.getIndexed
+
+
+/**
+ * A comparator for indices in a [Name]. If both indices are integers, compare them as integers.
+ * Null always stays "before" non-null index.
+ */
+public object NameIndexComparator : Comparator<String?> {
+    override fun compare(a: String?, b: String?): Int {
+        if (a == b) return 0
+        if (a == null) return 1
+        if (b == null) return -1
+        val aInt = a.toIntOrNull()
+        val bInt = b.toIntOrNull()
+        return if (aInt != null && bInt != null) {
+            aInt.compareTo(bInt)
+        } else {
+            a.compareTo(b)
+        }
+    }
+
+}
+
+public fun Meta.getIndexedList(name: Name): List<Meta> = getIndexed(name).entries.sortedWith(
+    //sort by index
+    compareBy(space.kscience.dataforge.names.NameIndexComparator) { it.key }
+).map{it.value}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
index 2d6cc36f..4681ec12 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ObservableMetaTest.kt
@@ -17,8 +17,6 @@ class ObservableMetaTest {
             }
         }.asObservable()
 
-        println(meta)
-
         assertEquals("scatter", meta["data.type"].string)
     }
 

From e850ca4145535ce8b230e888c303c4346ca1d55d Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 18 Feb 2024 18:34:26 +0300
Subject: [PATCH 27/77] minor refactoring

---
 .../dataforge/actions/AbstractAction.kt       | 14 ++++----
 .../kscience/dataforge/actions/MapAction.kt   |  4 +--
 .../kscience/dataforge/actions/SplitAction.kt |  4 +--
 .../kscience/dataforge/data/NamedData.kt      |  2 +-
 .../kscience/dataforge/meta/MutableMeta.kt    | 33 +++++++++++++++----
 .../kscience/dataforge/meta/SealedMeta.kt     |  4 ++-
 .../space/kscience/dataforge/meta/MetaTest.kt | 29 ++++++++++++++++
 gradle/wrapper/gradle-wrapper.properties      |  2 +-
 8 files changed, 72 insertions(+), 20 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 7cd1ced5..1802e488 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -37,21 +37,23 @@ public abstract class AbstractAction<T : Any, R : Any>(
      * Update part of the data set using provided data
      *
      * @param source the source data tree in case we need several data items to update
+     * @param meta the metadata used for the whole data tree
+     * @param updatedData an updated item
      */
     protected open fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
-        namedData: NamedData<T>,
-    ){
+        updatedData: NamedData<T>,
+    ) {
         //by default regenerate the whole data set
-        generate(source,meta)
+        generate(source, meta)
     }
 
     @OptIn(DFInternal::class)
     override fun execute(
         dataSet: DataTree<T>,
         meta: Meta,
-    ): DataTree<R> = if(dataSet.isObservable()) {
+    ): DataTree<R> = if (dataSet.isObservable()) {
         MutableDataTree<R>(outputType, dataSet.updatesScope).apply {
             generate(dataSet, meta)
             dataSet.updates().onEach {
@@ -64,8 +66,8 @@ public abstract class AbstractAction<T : Any, R : Any>(
                 close()
             }
         }
-    } else{
-        DataTree(outputType){
+    } else {
+        DataTree(outputType) {
             generate(dataSet, meta)
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 1f40ed73..fafb2d55 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -86,8 +86,8 @@ internal class MapAction<T : Any, R : Any>(
         data.forEach { mapOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
-        mapOne(namedData.name, namedData.data, namedData.meta)
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, updatedData: NamedData<T>) {
+        mapOne(updatedData.name, updatedData.data, updatedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 057419a7..cacbf1ee 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -77,8 +77,8 @@ internal class SplitAction<T : Any, R : Any>(
         data.forEach { splitOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
-        splitOne(namedData.name, namedData.data, namedData.meta)
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, updatedData: NamedData<T>) {
+        splitOne(updatedData.name, updatedData.data, updatedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 63e36a3f..bf65292c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -10,7 +10,7 @@ public interface NamedData<out T> : Named, Data<T> {
 }
 
 public operator fun NamedData<*>.component1(): Name = name
-public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
+public operator fun <T> NamedData<T>.component2(): Data<T> = data
 
 private class NamedDataImpl<T>(
     override val name: Name,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index d3453c4d..6e4a5daa 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -19,6 +19,10 @@ public annotation class MetaBuilderMarker
 public interface MutableMetaProvider : MetaProvider, MutableValueProvider {
     override fun get(name: Name): MutableMeta?
     public operator fun set(name: Name, node: Meta?)
+
+    /**
+     * Set value with the given name. Does nothing if value is not changed.
+     */
     override fun setValue(name: Name, value: Value?)
 }
 
@@ -48,11 +52,13 @@ public interface MutableMeta : Meta, MutableMetaProvider {
     }
 
     override fun setValue(name: Name, value: Value?) {
-        getOrCreate(name).value = value
+        if (value != getValue(name)) {
+            getOrCreate(name).value = value
+        }
     }
 
     /**
-     * Get existing node or create a new one
+     * Get an existing node or create a new one
      */
     public fun getOrCreate(name: Name): MutableMeta
 
@@ -198,10 +204,8 @@ public operator fun MutableMetaProvider.set(key: String, metas: Iterable<Meta>):
 
 
 /**
- * Update existing mutable node with another node. The rules are following:
- *  * value replaces anything
- *  * node updates node and replaces anything but node
- *  * node list updates node list if number of nodes in the list is the same and replaces anything otherwise
+ * Update the existing mutable node with another node.
+ * Values that are present in the current provider and are missing in [meta] are kept.
  */
 public fun MutableMetaProvider.update(meta: Meta) {
     meta.valueSequence().forEach { (name, value) ->
@@ -222,7 +226,7 @@ public fun <M : MutableTypedMeta<M>> MutableTypedMeta<M>.edit(name: Name, builde
     getOrCreate(name).apply(builder)
 
 /**
- * Set a value at a given [name]. If node does not exist, create it.
+ * Set a value at a given [name]. If a node does not exist, create it.
  */
 public operator fun <M : MutableTypedMeta<M>> MutableTypedMeta<M>.set(name: Name, value: Value?) {
     edit(name) {
@@ -367,6 +371,21 @@ public fun MutableMeta.append(name: Name, value: Value): Unit = append(name, Met
 
 public fun MutableMeta.append(key: String, value: Value): Unit = append(Name.parse(key), value)
 
+/**
+ * Update all items that exist in the [newMeta] and remove existing items that are missing in [newMeta].
+ * This produces the same result as clearing all items and updating blank meta with a [newMeta], but does not
+ * produce unnecessary invalidation events (if they are supported).
+ */
+public fun MutableMeta.reset(newMeta: Meta) {
+    //remove old items
+    (items.keys - newMeta.items.keys).forEach {
+        remove(it.asName())
+    }
+    newMeta.items.forEach { (token, item)->
+        set(token, item)
+    }
+}
+
 /**
  * Create a mutable copy of this meta. The copy is created even if the Meta is already mutable
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
index e842b990..b3fdf062 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
@@ -76,6 +76,8 @@ internal class MetaBuilder(
 
 
     override fun set(name: Name, node: Meta?) {
+        //skip setting if value has not changed
+        if(node == get(name)) return
         when (name.length) {
             0 -> error("Can't set a meta with empty name")
             1 -> {
@@ -89,7 +91,7 @@ internal class MetaBuilder(
             }
 
             else -> {
-                getOrCreate(name.first().asName()).set(name.cutFirst(), node)
+                getOrCreate(name.first().asName())[name.cutFirst()] = node
             }
         }
     }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaTest.kt
index 85db7bd6..78b5cdb4 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaTest.kt
@@ -61,4 +61,33 @@ class MetaTest {
         assertEquals(null, indexed["8"])
         assertEquals(12, indexed["12"].int)
     }
+
+    @Test
+    fun reset() {
+        val oldMeta = MutableMeta {
+            "a" put {
+                "value" put "aValue"
+            }
+            "b" put {
+                "value" put "bValue"
+            }
+            "c" put {
+                "value" put "cValue"
+            }
+        }
+        val newMeta = Meta {
+            "a" put {
+                "value" put "aValue"
+            }
+            "b" put {
+                "value" put "bValue"
+            }
+            "d" put {
+                "value" put "dValue"
+            }
+        }
+        oldMeta.reset(newMeta)
+        println(oldMeta)
+        assertEquals(setOf("a", "b", "d"), oldMeta.items.keys.map { it.toString() }.toSet())
+    }
 }
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index e411586a..17655d0e 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists

From aa4c745819fd0e216df833f5a703023ec671b1fc Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 18 Feb 2024 18:43:33 +0300
Subject: [PATCH 28/77] remove unnecessary type limitations in actions

---
 .../space/kscience/dataforge/actions/AbstractAction.kt      | 2 +-
 .../kotlin/space/kscience/dataforge/actions/MapAction.kt    | 4 ++--
 .../kotlin/space/kscience/dataforge/actions/ReduceAction.kt | 6 +++---
 .../kotlin/space/kscience/dataforge/actions/SplitAction.kt  | 6 +++---
 .../kotlin/space/kscience/dataforge/data/GroupRule.kt       | 4 ++--
 5 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 1802e488..00201a90 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -21,7 +21,7 @@ internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
 /**
  * An action that caches results on-demand and recalculates them on source push
  */
-public abstract class AbstractAction<T : Any, R : Any>(
+public abstract class AbstractAction<T, R>(
     public val outputType: KType,
 ) : Action<T, R> {
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index fafb2d55..ffa1d93d 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -50,7 +50,7 @@ public class MapActionBuilder<T, R>(
 }
 
 @PublishedApi
-internal class MapAction<T : Any, R : Any>(
+internal class MapAction<T, R>(
     outputType: KType,
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
@@ -96,7 +96,7 @@ internal class MapAction<T : Any, R : Any>(
  * A one-to-one mapping action
  */
 @DFExperimental
-public inline fun <T : Any, reified R : Any> Action.Companion.mapping(
+public inline fun <T, reified R> Action.Companion.mapping(
     noinline builder: MapActionBuilder<T, R>.() -> Unit,
 ): Action<T, R> = MapAction(typeOf<R>(), builder)
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 9440be55..0da9b61c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -12,7 +12,7 @@ import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
 
-public class JoinGroup<T : Any, R : Any>(
+public class JoinGroup<T, R>(
     public var name: String,
     internal val set: DataTree<T>,
     @PublishedApi internal var outputType: KType,
@@ -35,7 +35,7 @@ public class JoinGroup<T : Any, R : Any>(
 }
 
 @DFBuilder
-public class ReduceGroupBuilder<T : Any, R : Any>(
+public class ReduceGroupBuilder<T, R>(
     public val actionMeta: Meta,
     private val outputType: KType,
 ) {
@@ -79,7 +79,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
 }
 
 @PublishedApi
-internal class ReduceAction<T : Any, R : Any>(
+internal class ReduceAction<T, R>(
     outputType: KType,
     private val action: ReduceGroupBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index cacbf1ee..b2937515 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -13,9 +13,9 @@ import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
 
-public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
+public class SplitBuilder<T, R>(public val name: Name, public val meta: Meta) {
 
-    public class FragmentRule<T : Any, R : Any>(
+    public class FragmentRule<T, R>(
         public val name: Name,
         public var meta: MutableMeta,
         @PublishedApi internal var outputType: KType,
@@ -44,7 +44,7 @@ public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val me
  * Action that splits each incoming element into a number of fragments defined in builder
  */
 @PublishedApi
-internal class SplitAction<T : Any, R : Any>(
+internal class SplitAction<T, R>(
     outputType: KType,
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index 90486d85..ff7c94a8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -20,7 +20,7 @@ import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFInternal
 
 public interface GroupRule {
-    public fun <T : Any> gather(set: DataTree<T>): Map<String, DataTree<T>>
+    public fun <T> gather(set: DataTree<T>): Map<String, DataTree<T>>
 
     public companion object {
         /**
@@ -37,7 +37,7 @@ public interface GroupRule {
             defaultTagValue: String,
         ): GroupRule = object : GroupRule {
 
-            override fun <T : Any> gather(
+            override fun <T> gather(
                 set: DataTree<T>,
             ): Map<String, DataTree<T>> {
                 val map = HashMap<String, DataTreeBuilder<T>>()

From f95e278b2db7171a7e1bda6ffafa00d0f633f3f5 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 18 Feb 2024 20:11:53 +0300
Subject: [PATCH 29/77] Replace `branch` data builder with `putAll`

---
 CHANGELOG.md                                  |  1 +
 .../kscience/dataforge/data/DataSource.kt     |  8 ++--
 .../kscience/dataforge/data/dataBuilders.kt   | 38 +++++++++++++------
 .../dataforge/data/dataSetBuilderInContext.kt |  4 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  2 +-
 .../kscience/dataforge/workspace/Workspace.kt |  2 +-
 .../dataforge/workspace/taskBuilders.kt       |  6 +--
 .../dataforge/workspace/FileDataTest.kt       |  2 +-
 8 files changed, 39 insertions(+), 24 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index e5d85a2f..2849fce6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,7 @@
 - Name index comparator
 
 ### Changed
+- DataSink `branch` is replaced with `putAll` to avoid confusion with DataTree methods
 
 ### Deprecated
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index d379d027..40086ca2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -98,15 +98,15 @@ public val DataTree<*>.meta: Meta? get() = data?.meta
 /**
  * Provide subtree if it exists
  */
-public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: Name): TR? =
+public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.putAll(name: Name): TR? =
     when (name.length) {
         0 -> self
         1 -> items[name.first()]
-        else -> items[name.first()]?.branch(name.cutFirst())
+        else -> items[name.first()]?.putAll(name.cutFirst())
     }
 
-public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: String): TR? =
-    branch(name.parseAsName())
+public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.putAll(name: String): TR? =
+    this@branch.putAll(name.parseAsName())
 
 public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 8e8b6eaa..bdeb2798 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -16,11 +16,7 @@ public fun <T> DataSink<T>.put(value: NamedData<T>) {
     put(value.name, value.data)
 }
 
-public fun <T> DataSink<T>.branch(dataTree: DataTree<T>) {
-    putAll(dataTree.asSequence())
-}
-
-public inline fun <T> DataSink<T>.branch(
+public inline fun <T> DataSink<T>.putAll(
     prefix: Name,
     block: DataSink<T>.() -> Unit,
 ) {
@@ -35,24 +31,42 @@ public inline fun <T> DataSink<T>.branch(
     }
 }
 
+@Deprecated("Use putAll", ReplaceWith("putAll(prefix, block)"))
+public inline fun <T> DataSink<T>.branch(
+    prefix: Name,
+    block: DataSink<T>.() -> Unit,
+): Unit = putAll(prefix, block)
+
+
+public inline fun <T> DataSink<T>.putAll(
+    prefix: String,
+    block: DataSink<T>.() -> Unit,
+): Unit = putAll(prefix.asName(), block)
+
+@Deprecated("Use putAll", ReplaceWith("putAll(prefix, block)"))
 public inline fun <T> DataSink<T>.branch(
     prefix: String,
     block: DataSink<T>.() -> Unit,
-): Unit = branch(prefix.asName(), block)
-
+): Unit = putAll(prefix, block)
 
 public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
     put(Name.parse(name), value)
 }
 
-public fun <T> DataSink<T>.branch(name: Name, set: DataTree<T>) {
-    branch(name) { putAll(set.asSequence()) }
+public fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
+    putAll(name) { putAll(tree.asSequence()) }
 }
 
-public fun <T> DataSink<T>.branch(name: String, set: DataTree<T>) {
-    branch(Name.parse(name)) { putAll(set.asSequence()) }
+@Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
+public fun <T> DataSink<T>.branch(name: Name, tree: DataTree<T>): Unit = putAll(name,tree)
+
+public fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
+    putAll(Name.parse(name)) { putAll(tree.asSequence()) }
 }
 
+@Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
+public fun <T> DataSink<T>.branch(name: String, tree: DataTree<T>): Unit = putAll(name,tree)
+
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
@@ -124,7 +138,7 @@ public fun <T : Any> DataSink<T>.watchBranch(
     name: Name,
     dataSet: ObservableDataTree<T>,
 ): Job {
-    branch(name, dataSet)
+    putAll(name, dataSet)
     return dataSet.updates().onEach {
         put(name + it.name, it.data)
     }.launchIn(dataSet.updatesScope)
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index cfccb02b..6055a5fa 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -15,7 +15,7 @@ public infix fun <T : Any> String.put(data: Data<T>): Unit =
  */
 context(DataSink<T>)
 public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
-    branch(this, dataSet)
+    this.putAll(this, dataSet)
 
 /**
  * Build and append node
@@ -23,5 +23,5 @@ public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
 context(DataSink<T>)
 public infix fun <T : Any> String.put(
     block: DataSink<T>.() -> Unit,
-): Unit = branch(Name.parse(this), block)
+): Unit = this.putAll(Name.parse(this), block)
 
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 95b7a7bd..a4e732dc 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -43,7 +43,7 @@ internal class DataTreeBuilderTest {
                 wrap("b", "b")
             }
             wrap("root", "root")
-            putAll(updateData)
+            this.putAll(updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index f3ea322c..e4e315fd 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -11,7 +11,7 @@ import space.kscience.dataforge.provider.Provider
 import kotlin.coroutines.CoroutineContext
 
 
-public fun interface DataSelector<T> {
+public fun interface DataSelector<out T> {
     public suspend fun select(workspace: Workspace, meta: Meta): DataTree<T>
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 1900ff23..35fccc99 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -3,8 +3,8 @@ package space.kscience.dataforge.workspace
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.branch
 import space.kscience.dataforge.data.forEach
+import space.kscience.dataforge.data.putAll
 import space.kscience.dataforge.data.transform
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
@@ -101,7 +101,7 @@ public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
  * Set given [dataSet] as a task result.
  */
 public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
-    branch(dataSet)
+    this.putAll(dataSet)
 }
 
 /**
@@ -113,7 +113,7 @@ public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
     action: Action<T, R>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    branch(action.execute(from(selector, dependencyMeta), dependencyMeta))
+    this.putAll(action.execute(from(selector, dependencyMeta), dependencyMeta))
 }
 
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 10a1c268..a06f24a5 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -23,7 +23,7 @@ import kotlin.test.assertEquals
 
 class FileDataTest {
     val dataNode = DataTree<String> {
-        branch("dir") {
+        putAll("dir") {
             wrap("a", "Some string") {
                 "content" put "Some string"
             }

From aa88f05688199c51a4aa725e7f2123844fe391cb Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 19 Feb 2024 11:41:27 +0300
Subject: [PATCH 30/77] Replace `branch` data builder with `putAll`

---
 .../kotlin/space/kscience/dataforge/data/DataSource.kt    | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 40086ca2..d379d027 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -98,15 +98,15 @@ public val DataTree<*>.meta: Meta? get() = data?.meta
 /**
  * Provide subtree if it exists
  */
-public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.putAll(name: Name): TR? =
+public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: Name): TR? =
     when (name.length) {
         0 -> self
         1 -> items[name.first()]
-        else -> items[name.first()]?.putAll(name.cutFirst())
+        else -> items[name.first()]?.branch(name.cutFirst())
     }
 
-public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.putAll(name: String): TR? =
-    this@branch.putAll(name.parseAsName())
+public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: String): TR? =
+    branch(name.parseAsName())
 
 public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
 

From b575801983ff3afc381276c8249f7b628340596f Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 19 Feb 2024 11:42:04 +0300
Subject: [PATCH 31/77] Replace `branch` data builder with `putAll`

---
 .../space/kscience/dataforge/data/dataSetBuilderInContext.kt  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 6055a5fa..44602c53 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -15,7 +15,7 @@ public infix fun <T : Any> String.put(data: Data<T>): Unit =
  */
 context(DataSink<T>)
 public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
-    this.putAll(this, dataSet)
+    putAll(this, dataSet)
 
 /**
  * Build and append node
@@ -23,5 +23,5 @@ public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
 context(DataSink<T>)
 public infix fun <T : Any> String.put(
     block: DataSink<T>.() -> Unit,
-): Unit = this.putAll(Name.parse(this), block)
+): Unit = putAll(Name.parse(this), block)
 

From db2da2027d05c24a939eb35b68e7949e988c8760 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 19 Feb 2024 14:26:33 +0300
Subject: [PATCH 32/77] Replace `branch` data builder with `putAll`

---
 .../kscience/dataforge/data/dataBuilders.kt   | 38 ++++++++-----------
 .../dataforge/data/DataTreeBuilderTest.kt     |  4 +-
 2 files changed, 18 insertions(+), 24 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index bdeb2798..63233c07 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -5,7 +5,6 @@ import kotlinx.coroutines.flow.launchIn
 import kotlinx.coroutines.flow.onEach
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import space.kscience.dataforge.names.isEmpty
@@ -58,14 +57,14 @@ public fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
 }
 
 @Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
-public fun <T> DataSink<T>.branch(name: Name, tree: DataTree<T>): Unit = putAll(name,tree)
+public fun <T> DataSink<T>.branch(name: Name, tree: DataTree<T>): Unit = putAll(name, tree)
 
 public fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
     putAll(Name.parse(name)) { putAll(tree.asSequence()) }
 }
 
 @Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
-public fun <T> DataSink<T>.branch(name: String, tree: DataTree<T>): Unit = putAll(name,tree)
+public fun <T> DataSink<T>.branch(name: String, tree: DataTree<T>): Unit = putAll(name, tree)
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
@@ -117,30 +116,25 @@ public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
 }
 
 public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
-    this.putAll(tree.asSequence())
-}
-
-
-/**
- * Update data with given node data and meta with node meta.
- */
-@DFExperimental
-public fun <T> MutableDataTree<T>.putAll(source: DataTree<T>) {
-    source.forEach {
-        put(it.name, it.data)
-    }
+    putAll(tree.asSequence())
 }
 
 /**
  * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
  */
+public fun <T : Any> DataSink<T>.putAllAndWatch(
+    branchName: Name = Name.EMPTY,
+    dataSet: ObservableDataTree<T>,
+): Job {
+    putAll(branchName, dataSet)
+    return dataSet.updates().onEach {
+        put(branchName + it.name, it.data)
+    }.launchIn(dataSet.updatesScope)
+}
+
+
+@Deprecated("Use putAllAndWatch", ReplaceWith("putAllAndWatch(name, dataSet)"))
 public fun <T : Any> DataSink<T>.watchBranch(
     name: Name,
     dataSet: ObservableDataTree<T>,
-): Job {
-    putAll(name, dataSet)
-    return dataSet.updates().onEach {
-        put(name + it.name, it.data)
-    }.launchIn(dataSet.updatesScope)
-
-}
\ No newline at end of file
+): Job = putAllAndWatch(name, dataSet)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index a4e732dc..561e3aa0 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -43,7 +43,7 @@ internal class DataTreeBuilderTest {
                 wrap("b", "b")
             }
             wrap("root", "root")
-            this.putAll(updateData)
+            putAll(updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
@@ -55,7 +55,7 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int> {
-            watchBranch("sub".asName(), subNode)
+            putAllAndWatch("sub".asName(), subNode)
         }
 
         repeat(10) {

From facf7c3c7e423ce22039e7ad2612192b3f3d2f1b Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 19 Feb 2024 20:10:15 +0300
Subject: [PATCH 33/77] Remove some experimental flags

---
 .../space/kscience/dataforge/actions/MapAction.kt      |  3 +--
 .../space/kscience/dataforge/actions/ReduceAction.kt   | 10 ++++------
 .../space/kscience/dataforge/actions/SplitAction.kt    |  5 ++---
 .../space/kscience/dataforge/data/dataFilterJvm.kt     |  2 +-
 .../kotlin/space/kscience/dataforge/io/ioMisc.kt       |  3 ---
 .../kotlin/space/kscience/dataforge/io/fileIO.kt       |  3 ---
 .../space/kscience/dataforge/workspace/readFileData.kt |  6 ------
 7 files changed, 8 insertions(+), 24 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index ffa1d93d..0c56f0dd 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -6,7 +6,6 @@ import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.seal
 import space.kscience.dataforge.meta.toMutableMeta
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
@@ -95,7 +94,7 @@ internal class MapAction<T, R>(
 /**
  * A one-to-one mapping action
  */
-@DFExperimental
+
 public inline fun <T, reified R> Action.Companion.mapping(
     noinline builder: MapActionBuilder<T, R>.() -> Unit,
 ): Action<T, R> = MapAction(typeOf<R>(), builder)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 0da9b61c..9df0db26 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -4,7 +4,6 @@ import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
@@ -22,12 +21,12 @@ public class JoinGroup<T, R>(
 
     public lateinit var result: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R
 
-    internal fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(Map<Name,  ValueWithMeta<T>>) -> R1) {
+    internal fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
         this.outputType = outputType
         this.result = f;
     }
 
-    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(Map<Name,  ValueWithMeta<T>>) -> R1) {
+    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R1) {
         outputType = typeOf<R1>()
         this.result = f;
     }
@@ -67,7 +66,7 @@ public class ReduceGroupBuilder<T, R>(
     /**
      * Apply transformation to the whole node
      */
-    public fun result(resultName: String, f: suspend ActionEnv.(Map<Name,  ValueWithMeta<T>>) -> R) {
+    public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, ValueWithMeta<T>>) -> R) {
         groupRules += { node ->
             listOf(JoinGroup<T, R>(resultName, node, outputType).apply { result(outputType, f) })
         }
@@ -111,7 +110,6 @@ internal class ReduceAction<T, R>(
 /**
  * A one-to-one mapping action
  */
-@DFExperimental
-public inline fun <reified T : Any, reified R : Any> Action.Companion.reducing(
+public inline fun <reified T, reified R> Action.Companion.reducing(
     noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
 ): Action<T, R> = ReduceAction(typeOf<R>(), builder)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index b2937515..04fef3ab 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -5,7 +5,6 @@ import space.kscience.dataforge.meta.Laminate
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.toMutableMeta
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
 import kotlin.collections.set
@@ -85,7 +84,7 @@ internal class SplitAction<T, R>(
 /**
  * Action that splits each incoming element into a number of fragments defined in builder
  */
-@DFExperimental
-public inline fun <T : Any, reified R : Any> Action.Companion.splitting(
+
+public inline fun <T, reified R> Action.Companion.splitting(
     noinline builder: SplitBuilder<T, R>.() -> Unit,
 ): Action<T, R> = SplitAction(typeOf<R>(), builder)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 48155595..9498c758 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -84,4 +84,4 @@ public fun <R> ObservableDataTree<*>.filterByType(
 public inline fun <reified R> ObservableDataTree<*>.filterByType(
     scope: CoroutineScope,
     predicate: DataFilter = DataFilter.EMPTY,
-): ObservableDataTree<R> = filterByType(typeOf<R>(),scope,predicate)
\ No newline at end of file
+): ObservableDataTree<R> = filterByType(typeOf<R>(), scope, predicate)
\ No newline at end of file
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/ioMisc.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/ioMisc.kt
index 93fd210a..b5e56dfe 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/ioMisc.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/ioMisc.kt
@@ -5,7 +5,6 @@ import kotlinx.io.bytestring.ByteString
 import kotlinx.io.bytestring.decodeToString
 import kotlinx.io.bytestring.encodeToByteString
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
 import kotlin.math.min
 
 /**
@@ -52,7 +51,6 @@ public fun IOPlugin.peekBinaryEnvelopeFormat(binary: Binary): EnvelopeFormat? {
 /**
  * A zero-copy read from
  */
-@DFExperimental
 public fun IOPlugin.readEnvelope(
     binary: Binary,
     readNonEnvelopes: Boolean = false,
@@ -62,7 +60,6 @@ public fun IOPlugin.readEnvelope(
     Envelope(Meta.EMPTY, binary)
 } else error("Can't infer format for $binary")
 
-@DFExperimental
 public fun IOPlugin.readEnvelope(
     string: String,
     readNonEnvelopes: Boolean = false,
diff --git a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
index c15280f3..7df23eb5 100644
--- a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
+++ b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
@@ -77,14 +77,12 @@ public fun Path.rewrite(block: Sink.() -> Unit): Unit {
     stream.asSink().buffered().use(block)
 }
 
-@DFExperimental
 public fun EnvelopeFormat.readFile(path: Path): Envelope = readFrom(path.asBinary())
 
 /**
  * Resolve IOFormat based on type
  */
 @Suppress("UNCHECKED_CAST")
-@DFExperimental
 public inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? =
     ioFormatFactories.find { it.type.isSupertypeOf(typeOf<T>()) } as IOFormat<T>?
 
@@ -192,7 +190,6 @@ public fun IOPlugin.peekFileEnvelopeFormat(path: Path): EnvelopeFormat? {
  *
  * Return null otherwise.
  */
-@DFExperimental
 public fun IOPlugin.readEnvelopeFile(
     path: Path,
     readNonEnvelopes: Boolean = false,
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
index 1815c3e4..e0a4bdef 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
@@ -7,8 +7,6 @@ import space.kscience.dataforge.data.StaticData
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import space.kscience.dataforge.names.asName
@@ -39,7 +37,6 @@ public object FileData {
  * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
  * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
  */
-@OptIn(DFExperimental::class)
 public fun IOPlugin.readFileData(
     path: Path,
 ): Data<Binary> {
@@ -127,8 +124,6 @@ public fun DataSink<Binary>.files(
 
 private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
 
-@DFInternal
-@DFExperimental
 public fun DataSink<Binary>.monitorFiles(
     io: IOPlugin,
     name: Name,
@@ -171,7 +166,6 @@ public fun DataSink<Binary>.monitorFiles(
  * @param resources The names of the resources to read.
  * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
  */
-@DFExperimental
 public fun DataSink<Binary>.resources(
     io: IOPlugin,
     vararg resources: String,

From b2593d308e7c485ac12cce345e9496eefdbff907 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 24 Feb 2024 11:26:28 +0300
Subject: [PATCH 34/77] Add depth-first walk for data tree

---
 .../space/kscience/dataforge/data/DataSource.kt      | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index d379d027..90935fa1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -93,6 +93,18 @@ public fun <T> DataTree<T>.asSequence(
     }
 }
 
+/**
+ * Walk the data tree depth-first
+ */
+public fun <T, TR: GenericDataTree<T,TR>> TR.walk(
+    namePrefix: Name = Name.EMPTY,
+): Sequence<TR> = sequence {
+    yield(this@walk)
+    items.forEach { (token, tree) ->
+        yieldAll(tree.walk(namePrefix + token))
+    }
+}
+
 public val DataTree<*>.meta: Meta? get() = data?.meta
 
 /**

From 0e72b4b63ceac633c7fa036571a60514f74f6a5f Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 24 Feb 2024 15:01:27 +0300
Subject: [PATCH 35/77] Add depth-first walk for data tree

---
 .../kotlin/space/kscience/dataforge/data/DataSource.kt      | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 90935fa1..855a249c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -96,10 +96,10 @@ public fun <T> DataTree<T>.asSequence(
 /**
  * Walk the data tree depth-first
  */
-public fun <T, TR: GenericDataTree<T,TR>> TR.walk(
+private fun <T, TR: GenericDataTree<T,TR>> TR.walk(
     namePrefix: Name = Name.EMPTY,
-): Sequence<TR> = sequence {
-    yield(this@walk)
+): Sequence<Pair<Name,TR>> = sequence {
+    yield(namePrefix to this@walk)
     items.forEach { (token, tree) ->
         yieldAll(tree.walk(namePrefix + token))
     }

From db938e1ad8b0b2054a79143aef204d4c8755c039 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 14 Mar 2024 17:07:32 +0300
Subject: [PATCH 36/77] add custom json encoder to serializerable meta
 converter

---
 .../kotlin/space/kscience/dataforge/meta/MetaConverter.kt    | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 8959ae4a..00074525 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -135,16 +135,17 @@ public interface MetaConverter<T>: MetaSpec<T> {
         @DFExperimental
         public inline fun <reified T> serializable(
             descriptor: MetaDescriptor? = null,
+            jsonEncoder: Json = Json { ignoreUnknownKeys = true },
         ): MetaConverter<T> = object : MetaConverter<T> {
             private val serializer: KSerializer<T> = serializer()
 
             override fun readOrNull(source: Meta): T? {
                 val json = source.toJson(descriptor)
-                return Json.decodeFromJsonElement(serializer, json)
+                return jsonEncoder.decodeFromJsonElement(serializer, json)
             }
 
             override fun convert(obj: T): Meta {
-                val json = Json.encodeToJsonElement(obj)
+                val json = jsonEncoder.encodeToJsonElement(obj)
                 return json.toMeta(descriptor)
             }
 

From 9fe3deac336e161a12c6a85f7896ecc8f63018a3 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 2 Apr 2024 10:01:41 +0300
Subject: [PATCH 37/77] Fix scheme set method

---
 .../space/kscience/dataforge/meta/Scheme.kt   |  2 +-
 .../kscience/dataforge/meta/SchemeTest.kt     | 70 -------------------
 2 files changed, 1 insertion(+), 71 deletions(-)
 delete mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 96b6df49..85b5087a 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -122,7 +122,7 @@ public open class Scheme(
         override fun hashCode(): Int = Meta.hashCode(this)
 
         override fun set(name: Name, node: Meta?) {
-            target[name] = node
+            target[pathName + name] = node
             invalidate(name)
         }
 
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt
deleted file mode 100644
index eaa766d8..00000000
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SchemeTest.kt
+++ /dev/null
@@ -1,70 +0,0 @@
-package space.kscience.dataforge.meta
-
-import space.kscience.dataforge.misc.DFExperimental
-import kotlin.test.Test
-import kotlin.test.assertEquals
-import kotlin.test.assertNotNull
-
-
-private class SchemeWithInit: Scheme(){
-    init {
-        set("initial", "initialValue")
-    }
-
-    var initial by string()
-    companion object: SchemeSpec<SchemeWithInit>(::SchemeWithInit)
-}
-
-
-
-class SchemeTest {
-    @Test
-    fun testSchemeWrappingBeforeEdit() {
-        val config = MutableMeta()
-        val scheme = TestScheme.write(config)
-        scheme.a = 29
-        assertEquals(29, config["a"].int)
-    }
-
-    @OptIn(DFExperimental::class)
-    @Test
-    fun testSchemeWrappingAfterEdit() {
-        val scheme = TestScheme.empty()
-        scheme.a = 29
-        val config = MutableMeta()
-        scheme.retarget(config)
-        assertEquals(29, scheme.a)
-    }
-
-    @Test
-    fun testSchemeSubscription() {
-        val scheme = TestScheme.empty()
-        var flag: Int? = null
-        scheme.useProperty(TestScheme::a) { a ->
-            flag = a
-        }
-        scheme.a = 2
-        assertEquals(2, flag)
-    }
-
-    @Test
-    fun testListSubscription(){
-        val scheme = TestScheme.empty()
-        var value: Value? = null
-        scheme.v = ListValue(0.0,0.0,0.0)
-        scheme.useProperty(TestScheme::v){
-            value = it
-        }
-        scheme.v = ListValue(1.0, 2.0, 3.0)
-        assertNotNull(value)
-    }
-
-
-    @Test
-    fun testSchemeWithInit(){
-        val scheme = SchemeWithInit()
-        assertEquals("initialValue", scheme.initial)
-        scheme.initial = "none"
-        assertEquals("none", scheme.initial)
-    }
-}
\ No newline at end of file

From 104111f62dce898d45f0682254f8197293fb5e5e Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 2 Apr 2024 10:03:18 +0300
Subject: [PATCH 38/77] Add byte array value. Refactor exotic values

---
 .../kscience/dataforge/meta/MetaConverter.kt  |  2 +-
 .../dataforge/meta/MutableMetaDelegate.kt     | 11 ---
 .../space/kscience/dataforge/meta/Value.kt    |  2 -
 .../kscience/dataforge/meta/exoticValues.kt   | 80 ++++++++++++++++++
 .../dataforge/meta/valueExtensions.kt         |  7 --
 .../kscience/dataforge/misc/annotations.kt    |  9 +-
 .../dataforge/meta/SpecificationTest.kt       | 84 +++++++++++++++++++
 .../dataforge/values/DoubleArrayValue.kt      |  4 +
 8 files changed, 177 insertions(+), 22 deletions(-)
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 00074525..124f49f5 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -135,7 +135,7 @@ public interface MetaConverter<T>: MetaSpec<T> {
         @DFExperimental
         public inline fun <reified T> serializable(
             descriptor: MetaDescriptor? = null,
-            jsonEncoder: Json = Json { ignoreUnknownKeys = true },
+            jsonEncoder: Json = Json,
         ): MetaConverter<T> = object : MetaConverter<T> {
             private val serializer: KSerializer<T> = serializer()
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index 70c4aceb..e308a63d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -197,17 +197,6 @@ public fun MutableMetaProvider.numberList(
     reader = { it?.list?.map { value -> value.numberOrNull ?: Double.NaN } ?: listOf(*default) },
 )
 
-/* A special delegate for double arrays */
-
-
-public fun MutableMetaProvider.doubleArray(
-    vararg default: Double,
-    key: Name? = null,
-): ReadWriteProperty<Any?, DoubleArray> = value(
-    key,
-    writer = { DoubleArrayValue(it) },
-    reader = { it?.doubleArray ?: doubleArrayOf(*default) },
-)
 
 public fun <T> MutableMetaProvider.listValue(
     key: Name? = null,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt
index 66e14c86..2ab7b9ee 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt
@@ -256,8 +256,6 @@ public fun ShortArray.asValue(): Value = if (isEmpty()) Null else ListValue(map
 
 public fun FloatArray.asValue(): Value = if (isEmpty()) Null else ListValue(map { NumberValue(it) })
 
-public fun ByteArray.asValue(): Value = if (isEmpty()) Null else ListValue(map { NumberValue(it) })
-
 public fun <E : Enum<E>> E.asValue(): Value = EnumValue(this)
 
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
index 74952053..eb39e985 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
@@ -1,5 +1,9 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.names.Name
+import kotlin.properties.ReadOnlyProperty
+import kotlin.properties.ReadWriteProperty
+
 
 /**
  * A value built from string which content and type are parsed on-demand
@@ -44,3 +48,79 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
 }
 
 public fun DoubleArray.asValue(): Value = if (isEmpty()) Null else DoubleArrayValue(this)
+
+public val Value.doubleArray: DoubleArray
+    get() = if (this is DoubleArrayValue) {
+        value
+    } else {
+        DoubleArray(list.size) { list[it].double }
+    }
+
+public val Meta?.doubleArray: DoubleArray? get() = this?.value?.doubleArray
+
+public fun MetaProvider.doubleArray(
+    vararg default: Double,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, DoubleArray> = value(
+    key,
+    reader = { it?.doubleArray ?: doubleArrayOf(*default) },
+)
+
+public fun MutableMetaProvider.doubleArray(
+    vararg default: Double,
+    key: Name? = null,
+): ReadWriteProperty<Any?, DoubleArray> = value(
+    key,
+    writer = { DoubleArrayValue(it) },
+    reader = { it?.doubleArray ?: doubleArrayOf(*default) },
+)
+
+
+public class ByteArrayValue(override val value: ByteArray) : Value, Iterable<Byte> {
+    override val type: ValueType get() = ValueType.LIST
+    override val list: List<Value> get() = value.map { NumberValue(it) }
+
+    override fun equals(other: Any?): Boolean {
+        if (this === other) return true
+        if (other !is Value) return false
+
+        return when (other) {
+            is ByteArrayValue -> value.contentEquals(other.value)
+            else -> list == other.list
+        }
+    }
+
+    override fun hashCode(): Int = value.contentHashCode()
+
+    override fun toString(): String = list.joinToString(prefix = "[", postfix = "]")
+
+    override fun iterator(): Iterator<Byte> = value.iterator()
+}
+
+public fun ByteArray.asValue(): Value = ByteArrayValue(this)
+
+public val Value.byteArray: ByteArray
+    get() = if (this is ByteArrayValue) {
+        value
+    } else {
+        ByteArray(list.size) { list[it].number.toByte() }
+    }
+
+public val Meta?.byteArray: ByteArray? get() = this?.value?.byteArray
+
+public fun MetaProvider.byteArray(
+    vararg default: Byte,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, ByteArray> = value(
+    key,
+    reader = { it?.byteArray ?: byteArrayOf(*default) },
+)
+
+public fun MutableMetaProvider.byteArray(
+    vararg default: Byte,
+    key: Name? = null,
+): ReadWriteProperty<Any?, ByteArray> = value(
+    key,
+    writer = { ByteArrayValue(it) },
+    reader = { it?.byteArray ?: byteArrayOf(*default) },
+)
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
index e6b622ff..73464305 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
@@ -31,12 +31,5 @@ public inline fun <reified E : Enum<E>> Value.enum(): E = if (this is EnumValue<
 
 public val Value.stringList: List<String> get() = list.map { it.string }
 
-public val Value.doubleArray: DoubleArray
-    get() = if (this is DoubleArrayValue) {
-        value
-    } else {
-        DoubleArray(list.size) { list[it].double }
-    }
-
 
 public fun Value.toMeta(): Meta = Meta(this)
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/annotations.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/annotations.kt
index 3c9d6ac3..29568403 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/annotations.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/annotations.kt
@@ -18,4 +18,11 @@ public annotation class DFExperimental
  */
 @RequiresOptIn(level = RequiresOptIn.Level.WARNING)
 @Retention(AnnotationRetention.BINARY)
-public annotation class DFInternal
\ No newline at end of file
+public annotation class DFInternal
+
+/**
+ * Annotation marks methods that explicitly use KType without checking that it corresponds to the type parameter
+ */
+@RequiresOptIn(level = RequiresOptIn.Level.WARNING)
+@Retention(AnnotationRetention.BINARY)
+public annotation class UnsafeKType
\ No newline at end of file
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SpecificationTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SpecificationTest.kt
index 8d4d3537..dc9b9d64 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SpecificationTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/SpecificationTest.kt
@@ -1,7 +1,17 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.Test
 import kotlin.test.assertEquals
+import kotlin.test.assertNotNull
+
+
+internal class SubScheme : Scheme() {
+
+    var subValue by string()
+
+    companion object : SchemeSpec<SubScheme>(::SubScheme)
+}
 
 internal class TestScheme : Scheme() {
     var list by numberList(1, 2, 3)
@@ -11,9 +21,23 @@ internal class TestScheme : Scheme() {
 
     var v by value()
 
+    var sub by scheme(SubScheme)
+
     companion object : SchemeSpec<TestScheme>(::TestScheme)
 }
 
+private class SchemeWithInit: Scheme(){
+    init {
+        set("initial", "initialValue")
+    }
+
+    var initial by string()
+
+    companion object: SchemeSpec<SchemeWithInit>(::SchemeWithInit)
+}
+
+
+
 class SpecificationTest {
 
 //    @Test
@@ -71,4 +95,64 @@ class SpecificationTest {
         assertEquals(22, config["child.a"].int)
         assertEquals("test", config["child.b"].string)
     }
+
+    @Test
+    fun testSchemeWrappingBeforeEdit() {
+        val config = MutableMeta()
+        val scheme = TestScheme.write(config)
+        scheme.a = 29
+        assertEquals(29, config["a"].int)
+    }
+
+    @OptIn(DFExperimental::class)
+    @Test
+    fun testSchemeWrappingAfterEdit() {
+        val scheme = TestScheme.empty()
+        scheme.a = 29
+        val config = MutableMeta()
+        scheme.retarget(config)
+        assertEquals(29, scheme.a)
+    }
+
+    @Test
+    fun testSchemeSubscription() {
+        val scheme = TestScheme.empty()
+        var flag: Int? = null
+        scheme.useProperty(TestScheme::a) { a ->
+            flag = a
+        }
+        scheme.a = 2
+        assertEquals(2, flag)
+    }
+
+    @Test
+    fun testListSubscription(){
+        val scheme = TestScheme.empty()
+        var value: Value? = null
+        scheme.v = ListValue(0.0,0.0,0.0)
+        scheme.useProperty(TestScheme::v){
+            value = it
+        }
+        scheme.v = ListValue(1.0, 2.0, 3.0)
+        assertNotNull(value)
+    }
+
+    @Test
+    fun testSubScheme(){
+        val scheme = TestScheme.empty()
+
+        scheme.sub.subValue = "aaa"
+
+        assertEquals("aaa",scheme.sub.subValue)
+    }
+
+
+    @Test
+    fun testSchemeWithInit(){
+        val scheme = SchemeWithInit()
+        assertEquals("initialValue", scheme.initial)
+        scheme.initial = "none"
+        assertEquals("none", scheme.initial)
+    }
+
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt
new file mode 100644
index 00000000..59ccecae
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt
@@ -0,0 +1,4 @@
+package space.kscience.dataforge.values
+
+class DoubleArrayValue {
+}
\ No newline at end of file

From e7db1cc763210de45306b61642e2d75cf0d56307 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 2 Apr 2024 10:04:46 +0300
Subject: [PATCH 39/77] Refactor of data trees filtering

---
 CHANGELOG.md                                  |   4 +-
 build.gradle.kts                              |   4 +-
 .../dataforge/actions/AbstractAction.kt       |  46 ++-
 .../kscience/dataforge/actions/Action.kt      |  17 +-
 .../kscience/dataforge/actions/MapAction.kt   |  25 +-
 .../dataforge/actions/ReduceAction.kt         |   8 +-
 .../kscience/dataforge/actions/SplitAction.kt |  34 ++-
 .../space/kscience/dataforge/data/Data.kt     |  16 +-
 .../kscience/dataforge/data/DataFilter.kt     |  45 ++-
 .../space/kscience/dataforge/data/DataSink.kt | 157 +++++++++++
 .../kscience/dataforge/data/DataSource.kt     | 263 ++----------------
 .../kscience/dataforge/data/GroupRule.kt      |  10 +-
 .../kscience/dataforge/data/MetaMaskData.kt   |   2 +-
 .../kscience/dataforge/data/NamedData.kt      |  28 +-
 .../kscience/dataforge/data/dataBuilders.kt   |  71 ++---
 .../kscience/dataforge/data/dataTransform.kt  |  17 +-
 .../dataforge/data/dataTreeBuilder.kt         | 112 ++++++++
 .../kscience/dataforge/data/ActionsTest.kt    |  22 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  71 +++++
 .../kscience/dataforge/data/dataFilterJvm.kt  |  45 +--
 .../dataforge/data/dataSetBuilderInContext.kt |   4 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  68 -----
 .../kscience/dataforge/workspace/Task.kt      |   9 +-
 .../dataforge/workspace/TaskResult.kt         |   8 +-
 .../kscience/dataforge/workspace/Workspace.kt |   7 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |  10 +-
 .../dataforge/workspace/WorkspaceImpl.kt      |   4 +-
 .../dataforge/workspace/envelopeData.kt       |   4 +-
 .../dataforge/workspace/taskBuilders.kt       |   2 +-
 .../dataforge/workspace/FileWorkspaceCache.kt |   3 +-
 .../workspace/InMemoryWorkspaceCache.kt       |   3 +-
 .../dataforge/workspace/readFileData.kt       |  26 +-
 .../workspace/CachingWorkspaceTest.kt         |   4 +-
 .../workspace/DataPropagationTest.kt          |   2 +-
 .../dataforge/workspace/FileDataTest.kt       |   4 +-
 .../workspace/FileWorkspaceCacheTest.kt       |   6 +-
 .../workspace/SimpleWorkspaceTest.kt          |  14 +-
 37 files changed, 644 insertions(+), 531 deletions(-)
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
 rename dataforge-data/src/{jvmTest => commonTest}/kotlin/space/kscience/dataforge/data/ActionsTest.kt (65%)
 create mode 100644 dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
 delete mode 100644 dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2849fce6..a7c134e6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,7 +13,9 @@
 ### Removed
 
 ### Fixed
-- `listOfScheme` and `listOfConvertable` delegates provides correct items order
+- `listOfScheme` and `listOfConvertable` delegates provides correct items order.
+- Scheme meta setter works with proper sub-branch.
+- 
 
 ### Security
 
diff --git a/build.gradle.kts b/build.gradle.kts
index d41918ac..d4e22686 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.1-dev-1"
+    version = "0.8.1"
 }
 
 subprojects {
@@ -30,7 +30,7 @@ ksciencePublish {
         useApache2Licence()
         useSPCTeam()
     }
-    repository("spc","https://maven.sciprog.center/kscience")
+    repository("spc", "https://maven.sciprog.center/kscience")
     sonatype("https://oss.sonatype.org")
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 00201a90..4ed5f8df 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,11 +1,14 @@
 package space.kscience.dataforge.actions
 
-import kotlinx.coroutines.flow.launchIn
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.flow.collect
 import kotlinx.coroutines.flow.onEach
-import kotlinx.coroutines.launch
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.DataUpdate
+import space.kscience.dataforge.data.launchUpdate
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.startsWith
 import kotlin.reflect.KType
@@ -29,7 +32,7 @@ public abstract class AbstractAction<T, R>(
      * Generate initial content of the output
      */
     protected abstract fun DataSink<R>.generate(
-        data: DataTree<T>,
+        source: DataTree<T>,
         meta: Meta,
     )
 
@@ -40,35 +43,28 @@ public abstract class AbstractAction<T, R>(
      * @param meta the metadata used for the whole data tree
      * @param updatedData an updated item
      */
-    protected open fun DataSink<R>.update(
+    protected open suspend fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
-        updatedData: NamedData<T>,
+        updatedData: DataUpdate<T>,
     ) {
         //by default regenerate the whole data set
         generate(source, meta)
     }
 
-    @OptIn(DFInternal::class)
+    @OptIn(UnsafeKType::class)
     override fun execute(
-        dataSet: DataTree<T>,
+        source: DataTree<T>,
         meta: Meta,
-    ): DataTree<R> = if (dataSet.isObservable()) {
-        MutableDataTree<R>(outputType, dataSet.updatesScope).apply {
-            generate(dataSet, meta)
-            dataSet.updates().onEach {
-                update(dataSet, meta, it)
-            }.launchIn(updatesScope)
-
-            //close updates when the source is closed
-            updatesScope.launch {
-                dataSet.awaitClose()
-                close()
-            }
-        }
-    } else {
-        DataTree(outputType) {
-            generate(dataSet, meta)
+        updatesScope: CoroutineScope
+    ): DataTree<R> = DataTree(outputType) {
+        generate(source, meta)
+        //propagate updates
+        launchUpdate(updatesScope) {
+            source.updates.onEach { update ->
+                update(source, meta, update)
+            }.collect()
         }
     }
 }
+
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index 5ed60db9..80898aa8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -1,5 +1,8 @@
 package space.kscience.dataforge.actions
 
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.DelicateCoroutinesApi
+import kotlinx.coroutines.GlobalScope
 import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
@@ -13,7 +16,7 @@ public fun interface Action<T, R> {
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
      * so not actual computation is started at this moment.
      */
-    public fun execute(dataSet: DataTree<T>, meta: Meta): DataTree<R>
+    public fun execute(source: DataTree<T>, meta: Meta, updatesScope: CoroutineScope): DataTree<R>
 
     public companion object
 }
@@ -21,23 +24,27 @@ public fun interface Action<T, R> {
 /**
  * A convenience method to transform data using given [action]
  */
+@OptIn(DelicateCoroutinesApi::class)
 public fun <T, R> DataTree<T>.transform(
     action: Action<T, R>,
     meta: Meta = Meta.EMPTY,
-): DataTree<R> = action.execute(this, meta)
+    updateScope: CoroutineScope = GlobalScope,
+): DataTree<R> = action.execute(this, meta, updateScope)
 
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
-public infix fun <T, I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> = Action { dataSet, meta ->
-    action.execute(this@then.execute(dataSet, meta), meta)
+public infix fun <T, I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> = Action { dataSet, meta, scope ->
+    action.execute(this@then.execute(dataSet, meta, scope), meta, scope)
 }
 
 @DFExperimental
+@OptIn(DelicateCoroutinesApi::class)
 public operator fun <T, R> Action<T, R>.invoke(
     dataSet: DataTree<T>,
     meta: Meta = Meta.EMPTY,
-): DataTree<R> = execute(dataSet, meta)
+    updateScope: CoroutineScope = GlobalScope,
+): DataTree<R> = execute(dataSet, meta, updateScope)
 
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 0c56f0dd..a1ca8e59 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -6,7 +6,7 @@ import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.seal
 import space.kscience.dataforge.meta.toMutableMeta
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
@@ -54,7 +54,12 @@ internal class MapAction<T, R>(
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
+    private fun DataSink<R>.mapOne(name: Name, data: Data<T>?, meta: Meta) {
+        //fast return for null data
+        if (data == null) {
+            put(name, null)
+            return
+        }
         // Creating a new environment for action using **old** name, old meta and task meta
         val env = ActionEnv(name, data.meta, meta)
 
@@ -73,7 +78,7 @@ internal class MapAction<T, R>(
         //getting new meta
         val newMeta = builder.meta.seal()
 
-        @OptIn(DFInternal::class)
+        @OptIn(UnsafeKType::class)
         val newData = Data(builder.outputType, newMeta, dependencies = listOf(data)) {
             builder.result(env, data.await())
         }
@@ -81,12 +86,18 @@ internal class MapAction<T, R>(
         put(newName, newData)
     }
 
-    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
-        data.forEach { mapOne(it.name, it.data, meta) }
+    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
+        source.forEach { mapOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, updatedData: NamedData<T>) {
-        mapOne(updatedData.name, updatedData.data, updatedData.meta)
+
+
+    override suspend fun DataSink<R>.update(
+        source: DataTree<T>,
+        meta: Meta,
+        updatedData: DataUpdate<T>,
+    )  {
+        mapOne(updatedData.name, updatedData.data, meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 9df0db26..93278442 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -4,7 +4,7 @@ import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
 import kotlin.reflect.KType
@@ -84,8 +84,8 @@ internal class ReduceAction<T, R>(
 ) : AbstractAction<T, R>(outputType) {
     //TODO optimize reduction. Currently, the whole action recalculates on push
 
-    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
-        ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
+    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
+        ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(source).forEach { group ->
             val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
                     acc[value.name] = value.data
@@ -97,7 +97,7 @@ internal class ReduceAction<T, R>(
             val groupMeta = group.meta
 
             val env = ActionEnv(groupName.parseAsName(), groupMeta, meta)
-            @OptIn(DFInternal::class) val res: Data<R> = dataFlow.reduceToData(
+            @OptIn(UnsafeKType::class) val res: Data<R> = dataFlow.reduceToData(
                 group.outputType,
                 meta = groupMeta
             ) { group.result.invoke(env, it) }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 04fef3ab..2268b0fa 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -48,10 +48,10 @@ internal class SplitAction<T, R>(
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
-        val laminate = Laminate(data.meta, meta)
+    private fun DataSink<R>.splitOne(name: Name, data: Data<T>?, meta: Meta) {
+        val laminate = Laminate(data?.meta, meta)
 
-        val split = SplitBuilder<T, R>(name, data.meta).apply(action)
+        val split = SplitBuilder<T, R>(name, data?.meta ?: Meta.EMPTY).apply(action)
 
 
         // apply individual fragment rules to result
@@ -63,21 +63,29 @@ internal class SplitAction<T, R>(
             ).apply(rule)
             //data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
 
-            put(
-                fragmentName,
-                @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
-                    env.result(data.await())
-                }
-            )
+            if (data == null) {
+                put(fragmentName, null)
+            } else {
+                put(
+                    fragmentName,
+                    @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
+                        env.result(data.await())
+                    }
+                )
+            }
         }
     }
 
-    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
-        data.forEach { splitOne(it.name, it.data, meta) }
+    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
+        source.forEach { splitOne(it.name, it.data, meta) }
     }
 
-    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, updatedData: NamedData<T>) {
-        splitOne(updatedData.name, updatedData.data, updatedData.meta)
+    override suspend fun DataSink<R>.update(
+        source: DataTree<T>,
+        meta: Meta,
+        updatedData: DataUpdate<T>,
+    )  {
+        splitOne(updatedData.name, updatedData.data, meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index a0bb58ea..b9946a48 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -4,8 +4,8 @@ import kotlinx.coroutines.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
 import space.kscience.dataforge.meta.isEmpty
-import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.misc.DfType
+import space.kscience.dataforge.misc.UnsafeKType
 import kotlin.coroutines.CoroutineContext
 import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
@@ -41,7 +41,7 @@ public interface Data<out T> : Goal<T>, MetaRepr {
          */
         internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
 
-        public inline fun <reified T> static(
+        public inline fun <reified T> wrapValue(
             value: T,
             meta: Meta = Meta.EMPTY,
         ): Data<T> = StaticData(typeOf<T>(), value, meta)
@@ -50,10 +50,10 @@ public interface Data<out T> : Goal<T>, MetaRepr {
          * An empty data containing only meta
          */
         @OptIn(DelicateCoroutinesApi::class)
-        public fun empty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
-            override val type: KType = TYPE_OF_NOTHING
+        public fun buildEmpty(meta: Meta): Data<Nothing> = object : Data<Nothing> {
+            override val type: KType get() = TYPE_OF_NOTHING
             override val meta: Meta = meta
-            override val dependencies: Collection<Goal<*>> = emptyList()
+            override val dependencies: Collection<Goal<*>> get() = emptyList()
             override val deferred: Deferred<Nothing>
                 get() = GlobalScope.async(start = CoroutineStart.LAZY) {
                     error("The Data is empty and could not be computed")
@@ -62,6 +62,8 @@ public interface Data<out T> : Goal<T>, MetaRepr {
             override fun async(coroutineScope: CoroutineScope): Deferred<Nothing> = deferred
             override fun reset() {}
         }
+
+        public val EMPTY: Data<Nothing> = buildEmpty(Meta.EMPTY)
     }
 }
 
@@ -87,7 +89,7 @@ public class StaticData<T>(
 public inline fun <reified T> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
     StaticData(typeOf<T>(), value, meta)
 
-@DFInternal
+@UnsafeKType
 public fun <T> Data(
     type: KType,
     meta: Meta = Meta.EMPTY,
@@ -96,7 +98,7 @@ public fun <T> Data(
     block: suspend () -> T,
 ): Data<T> = LazyData(type, meta, context, dependencies, block)
 
-@OptIn(DFInternal::class)
+@OptIn(UnsafeKType::class)
 public inline fun <reified T> Data(
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index f45570ad..bc66e910 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -1,28 +1,30 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.plus
 import kotlin.reflect.KType
 
 public fun interface DataFilter {
 
-    public fun accepts(name: Name, meta: Meta, type: KType): Boolean
+    public fun accepts(name: Name, meta: Meta?, type: KType): Boolean
 
     public companion object {
         public val EMPTY: DataFilter = DataFilter { _, _, _ -> true }
     }
 }
 
-public fun DataFilter.accepts(data: NamedData<*>): Boolean = accepts(data.name, data.meta, data.type)
 
-public fun <T> Sequence<NamedData<T>>.filterData(predicate: DataFilter): Sequence<NamedData<T>> = filter { data ->
+public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
+
+public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
     predicate.accepts(data)
 }
 
-public fun <T> Flow<NamedData<T>>.filterData(predicate: DataFilter): Flow<NamedData<T>> = filter { data ->
+public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
     predicate.accepts(data)
 }
 
@@ -41,7 +43,8 @@ public fun <T> DataSource<T>.filterData(
 public fun <T> ObservableDataSource<T>.filterData(
     predicate: DataFilter,
 ): ObservableDataSource<T> = object : ObservableDataSource<T> {
-    override fun updates(): Flow<NamedData<T>> = this@filterData.updates().filter { predicate.accepts(it) }
+    override val updates: Flow<DataUpdate<T>>
+        get() = this@filterData.updates.filter { predicate.accepts(it) }
 
     override val dataType: KType get() = this@filterData.dataType
 
@@ -49,14 +52,32 @@ public fun <T> ObservableDataSource<T>.filterData(
         this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
 }
 
-public fun <T> GenericDataTree<T, *>.filterData(
-    predicate: DataFilter,
-): DataTree<T> = asSequence().filterData(predicate).toTree(dataType)
+internal class FilteredDataTree<T>(
+    val source: DataTree<T>,
+    val filter: DataFilter,
+    val branch: Name,
+    override val dataType: KType = source.dataType,
+) : DataTree<T> {
 
-public fun <T> GenericObservableDataTree<T, *>.filterData(
-    scope: CoroutineScope,
+    override val data: Data<T>?
+        get() = source[branch].takeIf {
+            filter.accepts(Name.EMPTY, data?.meta, data?.type ?: dataType)
+        }
+
+    override val items: Map<NameToken, DataTree<T>>
+        get() = source.branch(branch)?.items
+            ?.mapValues { FilteredDataTree(source, filter, branch + it.key) }
+            ?.filter { !it.value.isEmpty() }
+            ?: emptyMap()
+
+    override val updates: Flow<DataUpdate<T>>
+        get() = source.updates.filter { filter.accepts(it) }
+}
+
+
+public fun <T> DataTree<T>.filterData(
     predicate: DataFilter,
-): ObservableDataTree<T> = asSequence().filterData(predicate).toObservableTree(dataType, scope, updates().filterData(predicate))
+): DataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
 
 
 ///**
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
new file mode 100644
index 00000000..9d860310
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -0,0 +1,157 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.MutableSharedFlow
+import kotlinx.coroutines.flow.mapNotNull
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.*
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+public interface DataSink<in T> {
+    /**
+     * Put data without notification
+     */
+    public fun put(name: Name, data: Data<T>?)
+
+    /**
+     * Put data and propagate changes downstream
+     */
+    public suspend fun update(name: Name, data: Data<T>?)
+}
+
+/**
+ * Launch continuous update using
+ */
+public fun <T> DataSink<T>.launchUpdate(
+    scope: CoroutineScope,
+    updater: suspend DataSink<T>.() -> Unit,
+): Job = scope.launch {
+    object : DataSink<T> {
+        override fun put(name: Name, data: Data<T>?) {
+            launch {
+                this@launchUpdate.update(name, data)
+            }
+        }
+
+        override suspend fun update(name: Name, data: Data<T>?) {
+            this@launchUpdate.update(name, data)
+        }
+    }.updater()
+}
+
+/**
+ * A mutable version of [DataTree]
+ */
+public interface MutableDataTree<T> : DataTree<T>, DataSink<T> {
+    override var data: Data<T>?
+
+    override val items: Map<NameToken, MutableDataTree<T>>
+
+    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
+
+    public operator fun set(token: NameToken, data: Data<T>?)
+
+    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
+}
+
+public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
+    when (name.length) {
+        0 -> this.data = data
+        1 -> set(name.first(), data)
+        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
+    }
+}
+
+/**
+ * Provide a mutable subtree if it exists
+ */
+public tailrec fun <T> MutableDataTree<T>.branch(name: Name): MutableDataTree<T>? =
+    when (name.length) {
+        0 -> this
+        1 -> items[name.first()]
+        else -> items[name.first()]?.branch(name.cutFirst())
+    }
+
+private class MutableDataTreeRoot<T>(
+    override val dataType: KType,
+) : MutableDataTree<T> {
+
+    override val updates = MutableSharedFlow<DataUpdate<T>>()
+
+    inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
+
+        override var data: Data<T>? = null
+
+        override val items = HashMap<NameToken, MutableDataTree<T>>()
+
+        override val updates: Flow<DataUpdate<T>> = this@MutableDataTreeRoot.updates.mapNotNull { update ->
+            update.name.removeFirstOrNull(branchName)?.let {
+                DataUpdate(update.data?.type ?: dataType, it, update.data)
+            }
+        }
+        override val dataType: KType get() = this@MutableDataTreeRoot.dataType
+
+
+        override fun getOrCreateItem(token: NameToken): MutableDataTree<T> =
+            items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }
+
+
+        override fun set(token: NameToken, data: Data<T>?) {
+            val subTree = getOrCreateItem(token)
+            subTree.data = data
+        }
+
+        override suspend fun update(name: Name, data: Data<T>?) {
+            if (name.isEmpty()) {
+                this.data = data
+            } else {
+                getOrCreateItem(name.first()).update(name.cutFirst(), data)
+            }
+            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + name, data))
+        }
+
+    }
+
+
+    override var data: Data<T>? = null
+
+    override val items = HashMap<NameToken, MutableDataTree<T>>()
+
+    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
+        MutableDataTreeRoot(dataType)
+    }
+
+    override fun set(token: NameToken, data: Data<T>?) {
+        val subTree = getOrCreateItem(token)
+        subTree.data = data
+    }
+
+    override suspend fun update(name: Name, data: Data<T>?) {
+        if (name.isEmpty()) {
+            this.data = data
+        } else {
+            getOrCreateItem(name.first()).update(name.cutFirst(), data)
+        }
+        updates.emit(DataUpdate(data?.type ?: dataType, name, data))
+    }
+}
+
+/**
+ * Create a new [MutableDataTree]
+ */
+@UnsafeKType
+public fun <T> MutableDataTree(
+    type: KType,
+): MutableDataTree<T> = MutableDataTreeRoot<T>(type)
+
+/**
+ * Create and initialize a observable mutable data tree.
+ */
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> MutableDataTree(
+    generator: MutableDataTree<T>.() -> Unit = {},
+): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>()).apply { generator() }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 855a249c..a9756be7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -1,9 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.*
-import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.emptyFlow
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.*
 import kotlin.contracts.contract
 import kotlin.reflect.KType
@@ -33,46 +32,42 @@ public interface ObservableDataSource<out T> : DataSource<T> {
     /**
      * Flow updates made to the data
      */
-    public fun updates(): Flow<NamedData<T>>
+    public val updates: Flow<DataUpdate<T>>
 }
 
 /**
  * A tree like structure for data holding
  */
-public interface GenericDataTree<out T, out TR : GenericDataTree<T, TR>> : DataSource<T> {
-    public val self: TR
+public interface DataTree<out T> : ObservableDataSource<T> {
 
     public val data: Data<T>?
-    public val items: Map<NameToken, TR>
-
+    public val items: Map<NameToken, DataTree<T>>
 
     override fun read(name: Name): Data<T>? = when (name.length) {
         0 -> data
         else -> items[name.first()]?.read(name.cutFirst())
     }
 
+    /**
+     * Flow updates made to the data
+     */
+    override val updates: Flow<DataUpdate<T>>
+
     public companion object {
-        private object EmptyDataTree : GenericDataTree<Nothing, EmptyDataTree> {
-            override val self: EmptyDataTree get() = this
+        private object EmptyDataTree :
+            DataTree<Nothing> {
             override val data: Data<Nothing>? = null
             override val items: Map<NameToken, EmptyDataTree> = emptyMap()
             override val dataType: KType = typeOf<Unit>()
 
             override fun read(name: Name): Data<Nothing>? = null
-
+            override val updates: Flow<DataUpdate<Nothing>> get() = emptyFlow()
         }
 
-        public val EMPTY: GenericDataTree<Nothing, *> = EmptyDataTree
+        public val EMPTY: DataTree<Nothing> = EmptyDataTree
     }
 }
 
-public typealias DataTree<T> = GenericDataTree<T, GenericDataTree<T, *>>
-
-/**
- * Return a single data in this tree. Throw error if it is not single.
- */
-public fun <T> DataTree<T>.single(): NamedData<T> = asSequence().single()
-
 /**
  * An alias for easier access to tree values
  */
@@ -94,11 +89,13 @@ public fun <T> DataTree<T>.asSequence(
 }
 
 /**
- * Walk the data tree depth-first
+ * Walk the data tree depth-first.
+ *
+ * @return a [Sequence] of pairs [Name]-[DataTree] for all nodes including the root one.
  */
-private fun <T, TR: GenericDataTree<T,TR>> TR.walk(
+public fun <T> DataTree<T>.walk(
     namePrefix: Name = Name.EMPTY,
-): Sequence<Pair<Name,TR>> = sequence {
+): Sequence<Pair<Name, DataTree<T>>> = sequence {
     yield(namePrefix to this@walk)
     items.forEach { (token, tree) ->
         yieldAll(tree.walk(namePrefix + token))
@@ -110,233 +107,25 @@ public val DataTree<*>.meta: Meta? get() = data?.meta
 /**
  * Provide subtree if it exists
  */
-public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: Name): TR? =
+public tailrec fun <T> DataTree<T>.branch(name: Name): DataTree<T>? =
     when (name.length) {
-        0 -> self
+        0 -> this
         1 -> items[name.first()]
         else -> items[name.first()]?.branch(name.cutFirst())
     }
 
-public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: String): TR? =
+public fun <T> DataTree<T>.branch(name: String): DataTree<T>? =
     branch(name.parseAsName())
 
-public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
-
-@PublishedApi
-internal class FlatDataTree<T>(
-    override val dataType: KType,
-    private val dataSet: Map<Name, Data<T>>,
-    private val prefix: Name,
-) : GenericDataTree<T, FlatDataTree<T>> {
-    override val self: FlatDataTree<T> get() = this
-    override val data: Data<T>? get() = dataSet[prefix]
-    override val items: Map<NameToken, FlatDataTree<T>>
-        get() = dataSet.keys
-            .filter { it.startsWith(prefix) && it.length > prefix.length }
-            .map { it.tokens[prefix.length] }
-            .associateWith { FlatDataTree(dataType, dataSet, prefix + it) }
-
-    override fun read(name: Name): Data<T>? = dataSet[prefix + name]
-}
-
-/**
- * Represent this flat data map as a [DataTree] without copying it
- */
-public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = FlatDataTree(typeOf<T>(), this, Name.EMPTY)
-
-internal fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
-    FlatDataTree(type, associate { it.name to it.data }, Name.EMPTY)
-
-/**
- * Collect a sequence of [NamedData] to a [DataTree]
- */
-public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> =
-    FlatDataTree(typeOf<T>(), associate { it.name to it.data }, Name.EMPTY)
-
-public interface GenericObservableDataTree<out T, out TR : GenericObservableDataTree<T, TR>> :
-    GenericDataTree<T, TR>, ObservableDataSource<T>, AutoCloseable {
-
-    /**
-     * A scope that is used to propagate updates. When this scope is closed, no new updates could arrive.
-     */
-    public val updatesScope: CoroutineScope
-
-    /**
-     * Close this data tree updates channel
-     */
-    override fun close() {
-        updatesScope.cancel()
-    }
-
-}
-
-public typealias ObservableDataTree<T> = GenericObservableDataTree<T, GenericObservableDataTree<T, *>>
+public fun DataTree<*>.isEmpty(): Boolean = data == null && items.isEmpty()
 
 /**
  * Check if the [DataTree] is observable
  */
-public fun <T> DataTree<T>.isObservable(): Boolean {
+public fun <T> DataSource<T>.isObservable(): Boolean {
     contract {
-        returns(true) implies (this@isObservable is GenericObservableDataTree<T, *>)
+        returns(true) implies (this@isObservable is ObservableDataSource<T>)
     }
-    return this is GenericObservableDataTree<T, *>
+    return this is ObservableDataSource<T>
 }
 
-/**
- * Wait for this data tree to stop spawning updates (updatesScope is closed).
- * If this [DataTree] is not observable, return immediately.
- */
-public suspend fun <T> DataTree<T>.awaitClose() {
-    if (isObservable()) {
-        updatesScope.coroutineContext[Job]?.join()
-    }
-}
-
-public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> =
-    if (this is GenericObservableDataTree<T, *>) updates() else emptyFlow()
-
-public fun interface DataSink<in T> {
-    public fun put(name: Name, data: Data<T>?)
-}
-
-@DFInternal
-public class DataTreeBuilder<T>(private val type: KType) : DataSink<T> {
-    private val map = HashMap<Name, Data<T>>()
-    override fun put(name: Name, data: Data<T>?) {
-        if (data == null) {
-            map.remove(name)
-        } else {
-            map[name] = data
-        }
-    }
-
-    public fun build(): DataTree<T> = FlatDataTree(type, map, Name.EMPTY)
-}
-
-@DFInternal
-public inline fun <T> DataTree(
-    dataType: KType,
-    generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
-
-/**
- * Create and a data tree.
- */
-@OptIn(DFInternal::class)
-public inline fun <reified T> DataTree(
-    generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTreeBuilder<T>(typeOf<T>()).apply(generator).build()
-
-/**
- * A mutable version of [GenericDataTree]
- */
-public interface MutableDataTree<T> : GenericObservableDataTree<T, MutableDataTree<T>>, DataSink<T> {
-    override var data: Data<T>?
-
-    override val items: Map<NameToken, MutableDataTree<T>>
-
-    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
-
-    public operator fun set(token: NameToken, data: Data<T>?)
-
-    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
-}
-
-public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
-    when (name.length) {
-        0 -> this.data = data
-        1 -> set(name.first(), data)
-        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
-    }
-}
-
-private class MutableDataTreeImpl<T>(
-    override val dataType: KType,
-    override val updatesScope: CoroutineScope,
-) : MutableDataTree<T> {
-
-
-    private val updates = MutableSharedFlow<NamedData<T>>()
-
-    private val children = HashMap<NameToken, MutableDataTree<T>>()
-
-    override var data: Data<T>? = null
-        set(value) {
-            if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
-            field = value
-            if (value != null) {
-                updatesScope.launch {
-                    updates.emit(value.named(Name.EMPTY))
-                }
-            }
-        }
-
-    override val items: Map<NameToken, MutableDataTree<T>> get() = children
-
-    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = children.getOrPut(token){
-        MutableDataTreeImpl(dataType, updatesScope)
-    }
-
-    override val self: MutableDataTree<T> get() = this
-
-    override fun set(token: NameToken, data: Data<T>?) {
-        if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
-        val subTree = getOrCreateItem(token)
-        subTree.updates().onEach {
-            updates.emit(it.named(token + it.name))
-        }.launchIn(updatesScope)
-        subTree.data = data
-    }
-
-    override fun updates(): Flow<NamedData<T>> = updates
-}
-
-/**
- * Create a new [MutableDataTree]
- *
- * @param parentScope a [CoroutineScope] to control data propagation. By default uses [GlobalScope]
- */
-@OptIn(DelicateCoroutinesApi::class)
-public fun <T> MutableDataTree(
-    type: KType,
-    parentScope: CoroutineScope = GlobalScope,
-): MutableDataTree<T> = MutableDataTreeImpl<T>(
-    type,
-    CoroutineScope(parentScope.coroutineContext + Job(parentScope.coroutineContext[Job]))
-)
-
-/**
- * Create and initialize a observable mutable data tree.
- */
-@OptIn(DelicateCoroutinesApi::class)
-public inline fun <reified T> MutableDataTree(
-    parentScope: CoroutineScope = GlobalScope,
-    generator: MutableDataTree<T>.() -> Unit = {},
-): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply { generator() }
-
-//@DFInternal
-//public fun <T> ObservableDataTree(
-//    type: KType,
-//    scope: CoroutineScope,
-//    generator: suspend MutableDataTree<T>.() -> Unit = {},
-//): ObservableDataTree<T> = MutableDataTree<T>(type, scope.coroutineContext).apply(generator)
-
-public inline fun <reified T> ObservableDataTree(
-    parentScope: CoroutineScope,
-    generator: MutableDataTree<T>.() -> Unit = {},
-): ObservableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply(generator)
-
-
-/**
- * Collect a [Sequence] into an observable tree with additional [updates]
- */
-public fun <T> Sequence<NamedData<T>>.toObservableTree(
-    dataType: KType,
-    parentScope: CoroutineScope,
-    updates: Flow<NamedData<T>>,
-): ObservableDataTree<T> = MutableDataTree<T>(dataType, parentScope).apply {
-    this.putAll(this@toObservableTree)
-    updates.onEach {
-        put(it.name, it.data)
-    }.launchIn(updatesScope)
-}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index ff7c94a8..6dd3caa9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -17,7 +17,7 @@ package space.kscience.dataforge.data
 
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 
 public interface GroupRule {
     public fun <T> gather(set: DataTree<T>): Map<String, DataTree<T>>
@@ -31,7 +31,7 @@ public interface GroupRule {
          * @param defaultTagValue
          * @return
          */
-        @OptIn(DFInternal::class)
+        @OptIn(UnsafeKType::class)
         public fun byMetaValue(
             key: String,
             defaultTagValue: String,
@@ -40,15 +40,15 @@ public interface GroupRule {
             override fun <T> gather(
                 set: DataTree<T>,
             ): Map<String, DataTree<T>> {
-                val map = HashMap<String, DataTreeBuilder<T>>()
+                val map = HashMap<String, MutableDataTree<T>>()
 
                 set.forEach { data ->
                     val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.put(data.name, data.data)
+                    map.getOrPut(tagValue) { MutableDataTree(set.dataType) }.put(data.name, data.data)
                 }
 
 
-                return map.mapValues { it.value.build() }
+                return map
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
index acf2410d..85f0b2f9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
@@ -20,4 +20,4 @@ public fun <T> Data<T>.withMeta(newMeta: Meta): Data<T> = if (this is MetaMaskDa
  * Create a new [Data] with the same computation, but different meta. The meta is created by applying [block] to
  * the existing data meta.
  */
-public inline fun <T> Data<T>.mapMeta(block: MutableMeta.() -> Unit): Data<T> = withMeta(meta.copy(block))
\ No newline at end of file
+public inline fun <T> Data<T>.withMeta(block: MutableMeta.() -> Unit): Data<T> = withMeta(meta.copy(block))
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index bf65292c..9cb4c2d9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -3,10 +3,30 @@ package space.kscience.dataforge.data
 import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
+import kotlin.reflect.KType
 
-public interface NamedData<out T> : Named, Data<T> {
+/**
+ * An interface implementing a data update event.
+ *
+ * If [data] is null, then corresponding element should be removed.
+ */
+public interface DataUpdate<out T> : Named {
+    public val type: KType
     override val name: Name
-    public val data: Data<T>
+    public val data: Data<T>?
+}
+
+public fun <T> DataUpdate(type: KType, name: Name, data: Data<T>?): DataUpdate<T> = object : DataUpdate<T> {
+    override val type: KType = type
+    override val name: Name = name
+    override val data: Data<T>? = data
+}
+
+/**
+ * A data coupled to a name.
+ */
+public interface NamedData<out T> : DataUpdate<T>, Data<T> {
+    override val data: Data<T>
 }
 
 public operator fun NamedData<*>.component1(): Name = name
@@ -32,4 +52,6 @@ public fun <T> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData)
     NamedDataImpl(name, this.data)
 } else {
     NamedDataImpl(name, this)
-}
\ No newline at end of file
+}
+
+public fun <T> NamedData(name: Name, data: Data<T>): NamedData<T> = data.named(name)
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 63233c07..8ec3e361 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
 import kotlinx.coroutines.flow.launchIn
 import kotlinx.coroutines.flow.onEach
@@ -22,31 +23,27 @@ public inline fun <T> DataSink<T>.putAll(
     if (prefix.isEmpty()) {
         apply(block)
     } else {
-        val proxyDataSink = DataSink { nameWithoutPrefix, data ->
-            this.put(prefix + nameWithoutPrefix, data)
+        val proxyDataSink = object :DataSink<T>{
+            override fun put(name: Name, data: Data<T>?) {
+                this@putAll.put(prefix + name, data)
+            }
+
+            override suspend fun update(name: Name, data: Data<T>?) {
+                this@putAll.update(prefix + name, data)
+            }
+
         }
 
         proxyDataSink.apply(block)
     }
 }
 
-@Deprecated("Use putAll", ReplaceWith("putAll(prefix, block)"))
-public inline fun <T> DataSink<T>.branch(
-    prefix: Name,
-    block: DataSink<T>.() -> Unit,
-): Unit = putAll(prefix, block)
-
 
 public inline fun <T> DataSink<T>.putAll(
     prefix: String,
     block: DataSink<T>.() -> Unit,
 ): Unit = putAll(prefix.asName(), block)
 
-@Deprecated("Use putAll", ReplaceWith("putAll(prefix, block)"))
-public inline fun <T> DataSink<T>.branch(
-    prefix: String,
-    block: DataSink<T>.() -> Unit,
-): Unit = putAll(prefix, block)
 
 public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
     put(Name.parse(name), value)
@@ -56,20 +53,15 @@ public fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
     putAll(name) { putAll(tree.asSequence()) }
 }
 
-@Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
-public fun <T> DataSink<T>.branch(name: Name, tree: DataTree<T>): Unit = putAll(name, tree)
 
 public fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
     putAll(Name.parse(name)) { putAll(tree.asSequence()) }
 }
 
-@Deprecated("Use putAll", ReplaceWith("putAll(name, tree)"))
-public fun <T> DataSink<T>.branch(name: String, tree: DataTree<T>): Unit = putAll(name, tree)
-
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public inline fun <reified T> DataSink<T>.put(
+public inline fun <reified T> DataSink<T>.putValue(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -78,7 +70,7 @@ public inline fun <reified T> DataSink<T>.put(
     put(name, data)
 }
 
-public inline fun <reified T> DataSink<T>.put(
+public inline fun <reified T> DataSink<T>.putValue(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -90,24 +82,23 @@ public inline fun <reified T> DataSink<T>.put(
 /**
  * Emit static data with the fixed value
  */
-public inline fun <reified T> DataSink<T>.wrap(
+public inline fun <reified T> DataSink<T>.putValue(
     name: String,
-    data: T,
+    value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.static(data, meta))
+): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.wrap(
+public inline fun <reified T> DataSink<T>.putValue(
     name: Name,
-    data: T,
+    value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.static(data, meta))
+): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.wrap(
+public inline fun <reified T> DataSink<T>.putValue(
     name: String,
-    data: T,
-    mutableMeta: MutableMeta.() -> Unit,
-): Unit = put(Name.parse(name), Data.static(data, Meta(mutableMeta)))
-
+    value: T,
+    metaBuilder: MutableMeta.() -> Unit,
+): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
 public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
@@ -123,18 +114,12 @@ public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
  * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
  */
 public fun <T : Any> DataSink<T>.putAllAndWatch(
+    scope: CoroutineScope,
     branchName: Name = Name.EMPTY,
-    dataSet: ObservableDataTree<T>,
+    source: DataTree<T>,
 ): Job {
-    putAll(branchName, dataSet)
-    return dataSet.updates().onEach {
+    putAll(branchName, source)
+    return source.updates.onEach {
         put(branchName + it.name, it.data)
-    }.launchIn(dataSet.updatesScope)
-}
-
-
-@Deprecated("Use putAllAndWatch", ReplaceWith("putAllAndWatch(name, dataSet)"))
-public fun <T : Any> DataSink<T>.watchBranch(
-    name: Name,
-    dataSet: ObservableDataTree<T>,
-): Job = putAllAndWatch(name, dataSet)
\ No newline at end of file
+    }.launchIn(scope)
+}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 8c7ce70e..ab54eb3e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -1,7 +1,7 @@
 package space.kscience.dataforge.data
 
 import space.kscience.dataforge.meta.*
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import kotlin.coroutines.CoroutineContext
@@ -68,7 +68,7 @@ internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
     }
 }
 
-@DFInternal
+@UnsafeKType
 public fun <K, T, R> Map<K, Data<T>>.reduceToData(
     outputType: KType,
     meta: Meta = joinMeta(),
@@ -103,7 +103,7 @@ public inline fun <K, T, reified R> Map<K, Data<T>>.reduceToData(
 
 //Iterable operations
 
-@DFInternal
+@UnsafeKType
 public inline fun <T, R> Iterable<Data<T>>.reduceToData(
     outputType: KType,
     meta: Meta = joinMeta(),
@@ -118,7 +118,7 @@ public inline fun <T, R> Iterable<Data<T>>.reduceToData(
     transformation(map { it.awaitWithMeta() })
 }
 
-@OptIn(DFInternal::class)
+@OptIn(UnsafeKType::class)
 public inline fun <T, reified R> Iterable<Data<T>>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -141,7 +141,7 @@ public inline fun <T, reified R> Iterable<Data<T>>.foldToData(
 /**
  * Transform an [Iterable] of [NamedData] to a single [Data].
  */
-@DFInternal
+@UnsafeKType
 public inline fun <T, R> Iterable<NamedData<T>>.reduceNamedToData(
     outputType: KType,
     meta: Meta = joinMeta(),
@@ -156,7 +156,7 @@ public inline fun <T, R> Iterable<NamedData<T>>.reduceNamedToData(
     transformation(map { it.awaitWithMeta() })
 }
 
-@OptIn(DFInternal::class)
+@OptIn(UnsafeKType::class)
 public inline fun <T, reified R> Iterable<NamedData<T>>.reduceNamedToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
@@ -181,7 +181,8 @@ public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
 
 //DataSet operations
 
-@DFInternal
+
+@UnsafeKType
 public suspend fun <T, R> DataTree<T>.transform(
     outputType: KType,
     metaTransform: MutableMeta.() -> Unit = {},
@@ -198,7 +199,7 @@ public suspend fun <T, R> DataTree<T>.transform(
     }
 }
 
-@OptIn(DFInternal::class)
+@OptIn(UnsafeKType::class)
 public suspend inline fun <T, reified R> DataTree<T>.transform(
     noinline metaTransform: MutableMeta.() -> Unit = {},
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
new file mode 100644
index 00000000..ac0492f0
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -0,0 +1,112 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.MutableSharedFlow
+import kotlinx.coroutines.flow.mapNotNull
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
+import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.*
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+
+private class FlatDataTree<T>(
+    override val dataType: KType,
+    private val dataSet: Map<Name, Data<T>>,
+    private val sourceUpdates: Flow<DataUpdate<T>>,
+    private val prefix: Name,
+) : DataTree<T> {
+    override val data: Data<T>? get() = dataSet[prefix]
+    override val items: Map<NameToken, FlatDataTree<T>>
+        get() = dataSet.keys
+            .filter { it.startsWith(prefix) && it.length > prefix.length }
+            .map { it.tokens[prefix.length] }
+            .associateWith { FlatDataTree(dataType, dataSet, sourceUpdates, prefix + it) }
+
+    override fun read(name: Name): Data<T>? = dataSet[prefix + name]
+
+    override val updates: Flow<DataUpdate<T>> =
+        sourceUpdates.mapNotNull { update ->
+            update.name.removeFirstOrNull(prefix)?.let { DataUpdate(dataType, it, update.data) }
+        }
+}
+
+/**
+ * A builder for static [DataTree].
+ */
+private class DataTreeBuilder<T>(
+    private val type: KType,
+    initialData: Map<Name, Data<T>> = emptyMap(),
+) : DataSink<T> {
+
+    private val map = HashMap<Name, Data<T>>(initialData)
+
+    private val mutex = Mutex()
+
+    private val updatesFlow = MutableSharedFlow<DataUpdate<T>>()
+
+    override fun put(name: Name, data: Data<T>?) {
+        if (data == null) {
+            map.remove(name)
+        } else {
+            map[name] = data
+        }
+    }
+
+    override suspend fun update(name: Name, data: Data<T>?) {
+        mutex.withLock {
+            if (data == null) {
+                map.remove(name)
+            } else {
+                map.put(name, data)
+            }
+        }
+        updatesFlow.emit(DataUpdate(data?.type ?: type, name, data))
+    }
+
+    public fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
+}
+
+/**
+ * Create a static [DataTree]
+ */
+@UnsafeKType
+public fun <T> DataTree(
+    dataType: KType,
+    generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
+
+/**
+ * Create and a data tree.
+ */
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> DataTree(
+    noinline generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTree(typeOf<T>(), generator)
+
+
+/**
+ * Represent this flat data map as a [DataTree] without copying it
+ */
+@UnsafeKType
+public fun <T> Map<Name, Data<T>>.asTree(type: KType): DataTree<T> =
+    DataTreeBuilder(type, this).build()
+
+/**
+ * Represent this flat data map as a [DataTree] without copying it
+ */
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = asTree(typeOf<T>())
+
+
+@UnsafeKType
+public fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
+    DataTreeBuilder(type, associate { it.name to it.data }).build()
+
+
+/**
+ * Collect a sequence of [NamedData] to a [DataTree]
+ */
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> = toTree(typeOf<T>())
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
similarity index 65%
rename from dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
rename to dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index f6eae12c..ef9b14ab 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,22 +1,24 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.delay
-import kotlinx.coroutines.runBlocking
+import kotlinx.coroutines.flow.collect
+import kotlinx.coroutines.flow.onEach
+import kotlinx.coroutines.flow.take
 import kotlinx.coroutines.test.runTest
-import org.junit.jupiter.api.Test
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.actions.mapping
 import space.kscience.dataforge.misc.DFExperimental
+import kotlin.test.Test
 import kotlin.test.assertEquals
+import kotlin.time.Duration.Companion.milliseconds
 
 @OptIn(DFExperimental::class)
 internal class ActionsTest {
     @Test
-    fun testStaticMapAction() = runTest {
+    fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
         val data: DataTree<Int> = DataTree {
             repeat(10) {
-                wrap(it.toString(), it)
+                putValue(it.toString(), it)
             }
         }
 
@@ -28,7 +30,7 @@ internal class ActionsTest {
     }
 
     @Test
-    fun testDynamicMapAction() = runBlocking {
+    fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
         val source: MutableDataTree<Int> = MutableDataTree()
 
         val plusOne = Action.mapping<Int, Int> {
@@ -39,13 +41,9 @@ internal class ActionsTest {
 
 
         repeat(10) {
-            source.wrap(it.toString(), it)
+            source.putValue(it.toString(), it)
         }
-
-        delay(20)
-
-        source.close()
-        result.awaitClose()
+        result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
     }
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
new file mode 100644
index 00000000..b5738820
--- /dev/null
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -0,0 +1,71 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.cancel
+import kotlinx.coroutines.flow.collect
+import kotlinx.coroutines.flow.take
+import kotlinx.coroutines.launch
+import kotlinx.coroutines.test.runTest
+import space.kscience.dataforge.names.asName
+import kotlin.test.Test
+import kotlin.test.assertEquals
+import kotlin.time.Duration.Companion.milliseconds
+
+
+internal class DataTreeBuilderTest {
+    @Test
+    fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
+        val node = DataTree<Any> {
+            putAll("primary"){
+                putValue("a", "a")
+                putValue("b", "b")
+            }
+            putValue("c.d", "c.d")
+            putValue("c.f", "c.f")
+        }
+        assertEquals("a", node["primary.a"]?.await())
+        assertEquals("b", node["primary.b"]?.await())
+        assertEquals("c.d", node["c.d"]?.await())
+        assertEquals("c.f", node["c.f"]?.await())
+
+    }
+
+    @Test
+    fun testDataUpdate() = runTest(timeout = 500.milliseconds) {
+        val updateData = DataTree<Any> {
+            putAll("update") {
+                put("a", Data.wrapValue("a"))
+                put("b", Data.wrapValue("b"))
+            }
+        }
+
+        val node = DataTree<Any> {
+            putAll("primary") {
+                putValue("a", "a")
+                putValue("b", "b")
+            }
+            putValue("root", "root")
+            putAll(updateData)
+        }
+
+        assertEquals("a", node["update.a"]?.await())
+        assertEquals("a", node["primary.a"]?.await())
+    }
+
+    @Test
+    fun testDynamicUpdates() = runTest(timeout = 500.milliseconds) {
+        launch {
+            val subNode = MutableDataTree<Int>()
+
+            val rootNode = MutableDataTree<Int>() {
+                putAllAndWatch(this@launch, "sub".asName(), subNode)
+            }
+
+            repeat(10) {
+                subNode.putValue("value[$it]", it)
+            }
+            subNode.updates.take(10).collect()
+            assertEquals(9, rootNode["sub.value[9]"]?.await())
+            cancel()
+        }.join()
+    }
+}
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 9498c758..0cc81f7a 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -1,6 +1,5 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.misc.DFInternal
@@ -25,32 +24,40 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
 
 @Suppress("UNCHECKED_CAST")
 @DFInternal
-public fun <R> Sequence<NamedData<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
+public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
     filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
 
 @Suppress("UNCHECKED_CAST")
 @DFInternal
-public fun <R> Flow<NamedData<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
+public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
     filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
 
 /**
  * Select all data matching given type and filters. Does not modify paths
  *
- * @param predicate additional filtering condition based on item name and meta. By default, accepts all
+ * @param filter additional filtering condition based on item name and meta. By default, accepts all
  */
+@Suppress("UNCHECKED_CAST")
 @DFInternal
 public fun <R> DataTree<*>.filterByType(
     type: KType,
-    predicate: DataFilter = DataFilter.EMPTY,
-): DataTree<R> = asSequence().filterByDataType<R>(type).filterData(predicate).toTree(type)
+    branch: Name = Name.EMPTY,
+    filter: DataFilter = DataFilter.EMPTY,
+): DataTree<R> {
+    val filterWithType = DataFilter { name, meta, dataType ->
+        filter.accepts(name, meta, dataType) && dataType.isSubtypeOf(type)
+    }
+    return FilteredDataTree(this, filterWithType, branch, type) as DataTree<R>
+}
 
 /**
  * Select a single datum of the appropriate type
  */
 @OptIn(DFInternal::class)
 public inline fun <reified R : Any> DataTree<*>.filterByType(
-    predicate: DataFilter = DataFilter.EMPTY,
-): DataTree<R> = filterByType(typeOf<R>(), predicate)
+    branch: Name = Name.EMPTY,
+    filter: DataFilter = DataFilter.EMPTY,
+): DataTree<R> = filterByType(typeOf<R>(), branch, filter = filter)
 
 /**
  * Select a single datum if it is present and of given [type]
@@ -63,25 +70,3 @@ public inline fun <reified R : Any> DataTree<*>.getByType(name: Name): NamedData
 
 public inline fun <reified R : Any> DataTree<*>.getByType(name: String): NamedData<R>? =
     this@getByType.getByType(typeOf<R>(), Name.parse(name))
-
-/**
- * Select all data matching given type and filters. Does not modify paths
- *
- * @param predicate additional filtering condition based on item name and meta. By default, accepts all
- */
-@DFInternal
-public fun <R> ObservableDataTree<*>.filterByType(
-    type: KType,
-    scope: CoroutineScope,
-    predicate: DataFilter = DataFilter.EMPTY,
-): ObservableDataTree<R> = asSequence()
-    .filterByDataType<R>(type)
-    .filterData(predicate)
-    .toObservableTree(type, scope, updates().filterByDataType<R>(type).filterData(predicate))
-
-
-@OptIn(DFInternal::class)
-public inline fun <reified R> ObservableDataTree<*>.filterByType(
-    scope: CoroutineScope,
-    predicate: DataFilter = DataFilter.EMPTY,
-): ObservableDataTree<R> = filterByType(typeOf<R>(), scope, predicate)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 44602c53..8ce50a22 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -14,14 +14,14 @@ public infix fun <T : Any> String.put(data: Data<T>): Unit =
  * Append node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
+public infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
     putAll(this, dataSet)
 
 /**
  * Build and append node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.put(
+public infix fun <T : Any> String.putAll(
     block: DataSink<T>.() -> Unit,
 ): Unit = putAll(Name.parse(this), block)
 
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
deleted file mode 100644
index 561e3aa0..00000000
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ /dev/null
@@ -1,68 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.delay
-import kotlinx.coroutines.runBlocking
-import kotlinx.coroutines.test.runTest
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.asName
-import kotlin.test.Test
-import kotlin.test.assertEquals
-
-
-internal class DataTreeBuilderTest {
-    @Test
-    fun testTreeBuild() = runTest {
-        val node = DataTree<Any> {
-            "primary" put {
-                wrap("a", "a")
-                wrap("b", "b")
-            }
-            wrap("c.d", "c.d")
-            wrap("c.f", "c.f")
-        }
-        assertEquals("a", node["primary.a"]?.await())
-        assertEquals("b", node["primary.b"]?.await())
-        assertEquals("c.d", node["c.d"]?.await())
-        assertEquals("c.f", node["c.f"]?.await())
-
-    }
-
-    @OptIn(DFExperimental::class)
-    @Test
-    fun testDataUpdate() = runTest {
-        val updateData = DataTree<Any> {
-            "update" put {
-                "a" put Data.static("a")
-                "b" put Data.static("b")
-            }
-        }
-
-        val node = DataTree<Any> {
-            "primary" put {
-                wrap("a", "a")
-                wrap("b", "b")
-            }
-            wrap("root", "root")
-            putAll(updateData)
-        }
-
-        assertEquals("a", node["update.a"]?.await())
-        assertEquals("a", node["primary.a"]?.await())
-    }
-
-    @Test
-    fun testDynamicUpdates() = runBlocking {
-        val subNode = MutableDataTree<Int>()
-
-        val rootNode = MutableDataTree<Int> {
-            putAllAndWatch("sub".asName(), subNode)
-        }
-
-        repeat(10) {
-            subNode.wrap("value[$it]", it)
-        }
-
-        delay(20)
-        assertEquals(9, rootNode["sub.value[9]"]?.await())
-    }
-}
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index e636de49..372b119f 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -10,6 +10,7 @@ import space.kscience.dataforge.meta.MetaSpec
 import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DfType
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.workspace.Task.Companion.TYPE
 import kotlin.reflect.KType
@@ -90,7 +91,8 @@ public fun <T : Any> Task(
         taskMeta: Meta,
     ): TaskResult<T> {
         //TODO use safe builder and check for external data on add and detects cycles
-        val dataset = MutableDataTree<T>(resultType, workspace.context).apply {
+        @OptIn(UnsafeKType::class)
+        val dataset = MutableDataTree<T>(resultType).apply {
             TaskResultBuilder(workspace, taskName, taskMeta, this).apply {
                 withContext(GoalExecutionRestriction() + workspace.goalLogger) {
                     builder()
@@ -98,7 +100,6 @@ public fun <T : Any> Task(
             }
         }
         return workspace.wrapResult(dataset, taskName, taskMeta)
-
     }
 }
 
@@ -117,6 +118,7 @@ public inline fun <reified T : Any> Task(
  * @param builder for resulting data set
  */
 
+
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
@@ -132,7 +134,8 @@ public fun <T : Any, C : MetaRepr> Task(
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
         val taskMeta = configuration.toMeta()
-        val dataset = MutableDataTree<T>(resultType, this).apply {
+        @OptIn(UnsafeKType::class)
+        val dataset = MutableDataTree<T>(resultType).apply {
             TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
         }
         workspace.wrapResult(dataset, taskName, taskMeta)
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index 7aa94101..d4d4291a 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -4,7 +4,7 @@ import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
 import kotlinx.coroutines.joinAll
 import kotlinx.coroutines.launch
-import space.kscience.dataforge.data.ObservableDataTree
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.asSequence
 import space.kscience.dataforge.data.launch
 import space.kscience.dataforge.meta.Meta
@@ -17,16 +17,16 @@ import space.kscience.dataforge.names.Name
  * @param taskMeta The configuration of the task that produced the result
  */
 public data class TaskResult<T>(
-    public val content: ObservableDataTree<T>,
+    public val content: DataTree<T>,
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-) : ObservableDataTree<T> by content
+) : DataTree<T> by content
 
 /**
  * Wrap data into [TaskResult]
  */
-public fun <T> Workspace.wrapResult(data: ObservableDataTree<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
+public fun <T> Workspace.wrapResult(data: DataTree<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
     TaskResult(data, this, taskName, taskMeta)
 
 /**
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index e4e315fd..7247240b 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -2,7 +2,10 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.context.ContextAware
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.asSequence
+import space.kscience.dataforge.data.get
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DfType
@@ -26,7 +29,7 @@ public interface Workspace : ContextAware, Provider, CoroutineScope {
     /**
      * The whole data node for current workspace
      */
-    public val data: ObservableDataTree<*>
+    public val data: DataTree<*>
 
     /**
      * All targets associated with the workspace
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 4705c3b0..cf263a46 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -1,6 +1,5 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
@@ -12,6 +11,7 @@ import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.collections.set
@@ -98,19 +98,19 @@ public inline fun <reified T : Any> TaskContainer.task(
 public inline fun <T : Any, reified R : Any> TaskContainer.action(
     selector: DataSelector<T>,
     action: Action<T, R>,
-    noinline metaTransform: MutableMeta.()-> Unit = {},
+    noinline metaTransform: MutableMeta.() -> Unit = {},
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
     task(MetaDescriptor(descriptorBuilder)) {
-        result(action.execute(from(selector), taskMeta.copy(metaTransform)))
+        result(action.execute(from(selector), taskMeta.copy(metaTransform), workspace))
     }
 
 public class WorkspaceBuilder(
     private val parentContext: Context = Global,
-    private val coroutineScope: CoroutineScope = parentContext,
 ) : TaskContainer {
     private var context: Context? = null
-    private val data = MutableDataTree<Any?>(typeOf<Any?>(), coroutineScope)
+    @OptIn(UnsafeKType::class)
+    private val data = MutableDataTree<Any?>(typeOf<Any?>())
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
index 21c5e8c2..94839d62 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
@@ -2,14 +2,14 @@ package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.gather
-import space.kscience.dataforge.data.ObservableDataTree
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
 
 internal class WorkspaceImpl internal constructor(
     override val context: Context,
-    override val data: ObservableDataTree<*>,
+    override val data: DataTree<*>,
     override val targets: Map<String, Meta>,
     tasks: Map<Name, Task<*>>,
     private val postProcess: suspend (TaskResult<*>) -> TaskResult<*>,
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
index d54ff510..a74f8f05 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
@@ -3,14 +3,14 @@ package space.kscience.dataforge.workspace
 import space.kscience.dataforge.data.Data
 import space.kscience.dataforge.data.await
 import space.kscience.dataforge.io.*
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import kotlin.reflect.typeOf
 
 
 /**
  * Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
  */
-@OptIn(DFInternal::class)
+@OptIn(UnsafeKType::class)
 public inline fun <reified T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(typeOf<T>(), meta) {
     data?.readWith(format) ?: error("Can't convert envelope without data to Data")
 }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 35fccc99..15565995 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -113,7 +113,7 @@ public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
     action: Action<T, R>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    this.putAll(action.execute(from(selector, dependencyMeta), dependencyMeta))
+    putAll(action.execute(from(selector, dependencyMeta), dependencyMeta, workspace))
 }
 
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index a43657f8..da09c095 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.filterIsInstance
 import kotlinx.coroutines.flow.map
 import kotlinx.io.*
 import kotlinx.serialization.ExperimentalSerializationApi
@@ -96,7 +97,7 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
 
 
         val cachedTree = result.asSequence().map { cacheOne(it) }
-            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
+            .toTree(result.dataType, result.updates.filterIsInstance<NamedData<T>>().map { cacheOne(it) })
 
         return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index a3792231..f3b2ee15 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.filterIsInstance
 import kotlinx.coroutines.flow.map
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
@@ -31,7 +32,7 @@ public class InMemoryWorkspaceCache : WorkspaceCache {
 
 
         val cachedTree = result.asSequence().map { cacheOne(it) }
-            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
+            .toTree(result.dataType, result.updates.filterIsInstance<NamedData<T>>().map { cacheOne(it) })
 
         return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
index e0a4bdef..37dafab9 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
@@ -7,14 +7,12 @@ import space.kscience.dataforge.data.StaticData
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import space.kscience.dataforge.names.asName
 import space.kscience.dataforge.names.plus
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.StandardWatchEventKinds
-import java.nio.file.WatchEvent
+import java.nio.file.*
 import java.nio.file.attribute.BasicFileAttributes
 import java.nio.file.spi.FileSystemProvider
 import kotlin.io.path.*
@@ -166,15 +164,25 @@ public fun DataSink<Binary>.monitorFiles(
  * @param resources The names of the resources to read.
  * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
  */
+@DFExperimental
 public fun DataSink<Binary>.resources(
     io: IOPlugin,
-    vararg resources: String,
+    resource: String,
+    vararg otherResources: String,
     classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
 ) {
-    resources.forEach { resource ->
-        val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
-            "Resource with name $resource is not resolved"
+    //create a file system if necessary
+    val uri = Thread.currentThread().contextClassLoader.getResource("common")!!.toURI()
+    try {
+        uri.toPath()
+    } catch (e: FileSystemNotFoundException) {
+        FileSystems.newFileSystem(uri, mapOf("create" to "true"))
+    }
+
+    listOf(resource,*otherResources).forEach { r ->
+        val path = classLoader.getResource(r)?.toURI()?.toPath() ?: error(
+            "Resource with name $r is not resolved"
         )
-        files(io, resource.asName(), path)
+        files(io, r.asName(), path)
     }
 }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index e5c2c230..7a6a8202 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.wrap
+import space.kscience.dataforge.data.putValue
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
 import space.kscience.dataforge.meta.get
@@ -22,7 +22,7 @@ internal class CachingWorkspaceTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    wrap("myData[$it]", it)
+                    putValue("myData[$it]", it)
                 }
             }
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index d611b1c8..cd38f809 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -47,7 +47,7 @@ class DataPropagationTest {
         }
         data {
             repeat(100) {
-                wrap("myData[$it]", it)
+                putValue("myData[$it]", it)
             }
         }
     }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index a06f24a5..5466da76 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -24,11 +24,11 @@ import kotlin.test.assertEquals
 class FileDataTest {
     val dataNode = DataTree<String> {
         putAll("dir") {
-            wrap("a", "Some string") {
+            putValue("a", "Some string") {
                 "content" put "Some string"
             }
         }
-        wrap("b", "root data")
+        putValue("b", "root data")
 //        meta {
 //            "content" put "This is root meta node"
 //        }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 0f16b1c8..0cf4f401 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,11 +3,11 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.wrap
+import space.kscience.dataforge.data.putValue
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 
-@OptIn(ExperimentalCoroutinesApi::class,DFExperimental::class)
+@OptIn(ExperimentalCoroutinesApi::class, DFExperimental::class)
 class FileWorkspaceCacheTest {
 
     @Test
@@ -16,7 +16,7 @@ class FileWorkspaceCacheTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    wrap("myData[$it]", it)
+                    putValue("myData[$it]", it)
                 }
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index b49b9d54..837b61bc 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -6,7 +6,6 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.runBlocking
 import kotlinx.coroutines.test.runTest
-import org.junit.jupiter.api.Timeout
 import space.kscience.dataforge.context.*
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.*
@@ -16,6 +15,7 @@ import space.kscience.dataforge.names.plus
 import kotlin.test.Test
 import kotlin.test.assertEquals
 import kotlin.test.assertTrue
+import kotlin.time.Duration.Companion.milliseconds
 
 
 /**
@@ -62,7 +62,7 @@ internal class SimpleWorkspaceTest {
         data {
             //statically initialize data
             repeat(100) {
-                wrap("myData[$it]", it)
+                putValue("myData[$it]", it)
             }
         }
 
@@ -148,18 +148,16 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    @Timeout(1)
-    fun testWorkspace() = runTest {
+    fun testWorkspace() = runTest(timeout = 10.milliseconds) {
         val node = workspace.produce("sum")
         val res = node.asSequence().single()
         assertEquals(328350, res.await())
     }
 
     @Test
-    @Timeout(1)
-    fun testMetaPropagation() = runTest {
+    fun testMetaPropagation() = runTest(timeout = 10.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node.single().await()
+        val res = node.data!!.await()
     }
 
     @Test
@@ -188,7 +186,7 @@ internal class SimpleWorkspaceTest {
         val node = workspace.produce("filterOne") {
             "name" put "myData[12]"
         }
-        assertEquals(12, node.single().await())
+        assertEquals(12, node.data!!.await())
     }
 
 }
\ No newline at end of file

From 1c6045324423463e4df2852c2160d80efb3a2b10 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 16 Apr 2024 15:35:21 +0300
Subject: [PATCH 40/77] Fix NameToken.parse

---
 CHANGELOG.md                                  |  4 +++-
 .../kscience/dataforge/names/NameToken.kt     |  2 +-
 .../kscience/dataforge/names/NameTest.kt      | 20 +++++++++++++++----
 3 files changed, 20 insertions(+), 6 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index a7c134e6..f709b8be 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
 
 ### Added
 - Name index comparator
+- Specialized ByteArrayValue
 
 ### Changed
 - DataSink `branch` is replaced with `putAll` to avoid confusion with DataTree methods
@@ -15,7 +16,8 @@
 ### Fixed
 - `listOfScheme` and `listOfConvertable` delegates provides correct items order.
 - Scheme meta setter works with proper sub-branch.
-- 
+- NameToken.parse improper work with indices
+
 
 ### Security
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
index 0dc83c57..83752b9a 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
@@ -69,7 +69,7 @@ public class NameToken(public val body: String, public val index: String? = null
         public fun parse(string: String): NameToken {
             val body = string.substringBefore('[')
             val index = string.substringAfter('[', "")
-            if (index.isNotEmpty() && index.endsWith(']')) error("NameToken with index must end with ']'")
+            if (index.isNotEmpty() && !index.endsWith(']')) error("NameToken with index must end with ']'")
             return NameToken(body, index.removeSuffix("]"))
         }
     }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
index a5bdf3fc..db630487 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
@@ -1,9 +1,6 @@
 package space.kscience.dataforge.names
 
-import kotlin.test.Test
-import kotlin.test.assertEquals
-import kotlin.test.assertFalse
-import kotlin.test.assertTrue
+import kotlin.test.*
 
 class NameTest {
     @Test
@@ -50,4 +47,19 @@ class NameTest {
         val name = Name.parse("a.b.c")
         assertEquals("a.b".parseAsName(), name.cutLast())
     }
+
+    @Test
+    fun tokenParseTest(){
+        val token1 = NameToken.parse("token[index]")
+        assertEquals("token", token1.body)
+        assertEquals("index", token1.index)
+
+        val token2 = NameToken.parse("token-body")
+        assertEquals("token-body", token2.body)
+        assertEquals("", token2.index)
+
+        assertFails {
+            NameToken.parse("token[22")
+        }
+    }
 }
\ No newline at end of file

From a720b63d70bbe0cc32cfc21c057cce4d62f444c3 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 16 Apr 2024 15:35:59 +0300
Subject: [PATCH 41/77] Add ByteArrayValue

---
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/meta/MutableMeta.kt    |  4 ++++
 .../dataforge/values/DoubleArrayValue.kt      | 20 +++++++++++++++++++
 3 files changed, 25 insertions(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index d4e22686..b2f7e5e9 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.1"
+    version = "0.8.2"
 }
 
 subprojects {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 6e4a5daa..69e221f5 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -128,6 +128,10 @@ public interface MutableMeta : Meta, MutableMetaProvider {
         setValue(Name.parse(this), array.asValue())
     }
 
+    public infix fun String.put(array: ByteArray) {
+        setValue(Name.parse(this), array.asValue())
+    }
+
     public infix fun String.put(repr: MetaRepr) {
         set(Name.parse(this), repr.toMeta())
     }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt
index 59ccecae..30bc2adc 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/values/DoubleArrayValue.kt
@@ -1,4 +1,24 @@
 package space.kscience.dataforge.values
 
+import space.kscience.dataforge.meta.DoubleArrayValue
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.doubleArray
+import space.kscience.dataforge.meta.get
+import kotlin.test.Test
+import kotlin.test.assertEquals
+import kotlin.test.assertTrue
+
 class DoubleArrayValue {
+    @Test
+    fun doubleArrayWriteRead(){
+        val meta = Meta{
+            "doubleArray" put doubleArrayOf(1.0,2.0,3.0)
+        }
+
+        assertTrue {
+            meta["doubleArray"]?.value is DoubleArrayValue
+        }
+
+        assertEquals(2.0, meta["doubleArray"].doubleArray?.get(1))
+    }
 }
\ No newline at end of file

From 28f4beb3485cf07df493347cb7a317e1217dfcc4 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 23 Apr 2024 18:59:12 +0300
Subject: [PATCH 42/77] Fix all tests

---
 CHANGELOG.md                                  |   4 +-
 build.gradle.kts                              |   2 +-
 dataforge-context/api/dataforge-context.api   |   2 -
 .../descriptors/reflectiveDescriptors.kt      |  22 +-
 .../descriptors/TestAutoDescriptors.kt        |  13 +-
 .../kscience/dataforge/actions/MapAction.kt   |   6 +-
 .../kscience/dataforge/data/DataFilter.kt     |   2 +-
 .../space/kscience/dataforge/data/DataSink.kt |  10 +-
 .../kscience/dataforge/data/dataBuilders.kt   |  23 +-
 .../kscience/dataforge/data/ActionsTest.kt    |   2 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  32 +--
 .../space/kscience/dataforge/io/IOPlugin.kt   |   6 +-
 dataforge-meta/api/dataforge-meta.api         | 246 ++++++++++--------
 .../kscience/dataforge/meta/MetaDelegate.kt   | 116 +++++----
 .../dataforge/meta/MutableMetaDelegate.kt     | 129 +++++----
 .../dataforge/workspace/CachingAction.kt      |  19 ++
 .../dataforge/workspace/FileWorkspaceCache.kt |  23 +-
 .../workspace/InMemoryWorkspaceCache.kt       |  17 +-
 .../workspace/SimpleWorkspaceTest.kt          |   8 +-
 19 files changed, 409 insertions(+), 273 deletions(-)
 create mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index f709b8be..a56e56e6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@
 
 ### Changed
 - DataSink `branch` is replaced with `putAll` to avoid confusion with DataTree methods
+- Meta delegate now uses a specific class that has a descriptor
 
 ### Deprecated
 
@@ -16,7 +17,8 @@
 ### Fixed
 - `listOfScheme` and `listOfConvertable` delegates provides correct items order.
 - Scheme meta setter works with proper sub-branch.
-- NameToken.parse improper work with indices
+- NameToken.parse improper work with indices.
+- Proper data handling for cache.
 
 
 ### Security
diff --git a/build.gradle.kts b/build.gradle.kts
index b2f7e5e9..78dc6d35 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.2"
+    version = "0.8.2-dev-1"
 }
 
 subprojects {
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index 12bca176..eb94f89d 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -265,8 +265,6 @@ public abstract interface annotation class space/kscience/dataforge/descriptors/
 }
 
 public final class space/kscience/dataforge/descriptors/ReflectiveDescriptorsKt {
-	public static final fun forClass (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public static synthetic fun forClass$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 }
 
 public final class space/kscience/dataforge/properties/MetaAsFlowKt {
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
index 1f2db7fc..9c36410a 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
@@ -10,11 +10,11 @@ import space.kscience.dataforge.meta.ValueType
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.meta.descriptors.node
+import space.kscience.dataforge.misc.DFExperimental
 import java.net.URL
 import kotlin.reflect.KClass
 import kotlin.reflect.full.isSubclassOf
 import kotlin.reflect.full.memberProperties
-import kotlin.reflect.typeOf
 
 
 /**
@@ -58,9 +58,9 @@ private fun MetaDescriptorBuilder.loadDescriptorFromResource(resource: Descripto
     }
 }
 
-
-public fun <T : Any> MetaDescriptor.Companion.forClass(
-    kClass: KClass<T>,
+@DFExperimental
+public fun MetaDescriptor.Companion.forClass(
+    kClass: KClass<out Any>,
     mod: MetaDescriptorBuilder.() -> Unit = {},
 ): MetaDescriptor = MetaDescriptor {
     when {
@@ -79,7 +79,7 @@ public fun <T : Any> MetaDescriptor.Companion.forClass(
             is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
         }
     }
-    kClass.memberProperties.forEach { property ->
+    kClass.memberProperties.forEach { property->
 
         var flag = false
 
@@ -88,6 +88,12 @@ public fun <T : Any> MetaDescriptor.Companion.forClass(
             (property.returnType.classifier as? KClass<*>)?.let {
                 from(forClass(it))
             }
+//
+//            (property.getDelegate(Unit) as? MetaDelegate<*>)?.descriptor?.let {
+//                from(it)
+//                flag = true
+//            }
+
             property.annotations.forEach {
                 when (it) {
                     is Description -> {
@@ -119,6 +125,6 @@ public fun <T : Any> MetaDescriptor.Companion.forClass(
     mod()
 }
 
-@Suppress("UNCHECKED_CAST")
-public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor( noinline mod: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptor =
-    MetaDescriptor.forClass(typeOf<T>().classifier as KClass<T>, mod)
\ No newline at end of file
+@DFExperimental
+public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor(noinline mod: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptor =
+    MetaDescriptor.forClass(T::class, mod)
\ No newline at end of file
diff --git a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
index 617e85cc..df849ce5 100644
--- a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
+++ b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
@@ -1,3 +1,5 @@
+@file:OptIn(DFExperimental::class)
+
 package space.kscience.dataforge.descriptors
 
 import kotlinx.serialization.encodeToString
@@ -8,8 +10,9 @@ import space.kscience.dataforge.meta.SchemeSpec
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.int
 import space.kscience.dataforge.meta.string
+import space.kscience.dataforge.misc.DFExperimental
 
-private class TestScheme: Scheme(){
+private class TestScheme : Scheme() {
 
     @Description("A")
     val a by string()
@@ -17,15 +20,17 @@ private class TestScheme: Scheme(){
     @Description("B")
     val b by int()
 
-    companion object: SchemeSpec<TestScheme>(::TestScheme){
+    val c by int()
+
+    companion object : SchemeSpec<TestScheme>(::TestScheme) {
         override val descriptor: MetaDescriptor = autoDescriptor()
     }
 }
 
 class TestAutoDescriptors {
     @Test
-    fun autoDescriptor(){
+    fun autoDescriptor() {
         val autoDescriptor = MetaDescriptor.forClass(TestScheme::class)
-        println(Json{prettyPrint = true}.encodeToString(autoDescriptor))
+        println(Json { prettyPrint = true }.encodeToString(autoDescriptor))
     }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index a1ca8e59..08bf08e9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -48,8 +48,8 @@ public class MapActionBuilder<T, R>(
     public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1): Unit = result(typeOf<R1>(), f)
 }
 
-@PublishedApi
-internal class MapAction<T, R>(
+@UnsafeKType
+public class MapAction<T, R>(
     outputType: KType,
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
@@ -78,7 +78,6 @@ internal class MapAction<T, R>(
         //getting new meta
         val newMeta = builder.meta.seal()
 
-        @OptIn(UnsafeKType::class)
         val newData = Data(builder.outputType, newMeta, dependencies = listOf(data)) {
             builder.result(env, data.await())
         }
@@ -106,6 +105,7 @@ internal class MapAction<T, R>(
  * A one-to-one mapping action
  */
 
+@OptIn(UnsafeKType::class)
 public inline fun <T, reified R> Action.Companion.mapping(
     noinline builder: MapActionBuilder<T, R>.() -> Unit,
 ): Action<T, R> = MapAction(typeOf<R>(), builder)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index bc66e910..38174e50 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -61,7 +61,7 @@ internal class FilteredDataTree<T>(
 
     override val data: Data<T>?
         get() = source[branch].takeIf {
-            filter.accepts(Name.EMPTY, data?.meta, data?.type ?: dataType)
+            filter.accepts(Name.EMPTY, it?.meta, it?.type ?: dataType)
         }
 
     override val items: Map<NameToken, DataTree<T>>
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index 9d860310..6daeae98 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -2,6 +2,7 @@ package space.kscience.dataforge.data
 
 import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
+import kotlinx.coroutines.channels.BufferOverflow
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.MutableSharedFlow
 import kotlinx.coroutines.flow.mapNotNull
@@ -80,7 +81,8 @@ private class MutableDataTreeRoot<T>(
     override val dataType: KType,
 ) : MutableDataTree<T> {
 
-    override val updates = MutableSharedFlow<DataUpdate<T>>()
+    override val updates = MutableSharedFlow<DataUpdate<T>>(100, onBufferOverflow = BufferOverflow.DROP_LATEST)
+
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
@@ -108,10 +110,10 @@ private class MutableDataTreeRoot<T>(
         override suspend fun update(name: Name, data: Data<T>?) {
             if (name.isEmpty()) {
                 this.data = data
+                this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + name, data))
             } else {
                 getOrCreateItem(name.first()).update(name.cutFirst(), data)
             }
-            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + name, data))
         }
 
     }
@@ -122,7 +124,7 @@ private class MutableDataTreeRoot<T>(
     override val items = HashMap<NameToken, MutableDataTree<T>>()
 
     override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
-        MutableDataTreeRoot(dataType)
+        MutableDataTreeBranch(token.asName())
     }
 
     override fun set(token: NameToken, data: Data<T>?) {
@@ -133,10 +135,10 @@ private class MutableDataTreeRoot<T>(
     override suspend fun update(name: Name, data: Data<T>?) {
         if (name.isEmpty()) {
             this.data = data
+            updates.emit(DataUpdate(data?.type ?: dataType, name, data))
         } else {
             getOrCreateItem(name.first()).update(name.cutFirst(), data)
         }
-        updates.emit(DataUpdate(data?.type ?: dataType, name, data))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 8ec3e361..649cfd19 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -6,10 +6,7 @@ import kotlinx.coroutines.flow.launchIn
 import kotlinx.coroutines.flow.onEach
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.isEmpty
-import space.kscience.dataforge.names.plus
+import space.kscience.dataforge.names.*
 
 
 public fun <T> DataSink<T>.put(value: NamedData<T>) {
@@ -83,13 +80,13 @@ public inline fun <reified T> DataSink<T>.putValue(
  * Emit static data with the fixed value
  */
 public inline fun <reified T> DataSink<T>.putValue(
-    name: String,
+    name: Name,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
 
 public inline fun <reified T> DataSink<T>.putValue(
-    name: Name,
+    name: String,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
@@ -100,6 +97,18 @@ public inline fun <reified T> DataSink<T>.putValue(
     metaBuilder: MutableMeta.() -> Unit,
 ): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
+public suspend inline fun <reified T> DataSink<T>.updateValue(
+    name: Name,
+    value: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = update(name, Data.wrapValue(value, meta))
+
+public suspend inline fun <reified T> DataSink<T>.updateValue(
+    name: String,
+    value: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = update(name.parseAsName(), Data.wrapValue(value, meta))
+
 public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
         put(it.name, it.data)
@@ -120,6 +129,6 @@ public fun <T : Any> DataSink<T>.putAllAndWatch(
 ): Job {
     putAll(branchName, source)
     return source.updates.onEach {
-        put(branchName + it.name, it.data)
+        update(branchName + it.name, it.data)
     }.launchIn(scope)
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index ef9b14ab..477ca592 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -41,7 +41,7 @@ internal class ActionsTest {
 
 
         repeat(10) {
-            source.putValue(it.toString(), it)
+            source.updateValue(it.toString(), it)
         }
         result.updates.take(10).onEach { println(it.name) }.collect()
 
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index b5738820..760aeec2 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -1,9 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.cancel
+import kotlinx.coroutines.Job
 import kotlinx.coroutines.flow.collect
 import kotlinx.coroutines.flow.take
-import kotlinx.coroutines.launch
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.names.asName
 import kotlin.test.Test
@@ -15,7 +14,7 @@ internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
         val node = DataTree<Any> {
-            putAll("primary"){
+            putAll("primary") {
                 putValue("a", "a")
                 putValue("b", "b")
             }
@@ -53,19 +52,22 @@ internal class DataTreeBuilderTest {
 
     @Test
     fun testDynamicUpdates() = runTest(timeout = 500.milliseconds) {
-        launch {
-            val subNode = MutableDataTree<Int>()
+        var job: Job? = null
 
-            val rootNode = MutableDataTree<Int>() {
-                putAllAndWatch(this@launch, "sub".asName(), subNode)
-            }
+        val subNode = MutableDataTree<Int>()
 
-            repeat(10) {
-                subNode.putValue("value[$it]", it)
-            }
-            subNode.updates.take(10).collect()
-            assertEquals(9, rootNode["sub.value[9]"]?.await())
-            cancel()
-        }.join()
+        val rootNode = MutableDataTree<Int>() {
+            job = putAllAndWatch(this@runTest, "sub".asName(), subNode)
+        }
+
+        repeat(10) {
+            subNode.updateValue("value[$it]", it)
+        }
+
+        rootNode.updates.take(10).collect()
+        assertEquals(9, rootNode["sub.value[9]"]?.await())
+        assertEquals(8, rootNode["sub.value[8]"]?.await())
+
+        job?.cancel()
     }
 }
\ No newline at end of file
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
index 0d79da4d..f431a731 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
@@ -6,7 +6,7 @@ import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
@@ -19,11 +19,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
     }
 
     @Suppress("UNCHECKED_CAST")
-    @DFInternal
+    @UnsafeKType
     public fun <T> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
         ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
 
-    @OptIn(DFInternal::class)
+    @OptIn(UnsafeKType::class)
     public inline fun <reified T> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
         resolveIOFormat(typeOf<T>(), meta)
 
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index f9834c54..620e64d4 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -1,3 +1,15 @@
+public final class space/kscience/dataforge/meta/ByteArrayValue : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker, space/kscience/dataforge/meta/Value {
+	public fun <init> ([B)V
+	public fun equals (Ljava/lang/Object;)Z
+	public fun getList ()Ljava/util/List;
+	public fun getType ()Lspace/kscience/dataforge/meta/ValueType;
+	public synthetic fun getValue ()Ljava/lang/Object;
+	public fun getValue ()[B
+	public fun hashCode ()I
+	public fun iterator ()Ljava/util/Iterator;
+	public fun toString ()Ljava/lang/String;
+}
+
 public abstract interface class space/kscience/dataforge/meta/Configurable {
 	public abstract fun getMeta ()Lspace/kscience/dataforge/meta/MutableMeta;
 }
@@ -30,7 +42,20 @@ public final class space/kscience/dataforge/meta/EnumValue : space/kscience/data
 }
 
 public final class space/kscience/dataforge/meta/ExoticValuesKt {
+	public static final fun asValue ([B)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue ([D)Lspace/kscience/dataforge/meta/Value;
+	public static final fun byteArray (Lspace/kscience/dataforge/meta/MetaProvider;[BLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun byteArray (Lspace/kscience/dataforge/meta/MutableMetaProvider;[BLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun byteArray$default (Lspace/kscience/dataforge/meta/MetaProvider;[BLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun byteArray$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[BLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun doubleArray (Lspace/kscience/dataforge/meta/MetaProvider;[DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun doubleArray (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Meta;)[B
+	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Value;)[B
+	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Meta;)[D
+	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Value;)[D
 	public static final fun lazyParseValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/LazyParsedValue;
 }
 
@@ -199,53 +224,56 @@ public final class space/kscience/dataforge/meta/MetaConverterKt {
 	public static final fun convertNullable (Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaDelegate : kotlin/properties/ReadOnlyProperty, space/kscience/dataforge/meta/descriptors/Described {
+}
+
 public final class space/kscience/dataforge/meta/MetaDelegateKt {
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;ZLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun double (Lspace/kscience/dataforge/meta/MetaProvider;DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun double (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun float (Lspace/kscience/dataforge/meta/MetaProvider;FLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun float (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MetaProvider;FLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;ZLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun double (Lspace/kscience/dataforge/meta/MetaProvider;DLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun double (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun float (Lspace/kscience/dataforge/meta/MetaProvider;FLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun float (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MetaProvider;FLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun value (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun value (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun value (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun value (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 }
 
 public final class space/kscience/dataforge/meta/MetaKt {
@@ -352,6 +380,7 @@ public abstract interface class space/kscience/dataforge/meta/MutableMeta : spac
 	public fun put (Ljava/lang/String;Lspace/kscience/dataforge/meta/MetaRepr;)V
 	public fun put (Ljava/lang/String;Lspace/kscience/dataforge/meta/Value;)V
 	public fun put (Ljava/lang/String;Z)V
+	public fun put (Ljava/lang/String;[B)V
 	public fun put (Ljava/lang/String;[D)V
 	public fun put (Lspace/kscience/dataforge/names/Name;Ljava/lang/Enum;)V
 	public fun put (Lspace/kscience/dataforge/names/Name;Ljava/lang/Number;)V
@@ -371,63 +400,64 @@ public final class space/kscience/dataforge/meta/MutableMeta$Companion {
 	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
+public abstract interface class space/kscience/dataforge/meta/MutableMetaDelegate : kotlin/properties/ReadWriteProperty, space/kscience/dataforge/meta/descriptors/Described {
+}
+
 public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun doubleArray (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun float (Lspace/kscience/dataforge/meta/MutableMetaProvider;FLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun float (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;FLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun listOfConvertable (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun listOfConvertable$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun listValue (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun listValue$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun numberList (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun numberList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun stringList (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun stringList (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun stringList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun stringList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun value (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun value (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun boolean (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun float (Lspace/kscience/dataforge/meta/MutableMetaProvider;FLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun float (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;FLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun float$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun listOfConvertable (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun listOfConvertable$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun listValue (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun listValue$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun numberList (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun numberList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun string (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun string$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun stringList (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun stringList (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun stringList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun stringList$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[Ljava/lang/String;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun value (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun value (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 }
 
 public final class space/kscience/dataforge/meta/MutableMetaKt {
@@ -447,6 +477,7 @@ public final class space/kscience/dataforge/meta/MutableMetaKt {
 	public static final fun getOrCreate (Lspace/kscience/dataforge/meta/MutableTypedMeta;Ljava/lang/String;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
 	public static final fun remove (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;)V
 	public static final fun remove (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)V
+	public static final fun reset (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Meta;)V
 	public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Ljava/lang/Iterable;)V
 	public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/meta/Meta;)V
 	public static final fun set (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;)V
@@ -703,7 +734,6 @@ public final class space/kscience/dataforge/meta/Value$Companion {
 public final class space/kscience/dataforge/meta/ValueExtensionsKt {
 	public static final fun getBoolean (Lspace/kscience/dataforge/meta/Value;)Z
 	public static final fun getDouble (Lspace/kscience/dataforge/meta/Value;)D
-	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Value;)[D
 	public static final fun getFloat (Lspace/kscience/dataforge/meta/Value;)F
 	public static final fun getInt (Lspace/kscience/dataforge/meta/Value;)I
 	public static final fun getLong (Lspace/kscience/dataforge/meta/Value;)J
@@ -722,7 +752,6 @@ public final class space/kscience/dataforge/meta/ValueKt {
 	public static final fun asValue (Ljava/lang/Number;)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue (Z)Lspace/kscience/dataforge/meta/Value;
-	public static final fun asValue ([B)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue ([F)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue ([I)Lspace/kscience/dataforge/meta/Value;
 	public static final fun asValue ([J)Lspace/kscience/dataforge/meta/Value;
@@ -928,6 +957,9 @@ public final class space/kscience/dataforge/misc/NamedKt {
 	public static final fun isAnonymous (Lspace/kscience/dataforge/misc/Named;)Z
 }
 
+public abstract interface annotation class space/kscience/dataforge/misc/UnsafeKType : java/lang/annotation/Annotation {
+}
+
 public final class space/kscience/dataforge/names/Name {
 	public static final field Companion Lspace/kscience/dataforge/names/Name$Companion;
 	public static final field NAME_SEPARATOR Ljava/lang/String;
@@ -945,6 +977,16 @@ public final class space/kscience/dataforge/names/Name$Companion {
 	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
+public final class space/kscience/dataforge/names/NameIndexComparator : java/util/Comparator {
+	public static final field INSTANCE Lspace/kscience/dataforge/names/NameIndexComparator;
+	public synthetic fun compare (Ljava/lang/Object;Ljava/lang/Object;)I
+	public fun compare (Ljava/lang/String;Ljava/lang/String;)I
+}
+
+public final class space/kscience/dataforge/names/NameIndexComparatorKt {
+	public static final fun getIndexedList (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/names/Name;)Ljava/util/List;
+}
+
 public final class space/kscience/dataforge/names/NameKt {
 	public static final fun appendFirst (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
 	public static final fun appendLeft (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index a24f7371..8c5a738f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -1,15 +1,27 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadOnlyProperty
+import kotlin.reflect.KProperty
 
 /* Meta delegates */
 
-public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())
+public interface MetaDelegate<T> : ReadOnlyProperty<Any?, T>, Described
+
+
+public fun MetaProvider.node(
+    key: Name? = null,
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<Meta?> = object : MetaDelegate<Meta?> {
+    override val descriptor: MetaDescriptor? = descriptor
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): Meta? {
+        return get(key ?: property.name.asName())
+    }
 }
 
 /**
@@ -18,8 +30,12 @@ public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> =
 public fun <T> MetaProvider.spec(
     metaSpec: MetaSpec<T>,
     key: Name? = null,
-): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())?.let { metaSpec.read(it) }
+): MetaDelegate<T?> = object : MetaDelegate<T?> {
+    override val descriptor: MetaDescriptor? get() = metaSpec.descriptor
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
+        return get(key ?: property.name.asName())?.let { metaSpec.read(it) }
+    }
 }
 
 /**
@@ -27,9 +43,9 @@ public fun <T> MetaProvider.spec(
  */
 @DFExperimental
 public inline fun <reified T> MetaProvider.serializable(
-    descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadOnlyProperty<Any?, T?> = spec(MetaConverter.serializable(descriptor), key)
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<T?> = spec(MetaConverter.serializable(descriptor), key)
 
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
@@ -43,78 +59,90 @@ public fun <T> MetaProvider.node(
 public fun <T> Meta.listOfSpec(
     converter: MetaSpec<T>,
     key: Name? = null,
-): ReadOnlyProperty<Any?, List<T>> = ReadOnlyProperty{_, property ->
-    val name = key ?: property.name.asName()
-    getIndexed(name).values.map { converter.read(it) }
+): MetaDelegate<List<T>> = object : MetaDelegate<List<T>> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
+        val name = key ?: property.name.asName()
+        return getIndexed(name).values.map { converter.read(it) }
+    }
+
+    override val descriptor: MetaDescriptor? = converter.descriptor?.copy(multiple = true)
 }
 
 @DFExperimental
 public inline fun <reified T> Meta.listOfSerializable(
-    descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadOnlyProperty<Any?, List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
 
 /**
  * A property delegate that uses custom key
  */
-public fun MetaProvider.value(key: Name? = null): ReadOnlyProperty<Any?, Value?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())?.value
+public fun MetaProvider.value(
+    key: Name? = null,
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<Value?> = object : MetaDelegate<Value?> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): Value? = get(key ?: property.name.asName())?.value
+
+    override val descriptor: MetaDescriptor? = descriptor
 }
 
 public fun <R> MetaProvider.value(
     key: Name? = null,
+    descriptor: MetaDescriptor? = null,
     reader: (Value?) -> R,
-): ReadOnlyProperty<Any?, R> = ReadOnlyProperty { _, property ->
-    reader(get(key ?: property.name.asName())?.value)
+): MetaDelegate<R> = object : MetaDelegate<R> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): R = reader(get(key ?: property.name.asName())?.value)
+
+    override val descriptor: MetaDescriptor? = descriptor
 }
 
 //TODO add caching for sealed nodes
 
 /* Read-only delegates for [Meta] */
 
-public fun MetaProvider.string(key: Name? = null): ReadOnlyProperty<Any?, String?> = value(key) { it?.string }
+public fun MetaProvider.string(key: Name? = null): MetaDelegate<String?> = value(key = key) { it?.string }
 
-public fun MetaProvider.boolean(key: Name? = null): ReadOnlyProperty<Any?, Boolean?> = value(key) { it?.boolean }
+public fun MetaProvider.boolean(key: Name? = null): MetaDelegate<Boolean?> = value(key = key) { it?.boolean }
 
-public fun MetaProvider.number(key: Name? = null): ReadOnlyProperty<Any?, Number?> = value(key) { it?.numberOrNull }
+public fun MetaProvider.number(key: Name? = null): MetaDelegate<Number?> = value(key = key) { it?.numberOrNull }
 
-public fun MetaProvider.double(key: Name? = null): ReadOnlyProperty<Any?, Double?> = value(key) { it?.double }
+public fun MetaProvider.double(key: Name? = null): MetaDelegate<Double?> = value(key = key) { it?.double }
 
-public fun MetaProvider.float(key: Name? = null): ReadOnlyProperty<Any?, Float?> = value(key) { it?.float }
+public fun MetaProvider.float(key: Name? = null): MetaDelegate<Float?> = value(key = key) { it?.float }
 
-public fun MetaProvider.int(key: Name? = null): ReadOnlyProperty<Any?, Int?> = value(key) { it?.int }
+public fun MetaProvider.int(key: Name? = null): MetaDelegate<Int?> = value(key = key) { it?.int }
 
-public fun MetaProvider.long(key: Name? = null): ReadOnlyProperty<Any?, Long?> = value(key) { it?.long }
+public fun MetaProvider.long(key: Name? = null): MetaDelegate<Long?> = value(key = key) { it?.long }
 
-public fun MetaProvider.string(default: String, key: Name? = null): ReadOnlyProperty<Any?, String> =
-    value(key) { it?.string ?: default }
+public fun MetaProvider.string(default: String, key: Name? = null): MetaDelegate<String> =
+    value(key = key) { it?.string ?: default }
 
-public fun MetaProvider.boolean(default: Boolean, key: Name? = null): ReadOnlyProperty<Any?, Boolean> =
-    value(key) { it?.boolean ?: default }
+public fun MetaProvider.boolean(default: Boolean, key: Name? = null): MetaDelegate<Boolean> =
+    value(key = key) { it?.boolean ?: default }
 
-public fun MetaProvider.number(default: Number, key: Name? = null): ReadOnlyProperty<Any?, Number> =
-    value(key) { it?.numberOrNull ?: default }
+public fun MetaProvider.number(default: Number, key: Name? = null): MetaDelegate<Number> =
+    value(key = key) { it?.numberOrNull ?: default }
 
-public fun MetaProvider.double(default: Double, key: Name? = null): ReadOnlyProperty<Any?, Double> =
-    value(key) { it?.double ?: default }
+public fun MetaProvider.double(default: Double, key: Name? = null): MetaDelegate<Double> =
+    value(key = key) { it?.double ?: default }
 
-public fun MetaProvider.float(default: Float, key: Name? = null): ReadOnlyProperty<Any?, Float> =
-    value(key) { it?.float ?: default }
+public fun MetaProvider.float(default: Float, key: Name? = null): MetaDelegate<Float> =
+    value(key = key) { it?.float ?: default }
 
-public fun MetaProvider.int(default: Int, key: Name? = null): ReadOnlyProperty<Any?, Int> =
-    value(key) { it?.int ?: default }
+public fun MetaProvider.int(default: Int, key: Name? = null): MetaDelegate<Int> =
+    value(key = key) { it?.int ?: default }
 
-public fun MetaProvider.long(default: Long, key: Name? = null): ReadOnlyProperty<Any?, Long> =
-    value(key) { it?.long ?: default }
+public fun MetaProvider.long(default: Long, key: Name? = null): MetaDelegate<Long> =
+    value(key = key) { it?.long ?: default }
 
-public inline fun <reified E : Enum<E>> MetaProvider.enum(default: E, key: Name? = null): ReadOnlyProperty<Any?, E> =
-    value<E>(key) { it?.enum<E>() ?: default }
+public inline fun <reified E : Enum<E>> MetaProvider.enum(default: E, key: Name? = null): MetaDelegate<E> =
+    value<E>(key = key) { it?.enum<E>() ?: default }
 
-public fun MetaProvider.string(key: Name? = null, default: () -> String): ReadOnlyProperty<Any?, String> =
-    value(key) { it?.string ?: default() }
+public fun MetaProvider.string(key: Name? = null, default: () -> String): MetaDelegate<String> =
+    value(key = key) { it?.string ?: default() }
 
-public fun MetaProvider.boolean(key: Name? = null, default: () -> Boolean): ReadOnlyProperty<Any?, Boolean> =
-    value(key) { it?.boolean ?: default() }
+public fun MetaProvider.boolean(key: Name? = null, default: () -> Boolean): MetaDelegate<Boolean> =
+    value(key = key) { it?.boolean ?: default() }
 
-public fun MetaProvider.number(key: Name? = null, default: () -> Number): ReadOnlyProperty<Any?, Number> =
-    value(key) { it?.numberOrNull ?: default() }
+public fun MetaProvider.number(key: Name? = null, default: () -> Number): MetaDelegate<Number> =
+    value(key = key) { it?.numberOrNull ?: default() }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index e308a63d..37140c6f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.meta
 
+import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
@@ -8,20 +9,28 @@ import space.kscience.dataforge.names.getIndexedList
 import kotlin.properties.ReadWriteProperty
 import kotlin.reflect.KProperty
 
+
 /* Read-write delegates */
 
-public fun MutableMetaProvider.node(key: Name? = null): ReadWriteProperty<Any?, Meta?> =
-    object : ReadWriteProperty<Any?, Meta?> {
-        override fun getValue(thisRef: Any?, property: KProperty<*>): Meta? {
-            return get(key ?: property.name.asName())
-        }
+public interface MutableMetaDelegate<T> : ReadWriteProperty<Any?, T>, Described
 
-        override fun setValue(thisRef: Any?, property: KProperty<*>, value: Meta?) {
-            val name = key ?: property.name.asName()
-            set(name, value)
-        }
+public fun MutableMetaProvider.node(
+    key: Name? = null,
+    descriptor: MetaDescriptor? = null,
+): MutableMetaDelegate<Meta?> = object : MutableMetaDelegate<Meta?> {
+
+    override val descriptor: MetaDescriptor? = descriptor
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): Meta? {
+        return get(key ?: property.name.asName())
     }
 
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: Meta?) {
+        val name = key ?: property.name.asName()
+        set(name, value)
+    }
+}
+
 /**
  * Use [converter] to transform an object to Meta and back.
  * Note that mutation of the object does not change Meta.
@@ -29,21 +38,24 @@ public fun MutableMetaProvider.node(key: Name? = null): ReadWriteProperty<Any?,
 public fun <T> MutableMetaProvider.convertable(
     converter: MetaConverter<T>,
     key: Name? = null,
-): ReadWriteProperty<Any?, T?> =
-    object : ReadWriteProperty<Any?, T?> {
-        override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-            val name = key ?: property.name.asName()
-            return get(name)?.let { converter.read(it) }
-        }
+): MutableMetaDelegate<T?> = object : MutableMetaDelegate<T?> {
 
-        override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
-            val name = key ?: property.name.asName()
-            set(name, value?.let { converter.convert(it) })
-        }
+    override val descriptor: MetaDescriptor? get() = converter.descriptor
+
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
+        val name = key ?: property.name.asName()
+        return get(name)?.let { converter.read(it) }
     }
 
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
+        val name = key ?: property.name.asName()
+        set(name, value?.let { converter.convert(it) })
+    }
+}
+
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
-public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
+public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): MutableMetaDelegate<T?> =
     convertable(converter, key)
 
 /**
@@ -54,7 +66,7 @@ public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConver
 public inline fun <reified T> MutableMetaProvider.serializable(
     descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadWriteProperty<Any?, T?> = convertable(MetaConverter.serializable(descriptor), key)
+): MutableMetaDelegate<T?> = convertable(MetaConverter.serializable(descriptor), key)
 
 /**
  * Use [converter] to convert a list of same name siblings meta to object and back.
@@ -63,7 +75,9 @@ public inline fun <reified T> MutableMetaProvider.serializable(
 public fun <T> MutableMeta.listOfConvertable(
     converter: MetaConverter<T>,
     key: Name? = null,
-): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
+): MutableMetaDelegate<List<T>> = object : MutableMetaDelegate<List<T>> {
+    override val descriptor: MetaDescriptor? = converter.descriptor?.copy(multiple = true)
+
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
         return getIndexedList(name).map { converter.read(it) }
@@ -77,26 +91,33 @@ public fun <T> MutableMeta.listOfConvertable(
 
 @DFExperimental
 public inline fun <reified T> MutableMeta.listOfSerializable(
-    descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): ReadWriteProperty<Any?, List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
+    descriptor: MetaDescriptor? = null,
+): MutableMetaDelegate<List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
 
 
-public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?, Value?> =
-    object : ReadWriteProperty<Any?, Value?> {
-        override fun getValue(thisRef: Any?, property: KProperty<*>): Value? =
-            get(key ?: property.name.asName())?.value
+public fun MutableMetaProvider.value(
+    key: Name? = null,
+    descriptor: MetaDescriptor? = null,
+): MutableMetaDelegate<Value?> = object : MutableMetaDelegate<Value?> {
+    override val descriptor: MetaDescriptor? = descriptor
 
-        override fun setValue(thisRef: Any?, property: KProperty<*>, value: Value?) {
-            setValue(key ?: property.name.asName(), value)
-        }
+    override fun getValue(thisRef: Any?, property: KProperty<*>): Value? =
+        get(key ?: property.name.asName())?.value
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: Value?) {
+        setValue(key ?: property.name.asName(), value)
     }
+}
 
 public fun <T> MutableMetaProvider.value(
     key: Name? = null,
     writer: (T) -> Value? = { Value.of(it) },
+    descriptor: MetaDescriptor? = null,
     reader: (Value?) -> T,
-): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
+): MutableMetaDelegate<T> = object : MutableMetaDelegate<T> {
+    override val descriptor: MetaDescriptor? = descriptor
+
     override fun getValue(thisRef: Any?, property: KProperty<*>): T =
         reader(get(key ?: property.name.asName())?.value)
 
@@ -107,65 +128,65 @@ public fun <T> MutableMetaProvider.value(
 
 /* Read-write delegates for [MutableItemProvider] */
 
-public fun MutableMetaProvider.string(key: Name? = null): ReadWriteProperty<Any?, String?> =
+public fun MutableMetaProvider.string(key: Name? = null): MutableMetaDelegate<String?> =
     value(key) { it?.string }
 
-public fun MutableMetaProvider.boolean(key: Name? = null): ReadWriteProperty<Any?, Boolean?> =
+public fun MutableMetaProvider.boolean(key: Name? = null): MutableMetaDelegate<Boolean?> =
     value(key) { it?.boolean }
 
-public fun MutableMetaProvider.number(key: Name? = null): ReadWriteProperty<Any?, Number?> =
+public fun MutableMetaProvider.number(key: Name? = null): MutableMetaDelegate<Number?> =
     value(key) { it?.number }
 
-public fun MutableMetaProvider.string(default: String, key: Name? = null): ReadWriteProperty<Any?, String> =
+public fun MutableMetaProvider.string(default: String, key: Name? = null): MutableMetaDelegate<String> =
     value(key) { it?.string ?: default }
 
-public fun MutableMetaProvider.boolean(default: Boolean, key: Name? = null): ReadWriteProperty<Any?, Boolean> =
+public fun MutableMetaProvider.boolean(default: Boolean, key: Name? = null): MutableMetaDelegate<Boolean> =
     value(key) { it?.boolean ?: default }
 
-public fun MutableMetaProvider.number(default: Number, key: Name? = null): ReadWriteProperty<Any?, Number> =
+public fun MutableMetaProvider.number(default: Number, key: Name? = null): MutableMetaDelegate<Number> =
     value(key) { it?.number ?: default }
 
-public fun MutableMetaProvider.string(key: Name? = null, default: () -> String): ReadWriteProperty<Any?, String> =
+public fun MutableMetaProvider.string(key: Name? = null, default: () -> String): MutableMetaDelegate<String> =
     value(key) { it?.string ?: default() }
 
-public fun MutableMetaProvider.boolean(key: Name? = null, default: () -> Boolean): ReadWriteProperty<Any?, Boolean> =
+public fun MutableMetaProvider.boolean(key: Name? = null, default: () -> Boolean): MutableMetaDelegate<Boolean> =
     value(key) { it?.boolean ?: default() }
 
-public fun MutableMetaProvider.number(key: Name? = null, default: () -> Number): ReadWriteProperty<Any?, Number> =
+public fun MutableMetaProvider.number(key: Name? = null, default: () -> Number): MutableMetaDelegate<Number> =
     value(key) { it?.number ?: default() }
 
 public inline fun <reified E : Enum<E>> MutableMetaProvider.enum(
     default: E,
     key: Name? = null,
-): ReadWriteProperty<Any?, E> = value(key) { value -> value?.string?.let { enumValueOf<E>(it) } ?: default }
+): MutableMetaDelegate<E> = value(key) { value -> value?.string?.let { enumValueOf<E>(it) } ?: default }
 
 /* Number delegates */
 
-public fun MutableMetaProvider.int(key: Name? = null): ReadWriteProperty<Any?, Int?> =
+public fun MutableMetaProvider.int(key: Name? = null): MutableMetaDelegate<Int?> =
     value(key) { it?.int }
 
-public fun MutableMetaProvider.double(key: Name? = null): ReadWriteProperty<Any?, Double?> =
+public fun MutableMetaProvider.double(key: Name? = null): MutableMetaDelegate<Double?> =
     value(key) { it?.double }
 
-public fun MutableMetaProvider.long(key: Name? = null): ReadWriteProperty<Any?, Long?> =
+public fun MutableMetaProvider.long(key: Name? = null): MutableMetaDelegate<Long?> =
     value(key) { it?.long }
 
-public fun MutableMetaProvider.float(key: Name? = null): ReadWriteProperty<Any?, Float?> =
+public fun MutableMetaProvider.float(key: Name? = null): MutableMetaDelegate<Float?> =
     value(key) { it?.float }
 
 
 /* Safe number delegates*/
 
-public fun MutableMetaProvider.int(default: Int, key: Name? = null): ReadWriteProperty<Any?, Int> =
+public fun MutableMetaProvider.int(default: Int, key: Name? = null): MutableMetaDelegate<Int> =
     value(key) { it?.int ?: default }
 
-public fun MutableMetaProvider.double(default: Double, key: Name? = null): ReadWriteProperty<Any?, Double> =
+public fun MutableMetaProvider.double(default: Double, key: Name? = null): MutableMetaDelegate<Double> =
     value(key) { it?.double ?: default }
 
-public fun MutableMetaProvider.long(default: Long, key: Name? = null): ReadWriteProperty<Any?, Long> =
+public fun MutableMetaProvider.long(default: Long, key: Name? = null): MutableMetaDelegate<Long> =
     value(key) { it?.long ?: default }
 
-public fun MutableMetaProvider.float(default: Float, key: Name? = null): ReadWriteProperty<Any?, Float> =
+public fun MutableMetaProvider.float(default: Float, key: Name? = null): MutableMetaDelegate<Float> =
     value(key) { it?.float ?: default }
 
 
@@ -174,7 +195,7 @@ public fun MutableMetaProvider.float(default: Float, key: Name? = null): ReadWri
 public fun MutableMetaProvider.stringList(
     vararg default: String,
     key: Name? = null,
-): ReadWriteProperty<Any?, List<String>> = value(
+): MutableMetaDelegate<List<String>> = value(
     key,
     writer = { list -> list.map { str -> str.asValue() }.asValue() },
     reader = { it?.stringList ?: listOf(*default) },
@@ -182,7 +203,7 @@ public fun MutableMetaProvider.stringList(
 
 public fun MutableMetaProvider.stringList(
     key: Name? = null,
-): ReadWriteProperty<Any?, List<String>?> = value(
+): MutableMetaDelegate<List<String>?> = value(
     key,
     writer = { it -> it?.map { str -> str.asValue() }?.asValue() },
     reader = { it?.stringList },
@@ -191,7 +212,7 @@ public fun MutableMetaProvider.stringList(
 public fun MutableMetaProvider.numberList(
     vararg default: Number,
     key: Name? = null,
-): ReadWriteProperty<Any?, List<Number>> = value(
+): MutableMetaDelegate<List<Number>> = value(
     key,
     writer = { it.map { num -> num.asValue() }.asValue() },
     reader = { it?.list?.map { value -> value.numberOrNull ?: Double.NaN } ?: listOf(*default) },
@@ -202,7 +223,7 @@ public fun <T> MutableMetaProvider.listValue(
     key: Name? = null,
     writer: (T) -> Value = { Value.of(it) },
     reader: (Value) -> T,
-): ReadWriteProperty<Any?, List<T>?> = value(
+): MutableMetaDelegate<List<T>?> = value(
     key,
     writer = { it?.map(writer)?.asValue() },
     reader = { it?.list?.map(reader) }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
new file mode 100644
index 00000000..5f88ab74
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
@@ -0,0 +1,19 @@
+package space.kscience.dataforge.workspace
+
+import space.kscience.dataforge.actions.AbstractAction
+import space.kscience.dataforge.data.*
+import space.kscience.dataforge.meta.Meta
+import kotlin.reflect.KType
+
+internal class CachingAction<T>(type: KType, private val caching: (NamedData<T>) -> NamedData<T>) :
+    AbstractAction<T, T>(type) {
+    override fun DataSink<T>.generate(source: DataTree<T>, meta: Meta) {
+        source.forEach {
+            put(caching(it))
+        }
+    }
+
+    override suspend fun DataSink<T>.update(source: DataTree<T>, meta: Meta, updatedData: DataUpdate<T>) {
+        put(updatedData.name, updatedData.data?.named(updatedData.name)?.let(caching))
+    }
+}
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index da09c095..4d2578e5 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -1,20 +1,22 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.flow.filterIsInstance
-import kotlinx.coroutines.flow.map
 import kotlinx.io.*
 import kotlinx.serialization.ExperimentalSerializationApi
 import kotlinx.serialization.KSerializer
 import kotlinx.serialization.json.Json
 import kotlinx.serialization.protobuf.ProtoBuf
 import kotlinx.serialization.serializer
+import space.kscience.dataforge.actions.Action
+import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.context.error
 import space.kscience.dataforge.context.logger
 import space.kscience.dataforge.context.request
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.await
+import space.kscience.dataforge.data.named
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.withIndex
 import java.nio.file.Path
 import kotlin.io.path.deleteIfExists
@@ -52,7 +54,8 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
 
     //    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> = this as TaskData<T>
 
-    @OptIn(DFExperimental::class, DFInternal::class)
+
+    @OptIn(DFExperimental::class, UnsafeKType::class)
     override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
         val io = result.workspace.context.request(IOPlugin)
 
@@ -60,8 +63,8 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
             ?: ProtobufIOFormat(result.dataType)
             ?: error("Can't resolve IOFormat for ${result.dataType}")
 
-        fun cacheOne(data: NamedData<T>): NamedData<T> {
 
+        val cachingAction: Action<T, T> = CachingAction(result.dataType) { data ->
             val path = cacheDirectory /
                     result.taskName.withIndex(result.taskMeta.hashCode().toString(16)).toString() /
                     data.name.toString()
@@ -80,7 +83,7 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
                     }
                 }
 
-                //waiting for data in current scope because Envelope is synchronous
+                //waiting for data in the current scope because Envelope is synchronous
                 return@Data data.await().also { result ->
                     val envelope = Envelope {
                         meta = data.meta
@@ -92,12 +95,10 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
                 }
 
             }
-            return datum.named(data.name)
+            datum.named(data.name)
         }
 
-
-        val cachedTree = result.asSequence().map { cacheOne(it) }
-            .toTree(result.dataType, result.updates.filterIsInstance<NamedData<T>>().map { cacheOne(it) })
+        val cachedTree = cachingAction(result)
 
         return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index f3b2ee15..8ba39ec1 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -1,9 +1,11 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.flow.filterIsInstance
-import kotlinx.coroutines.flow.map
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.actions.Action
+import space.kscience.dataforge.actions.invoke
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.named
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 import kotlin.reflect.full.isSubtypeOf
@@ -20,19 +22,18 @@ public class InMemoryWorkspaceCache : WorkspaceCache {
         if (type.isSubtypeOf(taskType)) this as Data<T>
         else error("Cached data type mismatch: expected $taskType but got $type")
 
+    @OptIn(DFExperimental::class)
     override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
-        fun cacheOne(data: NamedData<T>): NamedData<T> {
+        val cachingAction: Action<T, T> = CachingAction(result.dataType) { data ->
             val cachedData =  cache.getOrPut(TaskResultId(result.taskName, result.taskMeta)){
                 HashMap()
             }.getOrPut(data.name){
                 data.data
             }
-            return cachedData.checkType<T>(result.dataType).named(data.name)
+            cachedData.checkType<T>(result.dataType).named(data.name)
         }
 
-
-        val cachedTree = result.asSequence().map { cacheOne(it) }
-            .toTree(result.dataType, result.updates.filterIsInstance<NamedData<T>>().map { cacheOne(it) })
+        val cachedTree = cachingAction(result)
 
         return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 837b61bc..39837c15 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -148,16 +148,16 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    fun testWorkspace() = runTest(timeout = 10.milliseconds) {
+    fun testWorkspace() = runTest(timeout = 100.milliseconds) {
         val node = workspace.produce("sum")
         val res = node.asSequence().single()
         assertEquals(328350, res.await())
     }
 
     @Test
-    fun testMetaPropagation() = runTest(timeout = 10.milliseconds) {
+    fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node.data!!.await()
+        val res = node["sum"]!!.await()
     }
 
     @Test
@@ -186,7 +186,7 @@ internal class SimpleWorkspaceTest {
         val node = workspace.produce("filterOne") {
             "name" put "myData[12]"
         }
-        assertEquals(12, node.data!!.await())
+        assertEquals(12, node.asSequence().first().await())
     }
 
 }
\ No newline at end of file

From f840ffb473a698ccf8af08dd28ad22078642dcc9 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 27 Apr 2024 09:47:08 +0300
Subject: [PATCH 43/77] Move data tests to jvm to avoid random failings on
 native

---
 .../dataforge/descriptors/reflectiveDescriptors.kt         | 7 +------
 .../kotlin/space/kscience/dataforge/data/ActionsTest.kt    | 0
 .../dataforge/meta/descriptors/MetaDescriptorBuilder.kt    | 1 +
 3 files changed, 2 insertions(+), 6 deletions(-)
 rename dataforge-data/src/{commonTest => jvmTest}/kotlin/space/kscience/dataforge/data/ActionsTest.kt (100%)

diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
index 9c36410a..b2953018 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
@@ -79,7 +79,7 @@ public fun MetaDescriptor.Companion.forClass(
             is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
         }
     }
-    kClass.memberProperties.forEach { property->
+    kClass.memberProperties.forEach { property ->
 
         var flag = false
 
@@ -88,11 +88,6 @@ public fun MetaDescriptor.Companion.forClass(
             (property.returnType.classifier as? KClass<*>)?.let {
                 from(forClass(it))
             }
-//
-//            (property.getDelegate(Unit) as? MetaDelegate<*>)?.descriptor?.let {
-//                from(it)
-//                flag = true
-//            }
 
             property.annotations.forEach {
                 when (it) {
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
similarity index 100%
rename from dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
rename to dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index 2291e3d3..dfdbfeab 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -47,6 +47,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
     ): Unit {
         when (name.length) {
             0 -> error("Can't set descriptor to root")
+
             1 -> {
                 children[name.first().body] = descriptorBuilder
             }

From 2cd18854209cdf332e14581fa7c2cf7145dcf25c Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 27 Apr 2024 10:01:36 +0300
Subject: [PATCH 44/77] Update readme and changelog

---
 CHANGELOG.md                             | 24 +++++++++++++++++-------
 build.gradle.kts                         |  3 ++-
 dataforge-context/README.md              |  4 ++--
 dataforge-data/README.md                 |  4 ++--
 dataforge-io/README.md                   |  4 ++--
 dataforge-io/dataforge-io-yaml/README.md |  4 ++--
 dataforge-meta/README.md                 |  4 ++--
 dataforge-scripting/README.md            |  4 ++--
 dataforge-workspace/README.md            |  4 ++--
 9 files changed, 33 insertions(+), 22 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index a56e56e6..a94cb585 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,26 +3,36 @@
 ## Unreleased
 
 ### Added
-- Name index comparator
-- Specialized ByteArrayValue
 
 ### Changed
-- DataSink `branch` is replaced with `putAll` to avoid confusion with DataTree methods
-- Meta delegate now uses a specific class that has a descriptor
 
 ### Deprecated
 
 ### Removed
 
 ### Fixed
+
+### Security
+
+## 0.8.2 - 2024-04-27
+
+### Added
+
+- Name index comparator
+- Specialized ByteArrayValue
+
+### Changed
+
+- DataSink `branch` is replaced with `putAll` to avoid confusion with DataTree methods
+- Meta delegate now uses a specific class that has a descriptor
+
+### Fixed
+
 - `listOfScheme` and `listOfConvertable` delegates provides correct items order.
 - Scheme meta setter works with proper sub-branch.
 - NameToken.parse improper work with indices.
 - Proper data handling for cache.
 
-
-### Security
-
 ## 0.8.0 - 2024-02-03
 
 ### Added
diff --git a/build.gradle.kts b/build.gradle.kts
index 78dc6d35..4db098be 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -4,11 +4,12 @@ import space.kscience.gradle.useSPCTeam
 
 plugins {
     id("space.kscience.gradle.project")
+    id("org.jetbrains.kotlinx.kover") version "0.7.6"
 }
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.2-dev-1"
+    version = "0.8.2"
 }
 
 subprojects {
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index f0aff459..7970751c 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,7 +6,7 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.8.0")
+    implementation("space.kscience:dataforge-context:0.8.2")
 }
 ```
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index d77ed1b9..f08a43c2 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.8.0")
+    implementation("space.kscience:dataforge-data:0.8.2")
 }
 ```
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index 85e49e5a..5168797b 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -6,7 +6,7 @@ IO module
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.8.0")
+    implementation("space.kscience:dataforge-io:0.8.2")
 }
 ```
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index 20f5b4f6..cc1ecd8c 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,7 +6,7 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.8.0")
+    implementation("space.kscience:dataforge-io-yaml:0.8.2")
 }
 ```
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index bd11ebf1..a3209760 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -6,7 +6,7 @@ Meta definition and basic operations on meta
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.8.0")
+    implementation("space.kscience:dataforge-meta:0.8.2")
 }
 ```
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index fbc5cb69..3167e1b4 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.8.0")
+    implementation("space.kscience:dataforge-scripting:0.8.2")
 }
 ```
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index cea37368..f7461f30 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.8.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.8.2`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.8.0")
+    implementation("space.kscience:dataforge-workspace:0.8.2")
 }
 ```

From 3de5691c84dab040e7e570567b8b1fa38228b744 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 25 May 2024 21:19:06 +0300
Subject: [PATCH 45/77] Add custom coroutine context during new Context
 creation

---
 .../space/kscience/dataforge/context/Context.kt     |  4 +++-
 .../kscience/dataforge/context/ContextBuilder.kt    | 13 +++++++++++--
 2 files changed, 14 insertions(+), 3 deletions(-)

diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
index bb74d605..c614598c 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
@@ -10,6 +10,7 @@ import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.provider.Provider
 import kotlin.coroutines.CoroutineContext
+import kotlin.coroutines.EmptyCoroutineContext
 
 /**
  * The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top.
@@ -26,6 +27,7 @@ public open class Context internal constructor(
     public val parent: Context?,
     plugins: Set<Plugin>, // set of unattached plugins
     meta: Meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
 ) : Named, MetaRepr, Provider, CoroutineScope {
 
     /**
@@ -65,7 +67,7 @@ public open class Context internal constructor(
 
     override val coroutineContext: CoroutineContext by lazy {
         (parent ?: Global).coroutineContext.let { parenContext ->
-            parenContext + SupervisorJob(parenContext[Job])
+            parenContext + coroutineContext + SupervisorJob(parenContext[Job])
         }
     }
 
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/ContextBuilder.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/ContextBuilder.kt
index c0db4314..894c5f15 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/ContextBuilder.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/ContextBuilder.kt
@@ -13,6 +13,8 @@ import space.kscience.dataforge.names.plus
 import kotlin.collections.component1
 import kotlin.collections.component2
 import kotlin.collections.set
+import kotlin.coroutines.CoroutineContext
+import kotlin.coroutines.EmptyCoroutineContext
 
 /**
  * A convenience builder for context
@@ -59,8 +61,15 @@ public class ContextBuilder internal constructor(
         plugin(DeFactoPluginFactory(plugin))
     }
 
+    private var coroutineContext: CoroutineContext = EmptyCoroutineContext
+
+    public fun coroutineContext(coroutineContext: CoroutineContext) {
+        this.coroutineContext = coroutineContext
+    }
+
+
     public fun build(): Context {
-        val contextName = name ?: NameToken("@auto",hashCode().toUInt().toString(16)).asName()
+        val contextName = name ?: NameToken("@auto", hashCode().toUInt().toString(16)).asName()
         val plugins = HashMap<PluginTag, Plugin>()
 
         fun addPlugin(factory: PluginFactory<*>, meta: Meta) {
@@ -86,7 +95,7 @@ public class ContextBuilder internal constructor(
             addPlugin(factory, meta)
         }
 
-        return Context(contextName, parent, plugins.values.toSet(), meta.seal())
+        return Context(contextName, parent, plugins.values.toSet(), meta.seal(), coroutineContext)
     }
 }
 

From 5a75b05acd8893371fefb60e8190e5e688c3a014 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 4 Jun 2024 17:44:44 +0300
Subject: [PATCH 46/77] 0.9-RC

---
 CHANGELOG.md                                  |   5 +
 build.gradle.kts                              |  10 +-
 .../dataforge/properties/metaAsFlow.kt        |   2 +-
 .../descriptors/reflectiveDescriptors.kt      | 129 +++++++++---------
 .../descriptors/TestAutoDescriptors.kt        |  53 ++++---
 .../kscience/dataforge/data/dataTransform.kt  |  18 +++
 .../kscience/dataforge/data/ActionsTest.kt    |   1 +
 .../space/kscience/dataforge/meta/Laminate.kt |   2 +
 .../space/kscience/dataforge/meta/Meta.kt     |   8 +-
 .../kscience/dataforge/meta/MetaConverter.kt  |   2 +-
 .../kscience/dataforge/meta/MetaDelegate.kt   |  12 +-
 .../meta/{MetaSpec.kt => MetaReader.kt}       |   8 +-
 .../space/kscience/dataforge/meta/MetaRef.kt  |  64 +++++++++
 .../kscience/dataforge/meta/MutableMeta.kt    |   3 +
 .../kscience/dataforge/meta/ObservableMeta.kt |   3 +
 .../dataforge/meta/ObservableMetaWrapper.kt   |   3 +
 .../space/kscience/dataforge/meta/Scheme.kt   |  11 +-
 .../kscience/dataforge/meta/SealedMeta.kt     |   3 +
 .../meta/descriptors/MetaDescriptorBuilder.kt |   2 +-
 .../space/kscience/dataforge/misc/cast.kt     |   3 -
 .../kscience/dataforge/meta/DynamicMeta.kt    |   3 +
 .../space/kscience/dataforge/misc/castJs.kt   |   5 -
 .../space/kscience/dataforge/misc/castJvm.kt  |   4 -
 .../kscience/dataforge/misc/castNative.kt     |   4 -
 .../space/kscience/dataforge/misc/castWasm.kt |   4 -
 .../kscience/dataforge/workspace/Task.kt      |  12 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |   4 +-
 gradle.properties                             |   7 +-
 28 files changed, 234 insertions(+), 151 deletions(-)
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/{MetaSpec.kt => MetaReader.kt} (59%)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
 delete mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/cast.kt
 delete mode 100644 dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
 delete mode 100644 dataforge-meta/src/jvmMain/kotlin/space/kscience/dataforge/misc/castJvm.kt
 delete mode 100644 dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
 delete mode 100644 dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index a94cb585..d75dd1a2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,12 +3,17 @@
 ## Unreleased
 
 ### Added
+- Custom CoroutineContext during `Context` creation.
 
 ### Changed
+- Kotlin 2.0
+- `MetaSpec` renamed to `MetaReader`. MetaSpec is now reserved for builder-based generation of meta descriptors.
+- Add self-type for Meta. Remove unsafe cast method for meta instances.
 
 ### Deprecated
 
 ### Removed
+- Automatic descriptors for schema. It is not possible to implement them without heavy reflection.
 
 ### Fixed
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 4db098be..815a7077 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -3,21 +3,21 @@ import space.kscience.gradle.useApache2Licence
 import space.kscience.gradle.useSPCTeam
 
 plugins {
-    id("space.kscience.gradle.project")
-    id("org.jetbrains.kotlinx.kover") version "0.7.6"
+    alias(spclibs.plugins.kscience.project)
+    alias(spclibs.plugins.kotlinx.kover)
 }
 
 allprojects {
     group = "space.kscience"
-    version = "0.8.2"
+    version = "0.9.0-dev-1"
 }
 
 subprojects {
     apply(plugin = "maven-publish")
 
     tasks.withType<KotlinCompile> {
-        kotlinOptions {
-            freeCompilerArgs = freeCompilerArgs + "-Xcontext-receivers"
+        compilerOptions {
+            freeCompilerArgs.add("-Xcontext-receivers")
         }
     }
 }
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
index 90fafc5e..da539fcb 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
@@ -10,7 +10,7 @@ import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 
 @DFExperimental
-public fun <T> ObservableMeta.asFlow(converter: MetaSpec<T>): Flow<T> = callbackFlow {
+public fun <T> ObservableMeta.asFlow(converter: MetaReader<T>): Flow<T> = callbackFlow {
     onChange(this){
         trySend(converter.read(this))
     }
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
index b2953018..590324d7 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
@@ -4,18 +4,12 @@ import kotlinx.serialization.ExperimentalSerializationApi
 import kotlinx.serialization.json.Json
 import kotlinx.serialization.json.decodeFromStream
 import org.slf4j.LoggerFactory
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.meta.SchemeSpec
-import space.kscience.dataforge.meta.ValueType
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
-import space.kscience.dataforge.meta.descriptors.node
 import space.kscience.dataforge.misc.DFExperimental
 import java.net.URL
-import kotlin.reflect.KClass
-import kotlin.reflect.full.isSubclassOf
-import kotlin.reflect.full.memberProperties
-
+import kotlin.reflect.KAnnotatedElement
+import kotlin.reflect.KProperty
 
 /**
  * Description text for meta property, node or whole object
@@ -59,18 +53,8 @@ private fun MetaDescriptorBuilder.loadDescriptorFromResource(resource: Descripto
 }
 
 @DFExperimental
-public fun MetaDescriptor.Companion.forClass(
-    kClass: KClass<out Any>,
-    mod: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptor = MetaDescriptor {
-    when {
-        kClass.isSubclassOf(Number::class) -> valueType(ValueType.NUMBER)
-        kClass == String::class -> ValueType.STRING
-        kClass == Boolean::class -> ValueType.BOOLEAN
-        kClass == DoubleArray::class -> ValueType.LIST
-    }
-
-    kClass.annotations.forEach {
+public fun MetaDescriptorBuilder.forAnnotatedElement(element: KAnnotatedElement) {
+    element.annotations.forEach {
         when (it) {
             is Description -> description = it.value
 
@@ -79,47 +63,70 @@ public fun MetaDescriptor.Companion.forClass(
             is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
         }
     }
-    kClass.memberProperties.forEach { property ->
-
-        var flag = false
-
-        val descriptor = MetaDescriptor {
-            //use base type descriptor as a base
-            (property.returnType.classifier as? KClass<*>)?.let {
-                from(forClass(it))
-            }
-
-            property.annotations.forEach {
-                when (it) {
-                    is Description -> {
-                        description = it.value
-                        flag = true
-                    }
-
-                    is Multiple -> {
-                        multiple = true
-                        flag = true
-                    }
-
-                    is DescriptorResource -> {
-                        loadDescriptorFromResource(it)
-                        flag = true
-                    }
-
-                    is DescriptorUrl -> {
-                        loadDescriptorFromUrl(URL(it.url))
-                        flag = true
-                    }
-                }
-            }
-        }
-        if (flag) {
-            node(property.name, descriptor)
-        }
-    }
-    mod()
 }
 
 @DFExperimental
-public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor(noinline mod: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptor =
-    MetaDescriptor.forClass(T::class, mod)
\ No newline at end of file
+public fun MetaDescriptorBuilder.forProperty(property: KProperty<*>) {
+    property.annotations.forEach {
+        when (it) {
+            is Description -> description = it.value
+
+            is DescriptorResource -> loadDescriptorFromResource(it)
+
+            is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
+        }
+    }
+}
+//
+//@DFExperimental
+//public fun <T : Scheme> MetaDescriptor.Companion.forScheme(
+//    spec: SchemeSpec<T>,
+//    mod: MetaDescriptorBuilder.() -> Unit = {},
+//): MetaDescriptor = MetaDescriptor {
+//    val scheme = spec.empty()
+//    val kClass: KClass<T> = scheme::class as KClass<T>
+//    when {
+//        kClass.isSubclassOf(Number::class) -> valueType(ValueType.NUMBER)
+//        kClass == String::class -> ValueType.STRING
+//        kClass == Boolean::class -> ValueType.BOOLEAN
+//        kClass == DoubleArray::class -> ValueType.LIST
+//        kClass == ByteArray::class -> ValueType.LIST
+//    }
+//
+//    forAnnotatedElement(kClass)
+//    kClass.memberProperties.forEach { property ->
+//        node(property.name) {
+//
+//            (property.getDelegate(scheme) as? MetaDelegate<*>)?.descriptor?.let {
+//                from(it)
+//            }
+//
+//            property.annotations.forEach {
+//                when (it) {
+//                    is Description -> {
+//                        description = it.value
+//                    }
+//
+//                    is Multiple -> {
+//                        multiple = true
+//                    }
+//
+//                    is DescriptorResource -> {
+//                        loadDescriptorFromResource(it)
+//                    }
+//
+//                    is DescriptorUrl -> {
+//                        loadDescriptorFromUrl(URL(it.url))
+//                    }
+//                }
+//            }
+//        }
+//
+//    }
+//    mod()
+//}
+//
+//@DFExperimental
+//public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor(
+//    noinline mod: MetaDescriptorBuilder.() -> Unit = {},
+//): MetaDescriptor = MetaDescriptor.forScheme(this, mod)
\ No newline at end of file
diff --git a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
index df849ce5..3b1fce3d 100644
--- a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
+++ b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
@@ -2,35 +2,28 @@
 
 package space.kscience.dataforge.descriptors
 
-import kotlinx.serialization.encodeToString
-import kotlinx.serialization.json.Json
-import org.junit.jupiter.api.Test
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.meta.SchemeSpec
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.int
-import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFExperimental
 
-private class TestScheme : Scheme() {
-
-    @Description("A")
-    val a by string()
-
-    @Description("B")
-    val b by int()
-
-    val c by int()
-
-    companion object : SchemeSpec<TestScheme>(::TestScheme) {
-        override val descriptor: MetaDescriptor = autoDescriptor()
-    }
-}
-
-class TestAutoDescriptors {
-    @Test
-    fun autoDescriptor() {
-        val autoDescriptor = MetaDescriptor.forClass(TestScheme::class)
-        println(Json { prettyPrint = true }.encodeToString(autoDescriptor))
-    }
-}
\ No newline at end of file
+//
+//class TestScheme : Scheme() {
+//
+//    @Description("A")
+//    val a by string()
+//
+//    @Description("B")
+//    val b by int()
+//
+//    val c by int()
+//
+//    companion object : SchemeSpec<TestScheme>(::TestScheme) {
+//        override val descriptor: MetaDescriptor = autoDescriptor()
+//    }
+//}
+//
+//class TestAutoDescriptors {
+//    @Test
+//    fun autoDescriptor() {
+//        val autoDescriptor = MetaDescriptor.forScheme(TestScheme)
+//        println(Json { prettyPrint = true }.encodeToString(autoDescriptor))
+//    }
+//}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index ab54eb3e..c0d92e9e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -18,6 +18,24 @@ public data class NamedValueWithMeta<T>(val name: Name, val value: T, val meta:
 public suspend fun <T> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
     NamedValueWithMeta(name, await(), meta)
 
+/**
+ * Lazily transform this data to another data. By convention [block] should not use external data (be pure).
+ * @param type explicit type of the resulting [Data]
+ * @param coroutineContext additional [CoroutineContext] elements used for data computation.
+ * @param meta for the resulting data. By default equals input data.
+ * @param block the transformation itself
+ */
+@UnsafeKType
+public fun <T, R> Data<T>.transform(
+    type: KType,
+    meta: Meta = this.meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
+    block: suspend (T) -> R,
+): Data<R> = Data(type, meta, coroutineContext, listOf(this)) {
+    block(await())
+}
+
+
 
 /**
  * Lazily transform this data to another data. By convention [block] should not use external data (be pure).
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 477ca592..13660eee 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -43,6 +43,7 @@ internal class ActionsTest {
         repeat(10) {
             source.updateValue(it.toString(), it)
         }
+
         result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Laminate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Laminate.kt
index 87284107..0ae84bb6 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Laminate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Laminate.kt
@@ -9,6 +9,8 @@ import space.kscience.dataforge.names.NameToken
  */
 public class Laminate internal constructor(public val layers: List<Meta>) : TypedMeta<Laminate> {
 
+    override val self: Laminate get() = this
+
     override val value: Value? = layers.firstNotNullOfOrNull { it.value }
 
     override val items: Map<NameToken, Laminate> by lazy {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
index 5cf53c75..fd953085 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
@@ -3,7 +3,6 @@ package space.kscience.dataforge.meta
 import kotlinx.serialization.Serializable
 import kotlinx.serialization.json.Json
 import space.kscience.dataforge.misc.DfType
-import space.kscience.dataforge.misc.unsafeCast
 import space.kscience.dataforge.names.*
 import kotlin.jvm.JvmName
 
@@ -151,6 +150,8 @@ public interface TypedMeta<out M : TypedMeta<M>> : Meta {
 
     override val items: Map<NameToken, M>
 
+    public val self: M
+
     override fun get(name: Name): M? {
         tailrec fun M.find(name: Name): M? = if (name.isEmpty()) {
             this
@@ -164,11 +165,6 @@ public interface TypedMeta<out M : TypedMeta<M>> : Meta {
     override fun toMeta(): Meta = this
 }
 
-/**
- * Access self as a recursive type instance
- */
-public inline val <M : TypedMeta<M>> TypedMeta<M>.self: M get() = unsafeCast()
-
 //public typealias Meta = TypedMeta<*>
 
 public operator fun <M : TypedMeta<M>> TypedMeta<M>?.get(token: NameToken): M? = this?.items?.get(token)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 124f49f5..9baf0087 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -11,7 +11,7 @@ import space.kscience.dataforge.misc.DFExperimental
 /**
  * A converter of generic object to and from [Meta]
  */
-public interface MetaConverter<T>: MetaSpec<T> {
+public interface MetaConverter<T>: MetaReader<T> {
 
     /**
      * A descriptor for resulting meta
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 8c5a738f..1b506b44 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -25,16 +25,16 @@ public fun MetaProvider.node(
 }
 
 /**
- * Use [metaSpec] to read the Meta node
+ * Use [metaReader] to read the Meta node
  */
 public fun <T> MetaProvider.spec(
-    metaSpec: MetaSpec<T>,
+    metaReader: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<T?> = object : MetaDelegate<T?> {
-    override val descriptor: MetaDescriptor? get() = metaSpec.descriptor
+    override val descriptor: MetaDescriptor? get() = metaReader.descriptor
 
     override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-        return get(key ?: property.name.asName())?.let { metaSpec.read(it) }
+        return get(key ?: property.name.asName())?.let { metaReader.read(it) }
     }
 }
 
@@ -50,14 +50,14 @@ public inline fun <reified T> MetaProvider.serializable(
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
     key: Name? = null,
-    converter: MetaSpec<T>,
+    converter: MetaReader<T>,
 ): ReadOnlyProperty<Any?, T?> = spec(converter, key)
 
 /**
  * Use [converter] to convert a list of same name siblings meta to object
  */
 public fun <T> Meta.listOfSpec(
-    converter: MetaSpec<T>,
+    converter: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<List<T>> = object : MetaDelegate<List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaReader.kt
similarity index 59%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaReader.kt
index 9918d504..a8514d63 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaReader.kt
@@ -2,7 +2,7 @@ package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.meta.descriptors.Described
 
-public interface MetaSpec<out T> : Described {
+public interface MetaReader<out T> : Described {
 
     /**
      * Read the source meta into an object and return null if Meta could not be interpreted as a target type
@@ -10,12 +10,12 @@ public interface MetaSpec<out T> : Described {
     public fun readOrNull(source: Meta): T?
 
     /**
-     * Read generic read-only meta with this [MetaSpec] producing instance of the desired type.
+     * Read generic read-only meta with this [MetaReader] producing instance of the desired type.
      * Throws an error if conversion could not be done.
      */
     public fun read(source: Meta): T = readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
 }
 
 
-public fun <T : Any> MetaSpec<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
-public fun <T> MetaSpec<T>.readValue(value: Value): T? = read(Meta(value))
+public fun <T : Any> MetaReader<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
+public fun <T> MetaReader<T>.readValue(value: Value): T? = read(Meta(value))
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
new file mode 100644
index 00000000..413fe404
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
@@ -0,0 +1,64 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.meta.descriptors.Described
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import kotlin.properties.PropertyDelegateProvider
+import kotlin.properties.ReadOnlyProperty
+
+
+/**
+ * A reference to a read-only value of type [T] inside [MetaProvider]
+ */
+@DFExperimental
+public data class MetaRef<T>(
+    public val name: Name,
+    public val converter: MetaConverter<T>,
+    override val descriptor: MetaDescriptor? = converter.descriptor,
+) : Described
+
+@DFExperimental
+public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
+
+@DFExperimental
+public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
+    set(ref.name, ref.converter.convert(value))
+}
+
+@DFExperimental
+public class MetaSpec(
+    private val configuration: MetaDescriptorBuilder.() -> Unit = {},
+) : Described {
+    private val refs: MutableList<MetaRef<*>> = mutableListOf()
+
+    private fun registerRef(ref: MetaRef<*>) {
+        refs.add(ref)
+    }
+
+    public fun <T> item(
+        converter: MetaConverter<T>,
+        descriptor: MetaDescriptor? = converter.descriptor,
+        key: Name? = null,
+    ): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
+        PropertyDelegateProvider { _, property ->
+            val ref = MetaRef(key ?: property.name.asName(), converter, descriptor)
+            registerRef(ref)
+            ReadOnlyProperty { _, _ ->
+                ref
+            }
+        }
+
+    override val descriptor: MetaDescriptor by lazy {
+        MetaDescriptor {
+            refs.forEach { ref ->
+                ref.descriptor?.let {
+                    node(ref.name, ref.descriptor)
+                }
+            }
+            configuration()
+        }
+    }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 69e221f5..7be16cc9 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -253,6 +253,9 @@ private class MutableMetaImpl(
     value: Value?,
     children: Map<NameToken, Meta> = emptyMap(),
 ) : AbstractObservableMeta(), ObservableMutableMeta {
+
+    override val self get() = this
+
     override var value = value
         @ThreadSafe set(value) {
             val oldValue = field
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index b481962e..7cd28746 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -39,6 +39,9 @@ public interface ObservableMeta : Meta {
  * A [Meta] which is both observable and mutable
  */
 public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
+
+    override val self: ObservableMutableMeta get() = this
+
     override fun getOrCreate(name: Name): ObservableMutableMeta
 
     override fun get(name: Name): ObservableMutableMeta? {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
index abd2deb6..7c62f692 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
@@ -14,6 +14,9 @@ private class ObservableMetaWrapper(
     val nodeName: Name,
     val listeners: MutableSet<MetaListener>,
 ) : ObservableMutableMeta {
+
+    override val self get() = this
+
     override val items: Map<NameToken, ObservableMutableMeta>
         get() = root[nodeName]?.items?.keys?.associateWith {
             ObservableMetaWrapper(root, nodeName + it, listeners)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 85b5087a..fe121c42 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -12,7 +12,7 @@ import kotlin.reflect.KProperty
 import kotlin.reflect.KProperty1
 
 /**
- * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaSpec].
+ * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaReader].
  *
  * @param prototype default values provided by this scheme
  */
@@ -80,6 +80,9 @@ public open class Scheme(
     override fun toString(): String = meta.toString()
 
     private inner class SchemeMeta(val pathName: Name) : ObservableMutableMeta {
+
+        override val self get() = this
+
         override var value: Value?
             get() = target[pathName]?.value
                 ?: prototype?.get(pathName)?.value
@@ -216,7 +219,7 @@ public fun <T : Scheme> Configurable.updateWith(
 
 
 /**
- * A delegate that uses a [MetaSpec] to wrap a child of this provider
+ * A delegate that uses a [MetaReader] to wrap a child of this provider
  */
 public fun <T : Scheme> MutableMeta.scheme(
     spec: SchemeSpec<T>,
@@ -239,7 +242,7 @@ public fun <T : Scheme> Scheme.scheme(
 ): ReadWriteProperty<Any?, T> = meta.scheme(spec, key)
 
 /**
- * A delegate that uses a [MetaSpec] to wrap a child of this provider.
+ * A delegate that uses a [MetaReader] to wrap a child of this provider.
  * Returns null if meta with given name does not exist.
  */
 public fun <T : Scheme> MutableMeta.schemeOrNull(
@@ -264,7 +267,7 @@ public fun <T : Scheme> Scheme.schemeOrNull(
 ): ReadWriteProperty<Any?, T?> = meta.schemeOrNull(spec, key)
 
 /**
- * A delegate that uses a [MetaSpec] to wrap a list of child providers.
+ * A delegate that uses a [MetaReader] to wrap a list of child providers.
  * If children are mutable, the changes in list elements are reflected on them.
  * The list is a snapshot of children state, so change in structure is not reflected on its composition.
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
index b3fdf062..b218fad6 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
@@ -13,6 +13,9 @@ public class SealedMeta(
     override val value: Value?,
     override val items: Map<NameToken, SealedMeta>,
 ) : TypedMeta<SealedMeta> {
+
+    override val self: SealedMeta get() = this
+
     override fun toString(): String = Meta.toString(this)
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index dfdbfeab..2590273e 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -58,7 +58,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
         }
     }
 
-    internal fun node(
+    public fun node(
         name: Name,
         descriptorBuilder: MetaDescriptor,
     ): Unit {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/cast.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/cast.kt
deleted file mode 100644
index e714d596..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/cast.kt
+++ /dev/null
@@ -1,3 +0,0 @@
-package space.kscience.dataforge.misc
-
-public expect inline fun <T> Any?.unsafeCast(): T
\ No newline at end of file
diff --git a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/meta/DynamicMeta.kt b/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/meta/DynamicMeta.kt
index b38d5891..57d324c7 100644
--- a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/meta/DynamicMeta.kt
+++ b/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/meta/DynamicMeta.kt
@@ -31,6 +31,9 @@ public fun Meta.toDynamic(): dynamic {
 }
 
 public class DynamicMeta(internal val obj: dynamic) : TypedMeta<DynamicMeta> {
+
+    override val self: DynamicMeta get() = this
+
     private fun keys(): Array<String> = js("Object").keys(obj) as Array<String>
 
     private fun isArray(obj: dynamic): Boolean =
diff --git a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt b/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
deleted file mode 100644
index b057bcbe..00000000
--- a/dataforge-meta/src/jsMain/kotlin/space/kscience/dataforge/misc/castJs.kt
+++ /dev/null
@@ -1,5 +0,0 @@
-package space.kscience.dataforge.misc
-import kotlin.js.unsafeCast as unsafeCastJs
-
-@Suppress("NOTHING_TO_INLINE")
-public actual inline fun <T> Any?.unsafeCast(): T = unsafeCastJs<T>()
\ No newline at end of file
diff --git a/dataforge-meta/src/jvmMain/kotlin/space/kscience/dataforge/misc/castJvm.kt b/dataforge-meta/src/jvmMain/kotlin/space/kscience/dataforge/misc/castJvm.kt
deleted file mode 100644
index 27d399fe..00000000
--- a/dataforge-meta/src/jvmMain/kotlin/space/kscience/dataforge/misc/castJvm.kt
+++ /dev/null
@@ -1,4 +0,0 @@
-package space.kscience.dataforge.misc
-
-@Suppress("UNCHECKED_CAST", "NOTHING_TO_INLINE")
-public actual inline fun <T> Any?.unsafeCast(): T = this as T
\ No newline at end of file
diff --git a/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt b/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
deleted file mode 100644
index 4d9aa758..00000000
--- a/dataforge-meta/src/nativeMain/kotlin/space/kscience/dataforge/misc/castNative.kt
+++ /dev/null
@@ -1,4 +0,0 @@
-package space.kscience.dataforge.misc
-
-@Suppress("UNCHECKED_CAST")
-public actual inline fun <T> Any?.unsafeCast(): T = this as T
\ No newline at end of file
diff --git a/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt b/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt
deleted file mode 100644
index 27d399fe..00000000
--- a/dataforge-meta/src/wasmJsMain/kotlin/space/kscience/dataforge/misc/castWasm.kt
+++ /dev/null
@@ -1,4 +0,0 @@
-package space.kscience.dataforge.misc
-
-@Suppress("UNCHECKED_CAST", "NOTHING_TO_INLINE")
-public actual inline fun <T> Any?.unsafeCast(): T = this as T
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 372b119f..a1a754a4 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -5,8 +5,8 @@ import space.kscience.dataforge.data.DataSink
 import space.kscience.dataforge.data.GoalExecutionRestriction
 import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MetaReader
 import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.meta.MetaSpec
 import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DfType
@@ -44,10 +44,10 @@ public interface Task<T> : Described {
 }
 
 /**
- * A [Task] with [MetaSpec] for wrapping and unwrapping task configuration
+ * A [Task] with [MetaReader] for wrapping and unwrapping task configuration
  */
 public interface TaskWithSpec<T, C : Any> : Task<T> {
-    public val spec: MetaSpec<C>
+    public val spec: MetaReader<C>
     override val descriptor: MetaDescriptor? get() = spec.descriptor
 
     public suspend fun execute(workspace: Workspace, taskName: Name, configuration: C): TaskResult<T>
@@ -122,10 +122,10 @@ public inline fun <reified T : Any> Task(
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
-    specification: MetaSpec<C>,
+    specification: MetaReader<C>,
     builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
-    override val spec: MetaSpec<C> = specification
+    override val spec: MetaReader<C> = specification
 
     override suspend fun execute(
         workspace: Workspace,
@@ -143,6 +143,6 @@ public fun <T : Any, C : MetaRepr> Task(
 }
 
 public inline fun <reified T : Any, C : MetaRepr> Task(
-    specification: MetaSpec<C>,
+    specification: MetaReader<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): Task<T> = Task(typeOf<T>(), specification, builder)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index cf263a46..013c0171 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -71,10 +71,10 @@ public inline fun <reified T : Any> TaskContainer.task(
 }
 
 /**
- * Create a task based on [MetaSpec]
+ * Create a task based on [MetaReader]
  */
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
-    specification: MetaSpec<C>,
+    specification: MetaReader<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
diff --git a/gradle.properties b/gradle.properties
index 3734d13e..ffc318d9 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -1,10 +1,9 @@
+kotlin.code.style=official
+
 org.gradle.parallel=true
 org.gradle.jvmargs=-Xmx4096m
 
-kotlin.code.style=official
 kotlin.mpp.stability.nowarn=true
-kotlin.incremental.js.ir=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.15.2-kotlin-1.9.21
-#kotlin.experimental.tryK2=true
\ No newline at end of file
+toolsVersion=0.15.4-kotlin-2.0.0
\ No newline at end of file

From 04238295e277430e5b22771e470e87f343bd2fd1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 4 Jun 2024 17:55:25 +0300
Subject: [PATCH 47/77] Remove sequence with list builder in Name::parse to fix
 problem in Wasm

---
 .../kotlin/space/kscience/dataforge/names/Name.kt         | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
index b46a3507..7867330c 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
@@ -58,7 +58,7 @@ public class Name(public val tokens: List<NameToken>) {
          */
         public fun parse(string: String): Name {
             if (string.isBlank()) return EMPTY
-            val tokens = sequence {
+            val tokens = buildList<NameToken> {
                 var bodyBuilder = StringBuilder()
                 var queryBuilder = StringBuilder()
                 var bracketCount = 0
@@ -91,7 +91,7 @@ public class Name(public val tokens: List<NameToken>) {
                         else -> when (it) {
                             '.' -> {
                                 val query = if (queryBuilder.isEmpty()) null else queryBuilder.toString()
-                                yield(NameToken(bodyBuilder.toString(), query))
+                                add(NameToken(bodyBuilder.toString(), query))
                                 bodyBuilder = StringBuilder()
                                 queryBuilder = StringBuilder()
                             }
@@ -106,9 +106,9 @@ public class Name(public val tokens: List<NameToken>) {
                     }
                 }
                 val query = if (queryBuilder.isEmpty()) null else queryBuilder.toString()
-                yield(NameToken(bodyBuilder.toString(), query))
+                add(NameToken(bodyBuilder.toString(), query))
             }
-            return Name(tokens.toList())
+            return Name(tokens)
         }
     }
 }

From c62dc2c698ad802e38542c2490f9996d4944ae9f Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 4 Jun 2024 18:00:10 +0300
Subject: [PATCH 48/77] 0.9 release

---
 CHANGELOG.md                                | 21 +++++--
 build.gradle.kts                            |  2 +-
 dataforge-context/README.md                 |  4 +-
 dataforge-context/api/dataforge-context.api | 23 ++-----
 dataforge-data/README.md                    |  4 +-
 dataforge-io/README.md                      |  4 +-
 dataforge-io/dataforge-io-yaml/README.md    |  4 +-
 dataforge-meta/README.md                    |  4 +-
 dataforge-meta/api/dataforge-meta.api       | 67 +++++++++++----------
 dataforge-scripting/README.md               |  4 +-
 dataforge-workspace/README.md               |  4 +-
 11 files changed, 72 insertions(+), 69 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d75dd1a2..1f59fbae 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,22 +3,33 @@
 ## Unreleased
 
 ### Added
-- Custom CoroutineContext during `Context` creation.
 
 ### Changed
-- Kotlin 2.0
-- `MetaSpec` renamed to `MetaReader`. MetaSpec is now reserved for builder-based generation of meta descriptors.
-- Add self-type for Meta. Remove unsafe cast method for meta instances.
 
 ### Deprecated
 
 ### Removed
-- Automatic descriptors for schema. It is not possible to implement them without heavy reflection.
 
 ### Fixed
 
 ### Security
 
+## 0.9.0 - 2024-06-04
+
+### Added
+
+- Custom CoroutineContext during `Context` creation.
+
+### Changed
+
+- Kotlin 2.0
+- `MetaSpec` renamed to `MetaReader`. MetaSpec is now reserved for builder-based generation of meta descriptors.
+- Add self-type for Meta. Remove unsafe cast method for meta instances.
+
+### Removed
+
+- Automatic descriptors for schema. It is not possible to implement them without heavy reflection.
+
 ## 0.8.2 - 2024-04-27
 
 ### Added
diff --git a/build.gradle.kts b/build.gradle.kts
index 815a7077..72f61abc 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.0-dev-1"
+    version = "0.9.0"
 }
 
 subprojects {
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index 7970751c..2cd53fd1 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,7 +6,7 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.8.2")
+    implementation("space.kscience:dataforge-context:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index eb94f89d..67cd0cba 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -57,6 +57,7 @@ public abstract interface class space/kscience/dataforge/context/ContextAware {
 
 public final class space/kscience/dataforge/context/ContextBuilder {
 	public final fun build ()Lspace/kscience/dataforge/context/Context;
+	public final fun coroutineContext (Lkotlin/coroutines/CoroutineContext;)V
 	public final fun getName ()Lspace/kscience/dataforge/names/Name;
 	public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
 	public final fun plugin (Lspace/kscience/dataforge/context/Plugin;)V
@@ -69,9 +70,6 @@ public final class space/kscience/dataforge/context/ContextBuilder {
 	public final fun properties (Lkotlin/jvm/functions/Function1;)V
 }
 
-public final class space/kscience/dataforge/context/ContextBuilderKt {
-}
-
 public final class space/kscience/dataforge/context/DefaultLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {
 	public static final field Companion Lspace/kscience/dataforge/context/DefaultLogManager$Companion;
 	public fun <init> ()V
@@ -212,14 +210,14 @@ public final class space/kscience/dataforge/context/PluginTag : space/kscience/d
 	public fun toString ()Ljava/lang/String;
 }
 
-public final class space/kscience/dataforge/context/PluginTag$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
+public synthetic class space/kscience/dataforge/context/PluginTag$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
 	public static final field INSTANCE Lspace/kscience/dataforge/context/PluginTag$$serializer;
-	public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
+	public final fun childSerializers ()[Lkotlinx/serialization/KSerializer;
 	public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
-	public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/context/PluginTag;
-	public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
+	public final fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/context/PluginTag;
+	public final fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
 	public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
-	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/context/PluginTag;)V
+	public final fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/context/PluginTag;)V
 	public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
 }
 
@@ -264,15 +262,6 @@ public abstract interface annotation class space/kscience/dataforge/descriptors/
 public abstract interface annotation class space/kscience/dataforge/descriptors/Multiple : java/lang/annotation/Annotation {
 }
 
-public final class space/kscience/dataforge/descriptors/ReflectiveDescriptorsKt {
-}
-
-public final class space/kscience/dataforge/properties/MetaAsFlowKt {
-}
-
-public final class space/kscience/dataforge/provider/DfTypeKt {
-}
-
 public final class space/kscience/dataforge/provider/Path : java/lang/Iterable, kotlin/jvm/internal/markers/KMappedMarker {
 	public static final field Companion Lspace/kscience/dataforge/provider/Path$Companion;
 	public static final field PATH_SEGMENT_SEPARATOR Ljava/lang/String;
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index f08a43c2..35aaa4e6 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.8.2")
+    implementation("space.kscience:dataforge-data:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index 5168797b..5a9979a9 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -6,7 +6,7 @@ IO module
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.8.2")
+    implementation("space.kscience:dataforge-io:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index cc1ecd8c..f70a1490 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,7 +6,7 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.8.2")
+    implementation("space.kscience:dataforge-io-yaml:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index a3209760..e4fcacb1 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -6,7 +6,7 @@ Meta definition and basic operations on meta
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.8.2")
+    implementation("space.kscience:dataforge-meta:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 620e64d4..0bb4a908 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -103,6 +103,8 @@ public final class space/kscience/dataforge/meta/Laminate : space/kscience/dataf
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public fun getItems ()Ljava/util/Map;
 	public final fun getLayers ()Ljava/util/List;
+	public fun getSelf ()Lspace/kscience/dataforge/meta/Laminate;
+	public synthetic fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 	public fun getValue ()Lspace/kscience/dataforge/meta/Value;
 	public fun hashCode ()I
 	public final fun merge ()Lspace/kscience/dataforge/meta/SealedMeta;
@@ -198,7 +200,7 @@ public final class space/kscience/dataforge/meta/MetaBuilder : space/kscience/da
 public abstract interface annotation class space/kscience/dataforge/meta/MetaBuilderMarker : java/lang/annotation/Annotation {
 }
 
-public abstract interface class space/kscience/dataforge/meta/MetaConverter : space/kscience/dataforge/meta/MetaSpec {
+public abstract interface class space/kscience/dataforge/meta/MetaConverter : space/kscience/dataforge/meta/MetaReader {
 	public static final field Companion Lspace/kscience/dataforge/meta/MetaConverter$Companion;
 	public abstract fun convert (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
 	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
@@ -246,15 +248,15 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaReader;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaReader;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -262,8 +264,8 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -294,7 +296,6 @@ public final class space/kscience/dataforge/meta/MetaKt {
 	public static final fun getLong (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Long;
 	public static final synthetic fun getNonNullable (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/meta/Meta;
 	public static final fun getNumber (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Number;
-	public static final fun getSelf (Lspace/kscience/dataforge/meta/TypedMeta;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public static final fun getShort (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Short;
 	public static final fun getString (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/String;
 	public static final fun getStringList (Lspace/kscience/dataforge/meta/Meta;)Ljava/util/List;
@@ -311,6 +312,16 @@ public abstract interface class space/kscience/dataforge/meta/MetaProvider : spa
 	public fun getValue (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Value;
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaReader : space/kscience/dataforge/meta/descriptors/Described {
+	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaReaderKt {
+	public static final fun readNullable (Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
+}
+
 public abstract interface class space/kscience/dataforge/meta/MetaRepr {
 	public abstract fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
 }
@@ -324,16 +335,6 @@ public final class space/kscience/dataforge/meta/MetaSerializer : kotlinx/serial
 	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/Meta;)V
 }
 
-public abstract interface class space/kscience/dataforge/meta/MetaSpec : space/kscience/dataforge/meta/descriptors/Described {
-	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-}
-
-public final class space/kscience/dataforge/meta/MetaSpecKt {
-	public static final fun readNullable (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
-}
-
 public final class space/kscience/dataforge/meta/MetaTransformation {
 	public static final field Companion Lspace/kscience/dataforge/meta/MetaTransformation$Companion;
 	public static final fun apply-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
@@ -551,6 +552,8 @@ public abstract interface class space/kscience/dataforge/meta/ObservableMutableM
 	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public fun getSelf ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public synthetic fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 }
 
 public final class space/kscience/dataforge/meta/RegexItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
@@ -624,19 +627,21 @@ public final class space/kscience/dataforge/meta/SealedMeta : space/kscience/dat
 	public fun <init> (Lspace/kscience/dataforge/meta/Value;Ljava/util/Map;)V
 	public fun equals (Ljava/lang/Object;)Z
 	public fun getItems ()Ljava/util/Map;
+	public fun getSelf ()Lspace/kscience/dataforge/meta/SealedMeta;
+	public synthetic fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 	public fun getValue ()Lspace/kscience/dataforge/meta/Value;
 	public fun hashCode ()I
 	public fun toString ()Ljava/lang/String;
 }
 
-public final class space/kscience/dataforge/meta/SealedMeta$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
+public synthetic class space/kscience/dataforge/meta/SealedMeta$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
 	public static final field INSTANCE Lspace/kscience/dataforge/meta/SealedMeta$$serializer;
-	public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
+	public final fun childSerializers ()[Lkotlinx/serialization/KSerializer;
 	public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
-	public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/SealedMeta;
-	public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
+	public final fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/SealedMeta;
+	public final fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
 	public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
-	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/SealedMeta;)V
+	public final fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/SealedMeta;)V
 	public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
 }
 
@@ -710,6 +715,7 @@ public abstract interface class space/kscience/dataforge/meta/TypedMeta : space/
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
 	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public abstract fun getItems ()Ljava/util/Map;
+	public abstract fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 	public fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
 }
 
@@ -844,14 +850,14 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor {
 	public fun toString ()Ljava/lang/String;
 }
 
-public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
+public synthetic class space/kscience/dataforge/meta/descriptors/MetaDescriptor$$serializer : kotlinx/serialization/internal/GeneratedSerializer {
 	public static final field INSTANCE Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$$serializer;
-	public fun childSerializers ()[Lkotlinx/serialization/KSerializer;
+	public final fun childSerializers ()[Lkotlinx/serialization/KSerializer;
 	public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
-	public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
+	public final fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+	public final fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
 	public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
-	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
+	public final fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
 	public fun typeParametersSerializers ()[Lkotlinx/serialization/KSerializer;
 }
 
@@ -876,6 +882,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun getMultiple ()Z
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
+	public final fun node (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
 	public final fun setAllowedValues (Ljava/util/List;)V
 	public final fun setAttributes (Lspace/kscience/dataforge/meta/MutableMeta;)V
 	public final fun setChildren (Ljava/util/Map;)V
@@ -927,10 +934,6 @@ public final class space/kscience/dataforge/meta/descriptors/ValueRestriction$Co
 	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
-public final class space/kscience/dataforge/misc/CastJvmKt {
-	public static final fun unsafeCast (Ljava/lang/Object;)Ljava/lang/Object;
-}
-
 public abstract interface annotation class space/kscience/dataforge/misc/DFBuilder : java/lang/annotation/Annotation {
 }
 
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index 3167e1b4..1f650bea 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.8.2")
+    implementation("space.kscience:dataforge-scripting:0.9.0-dev-1")
 }
 ```
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index f7461f30..87b38c6e 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.8.2`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.9.0-dev-1`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.8.2")
+    implementation("space.kscience:dataforge-workspace:0.9.0-dev-1")
 }
 ```

From bb9afb329fbb9dcea3859a07f3a4718675d7c454 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Wed, 5 Jun 2024 10:53:35 +0300
Subject: [PATCH 49/77]  MetaProvider `spec` is replaced by `readable`.
 `listOfSpec` replaced with `listOfReadable`

---
 CHANGELOG.md                                  |  1 +
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/meta/MetaDelegate.kt   | 68 +++++++++++++++----
 .../dataforge/meta/MutableMetaDelegate.kt     | 31 +++++++--
 4 files changed, 84 insertions(+), 18 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f59fbae..788a7374 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@
 ### Changed
 
 ### Deprecated
+- MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 72f61abc..9f9632de 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.0"
+    version = "0.9.1-dev-1"
 }
 
 subprojects {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 1b506b44..2fa0f528 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -24,20 +24,45 @@ public fun MetaProvider.node(
     }
 }
 
+
 /**
- * Use [metaReader] to read the Meta node
+ * Use [reader] to read the Meta node
  */
-public fun <T> MetaProvider.spec(
-    metaReader: MetaReader<T>,
+public fun <T> MetaProvider.readable(
+    reader: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<T?> = object : MetaDelegate<T?> {
-    override val descriptor: MetaDescriptor? get() = metaReader.descriptor
+    override val descriptor: MetaDescriptor? get() = reader.descriptor
 
     override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-        return get(key ?: property.name.asName())?.let { metaReader.read(it) }
+        return get(key ?: property.name.asName())?.let { reader.read(it) }
     }
 }
 
+/**
+ * Use [reader] to read the Meta node or return [default] if node does not exist
+ */
+public fun <T> MetaProvider.readable(
+    reader: MetaReader<T>,
+    default: T,
+    key: Name? = null,
+): MetaDelegate<T> = object : MetaDelegate<T> {
+    override val descriptor: MetaDescriptor? get() = reader.descriptor
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        return get(key ?: property.name.asName())?.let { reader.read(it) } ?: default
+    }
+}
+
+/**
+ * Use [reader] to read the Meta node
+ */
+@Deprecated("Replace with reading", ReplaceWith("reading(metaReader, key)"))
+public fun <T> MetaProvider.spec(
+    reader: MetaReader<T>,
+    key: Name? = null,
+): MetaDelegate<T?> = readable(reader, key)
+
 /**
  * Use object serializer to transform it to Meta and back
  */
@@ -45,34 +70,51 @@ public fun <T> MetaProvider.spec(
 public inline fun <reified T> MetaProvider.serializable(
     key: Name? = null,
     descriptor: MetaDescriptor? = null,
-): MetaDelegate<T?> = spec(MetaConverter.serializable(descriptor), key)
+): MetaDelegate<T?> = readable(MetaConverter.serializable(descriptor), key)
+
+@DFExperimental
+public inline fun <reified T> MetaProvider.serializable(
+    key: Name? = null,
+    default: T,
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<T> = readable(MetaConverter.serializable(descriptor), default, key)
 
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
     key: Name? = null,
     converter: MetaReader<T>,
-): ReadOnlyProperty<Any?, T?> = spec(converter, key)
+): ReadOnlyProperty<Any?, T?> = readable(converter, key)
 
 /**
- * Use [converter] to convert a list of same name siblings meta to object
+ * Use [reader] to convert a list of same name siblings meta to object
  */
-public fun <T> Meta.listOfSpec(
-    converter: MetaReader<T>,
+public fun <T> Meta.listOfReadable(
+    reader: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<List<T>> = object : MetaDelegate<List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { converter.read(it) }
+        return getIndexed(name).values.map { reader.read(it) }
     }
 
-    override val descriptor: MetaDescriptor? = converter.descriptor?.copy(multiple = true)
+    override val descriptor: MetaDescriptor? = reader.descriptor?.copy(multiple = true)
 }
 
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object
+ */
+@Deprecated("Replace with readingList", ReplaceWith("readingList(converter, key)"))
+public fun <T> Meta.listOfSpec(
+    converter: MetaReader<T>,
+    key: Name? = null,
+): MetaDelegate<List<T>> = listOfReadable(converter, key)
+
 @DFExperimental
 public inline fun <reified T> Meta.listOfSerializable(
     key: Name? = null,
     descriptor: MetaDescriptor? = null,
-): MetaDelegate<List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
+): MetaDelegate<List<T>> = listOfReadable(MetaConverter.serializable(descriptor), key)
 
 /**
  * A property delegate that uses custom key
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index 37140c6f..28a550d8 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -54,9 +54,25 @@ public fun <T> MutableMetaProvider.convertable(
     }
 }
 
-@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
-public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): MutableMetaDelegate<T?> =
-    convertable(converter, key)
+public fun <T> MutableMetaProvider.convertable(
+    converter: MetaConverter<T>,
+    default: T,
+    key: Name? = null,
+): MutableMetaDelegate<T> = object : MutableMetaDelegate<T> {
+
+    override val descriptor: MetaDescriptor? get() = converter.descriptor
+
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        val name = key ?: property.name.asName()
+        return get(name)?.let { converter.read(it) } ?: default
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
+        val name = key ?: property.name.asName()
+        set(name, value?.let { converter.convert(it) })
+    }
+}
 
 /**
  * Use object serializer to transform it to Meta and back.
@@ -66,7 +82,14 @@ public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConver
 public inline fun <reified T> MutableMetaProvider.serializable(
     descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): MutableMetaDelegate<T?> = convertable(MetaConverter.serializable(descriptor), key)
+): MutableMetaDelegate<T?> = convertable<T>(MetaConverter.serializable(descriptor), key)
+
+@DFExperimental
+public inline fun <reified T> MutableMetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    default: T,
+    key: Name? = null,
+): MutableMetaDelegate<T> = convertable(MetaConverter.serializable(descriptor), default, key)
 
 /**
  * Use [converter] to convert a list of same name siblings meta to object and back.

From f79b7faeafeafab51a5c6432be0255d5c3e14251 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Wed, 5 Jun 2024 12:15:27 +0300
Subject: [PATCH 50/77] Simplify inheritance logic in `MutableTypedMeta`

---
 CHANGELOG.md                                     |  1 +
 .../space/kscience/dataforge/meta/MutableMeta.kt | 14 ++++++++++++--
 .../kscience/dataforge/meta/ObservableMeta.kt    | 16 ----------------
 3 files changed, 13 insertions(+), 18 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 788a7374..43e6bf4e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,7 @@
 ### Added
 
 ### Changed
+- Simplify inheritance logic in `MutableTypedMeta`
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 7be16cc9..274671bf 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -159,7 +159,17 @@ public interface MutableTypedMeta<M : MutableTypedMeta<M>> : TypedMeta<M>, Mutab
      */
     @DFExperimental
     public fun attach(name: Name, node: M)
-    override fun get(name: Name): M?
+
+    override fun get(name: Name): M? {
+        tailrec fun M.find(name: Name): M? = if (name.isEmpty()) {
+            self
+        } else {
+            items[name.firstOrNull()!!]?.find(name.cutFirst())
+        }
+
+        return self.find(name)
+    }
+
     override fun getOrCreate(name: Name): M
 }
 
@@ -388,7 +398,7 @@ public fun MutableMeta.reset(newMeta: Meta) {
     (items.keys - newMeta.items.keys).forEach {
         remove(it.asName())
     }
-    newMeta.items.forEach { (token, item)->
+    newMeta.items.forEach { (token, item) ->
         set(token, item)
     }
 }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 7cd28746..20388a92 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -2,9 +2,6 @@ package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.cutFirst
-import space.kscience.dataforge.names.firstOrNull
-import space.kscience.dataforge.names.isEmpty
 
 
 internal data class MetaListener(
@@ -39,20 +36,7 @@ public interface ObservableMeta : Meta {
  * A [Meta] which is both observable and mutable
  */
 public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
-
     override val self: ObservableMutableMeta get() = this
-
-    override fun getOrCreate(name: Name): ObservableMutableMeta
-
-    override fun get(name: Name): ObservableMutableMeta? {
-        tailrec fun ObservableMutableMeta.find(name: Name): ObservableMutableMeta? = if (name.isEmpty()) {
-            this
-        } else {
-            items[name.firstOrNull()!!]?.find(name.cutFirst())
-        }
-
-        return find(name)
-    }
 }
 
 internal abstract class AbstractObservableMeta : ObservableMeta {

From 332d38df77895ea3f72e6ba2a5b2d41233c08d29 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 10 Jun 2024 20:28:57 +0300
Subject: [PATCH 51/77] First implementation for protobuf converter

---
 .../kscience/dataforge/data/ActionsTest.kt    | 16 ++--
 .../dataforge-io-proto/build.gradle.kts       | 32 +++++++
 .../src/commonMain/proto/meta.proto           | 19 ++++
 .../src/jvmMain/kotlin/ProtoMetaFormat.kt     | 92 +++++++++++++++++++
 .../src/jvmTest/kotlin/ProtoBufTest.kt        | 39 ++++++++
 .../space/kscience/dataforge/meta/JsonMeta.kt |  5 +-
 .../space/kscience/dataforge/meta/Scheme.kt   |  5 +-
 .../kscience/dataforge/names/NameToken.kt     | 27 +++++-
 .../kscience/dataforge/names/NameTest.kt      | 14 ++-
 settings.gradle.kts                           |  1 +
 10 files changed, 233 insertions(+), 17 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/build.gradle.kts
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt

diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 13660eee..d608cbd3 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,8 +1,5 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.onEach
-import kotlinx.coroutines.flow.take
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
@@ -16,27 +13,28 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
+        val plusOne = Action.mapping<Int, Int> {
+            result { it + 1 }
+        }
+
         val data: DataTree<Int> = DataTree {
             repeat(10) {
                 putValue(it.toString(), it)
             }
         }
 
-        val plusOne = Action.mapping<Int, Int> {
-            result { it + 1 }
-        }
         val result = plusOne(data)
         assertEquals(2, result["1"]?.await())
     }
 
     @Test
     fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
-        val source: MutableDataTree<Int> = MutableDataTree()
-
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
 
+        val source: MutableDataTree<Int> = MutableDataTree()
+
         val result = plusOne(source)
 
 
@@ -44,7 +42,7 @@ internal class ActionsTest {
             source.updateValue(it.toString(), it)
         }
 
-        result.updates.take(10).onEach { println(it.name) }.collect()
+//        result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
     }
diff --git a/dataforge-io/dataforge-io-proto/build.gradle.kts b/dataforge-io/dataforge-io-proto/build.gradle.kts
new file mode 100644
index 00000000..2899a2b1
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/build.gradle.kts
@@ -0,0 +1,32 @@
+plugins {
+    id("space.kscience.gradle.mpp")
+    id("com.squareup.wire") version "4.9.9"
+}
+
+description = "ProtoBuf meta IO"
+
+kscience {
+    jvm()
+    dependencies {
+        api(projects.dataforgeIo)
+        api("com.squareup.wire:wire-runtime:4.9.9")
+    }
+    useSerialization {
+        protobuf()
+    }
+}
+
+wire {
+    kotlin {
+        sourcePath {
+            srcDir("src/commonMain/proto")
+        }
+    }
+}
+
+readme {
+    maturity = space.kscience.gradle.Maturity.PROTOTYPE
+    description = """
+        ProtoBuf Meta representation
+    """.trimIndent()
+}
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
new file mode 100644
index 00000000..2fd33eab
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -0,0 +1,19 @@
+syntax = "proto3";
+package space.kscience.dataforge.io.proto;
+
+message ProtoMeta {
+  message ProtoValue {
+    oneof value {
+      string stringValue = 2;
+      bool booleanValue = 3;
+      double doubleValue = 4;
+      float floatValue = 5;
+      int32 int32Value = 6;
+      int64 int64Value = 7;
+      bytes bytesValue = 8;
+    }
+  }
+  repeated ProtoValue value = 1;
+
+  map<string, ProtoMeta> items = 2;
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
new file mode 100644
index 00000000..5e55099b
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
@@ -0,0 +1,92 @@
+package space.kscience.dataforge.io.proto
+
+import kotlinx.io.Sink
+import kotlinx.io.Source
+import kotlinx.io.asInputStream
+import kotlinx.io.asOutputStream
+import org.slf4j.LoggerFactory
+import space.kscience.dataforge.io.MetaFormat
+import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.names.NameToken
+
+internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
+
+    private fun ProtoMeta.ProtoValue.toValue(): Value = when {
+        stringValue != null -> stringValue.asValue()
+        booleanValue != null -> booleanValue.asValue()
+        doubleValue != null -> doubleValue.asValue()
+        floatValue != null -> floatValue.asValue()
+        int32Value != null -> int32Value.asValue()
+        int64Value != null -> int64Value.asValue()
+        bytesValue != null -> bytesValue.toByteArray().asValue()
+        else -> Null
+    }
+
+    override val value: Value?
+        get() = when (proto.value_.size) {
+            0 -> null
+            1 -> proto.value_[0].toValue()
+            else -> proto.value_.map { it.toValue() }.asValue()
+        }
+
+
+    override val items: Map<NameToken, Meta>
+        get() = proto.items.entries.associate { NameToken.parse(it.key) to ProtoMetaWrapper(it.value) }
+
+    override fun toString(): String = Meta.toString(this)
+
+    override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
+
+    override fun hashCode(): Int = Meta.hashCode(this)
+}
+
+internal fun Meta.toProto(): ProtoMeta {
+
+
+    fun MutableList<ProtoMeta.ProtoValue>.appendProtoValues(value: Value): Unit {
+        when (value.type) {
+            ValueType.NULL -> {
+                //do nothing
+            }
+
+            ValueType.NUMBER -> when (value.value) {
+                is Int, is Short, is Byte -> add(ProtoMeta.ProtoValue(int32Value = value.int))
+                is Long -> add(ProtoMeta.ProtoValue(int64Value = value.long))
+                is Float -> add(ProtoMeta.ProtoValue(floatValue = value.float))
+                else -> {
+                    LoggerFactory.getLogger(ProtoMeta::class.java)
+                        .warn("Unknown number type ${value.value} encoded as Double")
+                    add(ProtoMeta.ProtoValue(doubleValue = value.double))
+                }
+            }
+
+            ValueType.STRING -> add(ProtoMeta.ProtoValue(stringValue = value.string))
+            ValueType.BOOLEAN -> add(ProtoMeta.ProtoValue(booleanValue = value.boolean))
+            ValueType.LIST -> {
+                value.list.forEach {
+                    if (it.type == ValueType.LIST) {
+                        error("Nested lists are not supported")
+                    } else {
+                        appendProtoValues(it)
+                    }
+                }
+            }
+        }
+    }
+
+    return ProtoMeta(
+        value_ = buildList { value?.let { appendProtoValues(it) } },
+        items.entries.associate { it.key.toString() to it.value.toProto() }
+    )
+}
+
+
+public object ProtoMetaFormat : MetaFormat {
+    override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
+        ProtoMeta.ADAPTER.encode(sink.asOutputStream(), meta.toProto())
+    }
+
+    override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
+        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.asInputStream()))
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
new file mode 100644
index 00000000..bf819b6e
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
@@ -0,0 +1,39 @@
+package space.kscience.dataforge.io.proto
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.get
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+class ProtoBufTest {
+
+    @Test
+    fun testProtoBufMetaFormat(){
+        val meta = Meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+            }
+        }
+        val buffer = kotlinx.io.Buffer()
+        ProtoMetaFormat.writeTo(buffer,meta)
+        val result = ProtoMetaFormat.readFrom(buffer)
+
+        println(result["a"]?.value)
+
+        meta.items.keys.forEach {
+            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
+        }
+
+        assertEquals(meta, result)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 8da8b2d3..049c1733 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -117,8 +117,11 @@ private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
             } else {
                 val indexKey = descriptor?.indexKey ?: Meta.INDEX_KEY
                 element.forEachIndexed { serial, childElement ->
-                    val index = (childElement as? JsonObject)?.get(indexKey)?.jsonPrimitive?.content
+
+                    val index = (childElement as? JsonObject)
+                        ?.get(indexKey)?.jsonPrimitive?.content
                         ?: serial.toString()
+
                     val child: SealedMeta = when (childElement) {
                         is JsonObject -> childElement.toMeta(descriptor)
                         is JsonArray -> {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index fe121c42..bc05cb5d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -221,13 +221,14 @@ public fun <T : Scheme> Configurable.updateWith(
 /**
  * A delegate that uses a [MetaReader] to wrap a child of this provider
  */
-public fun <T : Scheme> MutableMeta.scheme(
+public fun <T : Scheme> MutableMetaProvider.scheme(
     spec: SchemeSpec<T>,
     key: Name? = null,
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T {
         val name = key ?: property.name.asName()
-        return spec.write(getOrCreate(name))
+        val node = get(name)?: MutableMeta().also { set(name,it) }
+        return spec.write(node)
     }
 
     override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
index 83752b9a..3994ef27 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
@@ -67,10 +67,29 @@ public class NameToken(public val body: String, public val index: String? = null
          * Parse name token from a string
          */
         public fun parse(string: String): NameToken {
-            val body = string.substringBefore('[')
-            val index = string.substringAfter('[', "")
-            if (index.isNotEmpty() && !index.endsWith(']')) error("NameToken with index must end with ']'")
-            return NameToken(body, index.removeSuffix("]"))
+            var indexStart = -1
+            var indexEnd = -1
+            string.forEachIndexed { index, c ->
+                when (c) {
+                    '[' -> when {
+                        indexStart >= 0 -> error("Second opening bracket not allowed in NameToken: $string")
+                        else -> indexStart = index
+                    }
+
+                    ']' -> when {
+                        indexStart < 0 -> error("Closing index bracket could not be used before opening bracket in NameToken: $string")
+                        indexEnd >= 0 -> error("Second closing bracket not allowed in NameToken: $string")
+                        else -> indexEnd = index
+                    }
+
+                    else -> if(indexEnd>=0) error("Symbols not allowed after index in NameToken: $string")
+                }
+            }
+            if(indexStart>=0 && indexEnd<0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
+            return NameToken(
+                if(indexStart>=0) string.substring(0, indexStart) else string,
+                if(indexStart>=0) string.substring(indexStart + 1, indexEnd) else null
+            )
         }
     }
 }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
index db630487..25725333 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
@@ -56,10 +56,22 @@ class NameTest {
 
         val token2 = NameToken.parse("token-body")
         assertEquals("token-body", token2.body)
-        assertEquals("", token2.index)
+        assertEquals(null, token2.index)
+
+//        val token3 = NameToken.parse("[token-index]")
+//        assertEquals("", token3.body)
+//        assertEquals("token-index", token3.index)
+
+        assertFails{
+            NameToken.parse("[token-index]")
+        }
 
         assertFails {
             NameToken.parse("token[22")
         }
+
+        assertFails {
+            NameToken.parse("token[22]ddd")
+        }
     }
 }
\ No newline at end of file
diff --git a/settings.gradle.kts b/settings.gradle.kts
index ca872038..35eae74e 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -43,6 +43,7 @@ include(
     ":dataforge-meta",
     ":dataforge-io",
     ":dataforge-io:dataforge-io-yaml",
+    ":dataforge-io:dataforge-io-proto",
     ":dataforge-context",
     ":dataforge-data",
     ":dataforge-workspace",

From 7fa6617e7e85d341d7740cc252e46753cfd4c573 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 10 Jun 2024 21:22:00 +0300
Subject: [PATCH 52/77] First implementation for protobuf converter

---
 CHANGELOG.md                                    |  2 ++
 .../space/kscience/dataforge/meta/JsonMeta.kt   | 14 +++++++++-----
 .../kscience/dataforge/meta/MetaConverter.kt    |  8 +++++++-
 .../kscience/dataforge/meta/ConvertersTest.kt   | 17 +++++++++++++++++
 4 files changed, 35 insertions(+), 6 deletions(-)
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 43e6bf4e..f7a2e363 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,6 +13,8 @@
 ### Removed
 
 ### Fixed
+- Fixed NameToken parsing.
+- Top level string list meta conversion.
 
 ### Security
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 049c1733..39a98927 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -163,11 +163,15 @@ public fun JsonObject.toMeta(descriptor: MetaDescriptor? = null): SealedMeta {
 public fun JsonElement.toMeta(descriptor: MetaDescriptor? = null): SealedMeta = when (this) {
     is JsonPrimitive -> Meta(toValue(descriptor))
     is JsonObject -> toMeta(descriptor)
-    is JsonArray -> SealedMeta(null,
-        linkedMapOf<NameToken, SealedMeta>().apply {
-            addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
-        }
-    )
+    is JsonArray -> if (any { it is JsonObject }) {
+        SealedMeta(null,
+            linkedMapOf<NameToken, SealedMeta>().apply {
+                addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
+            }
+        )
+    } else{
+        Meta(map { it.toValueOrNull(descriptor) ?: kotlin.error("Unreachable: should not contain objects") }.asValue())
+    }
 }
 
 //
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 9baf0087..0f0e8efe 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -11,7 +11,7 @@ import space.kscience.dataforge.misc.DFExperimental
 /**
  * A converter of generic object to and from [Meta]
  */
-public interface MetaConverter<T>: MetaReader<T> {
+public interface MetaConverter<T> : MetaReader<T> {
 
     /**
      * A descriptor for resulting meta
@@ -116,6 +116,12 @@ public interface MetaConverter<T>: MetaReader<T> {
             override fun convert(obj: E): Meta = Meta(obj.asValue())
         }
 
+        public val stringList: MetaConverter<List<String>> = object : MetaConverter<List<String>> {
+            override fun convert(obj: List<String>): Meta = Meta(obj.map { it.asValue() }.asValue())
+
+            override fun readOrNull(source: Meta): List<String>? = source.stringList
+        }
+
         public fun <T> valueList(
             writer: (T) -> Value = { Value.of(it) },
             reader: (Value) -> T,
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt
new file mode 100644
index 00000000..fda978e4
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt
@@ -0,0 +1,17 @@
+package space.kscience.dataforge.meta
+
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+class ConvertersTest {
+
+    @Test
+    fun stringListConversion() {
+        val list = listOf("A", "B", "C")
+        val meta = MetaConverter.stringList.convert(list)
+        val json = meta.toJson()
+        val reconstructedMeta = json.toMeta()
+        val reconstructed = MetaConverter.stringList.read(reconstructedMeta)
+        assertEquals(list,reconstructed)
+    }
+}
\ No newline at end of file

From e7f2f7e96f7b8d9b797ce921de209fbf931e2712 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 11 Jun 2024 12:06:48 +0300
Subject: [PATCH 53/77] Protobuf meta representation

---
 .../src/commonMain/proto/meta.proto           | 13 ++++-
 .../src/jvmMain/kotlin/ProtoMetaFormat.kt     | 54 +++++++------------
 .../src/jvmTest/kotlin/ProtoBufTest.kt        |  7 ++-
 3 files changed, 38 insertions(+), 36 deletions(-)

diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
index 2fd33eab..24d33420 100644
--- a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -11,9 +11,20 @@ message ProtoMeta {
       int32 int32Value = 6;
       int64 int64Value = 7;
       bytes bytesValue = 8;
+      ProtoValueList listValue = 9;
+      Float64List float64ListValue = 10;
     }
   }
-  repeated ProtoValue value = 1;
+
+  message ProtoValueList{
+    repeated ProtoValue values = 1;
+  }
+
+  message Float64List{
+    repeated double values = 1 [packed=true];
+  }
+
+  ProtoValue protoValue = 1;
 
   map<string, ProtoMeta> items = 2;
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
index 5e55099b..e4a33549 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
@@ -12,7 +12,7 @@ import space.kscience.dataforge.names.NameToken
 
 internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
 
-    private fun ProtoMeta.ProtoValue.toValue(): Value = when {
+    private fun ProtoMeta.ProtoValue.toValue(): Value? = when {
         stringValue != null -> stringValue.asValue()
         booleanValue != null -> booleanValue.asValue()
         doubleValue != null -> doubleValue.asValue()
@@ -20,15 +20,13 @@ internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
         int32Value != null -> int32Value.asValue()
         int64Value != null -> int64Value.asValue()
         bytesValue != null -> bytesValue.toByteArray().asValue()
-        else -> Null
+        listValue != null -> listValue.values.mapNotNull { it.toValue() }.asValue()
+        float64ListValue != null -> float64ListValue.values.map { it.asValue() }.asValue()
+        else -> null
     }
 
     override val value: Value?
-        get() = when (proto.value_.size) {
-            0 -> null
-            1 -> proto.value_[0].toValue()
-            else -> proto.value_.map { it.toValue() }.asValue()
-        }
+        get() = proto.protoValue?.toValue()
 
 
     override val items: Map<NameToken, Meta>
@@ -44,39 +42,27 @@ internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
 internal fun Meta.toProto(): ProtoMeta {
 
 
-    fun MutableList<ProtoMeta.ProtoValue>.appendProtoValues(value: Value): Unit {
-        when (value.type) {
-            ValueType.NULL -> {
-                //do nothing
-            }
+    fun Value.toProto(): ProtoMeta.ProtoValue = when (type) {
+        ValueType.NULL -> ProtoMeta.ProtoValue()
 
-            ValueType.NUMBER -> when (value.value) {
-                is Int, is Short, is Byte -> add(ProtoMeta.ProtoValue(int32Value = value.int))
-                is Long -> add(ProtoMeta.ProtoValue(int64Value = value.long))
-                is Float -> add(ProtoMeta.ProtoValue(floatValue = value.float))
-                else -> {
-                    LoggerFactory.getLogger(ProtoMeta::class.java)
-                        .warn("Unknown number type ${value.value} encoded as Double")
-                    add(ProtoMeta.ProtoValue(doubleValue = value.double))
-                }
-            }
-
-            ValueType.STRING -> add(ProtoMeta.ProtoValue(stringValue = value.string))
-            ValueType.BOOLEAN -> add(ProtoMeta.ProtoValue(booleanValue = value.boolean))
-            ValueType.LIST -> {
-                value.list.forEach {
-                    if (it.type == ValueType.LIST) {
-                        error("Nested lists are not supported")
-                    } else {
-                        appendProtoValues(it)
-                    }
-                }
+        ValueType.NUMBER -> when (value) {
+            is Int, is Short, is Byte -> ProtoMeta.ProtoValue(int32Value = int)
+            is Long -> ProtoMeta.ProtoValue(int64Value = long)
+            is Float -> ProtoMeta.ProtoValue(floatValue = float)
+            else -> {
+                LoggerFactory.getLogger(ProtoMeta::class.java)
+                    .warn("Unknown number type ${value} encoded as Double")
+                ProtoMeta.ProtoValue(doubleValue = double)
             }
         }
+
+        ValueType.STRING -> ProtoMeta.ProtoValue(stringValue = string)
+        ValueType.BOOLEAN -> ProtoMeta.ProtoValue(booleanValue = boolean)
+        ValueType.LIST -> ProtoMeta.ProtoValue(listValue = ProtoMeta.ProtoValueList(list.map { it.toProto() }))
     }
 
     return ProtoMeta(
-        value_ = buildList { value?.let { appendProtoValues(it) } },
+        protoValue = value?.toProto(),
         items.entries.associate { it.key.toString() to it.value.toProto() }
     )
 }
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
index bf819b6e..5757092c 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
+++ b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
@@ -1,6 +1,7 @@
 package space.kscience.dataforge.io.proto
 
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.asValue
 import space.kscience.dataforge.meta.get
 import kotlin.test.Test
 import kotlin.test.assertEquals
@@ -22,13 +23,17 @@ class ProtoBufTest {
                     "d2" put 2
                 }
                 "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
             }
         }
         val buffer = kotlinx.io.Buffer()
         ProtoMetaFormat.writeTo(buffer,meta)
         val result = ProtoMetaFormat.readFrom(buffer)
 
-        println(result["a"]?.value)
+//        println(result["a"]?.value)
 
         meta.items.keys.forEach {
             assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")

From b77fc9a0d5bdd28f100e1f1788464ce856c4462e Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 11 Jun 2024 16:24:15 +0300
Subject: [PATCH 54/77] Add Envelope encoding to protobuf

---
 .../dataforge-io-proto/build.gradle.kts       |  1 +
 .../commonMain/kotlin/ProtoEnvelopeFormat.kt  | 32 +++++++
 .../kotlin/ProtoMetaFormat.kt                 | 16 ++--
 .../src/commonMain/proto/meta.proto           |  5 ++
 .../src/commonTest/kotlin/ProtoBufTest.kt     | 83 +++++++++++++++++++
 .../src/jvmTest/kotlin/ProtoBufTest.kt        | 44 ----------
 .../space/kscience/dataforge/io/Envelope.kt   |  5 +-
 7 files changed, 131 insertions(+), 55 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
 rename dataforge-io/dataforge-io-proto/src/{jvmMain => commonMain}/kotlin/ProtoMetaFormat.kt (85%)
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
 delete mode 100644 dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt

diff --git a/dataforge-io/dataforge-io-proto/build.gradle.kts b/dataforge-io/dataforge-io-proto/build.gradle.kts
index 2899a2b1..a9099cb5 100644
--- a/dataforge-io/dataforge-io-proto/build.gradle.kts
+++ b/dataforge-io/dataforge-io-proto/build.gradle.kts
@@ -7,6 +7,7 @@ description = "ProtoBuf meta IO"
 
 kscience {
     jvm()
+//    js()
     dependencies {
         api(projects.dataforgeIo)
         api("com.squareup.wire:wire-runtime:4.9.9")
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
new file mode 100644
index 00000000..d60539f5
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
@@ -0,0 +1,32 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.Sink
+import kotlinx.io.Source
+import kotlinx.io.readByteArray
+import okio.ByteString
+import okio.ByteString.Companion.toByteString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.io.EnvelopeFormat
+import space.kscience.dataforge.io.asBinary
+import space.kscience.dataforge.io.proto.ProtoEnvelope
+import space.kscience.dataforge.io.toByteArray
+import space.kscience.dataforge.meta.Meta
+
+
+public object ProtoEnvelopeFormat : EnvelopeFormat {
+    override fun readFrom(source: Source): Envelope {
+        val protoEnvelope = ProtoEnvelope.ADAPTER.decode(source.readByteArray())
+        return Envelope(
+            meta = protoEnvelope.meta?.let { ProtoMetaWrapper(it) } ?: Meta.EMPTY,
+            data = protoEnvelope.dataBytes.toByteArray().asBinary()
+        )
+    }
+
+    override fun writeTo(sink: Sink, obj: Envelope) {
+        val protoEnvelope = ProtoEnvelope(
+            obj.meta.toProto(),
+            obj.data?.toByteArray()?.toByteString() ?: ByteString.EMPTY
+        )
+        sink.write(ProtoEnvelope.ADAPTER.encode(protoEnvelope))
+    }
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
similarity index 85%
rename from dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
rename to dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
index e4a33549..8857832a 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
@@ -1,11 +1,10 @@
-package space.kscience.dataforge.io.proto
+package pace.kscience.dataforge.io.proto
 
 import kotlinx.io.Sink
 import kotlinx.io.Source
-import kotlinx.io.asInputStream
-import kotlinx.io.asOutputStream
-import org.slf4j.LoggerFactory
+import kotlinx.io.readByteArray
 import space.kscience.dataforge.io.MetaFormat
+import space.kscience.dataforge.io.proto.ProtoMeta
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.names.NameToken
@@ -50,8 +49,8 @@ internal fun Meta.toProto(): ProtoMeta {
             is Long -> ProtoMeta.ProtoValue(int64Value = long)
             is Float -> ProtoMeta.ProtoValue(floatValue = float)
             else -> {
-                LoggerFactory.getLogger(ProtoMeta::class.java)
-                    .warn("Unknown number type ${value} encoded as Double")
+//                LoggerFactory.getLogger(ProtoMeta::class.java)
+//                    .warn("Unknown number type ${value} encoded as Double")
                 ProtoMeta.ProtoValue(doubleValue = double)
             }
         }
@@ -67,12 +66,11 @@ internal fun Meta.toProto(): ProtoMeta {
     )
 }
 
-
 public object ProtoMetaFormat : MetaFormat {
     override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
-        ProtoMeta.ADAPTER.encode(sink.asOutputStream(), meta.toProto())
+        sink.write(ProtoMeta.ADAPTER.encode(meta.toProto()))
     }
 
     override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
-        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.asInputStream()))
+        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.readByteArray()))
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
index 24d33420..c6233872 100644
--- a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -27,4 +27,9 @@ message ProtoMeta {
   ProtoValue protoValue = 1;
 
   map<string, ProtoMeta> items = 2;
+}
+
+message ProtoEnvelope{
+  ProtoMeta meta = 1;
+  bytes dataBytes = 2;
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
new file mode 100644
index 00000000..f8abef24
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
@@ -0,0 +1,83 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.writeString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.io.toByteArray
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.asValue
+import space.kscience.dataforge.meta.get
+import kotlin.test.Test
+import kotlin.test.assertContentEquals
+import kotlin.test.assertEquals
+
+class ProtoBufTest {
+
+    @Test
+    fun testProtoBufMetaFormat(){
+        val meta = Meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
+            }
+        }
+        val buffer = kotlinx.io.Buffer()
+        ProtoMetaFormat.writeTo(buffer,meta)
+        val result = ProtoMetaFormat.readFrom(buffer)
+
+//        println(result["a"]?.value)
+
+        meta.items.keys.forEach {
+            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
+        }
+
+        assertEquals(meta, result)
+    }
+
+    @Test
+    fun testProtoBufEnvelopeFormat(){
+        val envelope = Envelope{
+            meta {
+                "a" put 22
+                "node" put {
+                    "b" put "DDD"
+                    "c" put 11.1
+                    "d" put {
+                        "d1" put {
+                            "d11" put "aaa"
+                            "d12" put "bbb"
+                        }
+                        "d2" put 2
+                    }
+                    "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                    "array2d" put listOf(
+                        doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                        doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                    ).asValue()
+                }
+            }
+            data {
+                writeString("Hello world!")
+            }
+        }
+
+        val buffer = kotlinx.io.Buffer()
+        ProtoEnvelopeFormat.writeTo(buffer,envelope)
+        val result = ProtoEnvelopeFormat.readFrom(buffer)
+
+        assertEquals(envelope.meta, result.meta)
+        assertContentEquals(envelope.data?.toByteArray(), result.data?.toByteArray())
+    }
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
deleted file mode 100644
index 5757092c..00000000
--- a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
+++ /dev/null
@@ -1,44 +0,0 @@
-package space.kscience.dataforge.io.proto
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.asValue
-import space.kscience.dataforge.meta.get
-import kotlin.test.Test
-import kotlin.test.assertEquals
-
-class ProtoBufTest {
-
-    @Test
-    fun testProtoBufMetaFormat(){
-        val meta = Meta {
-            "a" put 22
-            "node" put {
-                "b" put "DDD"
-                "c" put 11.1
-                "d" put {
-                    "d1" put {
-                        "d11" put "aaa"
-                        "d12" put "bbb"
-                    }
-                    "d2" put 2
-                }
-                "array" put doubleArrayOf(1.0, 2.0, 3.0)
-                "array2d" put listOf(
-                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
-                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
-                ).asValue()
-            }
-        }
-        val buffer = kotlinx.io.Buffer()
-        ProtoMetaFormat.writeTo(buffer,meta)
-        val result = ProtoMetaFormat.readFrom(buffer)
-
-//        println(result["a"]?.value)
-
-        meta.items.keys.forEach {
-            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
-        }
-
-        assertEquals(meta, result)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
index 728a0e69..43df520e 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
@@ -2,7 +2,6 @@ package space.kscience.dataforge.io
 
 import space.kscience.dataforge.meta.Laminate
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
@@ -34,7 +33,9 @@ public interface Envelope {
     }
 }
 
-internal class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope
+internal class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope{
+    override fun toString(): String  = "Envelope(meta=$meta, data=$data)"
+}
 
 public fun Envelope(meta: Meta, data: Binary?): Envelope = SimpleEnvelope(meta, data)
 

From 1f1f894e0d87da2b9b8f3b65b5a4d451a402169a Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 10:01:48 +0300
Subject: [PATCH 55/77] Documentation update

---
 dataforge-io/build.gradle.kts                 |  2 +-
 .../jvmMain/kotlin/performanceComparison.kt   | 51 +++++++++++++++++++
 .../meta/{MetaRef.kt => MetaSpec.kt}          | 29 ++++++++---
 .../dataforge/meta/ValueSerializer.kt         |  3 ++
 .../kscience/dataforge/meta/exoticValues.kt   | 10 +++-
 5 files changed, 87 insertions(+), 8 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/{MetaRef.kt => MetaSpec.kt} (73%)

diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index 5be52e61..16a6fbfc 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.3.1"
+val ioVersion = "0.4.0"
 
 kscience {
     jvm()
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
new file mode 100644
index 00000000..74939882
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
@@ -0,0 +1,51 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.writeString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.meta.asValue
+import kotlin.concurrent.thread
+import kotlin.time.measureTime
+
+public fun main() {
+    val envelope = Envelope {
+        meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
+            }
+        }
+        data {
+            writeString("Hello world!")
+        }
+    }
+
+    val format = ProtoEnvelopeFormat
+
+    measureTime {
+        val threads = List(100) {
+            thread {
+                repeat(100000) {
+                    val buffer = kotlinx.io.Buffer()
+                    format.writeTo(buffer, envelope)
+//                    println(buffer.size)
+                    val r = format.readFrom(buffer)
+                }
+            }
+        }
+
+        threads.forEach { it.join() }
+    }.also { println(it) }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
similarity index 73%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
index 413fe404..7af7594c 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
@@ -11,7 +11,7 @@ import kotlin.properties.ReadOnlyProperty
 
 
 /**
- * A reference to a read-only value of type [T] inside [MetaProvider]
+ * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
  */
 @DFExperimental
 public data class MetaRef<T>(
@@ -20,21 +20,36 @@ public data class MetaRef<T>(
     override val descriptor: MetaDescriptor? = converter.descriptor,
 ) : Described
 
+/**
+ * Get a value from provider by [ref] or return null if node with given name is missing
+ */
 @DFExperimental
 public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
 
+/**
+ * Set a value in a mutable provider by [ref]
+ */
 @DFExperimental
 public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
     set(ref.name, ref.converter.convert(value))
 }
 
+/**
+ * Remove a node corresponding to [ref] from a mutable provider if it exists
+ */
 @DFExperimental
-public class MetaSpec(
-    private val configuration: MetaDescriptorBuilder.() -> Unit = {},
-) : Described {
+public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
+    remove(ref.name)
+}
+
+/**
+ * A base class for [Meta] specification that stores references to meta nodes
+ */
+@DFExperimental
+public abstract class MetaSpec : Described {
     private val refs: MutableList<MetaRef<*>> = mutableListOf()
 
-    private fun registerRef(ref: MetaRef<*>) {
+    protected fun registerRef(ref: MetaRef<*>) {
         refs.add(ref)
     }
 
@@ -51,6 +66,8 @@ public class MetaSpec(
             }
         }
 
+    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
+
     override val descriptor: MetaDescriptor by lazy {
         MetaDescriptor {
             refs.forEach { ref ->
@@ -58,7 +75,7 @@ public class MetaSpec(
                     node(ref.name, ref.descriptor)
                 }
             }
-            configuration()
+            buildDescriptor()
         }
     }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
index dc13ef4c..0379187f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
@@ -8,6 +8,9 @@ import kotlinx.serialization.descriptors.element
 import kotlinx.serialization.encoding.Decoder
 import kotlinx.serialization.encoding.Encoder
 
+/**
+ * A serializer for [Value]
+ */
 public object ValueSerializer : KSerializer<Value> {
     private val listSerializer by lazy { ListSerializer(ValueSerializer) }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
index eb39e985..91811390 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
@@ -21,6 +21,9 @@ public class LazyParsedValue(public val string: String) : Value {
     override fun hashCode(): Int = string.hashCode()
 }
 
+/**
+ * Read this string as lazily parsed value
+ */
 public fun String.lazyParseValue(): LazyParsedValue = LazyParsedValue(this)
 
 /**
@@ -47,6 +50,9 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
     override fun iterator(): Iterator<Double> = value.iterator()
 }
 
+/**
+ * A zero-copy wrapping of this [DoubleArray] in a [Value]
+ */
 public fun DoubleArray.asValue(): Value = if (isEmpty()) Null else DoubleArrayValue(this)
 
 public val Value.doubleArray: DoubleArray
@@ -75,7 +81,9 @@ public fun MutableMetaProvider.doubleArray(
     reader = { it?.doubleArray ?: doubleArrayOf(*default) },
 )
 
-
+/**
+ * A [Value] wrapping a [ByteArray]
+ */
 public class ByteArrayValue(override val value: ByteArray) : Value, Iterable<Byte> {
     override val type: ValueType get() = ValueType.LIST
     override val list: List<Value> get() = value.map { NumberValue(it) }

From b4ebdfe089f13e592cb022e994955c00df4b1f37 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:34:29 +0300
Subject: [PATCH 56/77] Fix json to meta mapping

---
 .../space/kscience/dataforge/meta/JsonMeta.kt    | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 39a98927..1817ba64 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -34,9 +34,9 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
         val childDescriptor = descriptor?.nodes?.get(body)
         if (list.size == 1) {
             val (token, element) = list.first()
-                //do not add an empty element
-                val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
-            if(token.index == null) {
+            //do not add an empty element
+            val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
+            if (token.index == null) {
                 body to child
             } else {
                 body to JsonArray(listOf(child))
@@ -106,7 +106,7 @@ private fun JsonElement.toValueOrNull(descriptor: MetaDescriptor?): Value? = whe
 private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
     key: String,
     element: JsonElement,
-    descriptor: MetaDescriptor?
+    descriptor: MetaDescriptor?,
 ) {
     when (element) {
         is JsonPrimitive -> put(NameToken(key), Meta(element.toValue(descriptor)))
@@ -136,12 +136,14 @@ private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
                                 Meta(childValue)
                             }
                         }
+
                         is JsonPrimitive -> Meta(childElement.toValue(null))
                     }
                     put(NameToken(key, index), child)
                 }
             }
         }
+
         is JsonObject -> {
             val indexKey = descriptor?.indexKey ?: Meta.INDEX_KEY
             val index = element[indexKey]?.jsonPrimitive?.content
@@ -163,14 +165,14 @@ public fun JsonObject.toMeta(descriptor: MetaDescriptor? = null): SealedMeta {
 public fun JsonElement.toMeta(descriptor: MetaDescriptor? = null): SealedMeta = when (this) {
     is JsonPrimitive -> Meta(toValue(descriptor))
     is JsonObject -> toMeta(descriptor)
-    is JsonArray -> if (any { it is JsonObject }) {
+    is JsonArray -> if (all { it is JsonPrimitive }) {
+        Meta(map { it.toValueOrNull(descriptor) ?: error("Unreachable: should not contain objects") }.asValue())
+    } else {
         SealedMeta(null,
             linkedMapOf<NameToken, SealedMeta>().apply {
                 addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
             }
         )
-    } else{
-        Meta(map { it.toValueOrNull(descriptor) ?: kotlin.error("Unreachable: should not contain objects") }.asValue())
     }
 }
 

From 523db20e4a0e47d4a78b414a55811010fdb8d005 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:45:50 +0300
Subject: [PATCH 57/77] Fix flaky coroutines tests

---
 .../kscience/dataforge/data/ActionsTest.kt      | 17 ++++++++++++++---
 1 file changed, 14 insertions(+), 3 deletions(-)

diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index d608cbd3..ee4b05be 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,6 +1,11 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.ExperimentalCoroutinesApi
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
+import kotlinx.coroutines.withContext
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.actions.mapping
@@ -9,7 +14,7 @@ import kotlin.test.Test
 import kotlin.test.assertEquals
 import kotlin.time.Duration.Companion.milliseconds
 
-@OptIn(DFExperimental::class)
+@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
@@ -24,6 +29,8 @@ internal class ActionsTest {
         }
 
         val result = plusOne(data)
+
+        advanceUntilIdle()
         assertEquals(2, result["1"]?.await())
     }
 
@@ -38,8 +45,12 @@ internal class ActionsTest {
         val result = plusOne(source)
 
 
-        repeat(10) {
-            source.updateValue(it.toString(), it)
+        withContext(Dispatchers.Default) {
+            repeat(10) {
+                source.updateValue(it.toString(), it)
+            }
+
+            delay(50)
         }
 
 //        result.updates.take(10).onEach { println(it.name) }.collect()

From 5196d85da1cd87996a1232683855a3119f8318c1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:48:39 +0300
Subject: [PATCH 58/77] Add MetaConverter for DoubleArray and ByteArray

---
 .../kscience/dataforge/meta/exoticValues.kt   | 19 ++++++++++++++++++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
index 91811390..17436990 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
@@ -50,6 +50,7 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
     override fun iterator(): Iterator<Double> = value.iterator()
 }
 
+
 /**
  * A zero-copy wrapping of this [DoubleArray] in a [Value]
  */
@@ -81,6 +82,14 @@ public fun MutableMetaProvider.doubleArray(
     reader = { it?.doubleArray ?: doubleArrayOf(*default) },
 )
 
+private object DoubleArrayMetaConverter : MetaConverter<DoubleArray> {
+    override fun readOrNull(source: Meta): DoubleArray? = source.doubleArray
+
+    override fun convert(obj: DoubleArray): Meta = Meta(obj.asValue())
+}
+
+public val MetaConverter.Companion.doubleArray: MetaConverter<DoubleArray> get() = DoubleArrayMetaConverter
+
 /**
  * A [Value] wrapping a [ByteArray]
  */
@@ -131,4 +140,12 @@ public fun MutableMetaProvider.byteArray(
     key,
     writer = { ByteArrayValue(it) },
     reader = { it?.byteArray ?: byteArrayOf(*default) },
-)
\ No newline at end of file
+)
+
+private object ByteArrayMetaConverter : MetaConverter<ByteArray> {
+    override fun readOrNull(source: Meta): ByteArray? = source.byteArray
+
+    override fun convert(obj: ByteArray): Meta = Meta(obj.asValue())
+}
+
+public val MetaConverter.Companion.byteArray: MetaConverter<ByteArray> get() = ByteArrayMetaConverter
\ No newline at end of file

From 3807960cda2c2d7223c4057d581b215fd707e785 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:08:45 +0300
Subject: [PATCH 59/77] Implement MetaRef mechanics and tests

---
 .../space/kscience/dataforge/meta/MetaRef.kt  | 202 ++++++++++++++++++
 .../kscience/dataforge/meta/MetaRefTest.kt    |  35 +++
 2 files changed, 237 insertions(+)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
new file mode 100644
index 00000000..ef265f0d
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
@@ -0,0 +1,202 @@
+package space.kscience.dataforge.meta
+
+import kotlinx.serialization.json.Json
+import space.kscience.dataforge.meta.descriptors.Described
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import kotlin.properties.PropertyDelegateProvider
+import kotlin.properties.ReadOnlyProperty
+
+
+/**
+ * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
+ */
+@DFExperimental
+public data class MetaRef<T>(
+    public val name: Name,
+    public val converter: MetaConverter<T>,
+    override val descriptor: MetaDescriptor? = converter.descriptor,
+) : Described
+
+/**
+ * Get a value from provider by [ref] or return null if node with given name is missing
+ */
+@DFExperimental
+public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
+
+/**
+ * Set a value in a mutable provider by [ref]
+ */
+@DFExperimental
+public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
+    set(ref.name, ref.converter.convert(value))
+}
+
+/**
+ * Remove a node corresponding to [ref] from a mutable provider if it exists
+ */
+@DFExperimental
+public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
+    remove(ref.name)
+}
+
+/**
+ * Base storage of [MetaRef]
+ */
+@OptIn(DFExperimental::class)
+public interface MetaRefStore : Described {
+    public val refs: List<MetaRef<*>>
+}
+
+/**
+ * A base class for [Meta] specification that stores references to meta nodes.
+ */
+@DFExperimental
+public abstract class MetaSpec : MetaRefStore {
+    private val _refs: MutableList<MetaRef<*>> = mutableListOf()
+    override val refs: List<MetaRef<*>> get() = _refs
+
+    /**
+     * Register a ref in this specification
+     */
+    protected fun registerRef(ref: MetaRef<*>) {
+        _refs.add(ref)
+    }
+
+    /**
+     * Create and register a ref by property name and provided converter.
+     * By default, uses descriptor from the converter
+     */
+    public fun <T> item(
+        converter: MetaConverter<T>,
+        key: Name? = null,
+        descriptor: MetaDescriptor? = converter.descriptor,
+    ): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
+        PropertyDelegateProvider { _, property ->
+            val ref = MetaRef(key ?: property.name.asName(), converter, descriptor)
+            registerRef(ref)
+            ReadOnlyProperty { _, _ ->
+                ref
+            }
+        }
+
+    /**
+     * Override to provide custom [MetaDescriptor]
+     */
+    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
+
+    override val descriptor: MetaDescriptor by lazy {
+        MetaDescriptor {
+            refs.forEach { ref ->
+                ref.descriptor?.let {
+                    node(ref.name, ref.descriptor)
+                }
+            }
+            buildDescriptor()
+        }
+    }
+}
+
+/**
+ * Register an item using a [descriptorBuilder] to customize descriptor
+ */
+@DFExperimental
+public fun <T> MetaSpec.item(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> = item(converter, key, MetaDescriptor {
+    converter.descriptor?.let { from(it) }
+    descriptorBuilder()
+})
+
+//utility methods to add different nodes
+
+@DFExperimental
+public fun MetaSpec.metaItem(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Meta>>> =
+    item(MetaConverter.meta, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.string(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<String>>> =
+    item(MetaConverter.string, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.boolean(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Boolean>>> =
+    item(MetaConverter.boolean, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.stringList(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<List<String>>>> =
+    item(MetaConverter.stringList, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.float(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Float>>> =
+    item(MetaConverter.float, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.double(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Double>>> =
+    item(MetaConverter.double, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.int(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Int>>> =
+    item(MetaConverter.int, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.long(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Long>>> =
+    item(MetaConverter.long, key, descriptorBuilder)
+
+
+@DFExperimental
+public fun MetaSpec.doubleArray(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<DoubleArray>>> =
+    item(MetaConverter.doubleArray, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.byteArray(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<ByteArray>>> =
+    item(MetaConverter.byteArray, key, descriptorBuilder)
+
+@DFExperimental
+public inline fun <reified E : Enum<E>> MetaSpec.enum(
+    key: Name? = null,
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<E>>> =
+    item(MetaConverter.enum(), key, descriptorBuilder)
+
+@DFExperimental
+public inline fun <reified T> MetaSpec.serializable(
+    key: Name? = null,
+    jsonEncoder: Json = Json,
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
+    item(MetaConverter.serializable(jsonEncoder = jsonEncoder), key, descriptorBuilder)
\ No newline at end of file
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt
new file mode 100644
index 00000000..a93841d9
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt
@@ -0,0 +1,35 @@
+package space.kscience.dataforge.meta
+
+import kotlinx.serialization.Serializable
+import space.kscience.dataforge.misc.DFExperimental
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+@DFExperimental
+internal class MetaRefTest {
+
+    @Serializable
+    data class XY(val x: Double, val y: Double)
+
+    object TestMetaSpec : MetaSpec() {
+        val integer by int { description = "Integer value" }
+        val string by string { description = "String value" }
+        val custom by item(MetaConverter.serializable<XY>()) { description = "custom value" }
+    }
+
+    @Test
+    fun specWriteRead()  = with(TestMetaSpec){
+        val meta = MutableMeta()
+
+        meta[integer] = 22
+        meta[string] = "33"
+        val xy = XY(33.0, -33.0)
+        meta[custom] = xy
+
+        val sealed = meta.seal()
+
+        assertEquals(22, sealed[integer])
+        assertEquals("33", sealed[string])
+        assertEquals(xy, sealed[custom])
+    }
+}
\ No newline at end of file

From 8723f49efde5a699ea4b55c59abbbaef97044f9f Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:08:48 +0300
Subject: [PATCH 60/77] Implement MetaRef mechanics and tests

---
 .../space/kscience/dataforge/meta/MetaSpec.kt | 81 -------------------
 1 file changed, 81 deletions(-)
 delete mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
deleted file mode 100644
index 7af7594c..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
+++ /dev/null
@@ -1,81 +0,0 @@
-package space.kscience.dataforge.meta
-
-import space.kscience.dataforge.meta.descriptors.Described
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.properties.PropertyDelegateProvider
-import kotlin.properties.ReadOnlyProperty
-
-
-/**
- * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
- */
-@DFExperimental
-public data class MetaRef<T>(
-    public val name: Name,
-    public val converter: MetaConverter<T>,
-    override val descriptor: MetaDescriptor? = converter.descriptor,
-) : Described
-
-/**
- * Get a value from provider by [ref] or return null if node with given name is missing
- */
-@DFExperimental
-public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
-
-/**
- * Set a value in a mutable provider by [ref]
- */
-@DFExperimental
-public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
-    set(ref.name, ref.converter.convert(value))
-}
-
-/**
- * Remove a node corresponding to [ref] from a mutable provider if it exists
- */
-@DFExperimental
-public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
-    remove(ref.name)
-}
-
-/**
- * A base class for [Meta] specification that stores references to meta nodes
- */
-@DFExperimental
-public abstract class MetaSpec : Described {
-    private val refs: MutableList<MetaRef<*>> = mutableListOf()
-
-    protected fun registerRef(ref: MetaRef<*>) {
-        refs.add(ref)
-    }
-
-    public fun <T> item(
-        converter: MetaConverter<T>,
-        descriptor: MetaDescriptor? = converter.descriptor,
-        key: Name? = null,
-    ): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
-        PropertyDelegateProvider { _, property ->
-            val ref = MetaRef(key ?: property.name.asName(), converter, descriptor)
-            registerRef(ref)
-            ReadOnlyProperty { _, _ ->
-                ref
-            }
-        }
-
-    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
-
-    override val descriptor: MetaDescriptor by lazy {
-        MetaDescriptor {
-            refs.forEach { ref ->
-                ref.descriptor?.let {
-                    node(ref.name, ref.descriptor)
-                }
-            }
-            buildDescriptor()
-        }
-    }
-}
\ No newline at end of file

From 904262a22a35c8ba8bd58db99c4bc6429433798d Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:20:05 +0300
Subject: [PATCH 61/77] Add observability via MetaRef

---
 .../space/kscience/dataforge/meta/MetaRef.kt  | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
index ef265f0d..2e6f3452 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
@@ -7,6 +7,7 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.startsWith
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
 
@@ -35,6 +36,24 @@ public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
     set(ref.name, ref.converter.convert(value))
 }
 
+/**
+ * Observe changes to specific property via given [ref].
+ *
+ * This listener should be removed in a same way as [ObservableMeta.onChange].
+ *
+ * @param callback an action to be performed on each change of item. Null means that the item is not present or malformed.
+ */
+@DFExperimental
+public fun <T: Any> ObservableMeta.onValueChange(owner: Any?, ref: MetaRef<T>, callback: (T?) -> Unit) {
+    onChange(owner) { name ->
+        if (name.startsWith(ref.name)) {
+            get(name)?.let { value ->
+                callback(ref.converter.readOrNull(value))
+            }
+        }
+    }
+}
+
 /**
  * Remove a node corresponding to [ref] from a mutable provider if it exists
  */

From e5902c0bfed8a33c6527918a3e143478e5e5c8dc Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Aug 2024 10:04:24 +0300
Subject: [PATCH 62/77] Coroutine exception logging in context

---
 CHANGELOG.md                                                 | 1 +
 .../kotlin/space/kscience/dataforge/context/Context.kt       | 5 ++++-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index f7a2e363..6c856ac5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,7 @@
 ## Unreleased
 
 ### Added
+- Coroutine exception logging in context
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
index c614598c..457c39fc 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.context
 
+import kotlinx.coroutines.CoroutineExceptionHandler
 import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
 import kotlinx.coroutines.SupervisorJob
@@ -67,7 +68,9 @@ public open class Context internal constructor(
 
     override val coroutineContext: CoroutineContext by lazy {
         (parent ?: Global).coroutineContext.let { parenContext ->
-            parenContext + coroutineContext + SupervisorJob(parenContext[Job])
+            parenContext + coroutineContext + SupervisorJob(parenContext[Job]) + CoroutineExceptionHandler { _, throwable ->
+                logger.error(throwable) { "Exception in context $name" }
+            }
         }
     }
 

From de9ca6e0098ee760a685872b4924e7be9074fcd9 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 10 Sep 2024 07:47:53 +0300
Subject: [PATCH 63/77] ObservableMutableMetaSerializer

---
 CHANGELOG.md                                    |  1 +
 .../kscience/dataforge/meta/MetaSerializer.kt   | 17 +++++++++++++++++
 2 files changed, 18 insertions(+)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6c856ac5..82f0a8fb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
 
 ### Added
 - Coroutine exception logging in context
+- ObservableMutableMetaSerializer
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
index 557cd5ef..2ccfd520 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
@@ -45,4 +45,21 @@ public object MutableMetaSerializer : KSerializer<MutableMeta> {
     override fun serialize(encoder: Encoder, value: MutableMeta) {
         encoder.encodeSerializableValue(MetaSerializer, value)
     }
+}
+
+/**
+ * A serializer for [ObservableMutableMeta]
+ */
+public object ObservableMutableMetaSerializer : KSerializer<ObservableMutableMeta> {
+
+    override val descriptor: SerialDescriptor = MetaSerializer.descriptor
+
+    override fun deserialize(decoder: Decoder): ObservableMutableMeta {
+        val meta = decoder.decodeSerializableValue(MetaSerializer)
+        return ((meta as? MutableMeta) ?: meta.toMutableMeta()).asObservable()
+    }
+
+    override fun serialize(encoder: Encoder, value: ObservableMutableMeta) {
+        encoder.encodeSerializableValue(MetaSerializer, value)
+    }
 }
\ No newline at end of file

From 8f966b6c0c35b165ba51538ddb957aeafd9c5531 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 10 Sep 2024 07:48:17 +0300
Subject: [PATCH 64/77] Fix replacement for readable delegate in Meta

---
 .../kotlin/space/kscience/dataforge/meta/MetaDelegate.kt        | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 2fa0f528..26044f18 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -57,7 +57,7 @@ public fun <T> MetaProvider.readable(
 /**
  * Use [reader] to read the Meta node
  */
-@Deprecated("Replace with reading", ReplaceWith("reading(metaReader, key)"))
+@Deprecated("Replace with readable", ReplaceWith("readable(metaReader, key)"))
 public fun <T> MetaProvider.spec(
     reader: MetaReader<T>,
     key: Name? = null,

From 088ed64f4af833aa350baae57be37f20b215c3c6 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 17 Sep 2024 09:56:19 +0300
Subject: [PATCH 65/77] 0.9.1

---
 build.gradle.kts                              |  2 +-
 dataforge-context/api/dataforge-context.api   |  1 +
 .../space/kscience/dataforge/provider/Path.kt | 23 ++++++++++
 .../kscience/dataforge/data/ActionsTest.kt    | 35 +++++++-------
 dataforge-meta/api/dataforge-meta.api         | 46 ++++++++++++++-----
 .../kscience/dataforge/meta/ObservableMeta.kt |  3 ++
 6 files changed, 81 insertions(+), 29 deletions(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 9f9632de..98fca6b3 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.1-dev-1"
+    version = "0.9.1"
 }
 
 subprojects {
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index 67cd0cba..390123b8 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -282,6 +282,7 @@ public final class space/kscience/dataforge/provider/Path : java/lang/Iterable,
 
 public final class space/kscience/dataforge/provider/Path$Companion {
 	public final fun parse-X5wN5Vs (Ljava/lang/String;)Ljava/util/List;
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
 public final class space/kscience/dataforge/provider/PathKt {
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
index 9ecb68c2..3bfa83e1 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
@@ -15,15 +15,37 @@
  */
 package space.kscience.dataforge.provider
 
+import kotlinx.serialization.KSerializer
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.builtins.serializer
+import kotlinx.serialization.descriptors.SerialDescriptor
+import kotlinx.serialization.encoding.Decoder
+import kotlinx.serialization.encoding.Encoder
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
 import kotlin.jvm.JvmInline
 
+private object PathSerializer : KSerializer<Path> {
+
+    override val descriptor: SerialDescriptor
+        get() = String.serializer().descriptor
+
+    override fun serialize(encoder: Encoder, value: Path) {
+        encoder.encodeString(value.toString())
+    }
+
+    override fun deserialize(decoder: Decoder): Path {
+        return Path.parse(decoder.decodeString())
+    }
+}
+
+
 /**
  * Path interface.
  *
  */
 @JvmInline
+@Serializable(PathSerializer::class)
 public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken> {
 
     override fun iterator(): Iterator<PathToken> = tokens.iterator()
@@ -33,6 +55,7 @@ public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken
     public companion object {
         public const val PATH_SEGMENT_SEPARATOR: String = "/"
 
+
         public fun parse(path: String): Path = Path(path.split(PATH_SEGMENT_SEPARATOR).map { PathToken.parse(it) })
     }
 }
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index ee4b05be..2e790caa 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -18,20 +18,22 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
-        val plusOne = Action.mapping<Int, Int> {
-            result { it + 1 }
-        }
-
-        val data: DataTree<Int> = DataTree {
-            repeat(10) {
-                putValue(it.toString(), it)
+        withContext(Dispatchers.Default) {
+            val plusOne = Action.mapping<Int, Int> {
+                result { it + 1 }
             }
+
+            val data: DataTree<Int> = DataTree {
+                repeat(10) {
+                    putValue(it.toString(), it)
+                }
+            }
+
+            val result = plusOne(data)
+
+            advanceUntilIdle()
+            assertEquals(2, result["1"]?.await())
         }
-
-        val result = plusOne(data)
-
-        advanceUntilIdle()
-        assertEquals(2, result["1"]?.await())
     }
 
     @Test
@@ -45,14 +47,13 @@ internal class ActionsTest {
         val result = plusOne(source)
 
 
-        withContext(Dispatchers.Default) {
-            repeat(10) {
-                source.updateValue(it.toString(), it)
-            }
 
-            delay(50)
+        repeat(10) {
+            source.updateValue(it.toString(), it)
         }
 
+        delay(10)
+
 //        result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 0bb4a908..5bf71f94 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -53,8 +53,10 @@ public final class space/kscience/dataforge/meta/ExoticValuesKt {
 	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Meta;)[B
+	public static final fun getByteArray (Lspace/kscience/dataforge/meta/MetaConverter$Companion;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Value;)[B
 	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Meta;)[D
+	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/MetaConverter$Companion;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Value;)[D
 	public static final fun lazyParseValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/LazyParsedValue;
 }
@@ -217,6 +219,7 @@ public final class space/kscience/dataforge/meta/MetaConverter$Companion {
 	public final fun getMeta ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getNumber ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getString ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getStringList ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getValue ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaConverter;
@@ -248,6 +251,8 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun listOfReadable (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun listOfReadable$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -264,6 +269,10 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun readable (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun readable (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun readable$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun readable$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -322,6 +331,10 @@ public final class space/kscience/dataforge/meta/MetaReaderKt {
 	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaRefStore : space/kscience/dataforge/meta/descriptors/Described {
+	public abstract fun getRefs ()Ljava/util/List;
+}
+
 public abstract interface class space/kscience/dataforge/meta/MetaRepr {
 	public abstract fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
 }
@@ -411,7 +424,9 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
@@ -433,9 +448,7 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
@@ -508,7 +521,10 @@ public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx
 }
 
 public abstract interface class space/kscience/dataforge/meta/MutableTypedMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/TypedMeta {
-	public abstract fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
+	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
 }
 
@@ -546,16 +562,24 @@ public final class space/kscience/dataforge/meta/ObservableMetaWrapperKt {
 }
 
 public abstract interface class space/kscience/dataforge/meta/ObservableMutableMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/MutableTypedMeta, space/kscience/dataforge/meta/ObservableMeta {
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
-	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
-	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public static final field Companion Lspace/kscience/dataforge/meta/ObservableMutableMeta$Companion;
 	public fun getSelf ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public synthetic fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 }
 
+public final class space/kscience/dataforge/meta/ObservableMutableMeta$Companion {
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
+}
+
+public final class space/kscience/dataforge/meta/ObservableMutableMetaSerializer : kotlinx/serialization/KSerializer {
+	public static final field INSTANCE Lspace/kscience/dataforge/meta/ObservableMutableMetaSerializer;
+	public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
+	public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
+	public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
+	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/ObservableMutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/RegexItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
 	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
 	public final fun component1 ()Lkotlin/text/Regex;
@@ -596,9 +620,9 @@ public final class space/kscience/dataforge/meta/SchemeKt {
 	public static final fun listOfScheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun scheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 20388a92..3e53891b 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.meta
 
+import kotlinx.serialization.Serializable
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.Name
 
@@ -35,6 +36,8 @@ public interface ObservableMeta : Meta {
 /**
  * A [Meta] which is both observable and mutable
  */
+@Serializable(ObservableMutableMetaSerializer::class)
+@MetaBuilderMarker
 public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
     override val self: ObservableMutableMeta get() = this
 }

From 425f9801a59aafe93e475beba63abd98dcb24c18 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 15 Dec 2024 10:56:35 +0300
Subject: [PATCH 66/77] Rework data. Split static state and dynamic state

---
 CHANGELOG.md                                  |  1 +
 build.gradle.kts                              |  2 +-
 .../dataforge/actions/AbstractAction.kt       | 33 ++++---
 .../kscience/dataforge/actions/MapAction.kt   | 34 ++++---
 .../dataforge/actions/ReduceAction.kt         | 21 ++--
 .../kscience/dataforge/actions/SplitAction.kt | 21 ++--
 .../data/{GroupRule.kt => DataRenamer.kt}     | 45 ++++-----
 .../space/kscience/dataforge/data/DataSink.kt | 96 +++++--------------
 .../kscience/dataforge/data/DataSource.kt     | 13 ++-
 .../kscience/dataforge/data/MetaMaskData.kt   |  2 +-
 .../kscience/dataforge/data/NamedData.kt      |  5 +
 .../kscience/dataforge/data/dataBuilders.kt   | 75 ++++++---------
 .../kscience/dataforge/data/dataTransform.kt  | 45 ++++++---
 .../dataforge/data/dataTreeBuilder.kt         | 34 ++++---
 .../dataforge/data/DataTreeBuilderTest.kt     | 14 +--
 .../dataforge/data/dataSetBuilderInContext.kt |  6 +-
 .../kscience/dataforge/data/ActionsTest.kt    | 41 +++-----
 dataforge-meta/build.gradle.kts               |  6 ++
 .../dataforge/meta/valueExtensions.kt         | 15 ++-
 .../dataforge/workspace/FileDataTest.kt       |  4 +-
 docs/templates/README-TEMPLATE.md             | 61 ++++++++++++
 gradle.properties                             |  2 +-
 22 files changed, 318 insertions(+), 258 deletions(-)
 rename dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/{GroupRule.kt => DataRenamer.kt} (53%)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 82f0a8fb..3cb3daf0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
+- API of DataSink.
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/build.gradle.kts b/build.gradle.kts
index 98fca6b3..2d7cb095 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.1"
+    version = "0.10.0"
 }
 
 subprojects {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 4ed5f8df..e8249f0b 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,12 +1,7 @@
 package space.kscience.dataforge.actions
 
 import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.onEach
-import space.kscience.dataforge.data.DataSink
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.DataUpdate
-import space.kscience.dataforge.data.launchUpdate
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -31,10 +26,10 @@ public abstract class AbstractAction<T, R>(
     /**
      * Generate initial content of the output
      */
-    protected abstract fun DataSink<R>.generate(
+    protected abstract fun DataBuilderScope<R>.generate(
         source: DataTree<T>,
         meta: Meta,
-    )
+    ): Map<Name, Data<R>>
 
     /**
      * Update part of the data set using provided data
@@ -49,7 +44,7 @@ public abstract class AbstractAction<T, R>(
         updatedData: DataUpdate<T>,
     ) {
         //by default regenerate the whole data set
-        generate(source, meta)
+        putAll(generate(source, meta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -57,13 +52,21 @@ public abstract class AbstractAction<T, R>(
         source: DataTree<T>,
         meta: Meta,
         updatesScope: CoroutineScope
-    ): DataTree<R> = DataTree(outputType) {
-        generate(source, meta)
+    ): DataTree<R> = DataTree(
+        dataType = outputType,
+        scope = updatesScope,
+        initialData = DataBuilderScope<R>().generate(source, meta)
+    ) {
+
         //propagate updates
-        launchUpdate(updatesScope) {
-            source.updates.onEach { update ->
-                update(source, meta, update)
-            }.collect()
+        val updateSink = DataSink<R> { name, data ->
+            put(name, data)
+        }
+
+        with(updateSink) {
+            source.updates.collect { du: DataUpdate<T> ->
+                update(source, meta, du)
+            }
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 08bf08e9..fefa12d1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -54,22 +54,21 @@ public class MapAction<T, R>(
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.mapOne(name: Name, data: Data<T>?, meta: Meta) {
+    private fun mapOne(name: Name, data: Data<T>?, meta: Meta): Pair<Name, Data<R>?> {
         //fast return for null data
         if (data == null) {
-            put(name, null)
-            return
+            return name to null
         }
         // Creating a new environment for action using **old** name, old meta and task meta
         val env = ActionEnv(name, data.meta, meta)
 
         //applying transformation from builder
         val builder = MapActionBuilder<T, R>(
-            name,
-            data.meta.toMutableMeta(), // using data meta
-            meta,
-            data.type,
-            outputType
+            name = name,
+            meta = data.meta.toMutableMeta(), // using data meta
+            actionMeta = meta,
+            dataType = data.type,
+            outputType = outputType
         ).apply(block)
 
         //getting new name
@@ -82,21 +81,26 @@ public class MapAction<T, R>(
             builder.result(env, data.await())
         }
         //setting the data node
-        put(newName, newData)
+        return newName to newData
     }
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
-        source.forEach { mapOne(it.name, it.data, meta) }
+    override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
+        source.forEach {
+            val (name, data) = mapOne(it.name, it.data, meta)
+            if (data != null) {
+                check(name !in keys) { "Data with key $name already exist in the result" }
+                put(name, data)
+            }
+        }
     }
 
-
-
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
         updatedData: DataUpdate<T>,
-    )  {
-        mapOne(updatedData.name, updatedData.data, meta)
+    ) {
+        val (name, data) = mapOne(updatedData.name, updatedData.data, meta)
+        put(name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 93278442..73388fec 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -3,6 +3,8 @@ package space.kscience.dataforge.actions
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.get
+import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFBuilder
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -13,7 +15,7 @@ import kotlin.reflect.typeOf
 
 public class JoinGroup<T, R>(
     public var name: String,
-    internal val set: DataTree<T>,
+    internal val data: DataTree<T>,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -41,12 +43,17 @@ public class ReduceGroupBuilder<T, R>(
     private val groupRules: MutableList<(DataTree<T>) -> List<JoinGroup<T, R>>> = ArrayList();
 
     /**
-     * introduce grouping by meta value
+     * Group by a meta value
      */
-    public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
+    @OptIn(UnsafeKType::class)
+    public fun byMetaValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
         groupRules += { node ->
-            GroupRule.byMetaValue(tag, defaultTag).gather(node).map {
-                JoinGroup<T, R>(it.key, it.value, outputType).apply(action)
+            val groups = mutableMapOf<String, MutableMap<Name, Data<T>>>()
+            node.forEach { data ->
+                groups.getOrPut(data.meta[tag]?.string ?: defaultTag) { mutableMapOf() }.put(data.name, data)
+            }
+            groups.map { (key, dataMap) ->
+                JoinGroup<T, R>(key, dataMap.asTree(node.dataType), outputType).apply(action)
             }
         }
     }
@@ -84,9 +91,9 @@ internal class ReduceAction<T, R>(
 ) : AbstractAction<T, R>(outputType) {
     //TODO optimize reduction. Currently, the whole action recalculates on push
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
+    override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(source).forEach { group ->
-            val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
+            val dataFlow: Map<Name, Data<T>> = group.data.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
                     acc[value.name] = value.data
                 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 2268b0fa..bf3284be 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -7,7 +7,6 @@ import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.toMutableMeta
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
-import kotlin.collections.set
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -48,7 +47,7 @@ internal class SplitAction<T, R>(
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.splitOne(name: Name, data: Data<T>?, meta: Meta) {
+    private fun splitOne(name: Name, data: Data<T>?, meta: Meta): Map<Name, Data<R>?> = buildMap {
         val laminate = Laminate(data?.meta, meta)
 
         val split = SplitBuilder<T, R>(name, data?.meta ?: Meta.EMPTY).apply(action)
@@ -76,16 +75,26 @@ internal class SplitAction<T, R>(
         }
     }
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
-        source.forEach { splitOne(it.name, it.data, meta) }
+    override fun DataBuilderScope<R>.generate(
+        source: DataTree<T>,
+        meta: Meta
+    ): Map<Name, Data<R>> = buildMap {
+        source.forEach {
+            splitOne(it.name, it.data, meta).forEach { (name, data) ->
+                check(name !in keys) { "Data with key $name already exist in the result" }
+                if (data != null) {
+                    put(name, data)
+                }
+            }
+        }
     }
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
         updatedData: DataUpdate<T>,
-    )  {
-        splitOne(updatedData.name, updatedData.data, meta)
+    ) {
+        putAll(splitOne(updatedData.name, updatedData.data, meta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
similarity index 53%
rename from dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
rename to dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
index 6dd3caa9..af2596ac 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
@@ -15,40 +15,41 @@
  */
 package space.kscience.dataforge.data
 
+import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.plus
+import kotlin.reflect.KType
 
-public interface GroupRule {
-    public fun <T> gather(set: DataTree<T>): Map<String, DataTree<T>>
+/**
+ * Interface that define rename rule for [Data]
+ */
+@DFExperimental
+public fun interface DataRenamer {
+    public fun rename(name: Name, meta: Meta, type: KType): Name
 
     public companion object {
+
         /**
-         * Create grouping rule that creates groups for different values of value
-         * field with name [key]
-         *
-         * @param key
-         * @param defaultTagValue
-         * @return
+         * Prepend name token `key\[tagValue\]` to data name
          */
         @OptIn(UnsafeKType::class)
-        public fun byMetaValue(
+        public fun groupByMetaValue(
             key: String,
             defaultTagValue: String,
-        ): GroupRule = object : GroupRule {
+        ): DataRenamer = object : DataRenamer {
 
-            override fun <T> gather(
-                set: DataTree<T>,
-            ): Map<String, DataTree<T>> {
-                val map = HashMap<String, MutableDataTree<T>>()
-
-                set.forEach { data ->
-                    val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                    map.getOrPut(tagValue) { MutableDataTree(set.dataType) }.put(data.name, data.data)
-                }
-
-
-                return map
+            override fun rename(
+                name: Name,
+                meta: Meta,
+                type: KType
+            ): Name {
+                val tagValue: String = meta[key]?.string ?: defaultTagValue
+                return NameToken(key,tagValue).plus(name)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index 6daeae98..ccd37514 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -1,49 +1,28 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.channels.BufferOverflow
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.MutableSharedFlow
 import kotlinx.coroutines.flow.mapNotNull
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface DataSink<in T> {
-    /**
-     * Put data without notification
-     */
-    public fun put(name: Name, data: Data<T>?)
-
-    /**
-     * Put data and propagate changes downstream
-     */
-    public suspend fun update(name: Name, data: Data<T>?)
+public interface DataBuilderScope<in T>{
+    public companion object: DataBuilderScope<Nothing>
 }
 
-/**
- * Launch continuous update using
- */
-public fun <T> DataSink<T>.launchUpdate(
-    scope: CoroutineScope,
-    updater: suspend DataSink<T>.() -> Unit,
-): Job = scope.launch {
-    object : DataSink<T> {
-        override fun put(name: Name, data: Data<T>?) {
-            launch {
-                this@launchUpdate.update(name, data)
-            }
-        }
+@Suppress("UNCHECKED_CAST")
+public fun <T> DataBuilderScope(): DataBuilderScope<T> = DataBuilderScope as DataBuilderScope<T>
 
-        override suspend fun update(name: Name, data: Data<T>?) {
-            this@launchUpdate.update(name, data)
-        }
-    }.updater()
+public fun interface DataSink<in T>: DataBuilderScope<T> {
+    /**
+     * Put data and notify listeners if needed
+     */
+    public suspend fun put(name: Name, data: Data<T>?)
 }
 
+
 /**
  * A mutable version of [DataTree]
  */
@@ -54,16 +33,14 @@ public interface MutableDataTree<T> : DataTree<T>, DataSink<T> {
 
     public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
 
-    public operator fun set(token: NameToken, data: Data<T>?)
+    public suspend fun put(token: NameToken, data: Data<T>?)
 
-    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
-}
-
-public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
-    when (name.length) {
-        0 -> this.data = data
-        1 -> set(name.first(), data)
-        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
+    override suspend fun put(name: Name, data: Data<T>?): Unit {
+        when (name.length) {
+            0 -> this.data = data
+            1 -> put(name.first(), data)
+            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
+        }
     }
 }
 
@@ -81,8 +58,8 @@ private class MutableDataTreeRoot<T>(
     override val dataType: KType,
 ) : MutableDataTree<T> {
 
-    override val updates = MutableSharedFlow<DataUpdate<T>>(100, onBufferOverflow = BufferOverflow.DROP_LATEST)
-
+    override val items = HashMap<NameToken, MutableDataTree<T>>()
+    override val updates = MutableSharedFlow<DataUpdate<T>>(extraBufferCapacity = 100)
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
@@ -101,44 +78,21 @@ private class MutableDataTreeRoot<T>(
         override fun getOrCreateItem(token: NameToken): MutableDataTree<T> =
             items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }
 
-
-        override fun set(token: NameToken, data: Data<T>?) {
-            val subTree = getOrCreateItem(token)
-            subTree.data = data
+        override suspend fun put(token: NameToken, data: Data<T>?) {
+            this.data = data
+            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + token, data))
         }
-
-        override suspend fun update(name: Name, data: Data<T>?) {
-            if (name.isEmpty()) {
-                this.data = data
-                this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + name, data))
-            } else {
-                getOrCreateItem(name.first()).update(name.cutFirst(), data)
-            }
-        }
-
     }
 
-
     override var data: Data<T>? = null
 
-    override val items = HashMap<NameToken, MutableDataTree<T>>()
-
     override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
         MutableDataTreeBranch(token.asName())
     }
 
-    override fun set(token: NameToken, data: Data<T>?) {
-        val subTree = getOrCreateItem(token)
-        subTree.data = data
-    }
-
-    override suspend fun update(name: Name, data: Data<T>?) {
-        if (name.isEmpty()) {
-            this.data = data
-            updates.emit(DataUpdate(data?.type ?: dataType, name, data))
-        } else {
-            getOrCreateItem(name.first()).update(name.cutFirst(), data)
-        }
+    override suspend fun put(token: NameToken, data: Data<T>?) {
+        this.data = data
+        updates.emit(DataUpdate(data?.type ?: dataType, token.asName(), data))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index a9756be7..dfde4e0b 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -2,6 +2,7 @@ package space.kscience.dataforge.data
 
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.emptyFlow
+import kotlinx.coroutines.flow.first
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.*
 import kotlin.contracts.contract
@@ -30,11 +31,18 @@ public interface DataSource<out T> {
 public interface ObservableDataSource<out T> : DataSource<T> {
 
     /**
-     * Flow updates made to the data
+     * Flow updates made to the data. Updates are considered critical. The producer will suspend unless all updates are consumed.
      */
     public val updates: Flow<DataUpdate<T>>
 }
 
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
+    return read(name) ?: updates.first { it.name == name && it.data != null }.data!!
+}
+
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
+    awaitData(name.parseAsName())
+
 /**
  * A tree like structure for data holding
  */
@@ -54,8 +62,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
     override val updates: Flow<DataUpdate<T>>
 
     public companion object {
-        private object EmptyDataTree :
-            DataTree<Nothing> {
+        private object EmptyDataTree : DataTree<Nothing> {
             override val data: Data<Nothing>? = null
             override val items: Map<NameToken, EmptyDataTree> = emptyMap()
             override val dataType: KType = typeOf<Unit>()
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
index 85f0b2f9..2fbece21 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
@@ -8,7 +8,7 @@ import space.kscience.dataforge.meta.copy
 private class MetaMaskData<T>(val origin: Data<T>, override val meta: Meta) : Data<T> by origin
 
 /**
- * A data with overriden meta. It reflects original data computed state.
+ * A data with overridden meta. It reflects original data computed state.
  */
 public fun <T> Data<T>.withMeta(newMeta: Meta): Data<T> = if (this is MetaMaskData) {
     MetaMaskData(origin, newMeta)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 9cb4c2d9..54a9715c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -20,6 +20,11 @@ public fun <T> DataUpdate(type: KType, name: Name, data: Data<T>?): DataUpdate<T
     override val type: KType = type
     override val name: Name = name
     override val data: Data<T>? = data
+
+    override fun toString(): String {
+        return "DataUpdate(type=$type, name=$name, data=$data)"
+    }
+
 }
 
 /**
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 649cfd19..b3433616 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -1,15 +1,14 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.launchIn
-import kotlinx.coroutines.flow.onEach
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.names.*
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.isEmpty
+import space.kscience.dataforge.names.plus
 
 
-public fun <T> DataSink<T>.put(value: NamedData<T>) {
+public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
     put(value.name, value.data)
 }
 
@@ -20,16 +19,7 @@ public inline fun <T> DataSink<T>.putAll(
     if (prefix.isEmpty()) {
         apply(block)
     } else {
-        val proxyDataSink = object :DataSink<T>{
-            override fun put(name: Name, data: Data<T>?) {
-                this@putAll.put(prefix + name, data)
-            }
-
-            override suspend fun update(name: Name, data: Data<T>?) {
-                this@putAll.update(prefix + name, data)
-            }
-
-        }
+        val proxyDataSink = DataSink<T> { name, data -> this@putAll.put(prefix + name, data) }
 
         proxyDataSink.apply(block)
     }
@@ -42,23 +32,23 @@ public inline fun <T> DataSink<T>.putAll(
 ): Unit = putAll(prefix.asName(), block)
 
 
-public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
+public suspend fun <T> DataSink<T>.put(name: String, value: Data<T>) {
     put(Name.parse(name), value)
 }
 
-public fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
     putAll(name) { putAll(tree.asSequence()) }
 }
 
 
-public fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
     putAll(Name.parse(name)) { putAll(tree.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -67,7 +57,7 @@ public inline fun <reified T> DataSink<T>.putValue(
     put(name, data)
 }
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -79,56 +69,49 @@ public inline fun <reified T> DataSink<T>.putValue(
 /**
  * Emit static data with the fixed value
  */
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: Name,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     value: T,
     metaBuilder: MutableMeta.() -> Unit,
 ): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
-public suspend inline fun <reified T> DataSink<T>.updateValue(
-    name: Name,
-    value: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = update(name, Data.wrapValue(value, meta))
-
-public suspend inline fun <reified T> DataSink<T>.updateValue(
-    name: String,
-    value: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = update(name.parseAsName(), Data.wrapValue(value, meta))
-
-public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
+public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
         put(it.name, it.data)
     }
 }
 
-public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
+    map.forEach { (name, data) ->
+        put(name, data)
+    }
+}
+
+public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
     putAll(tree.asSequence())
 }
 
 /**
- * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
+ * Copy given data set and mirror its changes to this [DataSink]. Suspends indefinitely.
  */
-public fun <T : Any> DataSink<T>.putAllAndWatch(
-    scope: CoroutineScope,
-    branchName: Name = Name.EMPTY,
+public suspend fun <T : Any> DataSink<T>.putAllAndWatch(
     source: DataTree<T>,
-): Job {
+    branchName: Name = Name.EMPTY,
+) {
     putAll(branchName, source)
-    return source.updates.onEach {
-        update(branchName + it.name, it.data)
-    }.launchIn(scope)
+    source.updates.collect {
+        put(branchName + it.name, it.data)
+    }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index c0d92e9e..80105fc6 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -36,7 +37,6 @@ public fun <T, R> Data<T>.transform(
 }
 
 
-
 /**
  * Lazily transform this data to another data. By convention [block] should not use external data (be pure).
  * @param coroutineContext additional [CoroutineContext] elements used for data computation.
@@ -77,7 +77,6 @@ internal fun Iterable<Data<*>>.joinMeta(): Meta = Meta {
 }
 
 
-
 @PublishedApi
 internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
     forEach { (key, data) ->
@@ -201,34 +200,46 @@ public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
 
 
 @UnsafeKType
-public suspend fun <T, R> DataTree<T>.transform(
+public fun <T, R> DataTree<T>.map(
     outputType: KType,
+    scope: CoroutineScope,
     metaTransform: MutableMeta.() -> Unit = {},
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = DataTree<R>(outputType){
-    //quasi-synchronous processing of elements in the tree
-    asSequence().forEach { namedData: NamedData<T> ->
+    compute: suspend (NamedValueWithMeta<T>) -> R,
+): DataTree<R> = DataTree<R>(
+    outputType,
+    scope,
+    initialData = asSequence().associate { namedData: NamedData<T> ->
         val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
-        val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
-            block(namedData.awaitWithMeta())
+        val newData = Data(outputType, newMeta, scope.coroutineContext, listOf(namedData)) {
+            compute(namedData.awaitWithMeta())
+        }
+        namedData.name to newData
+    }
+) {
+    updates.collect { update ->
+        val data: Data<T>? = update.data
+        if (data == null) put(update.name, null) else {
+            val newMeta = data.meta.toMutableMeta().apply(metaTransform).seal()
+            val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
+                compute(NamedValueWithMeta(update.name, data.await(), data.meta))
+            }
+            put(update.name, d)
         }
-        put(namedData.name, d)
     }
 }
 
 @OptIn(UnsafeKType::class)
-public suspend inline fun <T, reified R> DataTree<T>.transform(
+public inline fun <T, reified R> DataTree<T>.map(
+    scope: CoroutineScope,
     noinline metaTransform: MutableMeta.() -> Unit = {},
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = this@transform.transform(typeOf<R>(), metaTransform, coroutineContext, block)
+): DataTree<R> = map(typeOf<R>(), scope, metaTransform, block)
 
 public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
     asSequence().forEach(block)
 }
 
-// DataSet reduction
+// DataSet snapshot reduction
 
 @PublishedApi
 internal fun DataTree<*>.joinMeta(): Meta = Meta {
@@ -238,6 +249,10 @@ internal fun DataTree<*>.joinMeta(): Meta = Meta {
     }
 }
 
+/**
+ * Reduce current snapshot of the [DataTree] to a single [Data].
+ * Even if a tree is changed in the future, only current data set is taken.
+ */
 public inline fun <T, reified R> DataTree<T>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index ac0492f0..20afcc76 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -1,8 +1,11 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.MutableSharedFlow
+import kotlinx.coroutines.flow.SharedFlow
 import kotlinx.coroutines.flow.mapNotNull
+import kotlinx.coroutines.launch
 import kotlinx.coroutines.sync.Mutex
 import kotlinx.coroutines.sync.withLock
 import space.kscience.dataforge.misc.UnsafeKType
@@ -14,7 +17,7 @@ import kotlin.reflect.typeOf
 private class FlatDataTree<T>(
     override val dataType: KType,
     private val dataSet: Map<Name, Data<T>>,
-    private val sourceUpdates: Flow<DataUpdate<T>>,
+    private val sourceUpdates: SharedFlow<DataUpdate<T>>,
     private val prefix: Name,
 ) : DataTree<T> {
     override val data: Data<T>? get() = dataSet[prefix]
@@ -33,7 +36,7 @@ private class FlatDataTree<T>(
 }
 
 /**
- * A builder for static [DataTree].
+ * A builder for [DataTree].
  */
 private class DataTreeBuilder<T>(
     private val type: KType,
@@ -46,20 +49,13 @@ private class DataTreeBuilder<T>(
 
     private val updatesFlow = MutableSharedFlow<DataUpdate<T>>()
 
-    override fun put(name: Name, data: Data<T>?) {
-        if (data == null) {
-            map.remove(name)
-        } else {
-            map[name] = data
-        }
-    }
 
-    override suspend fun update(name: Name, data: Data<T>?) {
+    override suspend fun put(name: Name, data: Data<T>?) {
         mutex.withLock {
             if (data == null) {
                 map.remove(name)
             } else {
-                map.put(name, data)
+                map[name] = data
             }
         }
         updatesFlow.emit(DataUpdate(data?.type ?: type, name, data))
@@ -74,16 +70,24 @@ private class DataTreeBuilder<T>(
 @UnsafeKType
 public fun <T> DataTree(
     dataType: KType,
-    generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
+    scope: CoroutineScope,
+    initialData: Map<Name, Data<T>> = emptyMap(),
+    updater: suspend DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
+    scope.launch{
+        updater()
+    }
+}.build()
 
 /**
  * Create and a data tree.
  */
 @OptIn(UnsafeKType::class)
 public inline fun <reified T> DataTree(
-    noinline generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTree(typeOf<T>(), generator)
+    scope: CoroutineScope,
+    initialData: Map<Name, Data<T>> = emptyMap(),
+    noinline updater: suspend DataSink<T>.() -> Unit,
+): DataTree<T> = DataTree(typeOf<T>(), scope, initialData, updater)
 
 
 /**
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 760aeec2..1f78b36c 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -1,8 +1,7 @@
 package space.kscience.dataforge.data
 
 import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.take
+import kotlinx.coroutines.launch
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.names.asName
 import kotlin.test.Test
@@ -57,17 +56,18 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = putAllAndWatch(this@runTest, "sub".asName(), subNode)
+            job = launch {  putAllAndWatch(subNode,"sub".asName())}
         }
 
         repeat(10) {
             subNode.updateValue("value[$it]", it)
         }
 
-        rootNode.updates.take(10).collect()
-        assertEquals(9, rootNode["sub.value[9]"]?.await())
-        assertEquals(8, rootNode["sub.value[8]"]?.await())
-
+        assertEquals(9, subNode.awaitData("value[9]").await())
+        assertEquals(8, subNode.awaitData("value[8]").await())
+        assertEquals(9, rootNode.awaitData("sub.value[9]").await())
+        assertEquals(8, rootNode.awaitData("sub.value[8]").await())
+        println("finished")
         job?.cancel()
     }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 8ce50a22..f9374974 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -1,3 +1,5 @@
+@file:Suppress("CONTEXT_RECEIVERS_DEPRECATED")
+
 package space.kscience.dataforge.data
 
 import space.kscience.dataforge.names.Name
@@ -7,14 +9,14 @@ import space.kscience.dataforge.names.Name
  * Append data to node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.put(data: Data<T>): Unit =
+public suspend infix fun <T : Any> String.put(data: Data<T>): Unit =
     put(Name.parse(this), data)
 
 /**
  * Append node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
+public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
     putAll(this, dataSet)
 
 /**
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 2e790caa..e9ec343c 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,11 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.ExperimentalCoroutinesApi
-import kotlinx.coroutines.delay
 import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
-import kotlinx.coroutines.withContext
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.actions.mapping
@@ -18,22 +15,20 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
-        withContext(Dispatchers.Default) {
-            val plusOne = Action.mapping<Int, Int> {
-                result { it + 1 }
-            }
-
-            val data: DataTree<Int> = DataTree {
-                repeat(10) {
-                    putValue(it.toString(), it)
-                }
-            }
-
-            val result = plusOne(data)
-
-            advanceUntilIdle()
-            assertEquals(2, result["1"]?.await())
+        val plusOne = Action.mapping<Int, Int> {
+            result { it + 1 }
         }
+
+        val data: DataTree<Int> = DataTree {
+            repeat(10) {
+                putValue(it.toString(), it)
+            }
+        }
+
+        val result = plusOne(data)
+
+        advanceUntilIdle()
+        assertEquals(2, result["1"]?.await())
     }
 
     @Test
@@ -44,19 +39,13 @@ internal class ActionsTest {
 
         val source: MutableDataTree<Int> = MutableDataTree()
 
-        val result = plusOne(source)
-
-
+        val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
             source.updateValue(it.toString(), it)
         }
 
-        delay(10)
-
-//        result.updates.take(10).onEach { println(it.name) }.collect()
-
-        assertEquals(2, result["1"]?.await())
+        assertEquals(2, result.awaitData("1").await())
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-meta/build.gradle.kts b/dataforge-meta/build.gradle.kts
index d150ef98..831aea72 100644
--- a/dataforge-meta/build.gradle.kts
+++ b/dataforge-meta/build.gradle.kts
@@ -16,4 +16,10 @@ description = "Meta definition and basic operations on meta"
 
 readme{
     maturity = space.kscience.gradle.Maturity.DEVELOPMENT
+
+    feature("metadata"){
+        """
+            
+        """.trimIndent()
+    }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
index 73464305..0c87bcc2 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
@@ -11,9 +11,18 @@ public fun Value.isNull(): Boolean = this == Null
 public fun Value.isList(): Boolean = this.type == ValueType.LIST
 
 public val Value.boolean: Boolean
-    get() = this == True
-            || this.list.firstOrNull() == True
-            || (type == ValueType.STRING && string.toBoolean())
+    get() = when (type) {
+        ValueType.NUMBER -> int > 0
+        ValueType.STRING -> string.toBoolean()
+        ValueType.BOOLEAN -> this === True
+        ValueType.LIST -> list.singleOrNull()?.boolean == true
+        ValueType.NULL -> false
+    }
+
+//        this == True
+//            || this.list.firstOrNull() == True
+//            || (type == ValueType.STRING && string.toBoolean())
+//            || (type == ValueType.)
 
 
 public val Value.int: Int get() = number.toInt()
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 5466da76..f526e194 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -54,7 +54,7 @@ class FileDataTest {
         val data = DataTree {
             files(io, Name.EMPTY, dir)
         }
-        val reconstructed = data.transform { (_, value) -> value.toByteArray().decodeToString() }
+        val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
@@ -69,7 +69,7 @@ class FileDataTest {
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
         val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
-            .transform { (_, value) -> value.toByteArray().decodeToString() }
+            .map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
 
diff --git a/docs/templates/README-TEMPLATE.md b/docs/templates/README-TEMPLATE.md
index 762e5d7e..6de55245 100644
--- a/docs/templates/README-TEMPLATE.md
+++ b/docs/templates/README-TEMPLATE.md
@@ -3,4 +3,65 @@
 
 ![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
 
+## Publications
+
+* [A general overview](https://doi.org/10.1051/epjconf/201817705003)
+* [An application in "Troitsk nu-mass" experiment](https://doi.org/10.1088/1742-6596/1525/1/012024)
+
+## Video
+
+* [A presentation on application of (old version of) DataForge to Troitsk nu-mass analysis.] (https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+
+## Questions and Answers
+In this section, we will try to cover DataForge main ideas in the form of questions and answers.
+
+### General
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+
+**Q**: How does it work?
+
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+
+**Q**: But where does it take algorithms to use?
+
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
+
+**Q**: So I still need to write the code? What is the difference then?
+
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
+
+### Platform
+
+**Q**: Which platform does DataForge use? Which operating system is it working on?
+
+**A**: The DataForge is mostly written in Kotlin-multiplatform and could be used on JVM, JS and native targets. Some modules and functions are supported only on JVM
+
+**Q**: Can I use my C++/Fortran/Python code in DataForge?
+
+A: Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+
+### Features
+**Q**: What other features does DataForge provide?
+
+**A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
+
+* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
+
+* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
+
+### Misc
+**Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
+
+**A**: One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics.
+
+**Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
+
+**A**: Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+
+**Q**: Is it possible to use DataForge in notebook mode?
+
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows to use any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
+
 ${modules}
diff --git a/gradle.properties b/gradle.properties
index ffc318d9..1b920cd8 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.15.4-kotlin-2.0.0
\ No newline at end of file
+toolsVersion=0.16.0-kotlin-2.1.0
\ No newline at end of file

From 6634ece349ddb36672b1967b005c4b40b28ae9e8 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 31 Dec 2024 13:58:21 +0300
Subject: [PATCH 67/77] Rework data. Split static state and dynamic state

---
 .../dataforge/actions/AbstractAction.kt       |  12 +-
 .../kscience/dataforge/actions/MapAction.kt   |  10 +-
 .../dataforge/actions/ReduceAction.kt         |   2 +-
 .../kscience/dataforge/actions/SplitAction.kt |   8 +-
 .../kscience/dataforge/data/DataFilter.kt     |  37 ++--
 .../space/kscience/dataforge/data/DataSink.kt |  21 +-
 .../kscience/dataforge/data/DataSource.kt     |  18 +-
 .../space/kscience/dataforge/data/Goal.kt     |   2 +-
 .../kscience/dataforge/data/NamedData.kt      |  33 +--
 .../dataforge/data/StaticDataBuilder.kt       |  63 ++++++
 .../kscience/dataforge/data/dataBuilders.kt   |  30 ++-
 .../kscience/dataforge/data/dataTransform.kt  |  28 +--
 .../dataforge/data/dataTreeBuilder.kt         |  24 ++-
 .../dataforge/data/DataTreeBuilderTest.kt     |  18 +-
 .../kscience/dataforge/data/dataFilterJvm.kt  |  20 +-
 .../kscience/dataforge/data/ActionsTest.kt    |   4 +-
 .../kscience/dataforge/workspace/Task.kt      |  43 ++--
 .../dataforge/workspace/TaskResult.kt         |   6 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |  29 ++-
 .../dataforge/workspace/taskBuilders.kt       |  82 ++++----
 .../dataforge/workspace/CachingAction.kt      |  20 +-
 .../dataforge/workspace/FileDataTree.kt       | 185 +++++++++++++++++
 .../workspace/InMemoryWorkspaceCache.kt       |   2 +-
 .../dataforge/workspace/readFileData.kt       | 188 ------------------
 .../dataforge/workspace/workspaceJvm.kt       |   4 +-
 .../dataforge/workspace/writeFileData.kt      |   8 +-
 .../workspace/CachingWorkspaceTest.kt         |  12 +-
 .../dataforge/workspace/FileDataTest.kt       |   4 +-
 .../workspace/FileWorkspaceCacheTest.kt       |   2 +-
 29 files changed, 486 insertions(+), 429 deletions(-)
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
 create mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
 delete mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index e8249f0b..af0aab00 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -35,16 +35,16 @@ public abstract class AbstractAction<T, R>(
      * Update part of the data set using provided data
      *
      * @param source the source data tree in case we need several data items to update
-     * @param meta the metadata used for the whole data tree
+     * @param actionMeta the metadata used for the whole data tree
      * @param updatedData an updated item
      */
     protected open suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
         //by default regenerate the whole data set
-        putAll(generate(source, meta))
+        putAll(generate(source, actionMeta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -64,8 +64,8 @@ public abstract class AbstractAction<T, R>(
         }
 
         with(updateSink) {
-            source.updates.collect { du: DataUpdate<T> ->
-                update(source, meta, du)
+            source.updates.collect {
+                update(source, meta, it)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index fefa12d1..2e4b2ddc 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -85,8 +85,8 @@ public class MapAction<T, R>(
     }
 
     override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
-        source.forEach {
-            val (name, data) = mapOne(it.name, it.data, meta)
+        source.forEach { data ->
+            val (name, data) = mapOne(data.name, data, meta)
             if (data != null) {
                 check(name !in keys) { "Data with key $name already exist in the result" }
                 put(name, data)
@@ -96,10 +96,10 @@ public class MapAction<T, R>(
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
-        val (name, data) = mapOne(updatedData.name, updatedData.data, meta)
+        val (name, data) = mapOne(updateName, source.read(updateName), actionMeta)
         put(name, data)
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 73388fec..d7bacda5 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -95,7 +95,7 @@ internal class ReduceAction<T, R>(
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(source).forEach { group ->
             val dataFlow: Map<Name, Data<T>> = group.data.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
-                    acc[value.name] = value.data
+                    acc[value.name] = value
                 }
             }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index bf3284be..6dfbc7c9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -80,7 +80,7 @@ internal class SplitAction<T, R>(
         meta: Meta
     ): Map<Name, Data<R>> = buildMap {
         source.forEach {
-            splitOne(it.name, it.data, meta).forEach { (name, data) ->
+            splitOne(it.name, it, meta).forEach { (name, data) ->
                 check(name !in keys) { "Data with key $name already exist in the result" }
                 if (data != null) {
                     put(name, data)
@@ -91,10 +91,10 @@ internal class SplitAction<T, R>(
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
-        putAll(splitOne(updatedData.name, updatedData.data, meta))
+        putAll(splitOne(updateName, source.read(updateName), actionMeta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index 38174e50..5538cc28 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -18,23 +18,25 @@ public fun interface DataFilter {
 }
 
 
-public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
+//public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
 
-public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
-    predicate.accepts(data)
-}
-
-public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
-    predicate.accepts(data)
-}
+//public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
+//    predicate.accepts(data)
+//}
+//
+//public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
+//    predicate.accepts(data)
+//}
 
 public fun <T> DataSource<T>.filterData(
-    predicate: DataFilter,
+    dataFilter: DataFilter,
 ): DataSource<T> = object : DataSource<T> {
     override val dataType: KType get() = this@filterData.dataType
 
     override fun read(name: Name): Data<T>? =
-        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+        this@filterData.read(name)?.takeIf {
+            dataFilter.accepts(name, it.meta, it.type)
+        }
 }
 
 /**
@@ -43,8 +45,12 @@ public fun <T> DataSource<T>.filterData(
 public fun <T> ObservableDataSource<T>.filterData(
     predicate: DataFilter,
 ): ObservableDataSource<T> = object : ObservableDataSource<T> {
-    override val updates: Flow<DataUpdate<T>>
-        get() = this@filterData.updates.filter { predicate.accepts(it) }
+
+    override val updates: Flow<Name>
+        get() = this@filterData.updates.filter {
+            val data = read(it)
+            predicate.accepts(it, data?.meta, data?.type ?: dataType)
+        }
 
     override val dataType: KType get() = this@filterData.dataType
 
@@ -70,8 +76,11 @@ internal class FilteredDataTree<T>(
             ?.filter { !it.value.isEmpty() }
             ?: emptyMap()
 
-    override val updates: Flow<DataUpdate<T>>
-        get() = source.updates.filter { filter.accepts(it) }
+    override val updates: Flow<Name>
+        get() = source.updates.filter {
+            val data = read(it)
+            filter.accepts(it, data?.meta, data?.type ?: dataType)
+        }
 }
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index ccd37514..c8a0f2a7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -8,14 +8,17 @@ import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface DataBuilderScope<in T>{
-    public companion object: DataBuilderScope<Nothing>
+public interface DataBuilderScope<in T> {
+    public companion object : DataBuilderScope<Nothing>
 }
 
 @Suppress("UNCHECKED_CAST")
 public fun <T> DataBuilderScope(): DataBuilderScope<T> = DataBuilderScope as DataBuilderScope<T>
 
-public fun interface DataSink<in T>: DataBuilderScope<T> {
+/**
+ * Asynchronous data sink
+ */
+public fun interface DataSink<in T> : DataBuilderScope<T> {
     /**
      * Put data and notify listeners if needed
      */
@@ -59,7 +62,7 @@ private class MutableDataTreeRoot<T>(
 ) : MutableDataTree<T> {
 
     override val items = HashMap<NameToken, MutableDataTree<T>>()
-    override val updates = MutableSharedFlow<DataUpdate<T>>(extraBufferCapacity = 100)
+    override val updates = MutableSharedFlow<Name>(extraBufferCapacity = 100)
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
@@ -67,10 +70,8 @@ private class MutableDataTreeRoot<T>(
 
         override val items = HashMap<NameToken, MutableDataTree<T>>()
 
-        override val updates: Flow<DataUpdate<T>> = this@MutableDataTreeRoot.updates.mapNotNull { update ->
-            update.name.removeFirstOrNull(branchName)?.let {
-                DataUpdate(update.data?.type ?: dataType, it, update.data)
-            }
+        override val updates: Flow<Name> = this@MutableDataTreeRoot.updates.mapNotNull { update ->
+            update.removeFirstOrNull(branchName)
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
@@ -80,7 +81,7 @@ private class MutableDataTreeRoot<T>(
 
         override suspend fun put(token: NameToken, data: Data<T>?) {
             this.data = data
-            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + token, data))
+            this@MutableDataTreeRoot.updates.emit(branchName + token)
         }
     }
 
@@ -92,7 +93,7 @@ private class MutableDataTreeRoot<T>(
 
     override suspend fun put(token: NameToken, data: Data<T>?) {
         this.data = data
-        updates.emit(DataUpdate(data?.type ?: dataType, token.asName(), data))
+        updates.emit(token.asName())
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index dfde4e0b..7ee87180 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -1,8 +1,6 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.emptyFlow
-import kotlinx.coroutines.flow.first
+import kotlinx.coroutines.flow.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.*
 import kotlin.contracts.contract
@@ -31,13 +29,17 @@ public interface DataSource<out T> {
 public interface ObservableDataSource<out T> : DataSource<T> {
 
     /**
-     * Flow updates made to the data. Updates are considered critical. The producer will suspend unless all updates are consumed.
+     * Names of updated elements.
+     * Data updates with the same names could be glued together.
+     *
+     * Updates are considered critical.
+     * The producer will suspend unless all updates are consumed.
      */
-    public val updates: Flow<DataUpdate<T>>
+    public val updates: Flow<Name>
 }
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
-    return read(name) ?: updates.first { it.name == name && it.data != null }.data!!
+    return read(name) ?: updates.filter { it == name }.map { read(name) }.filterNotNull().first()
 }
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
@@ -59,7 +61,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
     /**
      * Flow updates made to the data
      */
-    override val updates: Flow<DataUpdate<T>>
+    override val updates: Flow<Name>
 
     public companion object {
         private object EmptyDataTree : DataTree<Nothing> {
@@ -68,7 +70,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
             override val dataType: KType = typeOf<Unit>()
 
             override fun read(name: Name): Data<Nothing>? = null
-            override val updates: Flow<DataUpdate<Nothing>> get() = emptyFlow()
+            override val updates: Flow<Name> get() = emptyFlow()
         }
 
         public val EMPTY: DataTree<Nothing> = EmptyDataTree
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index 95ddbbf7..e54710b1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -32,7 +32,7 @@ public interface Goal<out T> {
     public companion object
 }
 
-public fun Goal<*>.launch(coroutineScope: CoroutineScope): Job = async(coroutineScope)
+public fun Goal<*>.launchIn(coroutineScope: CoroutineScope): Job = async(coroutineScope)
 
 public suspend fun <T> Goal<T>.await(): T = coroutineScope { async(this).await() }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 54a9715c..b20736ae 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -3,43 +3,16 @@ package space.kscience.dataforge.data
 import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
-import kotlin.reflect.KType
-
-/**
- * An interface implementing a data update event.
- *
- * If [data] is null, then corresponding element should be removed.
- */
-public interface DataUpdate<out T> : Named {
-    public val type: KType
-    override val name: Name
-    public val data: Data<T>?
-}
-
-public fun <T> DataUpdate(type: KType, name: Name, data: Data<T>?): DataUpdate<T> = object : DataUpdate<T> {
-    override val type: KType = type
-    override val name: Name = name
-    override val data: Data<T>? = data
-
-    override fun toString(): String {
-        return "DataUpdate(type=$type, name=$name, data=$data)"
-    }
-
-}
 
 /**
  * A data coupled to a name.
  */
-public interface NamedData<out T> : DataUpdate<T>, Data<T> {
-    override val data: Data<T>
-}
+public interface NamedData<out T> : Data<T>, Named
 
-public operator fun NamedData<*>.component1(): Name = name
-public operator fun <T> NamedData<T>.component2(): Data<T> = data
 
 private class NamedDataImpl<T>(
     override val name: Name,
-    override val data: Data<T>,
+    val data: Data<T>,
 ) : Data<T> by data, NamedData<T> {
     override fun toString(): String = buildString {
         append("NamedData(name=\"$name\"")
@@ -54,7 +27,7 @@ private class NamedDataImpl<T>(
 }
 
 public fun <T> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
-    NamedDataImpl(name, this.data)
+    NamedDataImpl(name, this)
 } else {
     NamedDataImpl(name, this)
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
new file mode 100644
index 00000000..0c1fe0b9
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
@@ -0,0 +1,63 @@
+package space.kscience.dataforge.data
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.parseAsName
+import space.kscience.dataforge.names.plus
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+
+public fun interface StaticDataBuilder<T> : DataBuilderScope<T> {
+    public fun put(name: Name, data: Data<T>)
+}
+
+private class DataMapBuilder<T> : StaticDataBuilder<T> {
+    val map = mutableMapOf<Name, Data<T>>()
+
+    override fun put(name: Name, data: Data<T>) {
+        if (map.containsKey(name)) {
+            error("Duplicate key '$name'")
+        } else {
+            map.put(name, data)
+        }
+    }
+}
+
+public fun <T> StaticDataBuilder<T>.put(name: String, data: Data<T>) {
+    put(name.parseAsName(), data)
+}
+
+public inline fun <T, reified T1 : T> StaticDataBuilder<T>.putValue(
+    name: String,
+    value: T1,
+    metaBuilder: MutableMeta.() -> Unit = {}
+) {
+    put(name, Data(value, Meta(metaBuilder)))
+}
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
+    val map = DataMapBuilder<T>().apply(block).map
+    map.forEach { (name, data) ->
+        put(prefix + name, data)
+    }
+}
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
+    putAll(prefix.parseAsName(), block)
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: String, tree: DataTree<T>) {
+    tree.forEach { data ->
+        put(prefix + data.name, data)
+    }
+}
+
+@UnsafeKType
+public fun <T> DataTree.Companion.static(type: KType, block: StaticDataBuilder<T>.() -> Unit): DataTree<T> =
+    DataMapBuilder<T>().apply(block).map.asTree(type)
+
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> DataTree.Companion.static(noinline block: StaticDataBuilder<T>.() -> Unit): DataTree<T> =
+    static(typeOf<T>(), block)
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index b3433616..640e8541 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -9,7 +9,7 @@ import space.kscience.dataforge.names.plus
 
 
 public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
-    put(value.name, value.data)
+    put(value.name, value)
 }
 
 public inline fun <T> DataSink<T>.putAll(
@@ -89,7 +89,7 @@ public suspend inline fun <reified T> DataSink<T>.putValue(
 
 public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        put(it.name, it.data)
+        put(it)
     }
 }
 
@@ -99,19 +99,27 @@ public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
     }
 }
 
-public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
-    putAll(tree.asSequence())
-}
+//public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+//    putAll(tree.asSequence())
+//}
 
 /**
- * Copy given data set and mirror its changes to this [DataSink]. Suspends indefinitely.
+ * Suspends indefinitely.
  */
-public suspend fun <T : Any> DataSink<T>.putAllAndWatch(
-    source: DataTree<T>,
-    branchName: Name = Name.EMPTY,
+public suspend fun <T : Any> DataSink<T>.watch(
+    source: ObservableDataSource<T>,
+    prefix: Name = Name.EMPTY,
 ) {
-    putAll(branchName, source)
+//    putAll(branchName, source)
     source.updates.collect {
-        put(branchName + it.name, it.data)
+        put(prefix + it, source.read(it))
     }
+}
+
+public suspend fun <T : Any> MutableDataTree<T>.putAllAndWatch(
+    source: DataTree<T>,
+    prefix: Name = Name.EMPTY,
+) {
+    putAll(prefix, source)
+    watch(source,prefix)
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 80105fc6..a8d5ac20 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -200,40 +200,44 @@ public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
 
 
 @UnsafeKType
-public fun <T, R> DataTree<T>.map(
+public fun <T, R> DataTree<T>.transformEach(
     outputType: KType,
     scope: CoroutineScope,
-    metaTransform: MutableMeta.() -> Unit = {},
+    metaTransform: MutableMeta.(name: Name) -> Unit = {},
     compute: suspend (NamedValueWithMeta<T>) -> R,
 ): DataTree<R> = DataTree<R>(
     outputType,
     scope,
     initialData = asSequence().associate { namedData: NamedData<T> ->
-        val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
+        val newMeta = namedData.meta.toMutableMeta().apply {
+            metaTransform(namedData.name)
+        }.seal()
         val newData = Data(outputType, newMeta, scope.coroutineContext, listOf(namedData)) {
             compute(namedData.awaitWithMeta())
         }
         namedData.name to newData
     }
 ) {
-    updates.collect { update ->
-        val data: Data<T>? = update.data
-        if (data == null) put(update.name, null) else {
-            val newMeta = data.meta.toMutableMeta().apply(metaTransform).seal()
+    updates.collect { name ->
+        val data: Data<T>? = read(name)
+        if (data == null) put(name, null) else {
+            val newMeta = data.meta.toMutableMeta().apply {
+                metaTransform(name)
+            }.seal()
             val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
-                compute(NamedValueWithMeta(update.name, data.await(), data.meta))
+                compute(NamedValueWithMeta(name, data.await(), data.meta))
             }
-            put(update.name, d)
+            put(name, d)
         }
     }
 }
 
 @OptIn(UnsafeKType::class)
-public inline fun <T, reified R> DataTree<T>.map(
+public inline fun <T, reified R> DataTree<T>.transformEach(
     scope: CoroutineScope,
-    noinline metaTransform: MutableMeta.() -> Unit = {},
+    noinline metaTransform: MutableMeta.(name: Name) -> Unit = {},
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = map(typeOf<R>(), scope, metaTransform, block)
+): DataTree<R> = transformEach(typeOf<R>(), scope, metaTransform, block)
 
 public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
     asSequence().forEach(block)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index 20afcc76..664f9904 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -17,7 +17,7 @@ import kotlin.reflect.typeOf
 private class FlatDataTree<T>(
     override val dataType: KType,
     private val dataSet: Map<Name, Data<T>>,
-    private val sourceUpdates: SharedFlow<DataUpdate<T>>,
+    private val sourceUpdates: SharedFlow<Name>,
     private val prefix: Name,
 ) : DataTree<T> {
     override val data: Data<T>? get() = dataSet[prefix]
@@ -29,10 +29,9 @@ private class FlatDataTree<T>(
 
     override fun read(name: Name): Data<T>? = dataSet[prefix + name]
 
-    override val updates: Flow<DataUpdate<T>> =
-        sourceUpdates.mapNotNull { update ->
-            update.name.removeFirstOrNull(prefix)?.let { DataUpdate(dataType, it, update.data) }
-        }
+    override val updates: Flow<Name> = sourceUpdates.mapNotNull { update ->
+        update.removeFirstOrNull(prefix)
+    }
 }
 
 /**
@@ -47,7 +46,7 @@ private class DataTreeBuilder<T>(
 
     private val mutex = Mutex()
 
-    private val updatesFlow = MutableSharedFlow<DataUpdate<T>>()
+    private val updatesFlow = MutableSharedFlow<Name>()
 
 
     override suspend fun put(name: Name, data: Data<T>?) {
@@ -58,7 +57,7 @@ private class DataTreeBuilder<T>(
                 map[name] = data
             }
         }
-        updatesFlow.emit(DataUpdate(data?.type ?: type, name, data))
+        updatesFlow.emit(name)
     }
 
     public fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
@@ -74,7 +73,7 @@ public fun <T> DataTree(
     initialData: Map<Name, Data<T>> = emptyMap(),
     updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
-    scope.launch{
+    scope.launch {
         updater()
     }
 }.build()
@@ -89,6 +88,13 @@ public inline fun <reified T> DataTree(
     noinline updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTree(typeOf<T>(), scope, initialData, updater)
 
+@UnsafeKType
+public fun <T> DataTree(type: KType, data: Map<Name, Data<T>>): DataTree<T> =
+    DataTreeBuilder(type, data).build()
+
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> DataTree(data: Map<Name, Data<T>>): DataTree<T> =
+    DataTree(typeOf<T>(), data)
 
 /**
  * Represent this flat data map as a [DataTree] without copying it
@@ -106,7 +112,7 @@ public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = asTree(
 
 @UnsafeKType
 public fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
-    DataTreeBuilder(type, associate { it.name to it.data }).build()
+    DataTreeBuilder(type, associate { it.name to it }).build()
 
 
 /**
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 1f78b36c..1d4d2ea4 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -12,7 +12,7 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
-        val node = DataTree<Any> {
+        val node = DataTree.static<Any> {
             putAll("primary") {
                 putValue("a", "a")
                 putValue("b", "b")
@@ -29,20 +29,18 @@ internal class DataTreeBuilderTest {
 
     @Test
     fun testDataUpdate() = runTest(timeout = 500.milliseconds) {
-        val updateData = DataTree<Any> {
-            putAll("update") {
-                put("a", Data.wrapValue("a"))
-                put("b", Data.wrapValue("b"))
-            }
+        val updateData = DataTree.static<Any> {
+            put("a", Data.wrapValue("a"))
+            put("b", Data.wrapValue("b"))
         }
 
-        val node = DataTree<Any> {
+        val node = DataTree.static<Any> {
             putAll("primary") {
                 putValue("a", "a")
                 putValue("b", "b")
             }
             putValue("root", "root")
-            putAll(updateData)
+            putAll("update", updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
@@ -56,11 +54,11 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = launch {  putAllAndWatch(subNode,"sub".asName())}
+            job = launch { putAllAndWatch(subNode, "sub".asName()) }
         }
 
         repeat(10) {
-            subNode.updateValue("value[$it]", it)
+            subNode.putValue("value[$it]", it)
         }
 
         assertEquals(9, subNode.awaitData("value[9]").await())
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 0cc81f7a..ba7fdadf 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -1,7 +1,5 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
@@ -22,15 +20,15 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
-@Suppress("UNCHECKED_CAST")
-@DFInternal
-public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
-    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
-
-@Suppress("UNCHECKED_CAST")
-@DFInternal
-public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
-    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
+//@Suppress("UNCHECKED_CAST")
+//@DFInternal
+//public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
+//    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
+//
+//@Suppress("UNCHECKED_CAST")
+//@DFInternal
+//public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
+//    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
 
 /**
  * Select all data matching given type and filters. Does not modify paths
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index e9ec343c..6828b674 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -19,7 +19,7 @@ internal class ActionsTest {
             result { it + 1 }
         }
 
-        val data: DataTree<Int> = DataTree {
+        val data: DataTree<Int> = DataTree.static {
             repeat(10) {
                 putValue(it.toString(), it)
             }
@@ -42,7 +42,7 @@ internal class ActionsTest {
         val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
-            source.updateValue(it.toString(), it)
+            source.putValue(it.toString(), it)
         }
 
         assertEquals(2, result.awaitData("1").await())
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index a1a754a4..f1e9130a 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -1,9 +1,9 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.DataBuilderScope
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.GoalExecutionRestriction
-import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaReader
 import space.kscience.dataforge.meta.MetaRepr
@@ -62,12 +62,12 @@ public interface TaskWithSpec<T, C : Any> : Task<T> {
 //    block: C.() -> Unit = {},
 //): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultBuilder<T>(
+public class TaskResultScope<T>(
+    public val resultType: KType,
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-    private val dataSink: DataSink<T>,
-) : DataSink<T> by dataSink
+) : DataBuilderScope<T>
 
 /**
  * Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
@@ -77,10 +77,11 @@ public class TaskResultBuilder<T>(
  * @param descriptor of meta accepted by this task
  * @param builder for resulting data set
  */
+@UnsafeKType
 public fun <T : Any> Task(
     resultType: KType,
     descriptor: MetaDescriptor? = null,
-    builder: suspend TaskResultBuilder<T>.() -> Unit,
+    builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Task<T> = object : Task<T> {
 
     override val descriptor: MetaDescriptor? = descriptor
@@ -89,23 +90,19 @@ public fun <T : Any> Task(
         workspace: Workspace,
         taskName: Name,
         taskMeta: Meta,
-    ): TaskResult<T> {
+    ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
-        @OptIn(UnsafeKType::class)
-        val dataset = MutableDataTree<T>(resultType).apply {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply {
-                withContext(GoalExecutionRestriction() + workspace.goalLogger) {
-                    builder()
-                }
-            }
-        }
-        return workspace.wrapResult(dataset, taskName, taskMeta)
+        val dataset = TaskResultScope<T>(resultType, workspace, taskName, taskMeta).builder()
+
+
+        workspace.wrapResult(dataset, taskName, taskMeta)
     }
 }
 
+@OptIn(UnsafeKType::class)
 public inline fun <reified T : Any> Task(
     descriptor: MetaDescriptor? = null,
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Task<T> = Task(typeOf<T>(), descriptor, builder)
 
 
@@ -117,13 +114,11 @@ public inline fun <reified T : Any> Task(
  * @param specification a specification for task configuration
  * @param builder for resulting data set
  */
-
-
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
     specification: MetaReader<C>,
-    builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
     override val spec: MetaReader<C> = specification
 
@@ -134,15 +129,15 @@ public fun <T : Any, C : MetaRepr> Task(
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
         val taskMeta = configuration.toMeta()
+
         @OptIn(UnsafeKType::class)
-        val dataset = MutableDataTree<T>(resultType).apply {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
-        }
+        val dataset = TaskResultScope<T>(resultType, workspace, taskName, taskMeta).builder(configuration)
+
         workspace.wrapResult(dataset, taskName, taskMeta)
     }
 }
 
 public inline fun <reified T : Any, C : MetaRepr> Task(
     specification: MetaReader<C>,
-    noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    noinline builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): Task<T> = Task(typeOf<T>(), specification, builder)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index d4d4291a..aff438ca 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -6,7 +6,7 @@ import kotlinx.coroutines.joinAll
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.asSequence
-import space.kscience.dataforge.data.launch
+import space.kscience.dataforge.data.launchIn
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
@@ -33,9 +33,9 @@ public fun <T> Workspace.wrapResult(data: DataTree<T>, taskName: Name, taskMeta:
  * Start computation for all data elements of this node.
  * The resulting [Job] is completed only when all of them are completed.
  */
-public fun TaskResult<*>.launch(scope: CoroutineScope): Job {
+public fun TaskResult<*>.launchIn(scope: CoroutineScope): Job {
     val jobs = asSequence().map {
-        it.data.launch(scope)
+        it.launchIn(scope)
     }.toList()
     return scope.launch { jobs.joinAll() }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 013c0171..38d90b31 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -4,20 +4,17 @@ import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
 import space.kscience.dataforge.context.Global
-import space.kscience.dataforge.data.DataSink
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.MutableDataTree
+import space.kscience.dataforge.data.StaticDataBuilder
+import space.kscience.dataforge.data.static
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
-import kotlin.collections.set
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
-import kotlin.reflect.typeOf
 
 public data class TaskReference<T>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
 
@@ -42,7 +39,7 @@ public interface TaskContainer {
 public inline fun <reified T : Any> TaskContainer.registerTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
 
 /**
@@ -51,7 +48,7 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
 public inline fun <reified T : Any> TaskContainer.buildTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): TaskReference<T> {
     val theName = Name.parse(name)
     val descriptor = MetaDescriptor(descriptorBuilder)
@@ -62,7 +59,7 @@ public inline fun <reified T : Any> TaskContainer.buildTask(
 
 public inline fun <reified T : Any> TaskContainer.task(
     descriptor: MetaDescriptor,
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
     val task = Task(descriptor, builder)
@@ -75,7 +72,7 @@ public inline fun <reified T : Any> TaskContainer.task(
  */
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
     specification: MetaReader<C>,
-    noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    noinline builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
     val task = Task(specification, builder)
@@ -88,7 +85,7 @@ public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
  */
 public inline fun <reified T : Any> TaskContainer.task(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> =
     task(MetaDescriptor(descriptorBuilder), builder)
 
@@ -102,15 +99,15 @@ public inline fun <T : Any, reified R : Any> TaskContainer.action(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
     task(MetaDescriptor(descriptorBuilder)) {
-        result(action.execute(from(selector), taskMeta.copy(metaTransform), workspace))
+        action.execute(from(selector), taskMeta.copy(metaTransform), workspace)
     }
 
 public class WorkspaceBuilder(
     private val parentContext: Context = Global,
 ) : TaskContainer {
     private var context: Context? = null
-    @OptIn(UnsafeKType::class)
-    private val data = MutableDataTree<Any?>(typeOf<Any?>())
+
+    private var data: DataTree<Any?>? = null
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
@@ -125,8 +122,8 @@ public class WorkspaceBuilder(
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSink<Any?>.() -> Unit) {
-        data.apply(builder)
+    public fun data(builder: StaticDataBuilder<Any?>.() -> Unit) {
+        data = DataTree.static(builder)
     }
 
     /**
@@ -152,7 +149,7 @@ public class WorkspaceBuilder(
         val postProcess: suspend (TaskResult<*>) -> TaskResult<*> = { result ->
             cache?.cache(result) ?: result
         }
-        return WorkspaceImpl(context ?: parentContext, data, targets, tasks, postProcess)
+        return WorkspaceImpl(context ?: parentContext, data ?: DataTree.EMPTY, targets, tasks, postProcess)
     }
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 15565995..49b485e5 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -1,12 +1,13 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.forEach
-import space.kscience.dataforge.data.putAll
-import space.kscience.dataforge.data.transform
-import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.data.NamedValueWithMeta
+import space.kscience.dataforge.data.transformEach
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.meta.remove
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.plus
@@ -14,7 +15,7 @@ import space.kscience.dataforge.names.plus
 /**
  * A task meta without a node corresponding to the task itself (removing a node with name of the task).
  */
-public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
+public val TaskResultScope<*>.defaultDependencyMeta: Meta
     get() = taskMeta.copy {
         remove(taskName)
     }
@@ -25,12 +26,12 @@ public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  */
-public suspend fun <T> TaskResultBuilder<*>.from(
+public suspend fun <T> TaskResultScope<*>.from(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ): DataTree<T> = selector.select(workspace, dependencyMeta)
 
-public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultScope<*>.from(
     plugin: P,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -50,7 +51,7 @@ public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  * @param selectorBuilder a builder of task from the plugin.
  */
-public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultScope<*>.from(
     pluginFactory: PluginFactory<P>,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -64,7 +65,7 @@ public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBui
     return res as TaskResult<T>
 }
 
-public val TaskResultBuilder<*>.allData: DataSelector<*>
+public val TaskResultScope<*>.allData: DataSelector<*>
     get() = DataSelector { workspace, _ -> workspace.data }
 
 /**
@@ -77,43 +78,38 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
  * @param action process individual data asynchronously.
  */
 @DFExperimental
-public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
+public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-    dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
-    crossinline action: suspend (arg: T, name: Name, meta: Meta) -> R,
-) {
-    from(selector, dependencyMeta).forEach { data ->
-        val meta = data.meta.toMutableMeta().apply {
-            taskMeta[taskName]?.let { taskName.put(it) }
-            dataMetaTransform(data.name)
-        }
-
-        val res = data.transform(meta, workspace.context.coroutineContext) {
-            action(it, data.name, meta)
-        }
-
-        put(data.name, res)
+    crossinline dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
+    crossinline action: suspend (NamedValueWithMeta<T>) -> R,
+): DataTree<R> = from(selector, dependencyMeta).transformEach<T, R>(
+    workspace.context,
+    metaTransform = { name ->
+        taskMeta[taskName]?.let { taskName put it }
+        dataMetaTransform(name)
     }
-}
-
-/**
- * Set given [dataSet] as a task result.
- */
-public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
-    this.putAll(dataSet)
-}
-
-/**
- * Use provided [action] to fill the result
- */
-@DFExperimental
-public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
-    selector: DataSelector<T>,
-    action: Action<T, R>,
-    dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    putAll(action.execute(from(selector, dependencyMeta), dependencyMeta, workspace))
+    action(it)
 }
 
+///**
+// * Set given [dataSet] as a task result.
+// */
+//public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
+//    putAll(dataSet)
+//}
+
+///**
+// * Use provided [action] to fill the result
+// */
+//@DFExperimental
+//public suspend inline fun <T, reified R> TaskResultScope<R>.actionFrom(
+//    selector: DataSelector<T>,
+//    action: Action<T, R>,
+//    dependencyMeta: Meta = defaultDependencyMeta,
+//) {
+//    putAll(action.execute(from(selector, dependencyMeta), dependencyMeta, workspace))
+//}
+
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
index 5f88ab74..9fc91e33 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
@@ -3,17 +3,25 @@ package space.kscience.dataforge.workspace
 import space.kscience.dataforge.actions.AbstractAction
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 
-internal class CachingAction<T>(type: KType, private val caching: (NamedData<T>) -> NamedData<T>) :
-    AbstractAction<T, T>(type) {
-    override fun DataSink<T>.generate(source: DataTree<T>, meta: Meta) {
+internal class CachingAction<T>(
+    type: KType, private val caching: (NamedData<T>) -> NamedData<T>
+) : AbstractAction<T, T>(type) {
+
+    override fun DataBuilderScope<T>.generate(
+        source: DataTree<T>,
+        meta: Meta
+    ): Map<Name, Data<T>> = buildMap {
         source.forEach {
-            put(caching(it))
+            val cached = caching(it)
+            put(cached.name, cached)
         }
     }
 
-    override suspend fun DataSink<T>.update(source: DataTree<T>, meta: Meta, updatedData: DataUpdate<T>) {
-        put(updatedData.name, updatedData.data?.named(updatedData.name)?.let(caching))
+    override suspend fun DataSink<T>.update(source: DataTree<T>, actionMeta: Meta, updateName: Name) {
+        val updatedData = source.read(updateName)
+        put(updateName, updatedData?.named(updateName)?.let(caching))
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
new file mode 100644
index 00000000..c3cd3a0b
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
@@ -0,0 +1,185 @@
+package space.kscience.dataforge.workspace
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.channels.awaitClose
+import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.isActive
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.StaticData
+import space.kscience.dataforge.io.*
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.plus
+import java.nio.file.*
+import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.*
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+
+public class FileDataTree(
+    public val io: IOPlugin,
+    public val path: Path,
+    private val monitor: Boolean = false
+) : DataTree<Binary> {
+    override val dataType: KType = typeOf<Binary>()
+
+    /**
+     * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
+     * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
+     */
+    private fun readFileAsData(
+        path: Path,
+    ): Data<Binary> {
+        val envelope = io.readEnvelopeFile(path, true)
+        val updatedMeta = envelope.meta.copy {
+            FILE_PATH_KEY put path.toString()
+            FILE_EXTENSION_KEY put path.extension
+
+            val attributes = path.readAttributes<BasicFileAttributes>()
+            FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
+            FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
+        }
+        return StaticData(
+            typeOf<Binary>(),
+            envelope.data ?: Binary.EMPTY,
+            updatedMeta
+        )
+    }
+
+    private fun readFilesFromDirectory(
+        path: Path
+    ): Map<NameToken, FileDataTree> = path.listDirectoryEntries().filterNot { it.name.startsWith("@") }.associate {
+        NameToken.parse(it.nameWithoutExtension) to FileDataTree(io, it)
+    }
+
+    override val data: Data<Binary>?
+        get() = when {
+            path.isRegularFile() -> {
+                //TODO process zip
+                readFileAsData(path)
+            }
+
+            path.isDirectory() -> {
+                val dataBinary: Binary? = path.resolve(IOPlugin.DATA_FILE_NAME)?.asBinary()
+                val meta: Meta? = path.find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }?.let {
+                    io.readMetaFileOrNull(it)
+                }
+                if (dataBinary != null || meta != null) {
+                    StaticData(
+                        typeOf<Binary>(),
+                        dataBinary ?: Binary.EMPTY,
+                        meta ?: Meta.EMPTY
+                    )
+                } else {
+                    null
+                }
+            }
+
+            else -> {
+                null
+            }
+        }
+
+
+    override val items: Map<NameToken, DataTree<Binary>>
+        get() = when {
+            path.isDirectory() -> readFilesFromDirectory(path)
+            path.isRegularFile() && path.extension == "zip" -> {
+                //Using an explicit Zip file system to avoid bizarre compatibility bugs
+                val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+                    ?: error("Zip file system provider not found")
+                val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
+                readFilesFromDirectory(fs.rootDirectories.single())
+            }
+
+            else -> emptyMap()
+        }
+
+
+    override val updates: Flow<Name> = if (monitor) {
+        callbackFlow<Name> {
+            val watchService: WatchService = path.fileSystem.newWatchService()
+
+            fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
+
+            fun monitor(childPath: Path): Job {
+                val key: WatchKey = childPath.register(
+                    watchService, arrayOf(
+                        StandardWatchEventKinds.ENTRY_DELETE,
+                        StandardWatchEventKinds.ENTRY_MODIFY,
+                        StandardWatchEventKinds.ENTRY_CREATE,
+                    )
+                )
+
+                return launch {
+                    while (isActive) {
+                        for (event: WatchEvent<*> in key.pollEvents()) {
+                            val eventPath = event.context() as Path
+                            if (event.kind() === StandardWatchEventKinds.ENTRY_CREATE) {
+                                monitor(eventPath)
+                            } else {
+                                send(eventPath.relativeTo(path).toName())
+                            }
+                        }
+                        key.reset()
+                    }
+                }
+            }
+
+            monitor(path)
+
+            awaitClose {
+                watchService.close()
+            }
+
+        }.flowOn(Dispatchers.IO).shareIn(io.context, SharingStarted.WhileSubscribed())
+    } else {
+        emptyFlow()
+    }
+
+    public companion object {
+        public val FILE_KEY: Name = "file".asName()
+        public val FILE_PATH_KEY: Name = FILE_KEY + "path"
+        public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
+        public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
+        public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
+        public const val DF_FILE_EXTENSION: String = "df"
+        public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
+    }
+}
+
+
+///**
+// * @param resources The names of the resources to read.
+// * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
+// */
+//@DFExperimental
+//public fun DataSink<Binary>.resources(
+//    io: IOPlugin,
+//    resource: String,
+//    vararg otherResources: String,
+//    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
+//) {
+//    //create a file system if necessary
+//    val uri = Thread.currentThread().contextClassLoader.getResource("common")!!.toURI()
+//    try {
+//        uri.toPath()
+//    } catch (e: FileSystemNotFoundException) {
+//        FileSystems.newFileSystem(uri, mapOf("create" to "true"))
+//    }
+//
+//    listOf(resource, *otherResources).forEach { r ->
+//        val path = classLoader.getResource(r)?.toURI()?.toPath() ?: error(
+//            "Resource with name $r is not resolved"
+//        )
+//        io.readAsDataTree(r.asName(), path)
+//    }
+//}
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index 8ba39ec1..9e986ba9 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -28,7 +28,7 @@ public class InMemoryWorkspaceCache : WorkspaceCache {
             val cachedData =  cache.getOrPut(TaskResultId(result.taskName, result.taskMeta)){
                 HashMap()
             }.getOrPut(data.name){
-                data.data
+                data
             }
             cachedData.checkType<T>(result.dataType).named(data.name)
         }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
deleted file mode 100644
index 37dafab9..00000000
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
+++ /dev/null
@@ -1,188 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import kotlinx.coroutines.*
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.DataSink
-import space.kscience.dataforge.data.StaticData
-import space.kscience.dataforge.io.*
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.NameToken
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.plus
-import java.nio.file.*
-import java.nio.file.attribute.BasicFileAttributes
-import java.nio.file.spi.FileSystemProvider
-import kotlin.io.path.*
-import kotlin.reflect.typeOf
-
-
-public object FileData {
-    public val FILE_KEY: Name = "file".asName()
-    public val FILE_PATH_KEY: Name = FILE_KEY + "path"
-    public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
-    public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
-    public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
-    public const val DF_FILE_EXTENSION: String = "df"
-    public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
-
-}
-
-
-/**
- * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
- * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
- */
-public fun IOPlugin.readFileData(
-    path: Path,
-): Data<Binary> {
-    val envelope = readEnvelopeFile(path, true)
-    val updatedMeta = envelope.meta.copy {
-        FileData.FILE_PATH_KEY put path.toString()
-        FileData.FILE_EXTENSION_KEY put path.extension
-
-        val attributes = path.readAttributes<BasicFileAttributes>()
-        FileData.FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
-        FileData.FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
-    }
-    return StaticData(
-        typeOf<Binary>(),
-        envelope.data ?: Binary.EMPTY,
-        updatedMeta
-    )
-}
-
-public fun DataSink<Binary>.file(io: IOPlugin, name: Name, path: Path) {
-    if (!path.isRegularFile()) error("Only regular files could be handled by this function")
-    put(name, io.readFileData(path))
-}
-
-public fun DataSink<Binary>.directory(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-) {
-    if (!path.isDirectory()) error("Only directories could be handled by this function")
-    //process root data
-
-    var dataBinary: Binary? = null
-    var meta: Meta? = null
-    Files.list(path).forEach { childPath ->
-        val fileName = childPath.fileName.toString()
-        if (fileName == IOPlugin.DATA_FILE_NAME) {
-            dataBinary = childPath.asBinary()
-        } else if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-            meta = io.readMetaFileOrNull(childPath)
-        } else if (!fileName.startsWith("@")) {
-            val token = if (childPath.isRegularFile() && childPath.extension in FileData.DEFAULT_IGNORE_EXTENSIONS) {
-                NameToken(childPath.nameWithoutExtension)
-            } else {
-                NameToken(childPath.name)
-            }
-
-            files(io, name + token, childPath)
-        }
-    }
-
-    //set data if it is relevant
-    if (dataBinary != null || meta != null) {
-        put(
-            name,
-            StaticData(
-                typeOf<Binary>(),
-                dataBinary ?: Binary.EMPTY,
-                meta ?: Meta.EMPTY
-            )
-        )
-    }
-}
-
-public fun DataSink<Binary>.files(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-) {
-    if (path.isRegularFile() && path.extension == "zip") {
-        //Using explicit Zip file system to avoid bizarre compatibility bugs
-        val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
-            ?: error("Zip file system provider not found")
-        val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
-
-        files(io, name, fs.rootDirectories.first())
-    }
-    if (path.isRegularFile()) {
-        file(io, name, path)
-    } else {
-        directory(io, name, path)
-    }
-}
-
-
-private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
-
-public fun DataSink<Binary>.monitorFiles(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-    scope: CoroutineScope = io.context,
-): Job {
-    files(io, name, path)
-    return scope.launch(Dispatchers.IO) {
-        val watchService = path.fileSystem.newWatchService()
-
-        path.register(
-            watchService,
-            StandardWatchEventKinds.ENTRY_DELETE,
-            StandardWatchEventKinds.ENTRY_MODIFY,
-            StandardWatchEventKinds.ENTRY_CREATE
-        )
-
-        do {
-            val key = watchService.take()
-            if (key != null) {
-                for (event: WatchEvent<*> in key.pollEvents()) {
-                    val eventPath = event.context() as Path
-                    if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
-                        put(eventPath.toName(), null)
-                    } else {
-                        val fileName = eventPath.fileName.toString()
-                        if (!fileName.startsWith("@")) {
-                            files(io, name, eventPath)
-                        }
-                    }
-                }
-                key.reset()
-            }
-        } while (isActive && key != null)
-    }
-
-}
-
-/**
- * @param resources The names of the resources to read.
- * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
- */
-@DFExperimental
-public fun DataSink<Binary>.resources(
-    io: IOPlugin,
-    resource: String,
-    vararg otherResources: String,
-    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
-) {
-    //create a file system if necessary
-    val uri = Thread.currentThread().contextClassLoader.getResource("common")!!.toURI()
-    try {
-        uri.toPath()
-    } catch (e: FileSystemNotFoundException) {
-        FileSystems.newFileSystem(uri, mapOf("create" to "true"))
-    }
-
-    listOf(resource,*otherResources).forEach { r ->
-        val path = classLoader.getResource(r)?.toURI()?.toPath() ?: error(
-            "Resource with name $r is not resolved"
-        )
-        files(io, r.asName(), path)
-    }
-}
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index 61caf7e0..688b5699 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -15,14 +15,14 @@ import space.kscience.dataforge.names.matches
  * Select the whole data set from the workspace filtered by type.
  */
 @OptIn(DFExperimental::class)
-public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
+public inline fun <reified T : Any> TaskResultScope<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
     DataSelector<T> { workspace, _ ->
         workspace.data.filterByType { name, _, _ ->
             namePattern == null || name.matches(namePattern)
         }
     }
 
-public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
+public suspend inline fun <reified T : Any> TaskResultScope<*>.fromTask(
     task: Name,
     taskMeta: Meta = Meta.EMPTY,
 ): DataTree<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
index 379a79dd..c65570df 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
@@ -2,7 +2,9 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.forEach
+import space.kscience.dataforge.data.meta
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
@@ -32,8 +34,8 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     } else if (!Files.isDirectory(path)) {
         error("Can't write a node into file")
     }
-    dataSet.forEach { (name, data) ->
-        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+    dataSet.forEach { data ->
+        val childPath = path.resolve(data.name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
         childPath.parent.createDirectories()
         val envelope = data.toEnvelope(format)
         if (envelopeFormat != null) {
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 7a6a8202..1c43fba0 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -29,7 +29,7 @@ internal class CachingWorkspaceTest {
             inMemoryCache()
 
             val doFirst by task<Any> {
-                transformEach(allData) { _, name, _ ->
+                transformEach(allData) { (name, _, _) ->
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
@@ -39,7 +39,7 @@ internal class CachingWorkspaceTest {
                 transformEach(
                     doFirst,
                     dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
-                ) { _, name, _ ->
+                ) { (name, _, _) ->
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
                 }
@@ -52,11 +52,11 @@ internal class CachingWorkspaceTest {
         val secondC = workspace.produce("doSecond")
         //use coroutineScope to wait for the result
         coroutineScope {
-            first.launch(this)
-            secondA.launch(this)
-            secondB.launch(this)
+            first.launchIn(this)
+            secondA.launchIn(this)
+            secondB.launchIn(this)
             //repeat to check caching
-            secondC.launch(this)
+            secondC.launchIn(this)
         }
 
         assertEquals(10, firstCounter)
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index f526e194..d9fa9ae4 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -52,7 +52,7 @@ class FileDataTest {
         io.writeDataDirectory(dir, dataNode, StringIOFormat)
         println(dir.toUri().toString())
         val data = DataTree {
-            files(io, Name.EMPTY, dir)
+            io.readAsDataTree(Name.EMPTY, dir)
         }
         val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
@@ -68,7 +68,7 @@ class FileDataTest {
         zip.deleteExisting()
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
-        val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
+        val reconstructed = DataTree { io.readAsDataTree(Name.EMPTY, zip) }
             .map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 0cf4f401..7d07481c 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -26,7 +26,7 @@ class FileWorkspaceCacheTest {
             }
         }
 
-        workspace.produce("echo").launch(this)
+        workspace.produce("echo").launchIn(this)
 
     }
 }
\ No newline at end of file

From 531f95d55f073408520c60bb0ce812c2718b9467 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 2 Jan 2025 10:09:00 +0300
Subject: [PATCH 68/77] Fix implementation of MutableDataTree. All tests pass

---
 .../space/kscience/dataforge/data/DataSink.kt | 81 ++++++++++++-------
 .../kscience/dataforge/data/DataSource.kt     |  5 +-
 .../dataforge/data/StaticDataBuilder.kt       | 24 +++---
 .../dataforge/data/DataTreeBuilderTest.kt     | 28 ++++---
 .../kscience/dataforge/data/ActionsTest.kt    |  6 +-
 .../kscience/dataforge/workspace/Task.kt      |  2 +-
 ...{taskBuilders.kt => taskResultBuilders.kt} | 22 +++--
 .../dataforge/workspace/FileDataTree.kt       | 15 +++-
 .../workspace/CachingWorkspaceTest.kt         |  8 +-
 .../workspace/DataPropagationTest.kt          | 12 ++-
 .../dataforge/workspace/FileDataTest.kt       | 20 ++---
 .../workspace/FileWorkspaceCacheTest.kt       |  6 +-
 .../workspace/SimpleWorkspaceTest.kt          | 54 +++++++------
 13 files changed, 162 insertions(+), 121 deletions(-)
 rename dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/{taskBuilders.kt => taskResultBuilders.kt} (87%)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index c8a0f2a7..c9786244 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -8,6 +8,9 @@ import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
+/**
+ * A marker scope for data builders
+ */
 public interface DataBuilderScope<in T> {
     public companion object : DataBuilderScope<Nothing>
 }
@@ -30,21 +33,19 @@ public fun interface DataSink<in T> : DataBuilderScope<T> {
  * A mutable version of [DataTree]
  */
 public interface MutableDataTree<T> : DataTree<T>, DataSink<T> {
-    override var data: Data<T>?
-
     override val items: Map<NameToken, MutableDataTree<T>>
-
-    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
-
-    public suspend fun put(token: NameToken, data: Data<T>?)
-
-    override suspend fun put(name: Name, data: Data<T>?): Unit {
-        when (name.length) {
-            0 -> this.data = data
-            1 -> put(name.first(), data)
-            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
-        }
-    }
+//
+//    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
+//
+//    public suspend fun put(token: NameToken, data: Data<T>?)
+//
+//    override suspend fun put(name: Name, data: Data<T>?): Unit {
+//        when (name.length) {
+//            0 -> this.data = data
+//            1 -> put(name.first(), data)
+//            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
+//        }
+//    }
 }
 
 /**
@@ -62,11 +63,12 @@ private class MutableDataTreeRoot<T>(
 ) : MutableDataTree<T> {
 
     override val items = HashMap<NameToken, MutableDataTree<T>>()
-    override val updates = MutableSharedFlow<Name>(extraBufferCapacity = 100)
+    override val updates = MutableSharedFlow<Name>()
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
         override var data: Data<T>? = null
+            private set
 
         override val items = HashMap<NameToken, MutableDataTree<T>>()
 
@@ -75,26 +77,43 @@ private class MutableDataTreeRoot<T>(
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
+        override suspend fun put(
+            name: Name,
+            data: Data<T>?
+        ) {
+            when (name.length) {
+                0 -> {
+                    this.data = data
+                    this@MutableDataTreeRoot.updates.emit(branchName)
+                }
 
-        override fun getOrCreateItem(token: NameToken): MutableDataTree<T> =
-            items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }
+                else -> {
+                    val token = name.first()
+                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.put(name.cutFirst(), data)
+                }
+            }
+        }
+    }
+    override var data: Data<T>? = null
+        private set
 
-        override suspend fun put(token: NameToken, data: Data<T>?) {
-            this.data = data
-            this@MutableDataTreeRoot.updates.emit(branchName + token)
+    override suspend fun put(
+        name: Name,
+        data: Data<T>?
+    ) {
+        when (name.length) {
+            0 -> {
+                this.data = data
+                this@MutableDataTreeRoot.updates.emit(Name.EMPTY)
+            }
+
+            else -> {
+                val token = name.first()
+                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.put(name.cutFirst(), data)
+            }
         }
     }
 
-    override var data: Data<T>? = null
-
-    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
-        MutableDataTreeBranch(token.asName())
-    }
-
-    override suspend fun put(token: NameToken, data: Data<T>?) {
-        this.data = data
-        updates.emit(token.asName())
-    }
 }
 
 /**
@@ -106,7 +125,7 @@ public fun <T> MutableDataTree(
 ): MutableDataTree<T> = MutableDataTreeRoot<T>(type)
 
 /**
- * Create and initialize a observable mutable data tree.
+ * Create and initialize an observable mutable data tree.
  */
 @OptIn(UnsafeKType::class)
 public inline fun <reified T> MutableDataTree(
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 7ee87180..531d37fd 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -38,9 +38,8 @@ public interface ObservableDataSource<out T> : DataSource<T> {
     public val updates: Flow<Name>
 }
 
-public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
-    return read(name) ?: updates.filter { it == name }.map { read(name) }.filterNotNull().first()
-}
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> =
+    read(name) ?: updates.filter { it == name }.mapNotNull { read(name) }.first()
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
     awaitData(name.parseAsName())
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
index 0c1fe0b9..54bcf19c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
@@ -11,13 +11,13 @@ import kotlin.reflect.typeOf
 
 
 public fun interface StaticDataBuilder<T> : DataBuilderScope<T> {
-    public fun put(name: Name, data: Data<T>)
+    public fun data(name: Name, data: Data<T>)
 }
 
 private class DataMapBuilder<T> : StaticDataBuilder<T> {
     val map = mutableMapOf<Name, Data<T>>()
 
-    override fun put(name: Name, data: Data<T>) {
+    override fun data(name: Name, data: Data<T>) {
         if (map.containsKey(name)) {
             error("Duplicate key '$name'")
         } else {
@@ -26,31 +26,31 @@ private class DataMapBuilder<T> : StaticDataBuilder<T> {
     }
 }
 
-public fun <T> StaticDataBuilder<T>.put(name: String, data: Data<T>) {
-    put(name.parseAsName(), data)
+public fun <T> StaticDataBuilder<T>.data(name: String, data: Data<T>) {
+    data(name.parseAsName(), data)
 }
 
-public inline fun <T, reified T1 : T> StaticDataBuilder<T>.putValue(
+public inline fun <T, reified T1 : T> StaticDataBuilder<T>.value(
     name: String,
     value: T1,
     metaBuilder: MutableMeta.() -> Unit = {}
 ) {
-    put(name, Data(value, Meta(metaBuilder)))
+    data(name, Data(value, Meta(metaBuilder)))
 }
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
+public fun <T> StaticDataBuilder<T>.node(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
     val map = DataMapBuilder<T>().apply(block).map
     map.forEach { (name, data) ->
-        put(prefix + name, data)
+        data(prefix + name, data)
     }
 }
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
-    putAll(prefix.parseAsName(), block)
+public fun <T> StaticDataBuilder<T>.node(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
+    node(prefix.parseAsName(), block)
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: String, tree: DataTree<T>) {
+public fun <T> StaticDataBuilder<T>.node(prefix: String, tree: DataTree<T>) {
     tree.forEach { data ->
-        put(prefix + data.name, data)
+        data(prefix.parseAsName() + data.name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 1d4d2ea4..6f1a7ed1 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -13,12 +13,12 @@ internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
         val node = DataTree.static<Any> {
-            putAll("primary") {
-                putValue("a", "a")
-                putValue("b", "b")
+            node("primary") {
+                value("a", "a")
+                value("b", "b")
             }
-            putValue("c.d", "c.d")
-            putValue("c.f", "c.f")
+            value("c.d", "c.d")
+            value("c.f", "c.f")
         }
         assertEquals("a", node["primary.a"]?.await())
         assertEquals("b", node["primary.b"]?.await())
@@ -30,17 +30,17 @@ internal class DataTreeBuilderTest {
     @Test
     fun testDataUpdate() = runTest(timeout = 500.milliseconds) {
         val updateData = DataTree.static<Any> {
-            put("a", Data.wrapValue("a"))
-            put("b", Data.wrapValue("b"))
+            data("a", Data.wrapValue("a"))
+            data("b", Data.wrapValue("b"))
         }
 
         val node = DataTree.static<Any> {
-            putAll("primary") {
-                putValue("a", "a")
-                putValue("b", "b")
+            node("primary") {
+                value("a", "a")
+                value("b", "b")
             }
-            putValue("root", "root")
-            putAll("update", updateData)
+            value("root", "root")
+            node("update", updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
@@ -54,7 +54,9 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = launch { putAllAndWatch(subNode, "sub".asName()) }
+            job = launch {
+                putAllAndWatch(subNode, "sub".asName())
+            }
         }
 
         repeat(10) {
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 6828b674..1789237e 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.data
 
 import kotlinx.coroutines.ExperimentalCoroutinesApi
-import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
@@ -21,14 +20,13 @@ internal class ActionsTest {
 
         val data: DataTree<Int> = DataTree.static {
             repeat(10) {
-                putValue(it.toString(), it)
+                value(it.toString(), it)
             }
         }
 
         val result = plusOne(data)
 
-        advanceUntilIdle()
-        assertEquals(2, result["1"]?.await())
+        assertEquals(2, result.awaitData("1").await())
     }
 
     @Test
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index f1e9130a..06134ce6 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -62,7 +62,7 @@ public interface TaskWithSpec<T, C : Any> : Task<T> {
 //    block: C.() -> Unit = {},
 //): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultScope<T>(
+public class TaskResultScope<in T>(
     public val resultType: KType,
     public val workspace: Workspace,
     public val taskName: Name,
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
similarity index 87%
rename from dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
rename to dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
index 49b485e5..9df49aba 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
@@ -1,14 +1,13 @@
 package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.context.PluginFactory
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.NamedValueWithMeta
-import space.kscience.dataforge.data.transformEach
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.copy
 import space.kscience.dataforge.meta.remove
 import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.plus
 
@@ -77,13 +76,15 @@ public val TaskResultScope<*>.allData: DataSelector<*>
  * @param dataMetaTransform additional transformation of individual data meta.
  * @param action process individual data asynchronously.
  */
+@OptIn(UnsafeKType::class)
 @DFExperimental
-public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
+public suspend fun <T, R> TaskResultScope<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-    crossinline dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
-    crossinline action: suspend (NamedValueWithMeta<T>) -> R,
+    dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
+    action: suspend NamedValueWithMeta<T>.() -> R,
 ): DataTree<R> = from(selector, dependencyMeta).transformEach<T, R>(
+    resultType,
     workspace.context,
     metaTransform = { name ->
         taskMeta[taskName]?.let { taskName put it }
@@ -93,6 +94,15 @@ public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
     action(it)
 }
 
+@OptIn(UnsafeKType::class)
+public fun <R> TaskResultScope<R>.result(data: Data<R>): DataTree<R> = DataTree.static(resultType) {
+    data(Name.EMPTY, data)
+}
+
+@OptIn(UnsafeKType::class)
+public fun <R> TaskResultScope<R>.result(builder: StaticDataBuilder<R>.() -> Unit): DataTree<R> =
+    DataTree.static(resultType, builder)
+
 ///**
 // * Set given [dataSet] as a task result.
 // */
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
index c3cd3a0b..049ec2ec 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
@@ -68,10 +68,18 @@ public class FileDataTree(
             }
 
             path.isDirectory() -> {
-                val dataBinary: Binary? = path.resolve(IOPlugin.DATA_FILE_NAME)?.asBinary()
-                val meta: Meta? = path.find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }?.let {
+                //FIXME find data and meta in a single pass instead of two
+
+                val dataBinary: Binary? = path.listDirectoryEntries().find {
+                    it.fileName.nameWithoutExtension == IOPlugin.DATA_FILE_NAME
+                }?.asBinary()
+
+                val meta: Meta? = path.listDirectoryEntries().find {
+                    it.fileName.nameWithoutExtension == IOPlugin.META_FILE_NAME
+                }?.let {
                     io.readMetaFileOrNull(it)
                 }
+
                 if (dataBinary != null || meta != null) {
                     StaticData(
                         typeOf<Binary>(),
@@ -156,6 +164,9 @@ public class FileDataTree(
     }
 }
 
+public fun IOPlugin.readDirectory(path: Path, monitor: Boolean = false): FileDataTree =
+    FileDataTree(this, path, monitor)
+
 
 ///**
 // * @param resources The names of the resources to read.
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 1c43fba0..eb705e56 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.putValue
+import space.kscience.dataforge.data.value
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
 import space.kscience.dataforge.meta.get
@@ -22,14 +22,14 @@ internal class CachingWorkspaceTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    putValue("myData[$it]", it)
+                    value("myData[$it]", it)
                 }
             }
 
             inMemoryCache()
 
             val doFirst by task<Any> {
-                transformEach(allData) { (name, _, _) ->
+                transformEach(allData) {
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
@@ -39,7 +39,7 @@ internal class CachingWorkspaceTest {
                 transformEach(
                     doFirst,
                     dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
-                ) { (name, _, _) ->
+                ) {
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
                 }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index cd38f809..9cb040be 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,14 +20,12 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        put("result", result)
+        result(result)
     }
 
 
     val singleData by task<Int> {
-        workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            put("result", it)
-        }
+        result(workspace.data.filterByType<Int>()["myData[12]"]!!)
     }
 
 
@@ -47,7 +45,7 @@ class DataPropagationTest {
         }
         data {
             repeat(100) {
-                putValue("myData[$it]", it)
+                value("myData[$it]", it)
             }
         }
     }
@@ -55,12 +53,12 @@ class DataPropagationTest {
     @Test
     fun testAllData() = runTest {
         val node = testWorkspace.produce("Test.allData")
-        assertEquals(4950, node.content.asSequence().single().await())
+        assertEquals(4950, node.content.data?.await())
     }
 
     @Test
     fun testSingleData() = runTest {
         val node = testWorkspace.produce("Test.singleData")
-        assertEquals(12, node.content.asSequence().single().await())
+        assertEquals(12, node.content.data?.await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index d9fa9ae4..c9e5ea5a 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -12,7 +12,6 @@ import space.kscience.dataforge.io.*
 import space.kscience.dataforge.io.yaml.YamlPlugin
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
 import java.nio.file.Files
 import kotlin.io.path.deleteExisting
 import kotlin.io.path.fileSize
@@ -22,13 +21,13 @@ import kotlin.test.assertEquals
 
 
 class FileDataTest {
-    val dataNode = DataTree<String> {
-        putAll("dir") {
-            putValue("a", "Some string") {
+    val dataNode = DataTree.static<String> {
+        node("dir") {
+            value("a", "Some string") {
                 "content" put "Some string"
             }
         }
-        putValue("b", "root data")
+        value("b", "root data")
 //        meta {
 //            "content" put "This is root meta node"
 //        }
@@ -51,10 +50,10 @@ class FileDataTest {
         val dir = Files.createTempDirectory("df_data_node")
         io.writeDataDirectory(dir, dataNode, StringIOFormat)
         println(dir.toUri().toString())
-        val data = DataTree {
-            io.readAsDataTree(Name.EMPTY, dir)
+        val data = io.readDirectory(dir)
+        val reconstructed = data.transformEach(this) { (_, value) ->
+            value.toByteArray().decodeToString()
         }
-        val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
@@ -68,8 +67,9 @@ class FileDataTest {
         zip.deleteExisting()
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
-        val reconstructed = DataTree { io.readAsDataTree(Name.EMPTY, zip) }
-            .map { (_, value) -> value.toByteArray().decodeToString() }
+        val reconstructed = io.readDirectory(zip).transformEach(this) { (_, value) ->
+            value.toByteArray().decodeToString()
+        }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 7d07481c..7aa1fb0e 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.putValue
+import space.kscience.dataforge.data.value
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 
@@ -16,13 +16,13 @@ class FileWorkspaceCacheTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    putValue("myData[$it]", it)
+                    value("myData[$it]", it)
                 }
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
 
             val echo by task<String> {
-                transformEach(dataByType<String>()) { arg, _, _ -> arg }
+                transformEach(dataByType<String>()) { value }
             }
         }
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 39837c15..111b3b89 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -37,9 +37,9 @@ internal object TestPlugin : WorkspacePlugin() {
 
     val test by task {
         // type is inferred
-        transformEach(dataByType<Int>()) { arg, _, _ ->
-            logger.info { "Test: $arg" }
-            arg
+        transformEach(dataByType<Int>()) {
+            logger.info { "Test: $value" }
+            value
         }
 
     }
@@ -62,42 +62,42 @@ internal class SimpleWorkspaceTest {
         data {
             //statically initialize data
             repeat(100) {
-                putValue("myData[$it]", it)
+                value("myData[$it]", it)
             }
         }
 
         val filterOne by task<Int> {
             val name by taskMeta.string { error("Name field not defined") }
-            from(testPluginFactory) { test }[name]?.let { source: Data<Int> ->
-                put(name, source)
-            }
+            result(from(testPluginFactory) { test }[name]!!)
         }
 
         val square by task<Int> {
-            transformEach(dataByType<Int>()) { arg, name, meta ->
+            transformEach(dataByType<Int>()) {
                 if (meta["testFlag"].boolean == true) {
                     println("Side effect")
                 }
                 workspace.logger.info { "Starting square on $name" }
-                arg * arg
+                value * value
             }
         }
 
         val linear by task<Int> {
-            transformEach(dataByType<Int>()) { arg, name, _ ->
+            transformEach(dataByType<Int>()) {
                 workspace.logger.info { "Starting linear on $name" }
-                arg * 2 + 1
+                value * 2 + 1
             }
         }
 
         val fullSquare by task<Int> {
             val squareData = from(square)
             val linearData = from(linear)
-            squareData.forEach { data ->
-                val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
-                    l + r
+            result {
+                squareData.forEach { data ->
+                    val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
+                        l + r
+                    }
+                    data(data.name, newData)
                 }
-                put(data.name, newData)
             }
         }
 
@@ -106,7 +106,7 @@ internal class SimpleWorkspaceTest {
             val res = from(square).foldToData(0) { l, r ->
                 l + r.value
             }
-            put("sum", res)
+            result(res)
         }
 
         val averageByGroup by task<Int> {
@@ -116,13 +116,15 @@ internal class SimpleWorkspaceTest {
                 l + r.value
             }
 
-            put("even", evenSum)
             val oddSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 1
             }.foldToData(0) { l, r ->
                 l + r.value
             }
-            put("odd", oddSum)
+            result {
+                data("even", evenSum)
+                data("odd", oddSum)
+            }
         }
 
         val delta by task<Int> {
@@ -132,15 +134,17 @@ internal class SimpleWorkspaceTest {
             val res = even.combine(odd) { l, r ->
                 l - r
             }
-            put("res", res)
+            result(res)
         }
 
         val customPipe by task<Int> {
-            workspace.data.filterByType<Int>().forEach { data ->
-                val meta = data.meta.toMutableMeta().apply {
-                    "newValue" put 22
+            result {
+                workspace.data.filterByType<Int>().forEach { data ->
+                    val meta = data.meta.toMutableMeta().apply {
+                        "newValue" put 22
+                    }
+                    data(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
                 }
-                put(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
             }
         }
 
@@ -157,7 +161,7 @@ internal class SimpleWorkspaceTest {
     @Test
     fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node["sum"]!!.await()
+        val res = node.data?.await()
     }
 
     @Test
@@ -175,7 +179,7 @@ internal class SimpleWorkspaceTest {
                 """
                 Name: ${it.name}
                 Meta: ${it.meta}
-                Data: ${it.data.await()}
+                Data: ${it.await()}
             """.trimIndent()
             )
         }

From c11007216c2f2154a9a6c0c598903432bf13bf57 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 2 Jan 2025 14:27:22 +0300
Subject: [PATCH 69/77] Update documentation

---
 README.md                                     | 88 ++++++++++++++++++-
 dataforge-context/README.md                   |  4 +-
 dataforge-data/README.md                      |  4 +-
 dataforge-io/README.md                        | 13 ++-
 dataforge-io/build.gradle.kts                 | 58 +++++++++++-
 dataforge-io/dataforge-io-proto/README.md     | 21 +++++
 dataforge-io/dataforge-io-yaml/README.md      |  4 +-
 .../dataforge-io-yaml/build.gradle.kts        |  6 +-
 .../space/kscience/dataforge/io/Responder.kt  | 12 ---
 dataforge-meta/README.md                      | 11 ++-
 dataforge-meta/build.gradle.kts               | 46 +++++++++-
 dataforge-scripting/README.md                 |  4 +-
 dataforge-scripting/build.gradle.kts          | 10 ++-
 dataforge-workspace/README.md                 |  4 +-
 docs/templates/README-TEMPLATE.md             | 26 +++---
 gradle.properties                             |  2 +-
 gradle/wrapper/gradle-wrapper.properties      |  2 +-
 17 files changed, 259 insertions(+), 56 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/README.md
 delete mode 100644 dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt

diff --git a/README.md b/README.md
index da910804..be5591cf 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,70 @@
 [![JetBrains Research](https://jb.gg/badges/research.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
 [![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
 
-![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
+## Publications
+
+* [A general overview](https://doi.org/10.1051/epjconf/201817705003)
+* [An application in "Troitsk nu-mass" experiment](https://doi.org/10.1088/1742-6596/1525/1/012024)
+
+## Video
+
+* [A presentation on application of DataForge (legacy version) to Troitsk nu-mass analysis.](https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+
+## Questions and Answers
+
+In this section, we will try to cover DataForge main ideas in the form of questions and answers.
+
+### General
+
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages, and data flow is not always obvious. Also, the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows performing some automated data manipulations with optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+
+**Q**: How does it work?
+
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values similar to XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one does not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+
+**Q**: But where does it take algorithms to use?
+
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for a specific problem.
+
+**Q**: So I still need to write the code? What is the difference then?
+
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching, etc. Most of the things are done by the DataForge.
+
+### Platform
+
+**Q**: Which platform does DataForge use? Which operating system is it working on?
+
+**A**: The DataForge is mostly written in Kotlin-multiplatform and could be used on JVM, JS and native targets. Some modules and functions are supported only on JVM
+
+**Q**: Can I use my C++/Fortran/Python code in DataForge?
+
+**A**: Yes, as long as the code could be called from Java. Most common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+
+### Features
+
+**Q**: What other features does DataForge provide?
+
+**A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
+
+* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
+
+* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
+
+### Misc
+
+**Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
+
+**A**: One must note that DataForge is made for analysis, not for visualization. The visualization and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. [VisionForge](https://git.sciprog.center/kscience/visionforge) project aims to provide tools for both 2D and 3D visualization both locally and remotely.
+
+**Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
+
+**A**: It is not the purpose of DataForge to replace cluster computing software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+
+**Q**: Is it possible to use DataForge in notebook mode?
+
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows using any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
 
 
 ### [dataforge-context](dataforge-context)
@@ -14,14 +77,28 @@
 > **Maturity**: EXPERIMENTAL
 
 ### [dataforge-io](dataforge-io)
-> IO module
+> Serialization foundation for Meta objects and Envelope processing.
 >
 > **Maturity**: EXPERIMENTAL
+>
+> **Features:**
+> - [IO format](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt) : A generic API for reading something from binary representation and writing it to Binary.
+> - [Binary](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt) : Multi-read random access binary.
+> - [Envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt) : API and implementations for combined data and metadata format.
+> - [Tagged envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt) : Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+> - [Tagged envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt) : Implementation for text-friendly envelope format with text separators for sections.
+
 
 ### [dataforge-meta](dataforge-meta)
-> Meta definition and basic operations on meta
+> Core Meta and Name manipulation module
 >
 > **Maturity**: DEVELOPMENT
+>
+> **Features:**
+> - [Meta](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt) : **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+> - [Value](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt) : **Value** a sum type for different meta values.
+> - [Name](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt) : **Name** is an identifier to access tree-like structure.
+
 
 ### [dataforge-scripting](dataforge-scripting)
 >
@@ -31,6 +108,11 @@
 >
 > **Maturity**: EXPERIMENTAL
 
+### [dataforge-io/dataforge-io-proto](dataforge-io/dataforge-io-proto)
+> ProtoBuf Meta representation
+>
+> **Maturity**: PROTOTYPE
+
 ### [dataforge-io/dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
 > YAML meta converters and Front Matter envelope format
 >
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index 2cd53fd1..894868fa 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,7 +6,7 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-context:0.10.0")
 }
 ```
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index 35aaa4e6..3970fc31 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-data:0.10.0")
 }
 ```
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index 5a9979a9..9b56352e 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -2,11 +2,20 @@
 
 IO module
 
+## Features
+
+ - [IO format](src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt) : A generic API for reading something from binary representation and writing it to Binary.
+ - [Binary](src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt) : Multi-read random access binary.
+ - [Envelope](src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt) : API and implementations for combined data and metadata format.
+ - [Tagged envelope](src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt) : Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+ - [Tagged envelope](src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt) : Implementation for text-friendly envelope format with text separators for sections.
+
+
 ## Usage
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +25,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-io:0.10.0")
 }
 ```
diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index 16a6fbfc..52e47bb3 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.4.0"
+val ioVersion = "0.6.0"
 
 kscience {
     jvm()
@@ -22,6 +22,60 @@ kscience {
     }
 }
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
+
+    description = """
+        Serialization foundation for Meta objects and Envelope processing.
+    """.trimIndent()
+
+    feature(
+        "io-format",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt",
+        name = "IO format"
+    ) {
+        """
+            A generic API for reading something from binary representation and writing it to Binary.
+            
+            Similar to KSerializer, but without schema.
+        """.trimIndent()
+    }
+
+    feature(
+        "binary",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt",
+        name = "Binary"
+    ) {
+        "Multi-read random access binary."
+    }
+
+    feature(
+        "envelope",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt",
+        name = "Envelope"
+    ) {
+        """
+            API and implementations for combined data and metadata format.
+        """.trimIndent()
+    }
+
+    feature(
+        "envelope.tagged",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt",
+        name = "Tagged envelope"
+    ) {
+        """
+            Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+        """.trimIndent()
+    }
+
+    feature(
+        "envelope.tagless",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt",
+        name = "Tagged envelope"
+    ) {
+        """
+            Implementation for text-friendly envelope format with text separators for sections.
+        """.trimIndent()
+    }
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/README.md b/dataforge-io/dataforge-io-proto/README.md
new file mode 100644
index 00000000..31e48c15
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/README.md
@@ -0,0 +1,21 @@
+# Module dataforge-io-proto
+
+ProtoBuf meta IO
+
+## Usage
+
+## Artifact:
+
+The Maven coordinates of this project are `space.kscience:dataforge-io-proto:0.10.0`.
+
+**Gradle Kotlin DSL:**
+```kotlin
+repositories {
+    maven("https://repo.kotlin.link")
+    mavenCentral()
+}
+
+dependencies {
+    implementation("space.kscience:dataforge-io-proto:0.10.0")
+}
+```
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index f70a1490..e296e17e 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,7 +6,7 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-io-yaml:0.10.0")
 }
 ```
diff --git a/dataforge-io/dataforge-io-yaml/build.gradle.kts b/dataforge-io/dataforge-io-yaml/build.gradle.kts
index 505ce360..0ae7e9a2 100644
--- a/dataforge-io/dataforge-io-yaml/build.gradle.kts
+++ b/dataforge-io/dataforge-io-yaml/build.gradle.kts
@@ -11,14 +11,14 @@ kscience {
     dependencies {
         api(projects.dataforgeIo)
     }
-    useSerialization{
+    useSerialization {
         yamlKt()
     }
 }
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.PROTOTYPE
-    description ="""
+    description = """
         YAML meta converters and Front Matter envelope format
     """.trimIndent()
 }
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt
deleted file mode 100644
index 2a64966f..00000000
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt
+++ /dev/null
@@ -1,12 +0,0 @@
-package space.kscience.dataforge.io
-
-/**
- * An object that could respond to external messages asynchronously
- */
-public interface Responder {
-    /**
-     * Send a request and wait for response for this specific request
-     */
-    public suspend fun respond(request: Envelope): Envelope
-}
-
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index e4fcacb1..7eb186ed 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -2,11 +2,18 @@
 
 Meta definition and basic operations on meta
 
+## Features
+
+ - [Meta](src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt) : **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+ - [Value](src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt) : **Value** a sum type for different meta values.
+ - [Name](src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt) : **Name** is an identifier to access tree-like structure.
+
+
 ## Usage
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +23,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-meta:0.10.0")
 }
 ```
diff --git a/dataforge-meta/build.gradle.kts b/dataforge-meta/build.gradle.kts
index 831aea72..49d8bdc5 100644
--- a/dataforge-meta/build.gradle.kts
+++ b/dataforge-meta/build.gradle.kts
@@ -7,19 +7,57 @@ kscience {
     js()
     native()
     wasm()
-    useSerialization{
+    useSerialization {
         json()
     }
 }
 
 description = "Meta definition and basic operations on meta"
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.DEVELOPMENT
 
-    feature("metadata"){
+    description = """
+        Core Meta and Name manipulation module
+    """.trimIndent()
+
+    feature(
+        "meta",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt",
+        name = "Meta"
+    ) {
         """
-            
+        **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+        
+        Each Meta node could hava a node Value as well as a map of named child items.
+                    
+        """.trimIndent()
+    }
+
+    feature(
+        "value",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt",
+        name = "Value"
+    ) {
+        """
+        **Value** a sum type for different meta values.
+        
+        The following types are implemented in core (custom ones are also available):
+            * null
+            * boolean
+            * number
+            * string
+            * list of values
+        """.trimIndent()
+    }
+
+    feature(
+        "name",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt",
+        name = "Name"
+    ) {
+        """
+        **Name** is an identifier to access tree-like structure.
         """.trimIndent()
     }
 }
\ No newline at end of file
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index 1f650bea..e516b392 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-scripting:0.10.0")
 }
 ```
diff --git a/dataforge-scripting/build.gradle.kts b/dataforge-scripting/build.gradle.kts
index d9d87742..37bc2e11 100644
--- a/dataforge-scripting/build.gradle.kts
+++ b/dataforge-scripting/build.gradle.kts
@@ -2,22 +2,24 @@ plugins {
     id("space.kscience.gradle.mpp")
 }
 
-kscience{
+description = "Scripting definition fow workspace generation"
+
+kscience {
     jvm()
     commonMain {
         api(projects.dataforgeWorkspace)
         implementation(kotlin("scripting-common"))
     }
-    jvmMain{
+    jvmMain {
         implementation(kotlin("scripting-jvm-host"))
         implementation(kotlin("scripting-jvm"))
     }
-    jvmTest{
+    jvmTest {
         implementation(spclibs.logback.classic)
     }
 }
 
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.PROTOTYPE
 }
\ No newline at end of file
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index 87b38c6e..8bb476a3 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-workspace:0.10.0")
 }
 ```
diff --git a/docs/templates/README-TEMPLATE.md b/docs/templates/README-TEMPLATE.md
index 6de55245..f868eb59 100644
--- a/docs/templates/README-TEMPLATE.md
+++ b/docs/templates/README-TEMPLATE.md
@@ -1,8 +1,6 @@
 [![JetBrains Research](https://jb.gg/badges/research.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
 [![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
 
-![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
-
 ## Publications
 
 * [A general overview](https://doi.org/10.1051/epjconf/201817705003)
@@ -10,27 +8,29 @@
 
 ## Video
 
-* [A presentation on application of (old version of) DataForge to Troitsk nu-mass analysis.] (https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+* [A presentation on application of DataForge (legacy version) to Troitsk nu-mass analysis.](https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
 
 ## Questions and Answers
+
 In this section, we will try to cover DataForge main ideas in the form of questions and answers.
 
 ### General
-**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
 
-**A**: Yes, that is precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages, and data flow is not always obvious. Also, the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows performing some automated data manipulations with optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
 
 **Q**: How does it work?
 
-**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values similar to XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one does not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
 
 **Q**: But where does it take algorithms to use?
 
-**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for a specific problem.
 
 **Q**: So I still need to write the code? What is the difference then?
 
-**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching, etc. Most of the things are done by the DataForge.
 
 ### Platform
 
@@ -40,9 +40,10 @@ In this section, we will try to cover DataForge main ideas in the form of questi
 
 **Q**: Can I use my C++/Fortran/Python code in DataForge?
 
-A: Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+**A**: Yes, as long as the code could be called from Java. Most common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
 
 ### Features
+
 **Q**: What other features does DataForge provide?
 
 **A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
@@ -52,16 +53,17 @@ A: Yes, as long as the code could be called from Java. Most of common languages
 * **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
 
 ### Misc
+
 **Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
 
-**A**: One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics.
+**A**: One must note that DataForge is made for analysis, not for visualization. The visualization and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. [VisionForge](https://git.sciprog.center/kscience/visionforge) project aims to provide tools for both 2D and 3D visualization both locally and remotely.
 
 **Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
 
-**A**: Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+**A**: It is not the purpose of DataForge to replace cluster computing software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
 
 **Q**: Is it possible to use DataForge in notebook mode?
 
-**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows to use any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows using any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
 
 ${modules}
diff --git a/gradle.properties b/gradle.properties
index 1b920cd8..67ba7f8e 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.0-kotlin-2.1.0
\ No newline at end of file
+toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 17655d0e..d6e308a6 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists

From 6a13182d1c50bf0c364c66f4dc1612329cc11746 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 3 Jan 2025 14:19:36 +0300
Subject: [PATCH 70/77] Rename `put` to `write` in `DataSink`

---
 CHANGELOG.md                                  |  2 +-
 .../dataforge/actions/AbstractAction.kt       |  4 +-
 .../kscience/dataforge/actions/MapAction.kt   |  2 +-
 .../kscience/dataforge/actions/SplitAction.kt |  2 +-
 .../space/kscience/dataforge/data/DataSink.kt | 10 +--
 .../kscience/dataforge/data/dataBuilders.kt   | 76 ++++++++-----------
 .../kscience/dataforge/data/dataTransform.kt  |  4 +-
 .../dataforge/data/dataTreeBuilder.kt         |  6 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  4 +-
 .../dataforge/data/dataSetBuilderInContext.kt |  6 +-
 .../kscience/dataforge/data/ActionsTest.kt    |  2 +-
 .../dataforge/workspace/CachingAction.kt      |  2 +-
 12 files changed, 54 insertions(+), 66 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3cb3daf0..5e4a3ce2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,7 +8,7 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
-- API of DataSink.
+- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc).
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index af0aab00..1f1bbf2d 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -44,7 +44,7 @@ public abstract class AbstractAction<T, R>(
         updateName: Name,
     ) {
         //by default regenerate the whole data set
-        putAll(generate(source, actionMeta))
+        writeAll(generate(source, actionMeta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -60,7 +60,7 @@ public abstract class AbstractAction<T, R>(
 
         //propagate updates
         val updateSink = DataSink<R> { name, data ->
-            put(name, data)
+            write(name, data)
         }
 
         with(updateSink) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 2e4b2ddc..a763515c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -100,7 +100,7 @@ public class MapAction<T, R>(
         updateName: Name,
     ) {
         val (name, data) = mapOne(updateName, source.read(updateName), actionMeta)
-        put(name, data)
+        write(name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 6dfbc7c9..acc1ba36 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -94,7 +94,7 @@ internal class SplitAction<T, R>(
         actionMeta: Meta,
         updateName: Name,
     ) {
-        putAll(splitOne(updateName, source.read(updateName), actionMeta))
+        writeAll(splitOne(updateName, source.read(updateName), actionMeta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index c9786244..4345866e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -25,7 +25,7 @@ public fun interface DataSink<in T> : DataBuilderScope<T> {
     /**
      * Put data and notify listeners if needed
      */
-    public suspend fun put(name: Name, data: Data<T>?)
+    public suspend fun write(name: Name, data: Data<T>?)
 }
 
 
@@ -77,7 +77,7 @@ private class MutableDataTreeRoot<T>(
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
-        override suspend fun put(
+        override suspend fun write(
             name: Name,
             data: Data<T>?
         ) {
@@ -89,7 +89,7 @@ private class MutableDataTreeRoot<T>(
 
                 else -> {
                     val token = name.first()
-                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.put(name.cutFirst(), data)
+                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.write(name.cutFirst(), data)
                 }
             }
         }
@@ -97,7 +97,7 @@ private class MutableDataTreeRoot<T>(
     override var data: Data<T>? = null
         private set
 
-    override suspend fun put(
+    override suspend fun write(
         name: Name,
         data: Data<T>?
     ) {
@@ -109,7 +109,7 @@ private class MutableDataTreeRoot<T>(
 
             else -> {
                 val token = name.first()
-                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.put(name.cutFirst(), data)
+                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.write(name.cutFirst(), data)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 640e8541..80c5fca9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -8,118 +8,106 @@ import space.kscience.dataforge.names.isEmpty
 import space.kscience.dataforge.names.plus
 
 
-public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
-    put(value.name, value)
+public suspend fun <T> DataSink<T>.write(value: NamedData<T>) {
+    write(value.name, value)
 }
 
-public inline fun <T> DataSink<T>.putAll(
+public inline fun <T> DataSink<T>.writeAll(
     prefix: Name,
     block: DataSink<T>.() -> Unit,
 ) {
     if (prefix.isEmpty()) {
         apply(block)
     } else {
-        val proxyDataSink = DataSink<T> { name, data -> this@putAll.put(prefix + name, data) }
+        val proxyDataSink = DataSink<T> { name, data -> this@writeAll.write(prefix + name, data) }
 
         proxyDataSink.apply(block)
     }
 }
 
 
-public inline fun <T> DataSink<T>.putAll(
+public inline fun <T> DataSink<T>.writeAll(
     prefix: String,
     block: DataSink<T>.() -> Unit,
-): Unit = putAll(prefix.asName(), block)
+): Unit = writeAll(prefix.asName(), block)
 
 
-public suspend fun <T> DataSink<T>.put(name: String, value: Data<T>) {
-    put(Name.parse(name), value)
+public suspend fun <T> DataSink<T>.write(name: String, value: Data<T>) {
+    write(Name.parse(name), value)
 }
 
-public suspend fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
-    putAll(name) { putAll(tree.asSequence()) }
+public suspend fun <T> DataSink<T>.writeAll(name: Name, tree: DataTree<T>) {
+    writeAll(name) { writeAll(tree.asSequence()) }
 }
 
 
-public suspend fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
-    putAll(Name.parse(name)) { putAll(tree.asSequence()) }
+public suspend fun <T> DataSink<T>.writeAll(name: String, tree: DataTree<T>) {
+    writeAll(Name.parse(name)) { writeAll(tree.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    put(name, data)
+    write(name, data)
 }
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    put(name, data)
+    write(name, data)
 }
 
 /**
  * Emit static data with the fixed value
  */
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: Name,
     value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.wrapValue(value, meta))
+): Unit = write(name, Data.wrapValue(value, meta))
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.wrapValue(value, meta))
+): Unit = write(name, Data.wrapValue(value, meta))
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     value: T,
     metaBuilder: MutableMeta.() -> Unit,
-): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
+): Unit = write(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
-public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
+public suspend fun <T> DataSink<T>.writeAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        put(it)
+        write(it)
     }
 }
 
-public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
+public suspend fun <T> DataSink<T>.writeAll(map: Map<Name, Data<T>?>) {
     map.forEach { (name, data) ->
-        put(name, data)
+        write(name, data)
     }
 }
 
-//public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
-//    putAll(tree.asSequence())
-//}
-
 /**
- * Suspends indefinitely.
+ * Copy all data from [this] and mirror changes if they appear. Suspends indefinitely.
  */
-public suspend fun <T : Any> DataSink<T>.watch(
-    source: ObservableDataSource<T>,
-    prefix: Name = Name.EMPTY,
-) {
-//    putAll(branchName, source)
-    source.updates.collect {
-        put(prefix + it, source.read(it))
-    }
-}
-
-public suspend fun <T : Any> MutableDataTree<T>.putAllAndWatch(
+public suspend fun <T : Any> MutableDataTree<T>.writeAllAndWatch(
     source: DataTree<T>,
     prefix: Name = Name.EMPTY,
 ) {
-    putAll(prefix, source)
-    watch(source,prefix)
+    writeAll(prefix, source)
+    source.updates.collect {
+        write(prefix + it, source.read(it))
+    }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index a8d5ac20..868cb82f 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -220,14 +220,14 @@ public fun <T, R> DataTree<T>.transformEach(
 ) {
     updates.collect { name ->
         val data: Data<T>? = read(name)
-        if (data == null) put(name, null) else {
+        if (data == null) write(name, null) else {
             val newMeta = data.meta.toMutableMeta().apply {
                 metaTransform(name)
             }.seal()
             val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
                 compute(NamedValueWithMeta(name, data.await(), data.meta))
             }
-            put(name, d)
+            write(name, d)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index 664f9904..069e60bc 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -49,7 +49,7 @@ private class DataTreeBuilder<T>(
     private val updatesFlow = MutableSharedFlow<Name>()
 
 
-    override suspend fun put(name: Name, data: Data<T>?) {
+    override suspend fun write(name: Name, data: Data<T>?) {
         mutex.withLock {
             if (data == null) {
                 map.remove(name)
@@ -60,7 +60,7 @@ private class DataTreeBuilder<T>(
         updatesFlow.emit(name)
     }
 
-    public fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
+    fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
 }
 
 /**
@@ -73,7 +73,7 @@ public fun <T> DataTree(
     initialData: Map<Name, Data<T>> = emptyMap(),
     updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
-    scope.launch {
+    scope.launch(GoalExecutionRestriction(GoalExecutionRestrictionPolicy.ERROR)) {
         updater()
     }
 }.build()
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 6f1a7ed1..6a90664a 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -55,12 +55,12 @@ internal class DataTreeBuilderTest {
 
         val rootNode = MutableDataTree<Int>() {
             job = launch {
-                putAllAndWatch(subNode, "sub".asName())
+                writeAllAndWatch(subNode, "sub".asName())
             }
         }
 
         repeat(10) {
-            subNode.putValue("value[$it]", it)
+            subNode.writeValue("value[$it]", it)
         }
 
         assertEquals(9, subNode.awaitData("value[9]").await())
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index f9374974..2bcca5e4 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -10,14 +10,14 @@ import space.kscience.dataforge.names.Name
  */
 context(DataSink<T>)
 public suspend infix fun <T : Any> String.put(data: Data<T>): Unit =
-    put(Name.parse(this), data)
+    write(Name.parse(this), data)
 
 /**
  * Append node
  */
 context(DataSink<T>)
 public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
-    putAll(this, dataSet)
+    writeAll(this, dataSet)
 
 /**
  * Build and append node
@@ -25,5 +25,5 @@ public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
 context(DataSink<T>)
 public infix fun <T : Any> String.putAll(
     block: DataSink<T>.() -> Unit,
-): Unit = putAll(Name.parse(this), block)
+): Unit = writeAll(Name.parse(this), block)
 
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 1789237e..4a0342dc 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -40,7 +40,7 @@ internal class ActionsTest {
         val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
-            source.putValue(it.toString(), it)
+            source.writeValue(it.toString(), it)
         }
 
         assertEquals(2, result.awaitData("1").await())
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
index 9fc91e33..57b0746e 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
@@ -22,6 +22,6 @@ internal class CachingAction<T>(
 
     override suspend fun DataSink<T>.update(source: DataTree<T>, actionMeta: Meta, updateName: Name) {
         val updatedData = source.read(updateName)
-        put(updateName, updatedData?.named(updateName)?.let(caching))
+        write(updateName, updatedData?.named(updateName)?.let(caching))
     }
 }
\ No newline at end of file

From 9d70ba96eba59dddd29c4efd58d7770aca582ee7 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 3 Jan 2025 15:44:10 +0300
Subject: [PATCH 71/77] Remove implicit IOFormat in IOPlugin

---
 CHANGELOG.md                                  |  3 +-
 .../space/kscience/dataforge/io/IOPlugin.kt   | 17 -------
 .../space/kscience/dataforge/io/fileIO.kt     | 10 ----
 .../space/kscience/dataforge/meta/Scheme.kt   |  2 +-
 .../dataforge/names/NameIndexComparator.kt    |  2 +-
 .../kscience/dataforge/names/NameToken.kt     |  8 +--
 .../kscience/dataforge/workspace/Task.kt      |  6 +--
 .../dataforge/workspace/FileWorkspaceCache.kt | 51 +++++++++++++------
 8 files changed, 46 insertions(+), 53 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5e4a3ce2..83462d70 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,12 +8,13 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
-- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc).
+- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc.).
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
+- Remove implicit io format resolver in `IOPlugin` and `FileWorkspaceCache`. There are no guarantees that only one format is present in the contrxt for each type.
 
 ### Fixed
 - Fixed NameToken parsing.
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
index f431a731..11b5e5e3 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
@@ -6,28 +6,11 @@ import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
 
 public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
     override val tag: PluginTag get() = Companion.tag
 
-    public val ioFormatFactories: Collection<IOFormatFactory<*>> by lazy {
-        context.gather<IOFormatFactory<*>>(IO_FORMAT_TYPE).values
-    }
-
-    @Suppress("UNCHECKED_CAST")
-    @UnsafeKType
-    public fun <T> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
-        ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
-
-    @OptIn(UnsafeKType::class)
-    public inline fun <reified T> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
-        resolveIOFormat(typeOf<T>(), meta)
-
-
     public val metaFormatFactories: Collection<MetaFormatFactory> by lazy {
         context.gather<MetaFormatFactory>(META_FORMAT_TYPE).values
     }
diff --git a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
index 7df23eb5..2d54e061 100644
--- a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
+++ b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
@@ -15,8 +15,6 @@ import java.nio.file.Path
 import java.nio.file.StandardOpenOption
 import kotlin.io.path.inputStream
 import kotlin.math.min
-import kotlin.reflect.full.isSupertypeOf
-import kotlin.reflect.typeOf
 import kotlin.streams.asSequence
 
 
@@ -79,14 +77,6 @@ public fun Path.rewrite(block: Sink.() -> Unit): Unit {
 
 public fun EnvelopeFormat.readFile(path: Path): Envelope = readFrom(path.asBinary())
 
-/**
- * Resolve IOFormat based on type
- */
-@Suppress("UNCHECKED_CAST")
-public inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? =
-    ioFormatFactories.find { it.type.isSupertypeOf(typeOf<T>()) } as IOFormat<T>?
-
-
 public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
 public val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index bc05cb5d..12eb4c68 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -227,7 +227,7 @@ public fun <T : Scheme> MutableMetaProvider.scheme(
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T {
         val name = key ?: property.name.asName()
-        val node = get(name)?: MutableMeta().also { set(name,it) }
+        val node = get(name) ?: MutableMeta().also { set(name, it) }
         return spec.write(node)
     }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
index 742f8ebb..bb95cf65 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
@@ -27,4 +27,4 @@ public object NameIndexComparator : Comparator<String?> {
 public fun Meta.getIndexedList(name: Name): List<Meta> = getIndexed(name).entries.sortedWith(
     //sort by index
     compareBy(space.kscience.dataforge.names.NameIndexComparator) { it.key }
-).map{it.value}
\ No newline at end of file
+).map { it.value }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
index 3994ef27..d6a760f1 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
@@ -82,13 +82,13 @@ public class NameToken(public val body: String, public val index: String? = null
                         else -> indexEnd = index
                     }
 
-                    else -> if(indexEnd>=0) error("Symbols not allowed after index in NameToken: $string")
+                    else -> if (indexEnd >= 0) error("Symbols not allowed after index in NameToken: $string")
                 }
             }
-            if(indexStart>=0 && indexEnd<0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
+            if (indexStart >= 0 && indexEnd < 0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
             return NameToken(
-                if(indexStart>=0) string.substring(0, indexStart) else string,
-                if(indexStart>=0) string.substring(indexStart + 1, indexEnd) else null
+                if (indexStart >= 0) string.substring(0, indexStart) else string,
+                if (indexStart >= 0) string.substring(indexStart + 1, indexEnd) else null
             )
         }
     }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 06134ce6..5e0ff572 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -29,10 +29,10 @@ public interface Task<T> : Described {
     public val fingerprint: String get() = hashCode().toString(radix = 16)
 
     /**
-     * Compute a [TaskResult] using given meta. In general, the result is lazy and represents both computation model
-     * and a handler for actual result
+     * Compute a [TaskResult] using given meta. In general, the result is lazy and represents both the computation model
+     * and a handler for the actual result
      *
-     * @param workspace a workspace to run task in
+     * @param workspace a workspace to run the task in
      * @param taskName the name of the task in this workspace
      * @param taskMeta configuration for current stage computation
      */
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index 4d2578e5..ce32848a 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -15,6 +15,7 @@ import space.kscience.dataforge.data.Data
 import space.kscience.dataforge.data.await
 import space.kscience.dataforge.data.named
 import space.kscience.dataforge.io.*
+import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.withIndex
@@ -24,11 +25,7 @@ import kotlin.io.path.div
 import kotlin.io.path.exists
 import kotlin.reflect.KType
 
-public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
-
-    @Suppress("UNCHECKED_CAST")
-    private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
-
+public class JsonIOFormat<T>(public val serializer: KSerializer<T>) : IOFormat<T> {
     override fun readFrom(source: Source): T = Json.decodeFromString(serializer, source.readString())
 
     override fun writeTo(sink: Sink, obj: T) {
@@ -36,12 +33,11 @@ public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
     }
 }
 
+/**
+ * An [IOFormat] based on Protobuf representation of the serializeable object.
+ */
 @OptIn(ExperimentalSerializationApi::class)
-public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
-
-    @Suppress("UNCHECKED_CAST")
-    private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
-
+public class ProtobufIOFormat<T>(public val serializer: KSerializer<T>) : IOFormat<T> {
     override fun readFrom(source: Source): T = ProtoBuf.decodeFromByteArray(serializer, source.readByteArray())
 
     override fun writeTo(sink: Sink, obj: T) {
@@ -49,19 +45,39 @@ public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
     }
 }
 
+public interface IOFormatResolveStrategy {
+    public fun <T> resolve(type: KType, meta: Meta): IOFormat<T>
 
-public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCache {
+    public companion object {
+        public val PROTOBUF: IOFormatResolveStrategy = object : IOFormatResolveStrategy {
+            @Suppress("UNCHECKED_CAST")
+            override fun <T> resolve(
+                type: KType,
+                meta: Meta
+            ): IOFormat<T> = ProtobufIOFormat(serializer(type) as KSerializer<T>)
+        }
 
-    //    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> = this as TaskData<T>
+        public val JSON: IOFormatResolveStrategy = object : IOFormatResolveStrategy {
+            @Suppress("UNCHECKED_CAST")
+            override fun <T> resolve(
+                type: KType,
+                meta: Meta
+            ): IOFormat<T> = JsonIOFormat(serializer(type) as KSerializer<T>)
+        }
+    }
+}
+
+public class FileWorkspaceCache(
+    public val cacheDirectory: Path,
+    private val ioFormatResolveStrategy: IOFormatResolveStrategy,
+) : WorkspaceCache {
 
 
     @OptIn(DFExperimental::class, UnsafeKType::class)
     override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
         val io = result.workspace.context.request(IOPlugin)
 
-        val format: IOFormat<T> = io.resolveIOFormat(result.dataType, result.taskMeta)
-            ?: ProtobufIOFormat(result.dataType)
-            ?: error("Can't resolve IOFormat for ${result.dataType}")
+        val format: IOFormat<T> = ioFormatResolveStrategy.resolve<T>(result.dataType, result.taskMeta)
 
 
         val cachingAction: Action<T, T> = CachingAction(result.dataType) { data ->
@@ -104,4 +120,7 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
     }
 }
 
-public fun WorkspaceBuilder.fileCache(cacheDir: Path): Unit = cache(FileWorkspaceCache(cacheDir))
\ No newline at end of file
+public fun WorkspaceBuilder.fileCache(
+    cacheDir: Path,
+    ioFormatResolveStrategy: IOFormatResolveStrategy = IOFormatResolveStrategy.PROTOBUF
+): Unit = cache(FileWorkspaceCache(cacheDir, ioFormatResolveStrategy))
\ No newline at end of file

From da0ecbe2e5f7151207263b9a187877cabb041924 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:10:57 +0300
Subject: [PATCH 72/77] Build update

---
 CHANGELOG.md                                         |  3 ++-
 dataforge-context/build.gradle.kts                   |  2 +-
 dataforge-data/build.gradle.kts                      |  2 +-
 .../space/kscience/dataforge/actions/MapAction.kt    |  2 ++
 .../kotlin/space/kscience/dataforge/data/Goal.kt     | 12 ++++++++----
 gradle.properties                                    |  2 +-
 6 files changed, 15 insertions(+), 8 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 83462d70..60e5bfd9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,8 @@
 
 ### Added
 - Coroutine exception logging in context
-- ObservableMutableMetaSerializer
+- `ObservableMutableMetaSerializer`
+- `MutableMetaView` - a Meta wrapper that creates nodes only when its or its children are changed. 
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index 72a41693..c411f592 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -13,7 +13,7 @@ kscience {
     useSerialization()
     commonMain {
         api(projects.dataforgeMeta)
-        api(spclibs.atomicfu)
+//        api(spclibs.atomicfu)
     }
     jvmMain{
         api(kotlin("reflect"))
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index 99314ea3..8c0a690e 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -9,7 +9,7 @@ kscience{
     wasm()
     useCoroutines()
     dependencies {
-        api(spclibs.atomicfu)
+//        api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
         //Remove after subtype moved to stdlib
         api(kotlin("reflect"))
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index a763515c..ffa313f2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -37,6 +37,7 @@ public class MapActionBuilder<T, R>(
     /**
      * Set unsafe [outputType] for the resulting data. Be sure that it is correct.
      */
+    @UnsafeKType
     public fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(T) -> R1) {
         this.outputType = outputType
         result = f;
@@ -45,6 +46,7 @@ public class MapActionBuilder<T, R>(
     /**
      * Calculate the result of goal
      */
+    @OptIn(UnsafeKType::class)
     public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1): Unit = result(typeOf<R1>(), f)
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index e54710b1..e9b022b2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -64,11 +64,14 @@ public open class LazyGoal<T>(
     /**
      * Get ongoing computation or start a new one.
      * Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
-     * If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
+     * If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce an error or a warning
      * depending on the settings.
+     *
+     * If [Goal] is already started on a different scope, it is not restarted.
      */
     @OptIn(DFExperimental::class)
-    override fun async(coroutineScope: CoroutineScope): Deferred<T> {
+    override fun async(coroutineScope: CoroutineScope): Deferred<T> = deferred ?: run {
+
         val log = coroutineScope.coroutineContext[GoalLogger]
         // Check if context restricts goal computation
         coroutineScope.coroutineContext[GoalExecutionRestriction]?.let { restriction ->
@@ -85,13 +88,14 @@ public open class LazyGoal<T>(
         val startedDependencies = dependencies.map { goal ->
             goal.async(coroutineScope)
         }
-        return deferred ?: coroutineScope.async(
+
+        coroutineScope.async(
             coroutineContext
                     + CoroutineMonitor()
                     + Dependencies(startedDependencies)
                     + GoalExecutionRestriction(GoalExecutionRestrictionPolicy.NONE) // Remove restrictions on goal execution
         ) {
-            //cancel execution if error encountered in one of dependencies
+            //cancel execution if error encountered in one of the dependencies
             startedDependencies.forEach { deferred ->
                 deferred.invokeOnCompletion { error ->
                     if (error != null) this.cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
diff --git a/gradle.properties b/gradle.properties
index 67ba7f8e..ea4473c1 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file
+toolsVersion=0.16.0-kotlin-2.1.20-Beta1
\ No newline at end of file

From 27745802e159af4d45eccd61a1f11965f17ac2b1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:11:08 +0300
Subject: [PATCH 73/77] Add MutableMetaView

---
 .../dataforge/meta/MutableMetaView.kt         | 40 +++++++++++++++++++
 1 file changed, 40 insertions(+)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
new file mode 100644
index 00000000..6a601940
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
@@ -0,0 +1,40 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.plus
+
+/**
+ * A [Meta] child proxy that creates required nodes on write
+ */
+public class MutableMetaView(
+    public val origin: MutableMeta,
+    public val path: Name
+) : MutableMeta {
+
+    override val items: Map<NameToken, MutableMeta>
+        get() = origin[path]?.items ?: emptyMap()
+
+    override var value: Value?
+        get() = origin[path]?.value
+        set(value) {
+            origin[path] = value
+        }
+
+    override fun getOrCreate(name: Name): MutableMeta = MutableMetaView(origin, path + name)
+
+
+    override fun set(name: Name, node: Meta?) {
+        set(path + name, node)
+    }
+
+
+    override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
+
+
+    override fun hashCode(): Int  = Meta.hashCode(this)
+
+    override fun toString(): String = Meta.toString(this)
+}
+
+public fun MutableMeta.view(name: Name): MutableMetaView = MutableMetaView(this, name)
\ No newline at end of file

From 79bbc6c76b00f60390d801a752d64c71dc0df785 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:48:24 +0300
Subject: [PATCH 74/77] Fix MutableMetaView.kt

---
 .../dataforge/meta/MutableMetaView.kt         | 27 ++++++++++++-------
 .../dataforge/meta/ObservableMetaWrapper.kt   |  2 +-
 .../dataforge/meta/MutableMetaViewTest.kt     | 25 +++++++++++++++++
 3 files changed, 43 insertions(+), 11 deletions(-)
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
index 6a601940..2bc3f9aa 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
@@ -2,14 +2,15 @@ package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.parseAsName
 import space.kscience.dataforge.names.plus
 
 /**
- * A [Meta] child proxy that creates required nodes on write
+ * A [Meta] child proxy that creates required nodes on value write
  */
-public class MutableMetaView(
-    public val origin: MutableMeta,
-    public val path: Name
+private class MutableMetaView(
+    val origin: MutableMeta,
+    val path: Name
 ) : MutableMeta {
 
     override val items: Map<NameToken, MutableMeta>
@@ -23,18 +24,24 @@ public class MutableMetaView(
 
     override fun getOrCreate(name: Name): MutableMeta = MutableMetaView(origin, path + name)
 
-
     override fun set(name: Name, node: Meta?) {
-        set(path + name, node)
+        if (origin[path + name] == null && node?.isEmpty() == true) return
+        origin[path + name] = node
     }
 
-
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
 
-
-    override fun hashCode(): Int  = Meta.hashCode(this)
+    override fun hashCode(): Int = Meta.hashCode(this)
 
     override fun toString(): String = Meta.toString(this)
 }
 
-public fun MutableMeta.view(name: Name): MutableMetaView = MutableMetaView(this, name)
\ No newline at end of file
+/**
+ * Create a view of this [MutableMeta] node that creates child items only when their values are written.
+ *
+ * The difference between this method and regular [getOrCreate] is that [getOrCreate] always creates and attaches node
+ * even if it is empty.
+ */
+public fun MutableMeta.view(name: Name): MutableMeta = MutableMetaView(this, name)
+
+public fun MutableMeta.view(name: String): MutableMeta = view(name.parseAsName())
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
index 7c62f692..c8780417 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
@@ -59,7 +59,7 @@ private class ObservableMetaWrapper(
 
     fun removeNode(name: Name): Meta? {
         val oldMeta = get(name)
-        //don't forget to remove listener
+        //remember to remove listener
         oldMeta?.removeListener(this)
 
         return oldMeta
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt
new file mode 100644
index 00000000..61af1d04
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt
@@ -0,0 +1,25 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.names.asName
+import kotlin.test.Test
+import kotlin.test.assertEquals
+import kotlin.test.assertTrue
+
+class MutableMetaViewTest {
+    @Test
+    fun metaView() {
+        val meta = MutableMeta()
+        val view = meta.view("a".asName())
+
+        view["b"] = Meta.EMPTY
+
+        assertTrue { meta.items.isEmpty() }
+
+        view["c"] = Meta {
+            "d" put 22
+        }
+
+        assertEquals(22, meta["a.c.d"].int)
+    }
+
+}
\ No newline at end of file

From de534dec2d89a8a6b3a0f924df3c2d6a46f38173 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 14:26:56 +0300
Subject: [PATCH 75/77] Update plugin version

---
 dataforge-meta/api/dataforge-meta.api | 5 +++++
 gradle.properties                     | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 5bf71f94..b282c77f 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -520,6 +520,11 @@ public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx
 	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
+public final class space/kscience/dataforge/meta/MutableMetaViewKt {
+	public static final fun view (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;)Lspace/kscience/dataforge/meta/MutableMeta;
+	public static final fun view (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
+}
+
 public abstract interface class space/kscience/dataforge/meta/MutableTypedMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/TypedMeta {
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
diff --git a/gradle.properties b/gradle.properties
index ea4473c1..67ba7f8e 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.0-kotlin-2.1.20-Beta1
\ No newline at end of file
+toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file

From 99a053b978d09cd3c262b2e33a83a3c154db669b Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 19 Jan 2025 12:34:36 +0300
Subject: [PATCH 76/77] 0.10.0 release

---
 CHANGELOG.md                                  | 23 +++++-
 README.md                                     |  1 +
 build.gradle.kts                              |  6 ++
 dataforge-context/build.gradle.kts            |  5 +-
 .../dataforge/context/ClassLoaderPlugin.kt    |  2 +-
 .../kscience/dataforge/provider/dfType.kt     | 17 ++--
 dataforge-data/build.gradle.kts               |  3 -
 .../kscience/dataforge/data/DataFilter.kt     | 57 +++-----------
 .../kscience/dataforge/data/ActionsTest.kt    |  8 +-
 dataforge-output/api/dataforge-output.api     | 68 ----------------
 dataforge-output/build.gradle.kts             | 15 ----
 .../hep/dataforge/output/OutputManager.kt     | 75 ------------------
 .../kotlin/hep/dataforge/output/Renderer.kt   | 21 -----
 .../hep/dataforge/output/TextRenderer.kt      | 78 -------------------
 .../kotlin/hep/dataforge/output/outputJS.kt   |  7 --
 .../kotlin/hep/dataforge/output/outputJVM.kt  |  6 --
 .../hep/dataforge/output/outputNative.kt      |  6 --
 dataforge-scripting/README.md                 |  2 +-
 dataforge-workspace/build.gradle.kts          |  2 +
 .../dataforge/workspace}/dataFilterJvm.kt     | 17 +---
 .../dataforge/workspace/workspaceJvm.kt       |  1 -
 .../workspace/SimpleWorkspaceTest.kt          | 10 +--
 gradle.properties                             |  1 +
 23 files changed, 69 insertions(+), 362 deletions(-)
 delete mode 100644 dataforge-output/api/dataforge-output.api
 delete mode 100644 dataforge-output/build.gradle.kts
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
 delete mode 100644 dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
 delete mode 100644 dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
 delete mode 100644 dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
 rename {dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data => dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace}/dataFilterJvm.kt (77%)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 60e5bfd9..06dfa5bf 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,26 +3,45 @@
 ## Unreleased
 
 ### Added
+
+### Changed
+
+### Deprecated
+
+### Removed
+
+### Fixed
+
+### Security
+
+## 0.10.0 - 2025-01-19
+
+### Added
+
 - Coroutine exception logging in context
 - `ObservableMutableMetaSerializer`
 - `MutableMetaView` - a Meta wrapper that creates nodes only when its or its children are changed. 
 
 ### Changed
+
 - Simplify inheritance logic in `MutableTypedMeta`
 - Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc.).
+- Filter data by type is moved from `dataforge-data` to `dataforge-workspace` to avoid reflection dependency.
 
 ### Deprecated
+
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
+
 - Remove implicit io format resolver in `IOPlugin` and `FileWorkspaceCache`. There are no guarantees that only one format is present in the contrxt for each type.
+- Dependencies on `atomicfu` and `kotlin.reflect` from dataforge-data to improve performance.
 
 ### Fixed
+
 - Fixed NameToken parsing.
 - Top level string list meta conversion.
 
-### Security
-
 ## 0.9.0 - 2024-06-04
 
 ### Added
diff --git a/README.md b/README.md
index be5591cf..a3dd7b7b 100644
--- a/README.md
+++ b/README.md
@@ -101,6 +101,7 @@ In this section, we will try to cover DataForge main ideas in the form of questi
 
 
 ### [dataforge-scripting](dataforge-scripting)
+> Scripting definition fow workspace generation
 >
 > **Maturity**: PROTOTYPE
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 2d7cb095..22784656 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -22,6 +22,12 @@ subprojects {
     }
 }
 
+dependencies{
+    subprojects.forEach {
+        dokka(it)
+    }
+}
+
 readme {
     readmeTemplate = file("docs/templates/README-TEMPLATE.md")
 }
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index c411f592..ad7b76a9 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -13,11 +13,10 @@ kscience {
     useSerialization()
     commonMain {
         api(projects.dataforgeMeta)
-//        api(spclibs.atomicfu)
     }
     jvmMain{
-        api(kotlin("reflect"))
-        api("org.slf4j:slf4j-api:1.7.30")
+        api(spclibs.kotlin.reflect)
+        api(spclibs.slf4j)
     }
 }
 
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
index b2c703e1..44ba290f 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
@@ -17,7 +17,7 @@ package space.kscience.dataforge.context
 
 import java.util.*
 import kotlin.reflect.KClass
-import kotlin.reflect.full.cast
+import kotlin.reflect.cast
 
 public class ClassLoaderPlugin(private val classLoader: ClassLoader) : AbstractPlugin() {
     override val tag: PluginTag = PluginTag("classLoader", PluginTag.DATAFORGE_GROUP)
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
index cccc3efb..d291f378 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
@@ -8,25 +8,30 @@ import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KClass
+import kotlin.reflect.KType
 import kotlin.reflect.full.findAnnotation
-
+import kotlin.reflect.typeOf
 
 @DFExperimental
 public val KClass<*>.dfType: String
     get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
 
+@DFExperimental
+public val KType.dfType: String
+    get() = findAnnotation<DfType>()?.id ?: (classifier as? KClass<*>)?.simpleName ?: ""
+
 /**
  * Provide an object with given name inferring target from its type using [DfType] annotation
  */
 @DFExperimental
 public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
-    val target = T::class.dfType
+    val target = typeOf<T>().dfType
     return provide(target, name)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
-    val target = T::class.dfType
+    val target = typeOf<T>().dfType
     return top(target)
 }
 
@@ -35,15 +40,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
  */
 @DFExperimental
 public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
-    gather<T>(T::class.dfType, inherit)
+    gather<T>(typeOf<T>().dfType, inherit)
 
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
-    provides(T::class.dfType, items)
+    provides(typeOf<T>().dfType, items)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
-    provides(T::class.dfType, *items)
+    provides(typeOf<T>().dfType, *items)
 }
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index 8c0a690e..9451f59a 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -9,10 +9,7 @@ kscience{
     wasm()
     useCoroutines()
     dependencies {
-//        api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
-        //Remove after subtype moved to stdlib
-        api(kotlin("reflect"))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index 5538cc28..e9392da8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -3,6 +3,7 @@ package space.kscience.dataforge.data
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import space.kscience.dataforge.names.plus
@@ -17,17 +18,6 @@ public fun interface DataFilter {
     }
 }
 
-
-//public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
-
-//public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
-//    predicate.accepts(data)
-//}
-//
-//public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
-//    predicate.accepts(data)
-//}
-
 public fun <T> DataSource<T>.filterData(
     dataFilter: DataFilter,
 ): DataSource<T> = object : DataSource<T> {
@@ -58,10 +48,14 @@ public fun <T> ObservableDataSource<T>.filterData(
         this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
 }
 
-internal class FilteredDataTree<T>(
-    val source: DataTree<T>,
-    val filter: DataFilter,
-    val branch: Name,
+/**
+ * A [DataTree] filtered by branch and some criterion, possibly changing resulting type
+ */
+@DFInternal
+public class FilteredDataTree<T>(
+    public val source: DataTree<T>,
+    public val filter: DataFilter,
+    public val branch: Name,
     override val dataType: KType = source.dataType,
 ) : DataTree<T> {
 
@@ -83,37 +77,6 @@ internal class FilteredDataTree<T>(
         }
 }
 
-
 public fun <T> DataTree<T>.filterData(
     predicate: DataFilter,
-): DataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
-
-
-///**
-// * Generate a wrapper data set with a given name prefix appended to all names
-// */
-//public fun <T : Any> DataTree<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
-//    this
-//} else object : DataSource<T> {
-//
-//    override val dataType: KType get() = this@withNamePrefix.dataType
-//
-//    override val coroutineContext: CoroutineContext
-//        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-//
-//    override val meta: Meta get() = this@withNamePrefix.meta
-//
-//
-//    override fun iterator(): Iterator<NamedData<T>> = iterator {
-//        for (d in this@withNamePrefix) {
-//            yield(d.data.named(prefix + d.name))
-//        }
-//    }
-//
-//    override fun get(name: Name): Data<T>? =
-//        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
-//
-//    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
-//}
-//
-
+): FilteredDataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 4a0342dc..fcc3e299 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -13,7 +13,7 @@ import kotlin.time.Duration.Companion.milliseconds
 @OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
 internal class ActionsTest {
     @Test
-    fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
+    fun testStaticMapAction() = runTest(timeout = 200.milliseconds) {
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
@@ -26,11 +26,11 @@ internal class ActionsTest {
 
         val result = plusOne(data)
 
-        assertEquals(2, result.awaitData("1").await())
+        assertEquals(5, result.awaitData("4").await())
     }
 
     @Test
-    fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
+    fun testDynamicMapAction() = runTest(timeout = 200.milliseconds) {
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
@@ -43,7 +43,7 @@ internal class ActionsTest {
             source.writeValue(it.toString(), it)
         }
 
-        assertEquals(2, result.awaitData("1").await())
+        assertEquals(5, result.awaitData("4").await())
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-output/api/dataforge-output.api b/dataforge-output/api/dataforge-output.api
deleted file mode 100644
index f991b754..00000000
--- a/dataforge-output/api/dataforge-output.api
+++ /dev/null
@@ -1,68 +0,0 @@
-public final class hep/dataforge/output/ConsoleOutputManager : hep/dataforge/context/AbstractPlugin, hep/dataforge/output/OutputManager {
-	public static final field Companion Lhep/dataforge/output/ConsoleOutputManager$Companion;
-	public fun <init> ()V
-	public fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
-	public fun getTag ()Lhep/dataforge/context/PluginTag;
-}
-
-public final class hep/dataforge/output/ConsoleOutputManager$Companion : hep/dataforge/context/PluginFactory {
-	public fun getTag ()Lhep/dataforge/context/PluginTag;
-	public fun getType ()Lkotlin/reflect/KClass;
-	public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/output/ConsoleOutputManager;
-	public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/DefaultTextFormat : hep/dataforge/output/TextFormat {
-	public static final field INSTANCE Lhep/dataforge/output/DefaultTextFormat;
-	public fun getPriority ()I
-	public fun getType ()Lkotlin/reflect/KClass;
-	public fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/OutputJVMKt {
-	public static final fun getOutput (Lkotlinx/coroutines/Dispatchers;)Lkotlinx/coroutines/CoroutineDispatcher;
-}
-
-public abstract interface class hep/dataforge/output/OutputManager {
-	public abstract fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
-}
-
-public final class hep/dataforge/output/OutputManager$DefaultImpls {
-	public static synthetic fun get$default (Lhep/dataforge/output/OutputManager;Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/output/Renderer;
-}
-
-public final class hep/dataforge/output/OutputManagerKt {
-	public static final fun getCONSOLE_RENDERER ()Lhep/dataforge/output/Renderer;
-	public static final fun getOutput (Lhep/dataforge/context/Context;)Lhep/dataforge/output/OutputManager;
-	public static final fun render (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)V
-	public static synthetic fun render$default (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
-}
-
-public abstract interface class hep/dataforge/output/Renderer : hep/dataforge/context/ContextAware {
-	public abstract fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
-}
-
-public final class hep/dataforge/output/Renderer$DefaultImpls {
-	public static fun getLogger (Lhep/dataforge/output/Renderer;)Lmu/KLogger;
-	public static synthetic fun render$default (Lhep/dataforge/output/Renderer;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
-}
-
-public abstract interface class hep/dataforge/output/TextFormat {
-	public static final field Companion Lhep/dataforge/output/TextFormat$Companion;
-	public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
-	public abstract fun getPriority ()I
-	public abstract fun getType ()Lkotlin/reflect/KClass;
-	public abstract fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/TextFormat$Companion {
-	public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
-}
-
-public final class hep/dataforge/output/TextRenderer : hep/dataforge/output/Renderer {
-	public fun <init> (Lhep/dataforge/context/Context;Ljava/lang/Appendable;)V
-	public fun getContext ()Lhep/dataforge/context/Context;
-	public fun getLogger ()Lmu/KLogger;
-	public fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
-}
-
diff --git a/dataforge-output/build.gradle.kts b/dataforge-output/build.gradle.kts
deleted file mode 100644
index a858ff93..00000000
--- a/dataforge-output/build.gradle.kts
+++ /dev/null
@@ -1,15 +0,0 @@
-plugins {
-    id("space.kscience.gradle.mpp")
-    id("space.kscience.gradle.native")
-}
-
-kotlin {
-    sourceSets {
-        val commonMain by getting{
-            dependencies {
-                api(project(":dataforge-context"))
-                //api(project(":dataforge-io"))
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
deleted file mode 100644
index c8580403..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
+++ /dev/null
@@ -1,75 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.*
-import space.kscience.dataforge.context.PluginTag.Companion.DATAFORGE_GROUP
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-import kotlin.reflect.KClass
-
-/**
- * A manager for outputs
- */
-public interface OutputManager {
-
-    /**
-     * Get an output specialized for given type, name and stage.
-     * @param stage represents the node or directory for the output. Empty means root node.
-     * @param name represents the name inside the node.
-     * @param meta configuration for [Renderer] (not for rendered object)
-     */
-    public fun <T : Any> getOutputContainer(
-        type: KClass<out T>,
-        name: Name,
-        stage: Name = Name.EMPTY,
-        meta: Meta = Meta.EMPTY
-    ): Renderer<T>
-}
-
-/**
- * Get an output manager for a context
- */
-public val Context.output: OutputManager get() = plugins.get() ?: ConsoleOutputManager()
-
-/**
- * Get an output with given [name], [stage] and reified content type
- */
-public inline fun <reified T : Any> OutputManager.getOutputContainer(
-    name: Name,
-    stage: Name = Name.EMPTY,
-    meta: Meta = Meta.EMPTY
-): Renderer<T> {
-    return getOutputContainer(T::class, name, stage, meta)
-}
-
-/**
- * Directly render an object using the most suitable renderer
- */
-public fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = Meta.EMPTY): Unit =
-    getOutputContainer(obj::class, name, stage).render(obj, meta)
-
-/**
- * System console output.
- * The [CONSOLE_RENDERER] is used when no other [OutputManager] is provided.
- */
-public val CONSOLE_RENDERER: Renderer<Any> = Renderer { obj, meta -> println(obj) }
-
-public class ConsoleOutputManager : AbstractPlugin(), OutputManager {
-    override val tag: PluginTag get() = ConsoleOutputManager.tag
-
-    override fun <T : Any> getOutputContainer(type: KClass<out T>, name: Name, stage: Name, meta: Meta): Renderer<T> = CONSOLE_RENDERER
-
-    public companion object : PluginFactory<ConsoleOutputManager> {
-        override val tag: PluginTag = PluginTag("output.console", group = DATAFORGE_GROUP)
-
-        override val type: KClass<ConsoleOutputManager> = ConsoleOutputManager::class
-
-        override fun invoke(meta: Meta, context: Context): ConsoleOutputManager = ConsoleOutputManager()
-    }
-}
-
-/**
- * A dispatcher for output tasks.
- */
-public expect val Dispatchers.Output: CoroutineDispatcher
\ No newline at end of file
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
deleted file mode 100644
index f6caaeb3..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
+++ /dev/null
@@ -1,21 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.ContextAware
-import space.kscience.dataforge.meta.Meta
-
-/**
- * A generic way to render any object in the output.
- *
- * An object could be rendered either in append or overlay mode. The mode is decided by the [Renderer]
- * based on its configuration and provided meta
- *
- */
-public fun interface Renderer<in T : Any> {
-    /**
-     * Render specific object with configuration.
-     *
-     * By convention actual render is called in asynchronous mode, so this method should never
-     * block execution
-     */
-    public fun render(obj: T, meta: Meta)
-}
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
deleted file mode 100644
index 8b33241b..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
+++ /dev/null
@@ -1,78 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.Context
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.output.TextFormat.Companion.TEXT_RENDERER_TYPE
-import space.kscience.dataforge.provider.Type
-import space.kscience.dataforge.provider.top
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.launch
-import kotlin.reflect.KClass
-import kotlin.reflect.KType
-
-
-/**
- * A text or binary renderer based on [Output]
- */
-@Type(TEXT_RENDERER_TYPE)
-@Deprecated("Bad design")
-public interface TextFormat {
-    /**
-     * The priority of this renderer compared to other renderers
-     */
-    public val priority: Int
-    /**
-     * The type of the content served by this renderer
-     */
-    public val type: KClass<*>
-
-    public suspend fun Appendable.render(obj: Any)
-
-    public companion object {
-        public const val TEXT_RENDERER_TYPE: String = "dataforge.textRenderer"
-    }
-}
-
-@Deprecated("Bad design")
-public object DefaultTextFormat : TextFormat {
-    override val priority: Int = Int.MAX_VALUE
-    override val type: KClass<*> = Any::class
-
-    override suspend fun Appendable.render(obj: Any) {
-        append(obj.toString() + "\n")
-    }
-}
-
-/**
- * A text-based renderer
- */
-@Deprecated("Bad design")
-public class TextRenderer(override val context: Context, private val output: Appendable) : Renderer<Any> {
-    private val cache = HashMap<KClass<*>, TextFormat>()
-
-    /**
-     * Find the first [TextFormat] matching the given object type.
-     */
-    override fun render(obj: Any, meta: Meta) {
-        val format: TextFormat = if (obj is CharSequence) {
-            DefaultTextFormat
-        } else {
-            val value = cache[obj::class]
-            if (value == null) {
-                val answer =
-                    context.top<TextFormat>(TEXT_RENDERER_TYPE).values.firstOrNull { it.type.isInstance(obj) }
-                if (answer != null) {
-                    cache[obj::class] = answer
-                    answer
-                } else {
-                    DefaultTextFormat
-                }
-            } else {
-                value
-            }
-        }
-        context.launch(Dispatchers.Output) {
-            format.run { output.render(obj) }
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt b/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
deleted file mode 100644
index 453d7351..00000000
--- a/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
+++ /dev/null
@@ -1,7 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = Default
\ No newline at end of file
diff --git a/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt b/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
deleted file mode 100644
index d9ba0b2f..00000000
--- a/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
+++ /dev/null
@@ -1,6 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = IO
\ No newline at end of file
diff --git a/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt b/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
deleted file mode 100644
index 2d59ae4e..00000000
--- a/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
+++ /dev/null
@@ -1,6 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = Dispatchers.Default
\ No newline at end of file
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index e516b392..76b36b5b 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -1,6 +1,6 @@
 # Module dataforge-scripting
 
-
+Scripting definition fow workspace generation
 
 ## Usage
 
diff --git a/dataforge-workspace/build.gradle.kts b/dataforge-workspace/build.gradle.kts
index 5fa555eb..8254ef7d 100644
--- a/dataforge-workspace/build.gradle.kts
+++ b/dataforge-workspace/build.gradle.kts
@@ -2,6 +2,8 @@ plugins {
     id("space.kscience.gradle.mpp")
 }
 
+description = "A framework for pull-based data processing"
+
 kscience {
     jvm()
     js()
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
similarity index 77%
rename from dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
rename to dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
index ba7fdadf..ba8e148c 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
@@ -1,5 +1,6 @@
-package space.kscience.dataforge.data
+package space.kscience.dataforge.workspace
 
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
@@ -20,16 +21,6 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
-//@Suppress("UNCHECKED_CAST")
-//@DFInternal
-//public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
-//    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
-//
-//@Suppress("UNCHECKED_CAST")
-//@DFInternal
-//public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
-//    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
-
 /**
  * Select all data matching given type and filters. Does not modify paths
  *
@@ -40,7 +31,7 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
 public fun <R> DataTree<*>.filterByType(
     type: KType,
     branch: Name = Name.EMPTY,
-    filter: DataFilter = DataFilter.EMPTY,
+    filter: DataFilter = DataFilter.Companion.EMPTY,
 ): DataTree<R> {
     val filterWithType = DataFilter { name, meta, dataType ->
         filter.accepts(name, meta, dataType) && dataType.isSubtypeOf(type)
@@ -54,7 +45,7 @@ public fun <R> DataTree<*>.filterByType(
 @OptIn(DFInternal::class)
 public inline fun <reified R : Any> DataTree<*>.filterByType(
     branch: Name = Name.EMPTY,
-    filter: DataFilter = DataFilter.EMPTY,
+    filter: DataFilter = DataFilter.Companion.EMPTY,
 ): DataTree<R> = filterByType(typeOf<R>(), branch, filter = filter)
 
 /**
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index 688b5699..d9bc3e84 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.filterByType
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 111b3b89..eb1c17b7 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -152,14 +152,14 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    fun testWorkspace() = runTest(timeout = 100.milliseconds) {
+    fun testWorkspace() = runTest(timeout = 200.milliseconds) {
         val node = workspace.produce("sum")
-        val res = node.asSequence().single()
-        assertEquals(328350, res.await())
+        val res = node.data
+        assertEquals(328350, res?.await())
     }
 
     @Test
-    fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
+    fun testMetaPropagation() = runTest(timeout = 200.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
         val res = node.data?.await()
     }
@@ -190,7 +190,7 @@ internal class SimpleWorkspaceTest {
         val node = workspace.produce("filterOne") {
             "name" put "myData[12]"
         }
-        assertEquals(12, node.asSequence().first().await())
+        assertEquals(12, node.data?.await())
     }
 
 }
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index 67ba7f8e..015d2c52 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -5,5 +5,6 @@ org.gradle.jvmargs=-Xmx4096m
 
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
+org.jetbrains.dokka.experimental.gradle.pluginMode=V2Enabled
 
 toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file

From ec2626e006a8109cb19b83e05c8c559e39897ed8 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 19 Jan 2025 12:35:31 +0300
Subject: [PATCH 77/77] Add central publishing

---
 build.gradle.kts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 22784656..84c1bba7 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -38,7 +38,7 @@ ksciencePublish {
         useSPCTeam()
     }
     repository("spc", "https://maven.sciprog.center/kscience")
-    sonatype("https://oss.sonatype.org")
+    central()
 }
 
 apiValidation {