From bb9afb329fbb9dcea3859a07f3a4718675d7c454 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Wed, 5 Jun 2024 10:53:35 +0300
Subject: [PATCH 01/29]  MetaProvider `spec` is replaced by `readable`.
 `listOfSpec` replaced with `listOfReadable`

---
 CHANGELOG.md                                  |  1 +
 build.gradle.kts                              |  2 +-
 .../kscience/dataforge/meta/MetaDelegate.kt   | 68 +++++++++++++++----
 .../dataforge/meta/MutableMetaDelegate.kt     | 31 +++++++--
 4 files changed, 84 insertions(+), 18 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f59fbae..788a7374 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@
 ### Changed
 
 ### Deprecated
+- MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 72f61abc..9f9632de 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.0"
+    version = "0.9.1-dev-1"
 }
 
 subprojects {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 1b506b44..2fa0f528 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -24,20 +24,45 @@ public fun MetaProvider.node(
     }
 }
 
+
 /**
- * Use [metaReader] to read the Meta node
+ * Use [reader] to read the Meta node
  */
-public fun <T> MetaProvider.spec(
-    metaReader: MetaReader<T>,
+public fun <T> MetaProvider.readable(
+    reader: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<T?> = object : MetaDelegate<T?> {
-    override val descriptor: MetaDescriptor? get() = metaReader.descriptor
+    override val descriptor: MetaDescriptor? get() = reader.descriptor
 
     override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-        return get(key ?: property.name.asName())?.let { metaReader.read(it) }
+        return get(key ?: property.name.asName())?.let { reader.read(it) }
     }
 }
 
+/**
+ * Use [reader] to read the Meta node or return [default] if node does not exist
+ */
+public fun <T> MetaProvider.readable(
+    reader: MetaReader<T>,
+    default: T,
+    key: Name? = null,
+): MetaDelegate<T> = object : MetaDelegate<T> {
+    override val descriptor: MetaDescriptor? get() = reader.descriptor
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        return get(key ?: property.name.asName())?.let { reader.read(it) } ?: default
+    }
+}
+
+/**
+ * Use [reader] to read the Meta node
+ */
+@Deprecated("Replace with reading", ReplaceWith("reading(metaReader, key)"))
+public fun <T> MetaProvider.spec(
+    reader: MetaReader<T>,
+    key: Name? = null,
+): MetaDelegate<T?> = readable(reader, key)
+
 /**
  * Use object serializer to transform it to Meta and back
  */
@@ -45,34 +70,51 @@ public fun <T> MetaProvider.spec(
 public inline fun <reified T> MetaProvider.serializable(
     key: Name? = null,
     descriptor: MetaDescriptor? = null,
-): MetaDelegate<T?> = spec(MetaConverter.serializable(descriptor), key)
+): MetaDelegate<T?> = readable(MetaConverter.serializable(descriptor), key)
+
+@DFExperimental
+public inline fun <reified T> MetaProvider.serializable(
+    key: Name? = null,
+    default: T,
+    descriptor: MetaDescriptor? = null,
+): MetaDelegate<T> = readable(MetaConverter.serializable(descriptor), default, key)
 
 @Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
     key: Name? = null,
     converter: MetaReader<T>,
-): ReadOnlyProperty<Any?, T?> = spec(converter, key)
+): ReadOnlyProperty<Any?, T?> = readable(converter, key)
 
 /**
- * Use [converter] to convert a list of same name siblings meta to object
+ * Use [reader] to convert a list of same name siblings meta to object
  */
-public fun <T> Meta.listOfSpec(
-    converter: MetaReader<T>,
+public fun <T> Meta.listOfReadable(
+    reader: MetaReader<T>,
     key: Name? = null,
 ): MetaDelegate<List<T>> = object : MetaDelegate<List<T>> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
         val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { converter.read(it) }
+        return getIndexed(name).values.map { reader.read(it) }
     }
 
-    override val descriptor: MetaDescriptor? = converter.descriptor?.copy(multiple = true)
+    override val descriptor: MetaDescriptor? = reader.descriptor?.copy(multiple = true)
 }
 
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object
+ */
+@Deprecated("Replace with readingList", ReplaceWith("readingList(converter, key)"))
+public fun <T> Meta.listOfSpec(
+    converter: MetaReader<T>,
+    key: Name? = null,
+): MetaDelegate<List<T>> = listOfReadable(converter, key)
+
 @DFExperimental
 public inline fun <reified T> Meta.listOfSerializable(
     key: Name? = null,
     descriptor: MetaDescriptor? = null,
-): MetaDelegate<List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
+): MetaDelegate<List<T>> = listOfReadable(MetaConverter.serializable(descriptor), key)
 
 /**
  * A property delegate that uses custom key
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index 37140c6f..28a550d8 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -54,9 +54,25 @@ public fun <T> MutableMetaProvider.convertable(
     }
 }
 
-@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
-public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): MutableMetaDelegate<T?> =
-    convertable(converter, key)
+public fun <T> MutableMetaProvider.convertable(
+    converter: MetaConverter<T>,
+    default: T,
+    key: Name? = null,
+): MutableMetaDelegate<T> = object : MutableMetaDelegate<T> {
+
+    override val descriptor: MetaDescriptor? get() = converter.descriptor
+
+
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        val name = key ?: property.name.asName()
+        return get(name)?.let { converter.read(it) } ?: default
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
+        val name = key ?: property.name.asName()
+        set(name, value?.let { converter.convert(it) })
+    }
+}
 
 /**
  * Use object serializer to transform it to Meta and back.
@@ -66,7 +82,14 @@ public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConver
 public inline fun <reified T> MutableMetaProvider.serializable(
     descriptor: MetaDescriptor? = null,
     key: Name? = null,
-): MutableMetaDelegate<T?> = convertable(MetaConverter.serializable(descriptor), key)
+): MutableMetaDelegate<T?> = convertable<T>(MetaConverter.serializable(descriptor), key)
+
+@DFExperimental
+public inline fun <reified T> MutableMetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    default: T,
+    key: Name? = null,
+): MutableMetaDelegate<T> = convertable(MetaConverter.serializable(descriptor), default, key)
 
 /**
  * Use [converter] to convert a list of same name siblings meta to object and back.

From f79b7faeafeafab51a5c6432be0255d5c3e14251 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Wed, 5 Jun 2024 12:15:27 +0300
Subject: [PATCH 02/29] Simplify inheritance logic in `MutableTypedMeta`

---
 CHANGELOG.md                                     |  1 +
 .../space/kscience/dataforge/meta/MutableMeta.kt | 14 ++++++++++++--
 .../kscience/dataforge/meta/ObservableMeta.kt    | 16 ----------------
 3 files changed, 13 insertions(+), 18 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 788a7374..43e6bf4e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,7 @@
 ### Added
 
 ### Changed
+- Simplify inheritance logic in `MutableTypedMeta`
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 7be16cc9..274671bf 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -159,7 +159,17 @@ public interface MutableTypedMeta<M : MutableTypedMeta<M>> : TypedMeta<M>, Mutab
      */
     @DFExperimental
     public fun attach(name: Name, node: M)
-    override fun get(name: Name): M?
+
+    override fun get(name: Name): M? {
+        tailrec fun M.find(name: Name): M? = if (name.isEmpty()) {
+            self
+        } else {
+            items[name.firstOrNull()!!]?.find(name.cutFirst())
+        }
+
+        return self.find(name)
+    }
+
     override fun getOrCreate(name: Name): M
 }
 
@@ -388,7 +398,7 @@ public fun MutableMeta.reset(newMeta: Meta) {
     (items.keys - newMeta.items.keys).forEach {
         remove(it.asName())
     }
-    newMeta.items.forEach { (token, item)->
+    newMeta.items.forEach { (token, item) ->
         set(token, item)
     }
 }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 7cd28746..20388a92 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -2,9 +2,6 @@ package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.cutFirst
-import space.kscience.dataforge.names.firstOrNull
-import space.kscience.dataforge.names.isEmpty
 
 
 internal data class MetaListener(
@@ -39,20 +36,7 @@ public interface ObservableMeta : Meta {
  * A [Meta] which is both observable and mutable
  */
 public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
-
     override val self: ObservableMutableMeta get() = this
-
-    override fun getOrCreate(name: Name): ObservableMutableMeta
-
-    override fun get(name: Name): ObservableMutableMeta? {
-        tailrec fun ObservableMutableMeta.find(name: Name): ObservableMutableMeta? = if (name.isEmpty()) {
-            this
-        } else {
-            items[name.firstOrNull()!!]?.find(name.cutFirst())
-        }
-
-        return find(name)
-    }
 }
 
 internal abstract class AbstractObservableMeta : ObservableMeta {

From 332d38df77895ea3f72e6ba2a5b2d41233c08d29 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 10 Jun 2024 20:28:57 +0300
Subject: [PATCH 03/29] First implementation for protobuf converter

---
 .../kscience/dataforge/data/ActionsTest.kt    | 16 ++--
 .../dataforge-io-proto/build.gradle.kts       | 32 +++++++
 .../src/commonMain/proto/meta.proto           | 19 ++++
 .../src/jvmMain/kotlin/ProtoMetaFormat.kt     | 92 +++++++++++++++++++
 .../src/jvmTest/kotlin/ProtoBufTest.kt        | 39 ++++++++
 .../space/kscience/dataforge/meta/JsonMeta.kt |  5 +-
 .../space/kscience/dataforge/meta/Scheme.kt   |  5 +-
 .../kscience/dataforge/names/NameToken.kt     | 27 +++++-
 .../kscience/dataforge/names/NameTest.kt      | 14 ++-
 settings.gradle.kts                           |  1 +
 10 files changed, 233 insertions(+), 17 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/build.gradle.kts
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt

diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 13660eee..d608cbd3 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,8 +1,5 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.onEach
-import kotlinx.coroutines.flow.take
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
@@ -16,27 +13,28 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
+        val plusOne = Action.mapping<Int, Int> {
+            result { it + 1 }
+        }
+
         val data: DataTree<Int> = DataTree {
             repeat(10) {
                 putValue(it.toString(), it)
             }
         }
 
-        val plusOne = Action.mapping<Int, Int> {
-            result { it + 1 }
-        }
         val result = plusOne(data)
         assertEquals(2, result["1"]?.await())
     }
 
     @Test
     fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
-        val source: MutableDataTree<Int> = MutableDataTree()
-
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
 
+        val source: MutableDataTree<Int> = MutableDataTree()
+
         val result = plusOne(source)
 
 
@@ -44,7 +42,7 @@ internal class ActionsTest {
             source.updateValue(it.toString(), it)
         }
 
-        result.updates.take(10).onEach { println(it.name) }.collect()
+//        result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
     }
diff --git a/dataforge-io/dataforge-io-proto/build.gradle.kts b/dataforge-io/dataforge-io-proto/build.gradle.kts
new file mode 100644
index 00000000..2899a2b1
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/build.gradle.kts
@@ -0,0 +1,32 @@
+plugins {
+    id("space.kscience.gradle.mpp")
+    id("com.squareup.wire") version "4.9.9"
+}
+
+description = "ProtoBuf meta IO"
+
+kscience {
+    jvm()
+    dependencies {
+        api(projects.dataforgeIo)
+        api("com.squareup.wire:wire-runtime:4.9.9")
+    }
+    useSerialization {
+        protobuf()
+    }
+}
+
+wire {
+    kotlin {
+        sourcePath {
+            srcDir("src/commonMain/proto")
+        }
+    }
+}
+
+readme {
+    maturity = space.kscience.gradle.Maturity.PROTOTYPE
+    description = """
+        ProtoBuf Meta representation
+    """.trimIndent()
+}
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
new file mode 100644
index 00000000..2fd33eab
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -0,0 +1,19 @@
+syntax = "proto3";
+package space.kscience.dataforge.io.proto;
+
+message ProtoMeta {
+  message ProtoValue {
+    oneof value {
+      string stringValue = 2;
+      bool booleanValue = 3;
+      double doubleValue = 4;
+      float floatValue = 5;
+      int32 int32Value = 6;
+      int64 int64Value = 7;
+      bytes bytesValue = 8;
+    }
+  }
+  repeated ProtoValue value = 1;
+
+  map<string, ProtoMeta> items = 2;
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
new file mode 100644
index 00000000..5e55099b
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
@@ -0,0 +1,92 @@
+package space.kscience.dataforge.io.proto
+
+import kotlinx.io.Sink
+import kotlinx.io.Source
+import kotlinx.io.asInputStream
+import kotlinx.io.asOutputStream
+import org.slf4j.LoggerFactory
+import space.kscience.dataforge.io.MetaFormat
+import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.names.NameToken
+
+internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
+
+    private fun ProtoMeta.ProtoValue.toValue(): Value = when {
+        stringValue != null -> stringValue.asValue()
+        booleanValue != null -> booleanValue.asValue()
+        doubleValue != null -> doubleValue.asValue()
+        floatValue != null -> floatValue.asValue()
+        int32Value != null -> int32Value.asValue()
+        int64Value != null -> int64Value.asValue()
+        bytesValue != null -> bytesValue.toByteArray().asValue()
+        else -> Null
+    }
+
+    override val value: Value?
+        get() = when (proto.value_.size) {
+            0 -> null
+            1 -> proto.value_[0].toValue()
+            else -> proto.value_.map { it.toValue() }.asValue()
+        }
+
+
+    override val items: Map<NameToken, Meta>
+        get() = proto.items.entries.associate { NameToken.parse(it.key) to ProtoMetaWrapper(it.value) }
+
+    override fun toString(): String = Meta.toString(this)
+
+    override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
+
+    override fun hashCode(): Int = Meta.hashCode(this)
+}
+
+internal fun Meta.toProto(): ProtoMeta {
+
+
+    fun MutableList<ProtoMeta.ProtoValue>.appendProtoValues(value: Value): Unit {
+        when (value.type) {
+            ValueType.NULL -> {
+                //do nothing
+            }
+
+            ValueType.NUMBER -> when (value.value) {
+                is Int, is Short, is Byte -> add(ProtoMeta.ProtoValue(int32Value = value.int))
+                is Long -> add(ProtoMeta.ProtoValue(int64Value = value.long))
+                is Float -> add(ProtoMeta.ProtoValue(floatValue = value.float))
+                else -> {
+                    LoggerFactory.getLogger(ProtoMeta::class.java)
+                        .warn("Unknown number type ${value.value} encoded as Double")
+                    add(ProtoMeta.ProtoValue(doubleValue = value.double))
+                }
+            }
+
+            ValueType.STRING -> add(ProtoMeta.ProtoValue(stringValue = value.string))
+            ValueType.BOOLEAN -> add(ProtoMeta.ProtoValue(booleanValue = value.boolean))
+            ValueType.LIST -> {
+                value.list.forEach {
+                    if (it.type == ValueType.LIST) {
+                        error("Nested lists are not supported")
+                    } else {
+                        appendProtoValues(it)
+                    }
+                }
+            }
+        }
+    }
+
+    return ProtoMeta(
+        value_ = buildList { value?.let { appendProtoValues(it) } },
+        items.entries.associate { it.key.toString() to it.value.toProto() }
+    )
+}
+
+
+public object ProtoMetaFormat : MetaFormat {
+    override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
+        ProtoMeta.ADAPTER.encode(sink.asOutputStream(), meta.toProto())
+    }
+
+    override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
+        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.asInputStream()))
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
new file mode 100644
index 00000000..bf819b6e
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
@@ -0,0 +1,39 @@
+package space.kscience.dataforge.io.proto
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.get
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+class ProtoBufTest {
+
+    @Test
+    fun testProtoBufMetaFormat(){
+        val meta = Meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+            }
+        }
+        val buffer = kotlinx.io.Buffer()
+        ProtoMetaFormat.writeTo(buffer,meta)
+        val result = ProtoMetaFormat.readFrom(buffer)
+
+        println(result["a"]?.value)
+
+        meta.items.keys.forEach {
+            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
+        }
+
+        assertEquals(meta, result)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 8da8b2d3..049c1733 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -117,8 +117,11 @@ private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
             } else {
                 val indexKey = descriptor?.indexKey ?: Meta.INDEX_KEY
                 element.forEachIndexed { serial, childElement ->
-                    val index = (childElement as? JsonObject)?.get(indexKey)?.jsonPrimitive?.content
+
+                    val index = (childElement as? JsonObject)
+                        ?.get(indexKey)?.jsonPrimitive?.content
                         ?: serial.toString()
+
                     val child: SealedMeta = when (childElement) {
                         is JsonObject -> childElement.toMeta(descriptor)
                         is JsonArray -> {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index fe121c42..bc05cb5d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -221,13 +221,14 @@ public fun <T : Scheme> Configurable.updateWith(
 /**
  * A delegate that uses a [MetaReader] to wrap a child of this provider
  */
-public fun <T : Scheme> MutableMeta.scheme(
+public fun <T : Scheme> MutableMetaProvider.scheme(
     spec: SchemeSpec<T>,
     key: Name? = null,
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T {
         val name = key ?: property.name.asName()
-        return spec.write(getOrCreate(name))
+        val node = get(name)?: MutableMeta().also { set(name,it) }
+        return spec.write(node)
     }
 
     override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
index 83752b9a..3994ef27 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
@@ -67,10 +67,29 @@ public class NameToken(public val body: String, public val index: String? = null
          * Parse name token from a string
          */
         public fun parse(string: String): NameToken {
-            val body = string.substringBefore('[')
-            val index = string.substringAfter('[', "")
-            if (index.isNotEmpty() && !index.endsWith(']')) error("NameToken with index must end with ']'")
-            return NameToken(body, index.removeSuffix("]"))
+            var indexStart = -1
+            var indexEnd = -1
+            string.forEachIndexed { index, c ->
+                when (c) {
+                    '[' -> when {
+                        indexStart >= 0 -> error("Second opening bracket not allowed in NameToken: $string")
+                        else -> indexStart = index
+                    }
+
+                    ']' -> when {
+                        indexStart < 0 -> error("Closing index bracket could not be used before opening bracket in NameToken: $string")
+                        indexEnd >= 0 -> error("Second closing bracket not allowed in NameToken: $string")
+                        else -> indexEnd = index
+                    }
+
+                    else -> if(indexEnd>=0) error("Symbols not allowed after index in NameToken: $string")
+                }
+            }
+            if(indexStart>=0 && indexEnd<0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
+            return NameToken(
+                if(indexStart>=0) string.substring(0, indexStart) else string,
+                if(indexStart>=0) string.substring(indexStart + 1, indexEnd) else null
+            )
         }
     }
 }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
index db630487..25725333 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/names/NameTest.kt
@@ -56,10 +56,22 @@ class NameTest {
 
         val token2 = NameToken.parse("token-body")
         assertEquals("token-body", token2.body)
-        assertEquals("", token2.index)
+        assertEquals(null, token2.index)
+
+//        val token3 = NameToken.parse("[token-index]")
+//        assertEquals("", token3.body)
+//        assertEquals("token-index", token3.index)
+
+        assertFails{
+            NameToken.parse("[token-index]")
+        }
 
         assertFails {
             NameToken.parse("token[22")
         }
+
+        assertFails {
+            NameToken.parse("token[22]ddd")
+        }
     }
 }
\ No newline at end of file
diff --git a/settings.gradle.kts b/settings.gradle.kts
index ca872038..35eae74e 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -43,6 +43,7 @@ include(
     ":dataforge-meta",
     ":dataforge-io",
     ":dataforge-io:dataforge-io-yaml",
+    ":dataforge-io:dataforge-io-proto",
     ":dataforge-context",
     ":dataforge-data",
     ":dataforge-workspace",

From 7fa6617e7e85d341d7740cc252e46753cfd4c573 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 10 Jun 2024 21:22:00 +0300
Subject: [PATCH 04/29] First implementation for protobuf converter

---
 CHANGELOG.md                                    |  2 ++
 .../space/kscience/dataforge/meta/JsonMeta.kt   | 14 +++++++++-----
 .../kscience/dataforge/meta/MetaConverter.kt    |  8 +++++++-
 .../kscience/dataforge/meta/ConvertersTest.kt   | 17 +++++++++++++++++
 4 files changed, 35 insertions(+), 6 deletions(-)
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 43e6bf4e..f7a2e363 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,6 +13,8 @@
 ### Removed
 
 ### Fixed
+- Fixed NameToken parsing.
+- Top level string list meta conversion.
 
 ### Security
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 049c1733..39a98927 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -163,11 +163,15 @@ public fun JsonObject.toMeta(descriptor: MetaDescriptor? = null): SealedMeta {
 public fun JsonElement.toMeta(descriptor: MetaDescriptor? = null): SealedMeta = when (this) {
     is JsonPrimitive -> Meta(toValue(descriptor))
     is JsonObject -> toMeta(descriptor)
-    is JsonArray -> SealedMeta(null,
-        linkedMapOf<NameToken, SealedMeta>().apply {
-            addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
-        }
-    )
+    is JsonArray -> if (any { it is JsonObject }) {
+        SealedMeta(null,
+            linkedMapOf<NameToken, SealedMeta>().apply {
+                addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
+            }
+        )
+    } else{
+        Meta(map { it.toValueOrNull(descriptor) ?: kotlin.error("Unreachable: should not contain objects") }.asValue())
+    }
 }
 
 //
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
index 9baf0087..0f0e8efe 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -11,7 +11,7 @@ import space.kscience.dataforge.misc.DFExperimental
 /**
  * A converter of generic object to and from [Meta]
  */
-public interface MetaConverter<T>: MetaReader<T> {
+public interface MetaConverter<T> : MetaReader<T> {
 
     /**
      * A descriptor for resulting meta
@@ -116,6 +116,12 @@ public interface MetaConverter<T>: MetaReader<T> {
             override fun convert(obj: E): Meta = Meta(obj.asValue())
         }
 
+        public val stringList: MetaConverter<List<String>> = object : MetaConverter<List<String>> {
+            override fun convert(obj: List<String>): Meta = Meta(obj.map { it.asValue() }.asValue())
+
+            override fun readOrNull(source: Meta): List<String>? = source.stringList
+        }
+
         public fun <T> valueList(
             writer: (T) -> Value = { Value.of(it) },
             reader: (Value) -> T,
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt
new file mode 100644
index 00000000..fda978e4
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/ConvertersTest.kt
@@ -0,0 +1,17 @@
+package space.kscience.dataforge.meta
+
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+class ConvertersTest {
+
+    @Test
+    fun stringListConversion() {
+        val list = listOf("A", "B", "C")
+        val meta = MetaConverter.stringList.convert(list)
+        val json = meta.toJson()
+        val reconstructedMeta = json.toMeta()
+        val reconstructed = MetaConverter.stringList.read(reconstructedMeta)
+        assertEquals(list,reconstructed)
+    }
+}
\ No newline at end of file

From e7f2f7e96f7b8d9b797ce921de209fbf931e2712 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 11 Jun 2024 12:06:48 +0300
Subject: [PATCH 05/29] Protobuf meta representation

---
 .../src/commonMain/proto/meta.proto           | 13 ++++-
 .../src/jvmMain/kotlin/ProtoMetaFormat.kt     | 54 +++++++------------
 .../src/jvmTest/kotlin/ProtoBufTest.kt        |  7 ++-
 3 files changed, 38 insertions(+), 36 deletions(-)

diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
index 2fd33eab..24d33420 100644
--- a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -11,9 +11,20 @@ message ProtoMeta {
       int32 int32Value = 6;
       int64 int64Value = 7;
       bytes bytesValue = 8;
+      ProtoValueList listValue = 9;
+      Float64List float64ListValue = 10;
     }
   }
-  repeated ProtoValue value = 1;
+
+  message ProtoValueList{
+    repeated ProtoValue values = 1;
+  }
+
+  message Float64List{
+    repeated double values = 1 [packed=true];
+  }
+
+  ProtoValue protoValue = 1;
 
   map<string, ProtoMeta> items = 2;
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
index 5e55099b..e4a33549 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
@@ -12,7 +12,7 @@ import space.kscience.dataforge.names.NameToken
 
 internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
 
-    private fun ProtoMeta.ProtoValue.toValue(): Value = when {
+    private fun ProtoMeta.ProtoValue.toValue(): Value? = when {
         stringValue != null -> stringValue.asValue()
         booleanValue != null -> booleanValue.asValue()
         doubleValue != null -> doubleValue.asValue()
@@ -20,15 +20,13 @@ internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
         int32Value != null -> int32Value.asValue()
         int64Value != null -> int64Value.asValue()
         bytesValue != null -> bytesValue.toByteArray().asValue()
-        else -> Null
+        listValue != null -> listValue.values.mapNotNull { it.toValue() }.asValue()
+        float64ListValue != null -> float64ListValue.values.map { it.asValue() }.asValue()
+        else -> null
     }
 
     override val value: Value?
-        get() = when (proto.value_.size) {
-            0 -> null
-            1 -> proto.value_[0].toValue()
-            else -> proto.value_.map { it.toValue() }.asValue()
-        }
+        get() = proto.protoValue?.toValue()
 
 
     override val items: Map<NameToken, Meta>
@@ -44,39 +42,27 @@ internal class ProtoMetaWrapper(private val proto: ProtoMeta) : Meta {
 internal fun Meta.toProto(): ProtoMeta {
 
 
-    fun MutableList<ProtoMeta.ProtoValue>.appendProtoValues(value: Value): Unit {
-        when (value.type) {
-            ValueType.NULL -> {
-                //do nothing
-            }
+    fun Value.toProto(): ProtoMeta.ProtoValue = when (type) {
+        ValueType.NULL -> ProtoMeta.ProtoValue()
 
-            ValueType.NUMBER -> when (value.value) {
-                is Int, is Short, is Byte -> add(ProtoMeta.ProtoValue(int32Value = value.int))
-                is Long -> add(ProtoMeta.ProtoValue(int64Value = value.long))
-                is Float -> add(ProtoMeta.ProtoValue(floatValue = value.float))
-                else -> {
-                    LoggerFactory.getLogger(ProtoMeta::class.java)
-                        .warn("Unknown number type ${value.value} encoded as Double")
-                    add(ProtoMeta.ProtoValue(doubleValue = value.double))
-                }
-            }
-
-            ValueType.STRING -> add(ProtoMeta.ProtoValue(stringValue = value.string))
-            ValueType.BOOLEAN -> add(ProtoMeta.ProtoValue(booleanValue = value.boolean))
-            ValueType.LIST -> {
-                value.list.forEach {
-                    if (it.type == ValueType.LIST) {
-                        error("Nested lists are not supported")
-                    } else {
-                        appendProtoValues(it)
-                    }
-                }
+        ValueType.NUMBER -> when (value) {
+            is Int, is Short, is Byte -> ProtoMeta.ProtoValue(int32Value = int)
+            is Long -> ProtoMeta.ProtoValue(int64Value = long)
+            is Float -> ProtoMeta.ProtoValue(floatValue = float)
+            else -> {
+                LoggerFactory.getLogger(ProtoMeta::class.java)
+                    .warn("Unknown number type ${value} encoded as Double")
+                ProtoMeta.ProtoValue(doubleValue = double)
             }
         }
+
+        ValueType.STRING -> ProtoMeta.ProtoValue(stringValue = string)
+        ValueType.BOOLEAN -> ProtoMeta.ProtoValue(booleanValue = boolean)
+        ValueType.LIST -> ProtoMeta.ProtoValue(listValue = ProtoMeta.ProtoValueList(list.map { it.toProto() }))
     }
 
     return ProtoMeta(
-        value_ = buildList { value?.let { appendProtoValues(it) } },
+        protoValue = value?.toProto(),
         items.entries.associate { it.key.toString() to it.value.toProto() }
     )
 }
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
index bf819b6e..5757092c 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
+++ b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
@@ -1,6 +1,7 @@
 package space.kscience.dataforge.io.proto
 
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.asValue
 import space.kscience.dataforge.meta.get
 import kotlin.test.Test
 import kotlin.test.assertEquals
@@ -22,13 +23,17 @@ class ProtoBufTest {
                     "d2" put 2
                 }
                 "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
             }
         }
         val buffer = kotlinx.io.Buffer()
         ProtoMetaFormat.writeTo(buffer,meta)
         val result = ProtoMetaFormat.readFrom(buffer)
 
-        println(result["a"]?.value)
+//        println(result["a"]?.value)
 
         meta.items.keys.forEach {
             assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")

From b77fc9a0d5bdd28f100e1f1788464ce856c4462e Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 11 Jun 2024 16:24:15 +0300
Subject: [PATCH 06/29] Add Envelope encoding to protobuf

---
 .../dataforge-io-proto/build.gradle.kts       |  1 +
 .../commonMain/kotlin/ProtoEnvelopeFormat.kt  | 32 +++++++
 .../kotlin/ProtoMetaFormat.kt                 | 16 ++--
 .../src/commonMain/proto/meta.proto           |  5 ++
 .../src/commonTest/kotlin/ProtoBufTest.kt     | 83 +++++++++++++++++++
 .../src/jvmTest/kotlin/ProtoBufTest.kt        | 44 ----------
 .../space/kscience/dataforge/io/Envelope.kt   |  5 +-
 7 files changed, 131 insertions(+), 55 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
 rename dataforge-io/dataforge-io-proto/src/{jvmMain => commonMain}/kotlin/ProtoMetaFormat.kt (85%)
 create mode 100644 dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
 delete mode 100644 dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt

diff --git a/dataforge-io/dataforge-io-proto/build.gradle.kts b/dataforge-io/dataforge-io-proto/build.gradle.kts
index 2899a2b1..a9099cb5 100644
--- a/dataforge-io/dataforge-io-proto/build.gradle.kts
+++ b/dataforge-io/dataforge-io-proto/build.gradle.kts
@@ -7,6 +7,7 @@ description = "ProtoBuf meta IO"
 
 kscience {
     jvm()
+//    js()
     dependencies {
         api(projects.dataforgeIo)
         api("com.squareup.wire:wire-runtime:4.9.9")
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
new file mode 100644
index 00000000..d60539f5
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoEnvelopeFormat.kt
@@ -0,0 +1,32 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.Sink
+import kotlinx.io.Source
+import kotlinx.io.readByteArray
+import okio.ByteString
+import okio.ByteString.Companion.toByteString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.io.EnvelopeFormat
+import space.kscience.dataforge.io.asBinary
+import space.kscience.dataforge.io.proto.ProtoEnvelope
+import space.kscience.dataforge.io.toByteArray
+import space.kscience.dataforge.meta.Meta
+
+
+public object ProtoEnvelopeFormat : EnvelopeFormat {
+    override fun readFrom(source: Source): Envelope {
+        val protoEnvelope = ProtoEnvelope.ADAPTER.decode(source.readByteArray())
+        return Envelope(
+            meta = protoEnvelope.meta?.let { ProtoMetaWrapper(it) } ?: Meta.EMPTY,
+            data = protoEnvelope.dataBytes.toByteArray().asBinary()
+        )
+    }
+
+    override fun writeTo(sink: Sink, obj: Envelope) {
+        val protoEnvelope = ProtoEnvelope(
+            obj.meta.toProto(),
+            obj.data?.toByteArray()?.toByteString() ?: ByteString.EMPTY
+        )
+        sink.write(ProtoEnvelope.ADAPTER.encode(protoEnvelope))
+    }
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
similarity index 85%
rename from dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
rename to dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
index e4a33549..8857832a 100644
--- a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/ProtoMetaFormat.kt
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/kotlin/ProtoMetaFormat.kt
@@ -1,11 +1,10 @@
-package space.kscience.dataforge.io.proto
+package pace.kscience.dataforge.io.proto
 
 import kotlinx.io.Sink
 import kotlinx.io.Source
-import kotlinx.io.asInputStream
-import kotlinx.io.asOutputStream
-import org.slf4j.LoggerFactory
+import kotlinx.io.readByteArray
 import space.kscience.dataforge.io.MetaFormat
+import space.kscience.dataforge.io.proto.ProtoMeta
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.names.NameToken
@@ -50,8 +49,8 @@ internal fun Meta.toProto(): ProtoMeta {
             is Long -> ProtoMeta.ProtoValue(int64Value = long)
             is Float -> ProtoMeta.ProtoValue(floatValue = float)
             else -> {
-                LoggerFactory.getLogger(ProtoMeta::class.java)
-                    .warn("Unknown number type ${value} encoded as Double")
+//                LoggerFactory.getLogger(ProtoMeta::class.java)
+//                    .warn("Unknown number type ${value} encoded as Double")
                 ProtoMeta.ProtoValue(doubleValue = double)
             }
         }
@@ -67,12 +66,11 @@ internal fun Meta.toProto(): ProtoMeta {
     )
 }
 
-
 public object ProtoMetaFormat : MetaFormat {
     override fun writeMeta(sink: Sink, meta: Meta, descriptor: MetaDescriptor?) {
-        ProtoMeta.ADAPTER.encode(sink.asOutputStream(), meta.toProto())
+        sink.write(ProtoMeta.ADAPTER.encode(meta.toProto()))
     }
 
     override fun readMeta(source: Source, descriptor: MetaDescriptor?): Meta =
-        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.asInputStream()))
+        ProtoMetaWrapper(ProtoMeta.ADAPTER.decode(source.readByteArray()))
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
index 24d33420..c6233872 100644
--- a/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
+++ b/dataforge-io/dataforge-io-proto/src/commonMain/proto/meta.proto
@@ -27,4 +27,9 @@ message ProtoMeta {
   ProtoValue protoValue = 1;
 
   map<string, ProtoMeta> items = 2;
+}
+
+message ProtoEnvelope{
+  ProtoMeta meta = 1;
+  bytes dataBytes = 2;
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
new file mode 100644
index 00000000..f8abef24
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/commonTest/kotlin/ProtoBufTest.kt
@@ -0,0 +1,83 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.writeString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.io.toByteArray
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.asValue
+import space.kscience.dataforge.meta.get
+import kotlin.test.Test
+import kotlin.test.assertContentEquals
+import kotlin.test.assertEquals
+
+class ProtoBufTest {
+
+    @Test
+    fun testProtoBufMetaFormat(){
+        val meta = Meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
+            }
+        }
+        val buffer = kotlinx.io.Buffer()
+        ProtoMetaFormat.writeTo(buffer,meta)
+        val result = ProtoMetaFormat.readFrom(buffer)
+
+//        println(result["a"]?.value)
+
+        meta.items.keys.forEach {
+            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
+        }
+
+        assertEquals(meta, result)
+    }
+
+    @Test
+    fun testProtoBufEnvelopeFormat(){
+        val envelope = Envelope{
+            meta {
+                "a" put 22
+                "node" put {
+                    "b" put "DDD"
+                    "c" put 11.1
+                    "d" put {
+                        "d1" put {
+                            "d11" put "aaa"
+                            "d12" put "bbb"
+                        }
+                        "d2" put 2
+                    }
+                    "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                    "array2d" put listOf(
+                        doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                        doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                    ).asValue()
+                }
+            }
+            data {
+                writeString("Hello world!")
+            }
+        }
+
+        val buffer = kotlinx.io.Buffer()
+        ProtoEnvelopeFormat.writeTo(buffer,envelope)
+        val result = ProtoEnvelopeFormat.readFrom(buffer)
+
+        assertEquals(envelope.meta, result.meta)
+        assertContentEquals(envelope.data?.toByteArray(), result.data?.toByteArray())
+    }
+}
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt b/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
deleted file mode 100644
index 5757092c..00000000
--- a/dataforge-io/dataforge-io-proto/src/jvmTest/kotlin/ProtoBufTest.kt
+++ /dev/null
@@ -1,44 +0,0 @@
-package space.kscience.dataforge.io.proto
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.asValue
-import space.kscience.dataforge.meta.get
-import kotlin.test.Test
-import kotlin.test.assertEquals
-
-class ProtoBufTest {
-
-    @Test
-    fun testProtoBufMetaFormat(){
-        val meta = Meta {
-            "a" put 22
-            "node" put {
-                "b" put "DDD"
-                "c" put 11.1
-                "d" put {
-                    "d1" put {
-                        "d11" put "aaa"
-                        "d12" put "bbb"
-                    }
-                    "d2" put 2
-                }
-                "array" put doubleArrayOf(1.0, 2.0, 3.0)
-                "array2d" put listOf(
-                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
-                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
-                ).asValue()
-            }
-        }
-        val buffer = kotlinx.io.Buffer()
-        ProtoMetaFormat.writeTo(buffer,meta)
-        val result = ProtoMetaFormat.readFrom(buffer)
-
-//        println(result["a"]?.value)
-
-        meta.items.keys.forEach {
-            assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
-        }
-
-        assertEquals(meta, result)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
index 728a0e69..43df520e 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt
@@ -2,7 +2,6 @@ package space.kscience.dataforge.io
 
 import space.kscience.dataforge.meta.Laminate
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
@@ -34,7 +33,9 @@ public interface Envelope {
     }
 }
 
-internal class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope
+internal class SimpleEnvelope(override val meta: Meta, override val data: Binary?) : Envelope{
+    override fun toString(): String  = "Envelope(meta=$meta, data=$data)"
+}
 
 public fun Envelope(meta: Meta, data: Binary?): Envelope = SimpleEnvelope(meta, data)
 

From 1f1f894e0d87da2b9b8f3b65b5a4d451a402169a Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 10:01:48 +0300
Subject: [PATCH 07/29] Documentation update

---
 dataforge-io/build.gradle.kts                 |  2 +-
 .../jvmMain/kotlin/performanceComparison.kt   | 51 +++++++++++++++++++
 .../meta/{MetaRef.kt => MetaSpec.kt}          | 29 ++++++++---
 .../dataforge/meta/ValueSerializer.kt         |  3 ++
 .../kscience/dataforge/meta/exoticValues.kt   | 10 +++-
 5 files changed, 87 insertions(+), 8 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
 rename dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/{MetaRef.kt => MetaSpec.kt} (73%)

diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index 5be52e61..16a6fbfc 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.3.1"
+val ioVersion = "0.4.0"
 
 kscience {
     jvm()
diff --git a/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
new file mode 100644
index 00000000..74939882
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/src/jvmMain/kotlin/performanceComparison.kt
@@ -0,0 +1,51 @@
+package pace.kscience.dataforge.io.proto
+
+import kotlinx.io.writeString
+import space.kscience.dataforge.io.Envelope
+import space.kscience.dataforge.meta.asValue
+import kotlin.concurrent.thread
+import kotlin.time.measureTime
+
+public fun main() {
+    val envelope = Envelope {
+        meta {
+            "a" put 22
+            "node" put {
+                "b" put "DDD"
+                "c" put 11.1
+                "d" put {
+                    "d1" put {
+                        "d11" put "aaa"
+                        "d12" put "bbb"
+                    }
+                    "d2" put 2
+                }
+                "array" put doubleArrayOf(1.0, 2.0, 3.0)
+                "array2d" put listOf(
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue(),
+                    doubleArrayOf(1.0, 2.0, 3.0).asValue()
+                ).asValue()
+            }
+        }
+        data {
+            writeString("Hello world!")
+        }
+    }
+
+    val format = ProtoEnvelopeFormat
+
+    measureTime {
+        val threads = List(100) {
+            thread {
+                repeat(100000) {
+                    val buffer = kotlinx.io.Buffer()
+                    format.writeTo(buffer, envelope)
+//                    println(buffer.size)
+                    val r = format.readFrom(buffer)
+                }
+            }
+        }
+
+        threads.forEach { it.join() }
+    }.also { println(it) }
+}
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
similarity index 73%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
index 413fe404..7af7594c 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
@@ -11,7 +11,7 @@ import kotlin.properties.ReadOnlyProperty
 
 
 /**
- * A reference to a read-only value of type [T] inside [MetaProvider]
+ * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
  */
 @DFExperimental
 public data class MetaRef<T>(
@@ -20,21 +20,36 @@ public data class MetaRef<T>(
     override val descriptor: MetaDescriptor? = converter.descriptor,
 ) : Described
 
+/**
+ * Get a value from provider by [ref] or return null if node with given name is missing
+ */
 @DFExperimental
 public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
 
+/**
+ * Set a value in a mutable provider by [ref]
+ */
 @DFExperimental
 public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
     set(ref.name, ref.converter.convert(value))
 }
 
+/**
+ * Remove a node corresponding to [ref] from a mutable provider if it exists
+ */
 @DFExperimental
-public class MetaSpec(
-    private val configuration: MetaDescriptorBuilder.() -> Unit = {},
-) : Described {
+public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
+    remove(ref.name)
+}
+
+/**
+ * A base class for [Meta] specification that stores references to meta nodes
+ */
+@DFExperimental
+public abstract class MetaSpec : Described {
     private val refs: MutableList<MetaRef<*>> = mutableListOf()
 
-    private fun registerRef(ref: MetaRef<*>) {
+    protected fun registerRef(ref: MetaRef<*>) {
         refs.add(ref)
     }
 
@@ -51,6 +66,8 @@ public class MetaSpec(
             }
         }
 
+    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
+
     override val descriptor: MetaDescriptor by lazy {
         MetaDescriptor {
             refs.forEach { ref ->
@@ -58,7 +75,7 @@ public class MetaSpec(
                     node(ref.name, ref.descriptor)
                 }
             }
-            configuration()
+            buildDescriptor()
         }
     }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
index dc13ef4c..0379187f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ValueSerializer.kt
@@ -8,6 +8,9 @@ import kotlinx.serialization.descriptors.element
 import kotlinx.serialization.encoding.Decoder
 import kotlinx.serialization.encoding.Encoder
 
+/**
+ * A serializer for [Value]
+ */
 public object ValueSerializer : KSerializer<Value> {
     private val listSerializer by lazy { ListSerializer(ValueSerializer) }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
index eb39e985..91811390 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
@@ -21,6 +21,9 @@ public class LazyParsedValue(public val string: String) : Value {
     override fun hashCode(): Int = string.hashCode()
 }
 
+/**
+ * Read this string as lazily parsed value
+ */
 public fun String.lazyParseValue(): LazyParsedValue = LazyParsedValue(this)
 
 /**
@@ -47,6 +50,9 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
     override fun iterator(): Iterator<Double> = value.iterator()
 }
 
+/**
+ * A zero-copy wrapping of this [DoubleArray] in a [Value]
+ */
 public fun DoubleArray.asValue(): Value = if (isEmpty()) Null else DoubleArrayValue(this)
 
 public val Value.doubleArray: DoubleArray
@@ -75,7 +81,9 @@ public fun MutableMetaProvider.doubleArray(
     reader = { it?.doubleArray ?: doubleArrayOf(*default) },
 )
 
-
+/**
+ * A [Value] wrapping a [ByteArray]
+ */
 public class ByteArrayValue(override val value: ByteArray) : Value, Iterable<Byte> {
     override val type: ValueType get() = ValueType.LIST
     override val list: List<Value> get() = value.map { NumberValue(it) }

From b4ebdfe089f13e592cb022e994955c00df4b1f37 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:34:29 +0300
Subject: [PATCH 08/29] Fix json to meta mapping

---
 .../space/kscience/dataforge/meta/JsonMeta.kt    | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 39a98927..1817ba64 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -34,9 +34,9 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
         val childDescriptor = descriptor?.nodes?.get(body)
         if (list.size == 1) {
             val (token, element) = list.first()
-                //do not add an empty element
-                val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
-            if(token.index == null) {
+            //do not add an empty element
+            val child: JsonElement = element.toJsonWithIndex(childDescriptor, token.index)
+            if (token.index == null) {
                 body to child
             } else {
                 body to JsonArray(listOf(child))
@@ -106,7 +106,7 @@ private fun JsonElement.toValueOrNull(descriptor: MetaDescriptor?): Value? = whe
 private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
     key: String,
     element: JsonElement,
-    descriptor: MetaDescriptor?
+    descriptor: MetaDescriptor?,
 ) {
     when (element) {
         is JsonPrimitive -> put(NameToken(key), Meta(element.toValue(descriptor)))
@@ -136,12 +136,14 @@ private fun MutableMap<NameToken, SealedMeta>.addJsonElement(
                                 Meta(childValue)
                             }
                         }
+
                         is JsonPrimitive -> Meta(childElement.toValue(null))
                     }
                     put(NameToken(key, index), child)
                 }
             }
         }
+
         is JsonObject -> {
             val indexKey = descriptor?.indexKey ?: Meta.INDEX_KEY
             val index = element[indexKey]?.jsonPrimitive?.content
@@ -163,14 +165,14 @@ public fun JsonObject.toMeta(descriptor: MetaDescriptor? = null): SealedMeta {
 public fun JsonElement.toMeta(descriptor: MetaDescriptor? = null): SealedMeta = when (this) {
     is JsonPrimitive -> Meta(toValue(descriptor))
     is JsonObject -> toMeta(descriptor)
-    is JsonArray -> if (any { it is JsonObject }) {
+    is JsonArray -> if (all { it is JsonPrimitive }) {
+        Meta(map { it.toValueOrNull(descriptor) ?: error("Unreachable: should not contain objects") }.asValue())
+    } else {
         SealedMeta(null,
             linkedMapOf<NameToken, SealedMeta>().apply {
                 addJsonElement(Meta.JSON_ARRAY_KEY, this@toMeta, null)
             }
         )
-    } else{
-        Meta(map { it.toValueOrNull(descriptor) ?: kotlin.error("Unreachable: should not contain objects") }.asValue())
     }
 }
 

From 523db20e4a0e47d4a78b414a55811010fdb8d005 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:45:50 +0300
Subject: [PATCH 09/29] Fix flaky coroutines tests

---
 .../kscience/dataforge/data/ActionsTest.kt      | 17 ++++++++++++++---
 1 file changed, 14 insertions(+), 3 deletions(-)

diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index d608cbd3..ee4b05be 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,6 +1,11 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.ExperimentalCoroutinesApi
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
+import kotlinx.coroutines.withContext
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.actions.mapping
@@ -9,7 +14,7 @@ import kotlin.test.Test
 import kotlin.test.assertEquals
 import kotlin.time.Duration.Companion.milliseconds
 
-@OptIn(DFExperimental::class)
+@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
@@ -24,6 +29,8 @@ internal class ActionsTest {
         }
 
         val result = plusOne(data)
+
+        advanceUntilIdle()
         assertEquals(2, result["1"]?.await())
     }
 
@@ -38,8 +45,12 @@ internal class ActionsTest {
         val result = plusOne(source)
 
 
-        repeat(10) {
-            source.updateValue(it.toString(), it)
+        withContext(Dispatchers.Default) {
+            repeat(10) {
+                source.updateValue(it.toString(), it)
+            }
+
+            delay(50)
         }
 
 //        result.updates.take(10).onEach { println(it.name) }.collect()

From 5196d85da1cd87996a1232683855a3119f8318c1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 11:48:39 +0300
Subject: [PATCH 10/29] Add MetaConverter for DoubleArray and ByteArray

---
 .../kscience/dataforge/meta/exoticValues.kt   | 19 ++++++++++++++++++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
index 91811390..17436990 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/exoticValues.kt
@@ -50,6 +50,7 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
     override fun iterator(): Iterator<Double> = value.iterator()
 }
 
+
 /**
  * A zero-copy wrapping of this [DoubleArray] in a [Value]
  */
@@ -81,6 +82,14 @@ public fun MutableMetaProvider.doubleArray(
     reader = { it?.doubleArray ?: doubleArrayOf(*default) },
 )
 
+private object DoubleArrayMetaConverter : MetaConverter<DoubleArray> {
+    override fun readOrNull(source: Meta): DoubleArray? = source.doubleArray
+
+    override fun convert(obj: DoubleArray): Meta = Meta(obj.asValue())
+}
+
+public val MetaConverter.Companion.doubleArray: MetaConverter<DoubleArray> get() = DoubleArrayMetaConverter
+
 /**
  * A [Value] wrapping a [ByteArray]
  */
@@ -131,4 +140,12 @@ public fun MutableMetaProvider.byteArray(
     key,
     writer = { ByteArrayValue(it) },
     reader = { it?.byteArray ?: byteArrayOf(*default) },
-)
\ No newline at end of file
+)
+
+private object ByteArrayMetaConverter : MetaConverter<ByteArray> {
+    override fun readOrNull(source: Meta): ByteArray? = source.byteArray
+
+    override fun convert(obj: ByteArray): Meta = Meta(obj.asValue())
+}
+
+public val MetaConverter.Companion.byteArray: MetaConverter<ByteArray> get() = ByteArrayMetaConverter
\ No newline at end of file

From 3807960cda2c2d7223c4057d581b215fd707e785 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:08:45 +0300
Subject: [PATCH 11/29] Implement MetaRef mechanics and tests

---
 .../space/kscience/dataforge/meta/MetaRef.kt  | 202 ++++++++++++++++++
 .../kscience/dataforge/meta/MetaRefTest.kt    |  35 +++
 2 files changed, 237 insertions(+)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
new file mode 100644
index 00000000..ef265f0d
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
@@ -0,0 +1,202 @@
+package space.kscience.dataforge.meta
+
+import kotlinx.serialization.json.Json
+import space.kscience.dataforge.meta.descriptors.Described
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import kotlin.properties.PropertyDelegateProvider
+import kotlin.properties.ReadOnlyProperty
+
+
+/**
+ * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
+ */
+@DFExperimental
+public data class MetaRef<T>(
+    public val name: Name,
+    public val converter: MetaConverter<T>,
+    override val descriptor: MetaDescriptor? = converter.descriptor,
+) : Described
+
+/**
+ * Get a value from provider by [ref] or return null if node with given name is missing
+ */
+@DFExperimental
+public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
+
+/**
+ * Set a value in a mutable provider by [ref]
+ */
+@DFExperimental
+public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
+    set(ref.name, ref.converter.convert(value))
+}
+
+/**
+ * Remove a node corresponding to [ref] from a mutable provider if it exists
+ */
+@DFExperimental
+public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
+    remove(ref.name)
+}
+
+/**
+ * Base storage of [MetaRef]
+ */
+@OptIn(DFExperimental::class)
+public interface MetaRefStore : Described {
+    public val refs: List<MetaRef<*>>
+}
+
+/**
+ * A base class for [Meta] specification that stores references to meta nodes.
+ */
+@DFExperimental
+public abstract class MetaSpec : MetaRefStore {
+    private val _refs: MutableList<MetaRef<*>> = mutableListOf()
+    override val refs: List<MetaRef<*>> get() = _refs
+
+    /**
+     * Register a ref in this specification
+     */
+    protected fun registerRef(ref: MetaRef<*>) {
+        _refs.add(ref)
+    }
+
+    /**
+     * Create and register a ref by property name and provided converter.
+     * By default, uses descriptor from the converter
+     */
+    public fun <T> item(
+        converter: MetaConverter<T>,
+        key: Name? = null,
+        descriptor: MetaDescriptor? = converter.descriptor,
+    ): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
+        PropertyDelegateProvider { _, property ->
+            val ref = MetaRef(key ?: property.name.asName(), converter, descriptor)
+            registerRef(ref)
+            ReadOnlyProperty { _, _ ->
+                ref
+            }
+        }
+
+    /**
+     * Override to provide custom [MetaDescriptor]
+     */
+    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
+
+    override val descriptor: MetaDescriptor by lazy {
+        MetaDescriptor {
+            refs.forEach { ref ->
+                ref.descriptor?.let {
+                    node(ref.name, ref.descriptor)
+                }
+            }
+            buildDescriptor()
+        }
+    }
+}
+
+/**
+ * Register an item using a [descriptorBuilder] to customize descriptor
+ */
+@DFExperimental
+public fun <T> MetaSpec.item(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> = item(converter, key, MetaDescriptor {
+    converter.descriptor?.let { from(it) }
+    descriptorBuilder()
+})
+
+//utility methods to add different nodes
+
+@DFExperimental
+public fun MetaSpec.metaItem(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Meta>>> =
+    item(MetaConverter.meta, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.string(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<String>>> =
+    item(MetaConverter.string, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.boolean(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Boolean>>> =
+    item(MetaConverter.boolean, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.stringList(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<List<String>>>> =
+    item(MetaConverter.stringList, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.float(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Float>>> =
+    item(MetaConverter.float, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.double(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Double>>> =
+    item(MetaConverter.double, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.int(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Int>>> =
+    item(MetaConverter.int, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.long(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<Long>>> =
+    item(MetaConverter.long, key, descriptorBuilder)
+
+
+@DFExperimental
+public fun MetaSpec.doubleArray(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<DoubleArray>>> =
+    item(MetaConverter.doubleArray, key, descriptorBuilder)
+
+@DFExperimental
+public fun MetaSpec.byteArray(
+    key: Name? = null,
+    descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<ByteArray>>> =
+    item(MetaConverter.byteArray, key, descriptorBuilder)
+
+@DFExperimental
+public inline fun <reified E : Enum<E>> MetaSpec.enum(
+    key: Name? = null,
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<E>>> =
+    item(MetaConverter.enum(), key, descriptorBuilder)
+
+@DFExperimental
+public inline fun <reified T> MetaSpec.serializable(
+    key: Name? = null,
+    jsonEncoder: Json = Json,
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
+    item(MetaConverter.serializable(jsonEncoder = jsonEncoder), key, descriptorBuilder)
\ No newline at end of file
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt
new file mode 100644
index 00000000..a93841d9
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaRefTest.kt
@@ -0,0 +1,35 @@
+package space.kscience.dataforge.meta
+
+import kotlinx.serialization.Serializable
+import space.kscience.dataforge.misc.DFExperimental
+import kotlin.test.Test
+import kotlin.test.assertEquals
+
+@DFExperimental
+internal class MetaRefTest {
+
+    @Serializable
+    data class XY(val x: Double, val y: Double)
+
+    object TestMetaSpec : MetaSpec() {
+        val integer by int { description = "Integer value" }
+        val string by string { description = "String value" }
+        val custom by item(MetaConverter.serializable<XY>()) { description = "custom value" }
+    }
+
+    @Test
+    fun specWriteRead()  = with(TestMetaSpec){
+        val meta = MutableMeta()
+
+        meta[integer] = 22
+        meta[string] = "33"
+        val xy = XY(33.0, -33.0)
+        meta[custom] = xy
+
+        val sealed = meta.seal()
+
+        assertEquals(22, sealed[integer])
+        assertEquals("33", sealed[string])
+        assertEquals(xy, sealed[custom])
+    }
+}
\ No newline at end of file

From 8723f49efde5a699ea4b55c59abbbaef97044f9f Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:08:48 +0300
Subject: [PATCH 12/29] Implement MetaRef mechanics and tests

---
 .../space/kscience/dataforge/meta/MetaSpec.kt | 81 -------------------
 1 file changed, 81 deletions(-)
 delete mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
deleted file mode 100644
index 7af7594c..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
+++ /dev/null
@@ -1,81 +0,0 @@
-package space.kscience.dataforge.meta
-
-import space.kscience.dataforge.meta.descriptors.Described
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.properties.PropertyDelegateProvider
-import kotlin.properties.ReadOnlyProperty
-
-
-/**
- * A reference to a read-only value of type [T] inside [MetaProvider] or writable value in [MutableMetaProvider]
- */
-@DFExperimental
-public data class MetaRef<T>(
-    public val name: Name,
-    public val converter: MetaConverter<T>,
-    override val descriptor: MetaDescriptor? = converter.descriptor,
-) : Described
-
-/**
- * Get a value from provider by [ref] or return null if node with given name is missing
- */
-@DFExperimental
-public operator fun <T> MetaProvider.get(ref: MetaRef<T>): T? = get(ref.name)?.let { ref.converter.readOrNull(it) }
-
-/**
- * Set a value in a mutable provider by [ref]
- */
-@DFExperimental
-public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
-    set(ref.name, ref.converter.convert(value))
-}
-
-/**
- * Remove a node corresponding to [ref] from a mutable provider if it exists
- */
-@DFExperimental
-public fun MutableMetaProvider.remove(ref: MetaRef<*>) {
-    remove(ref.name)
-}
-
-/**
- * A base class for [Meta] specification that stores references to meta nodes
- */
-@DFExperimental
-public abstract class MetaSpec : Described {
-    private val refs: MutableList<MetaRef<*>> = mutableListOf()
-
-    protected fun registerRef(ref: MetaRef<*>) {
-        refs.add(ref)
-    }
-
-    public fun <T> item(
-        converter: MetaConverter<T>,
-        descriptor: MetaDescriptor? = converter.descriptor,
-        key: Name? = null,
-    ): PropertyDelegateProvider<MetaSpec, ReadOnlyProperty<MetaSpec, MetaRef<T>>> =
-        PropertyDelegateProvider { _, property ->
-            val ref = MetaRef(key ?: property.name.asName(), converter, descriptor)
-            registerRef(ref)
-            ReadOnlyProperty { _, _ ->
-                ref
-            }
-        }
-
-    protected open fun MetaDescriptorBuilder.buildDescriptor(): Unit = Unit
-
-    override val descriptor: MetaDescriptor by lazy {
-        MetaDescriptor {
-            refs.forEach { ref ->
-                ref.descriptor?.let {
-                    node(ref.name, ref.descriptor)
-                }
-            }
-            buildDescriptor()
-        }
-    }
-}
\ No newline at end of file

From 904262a22a35c8ba8bd58db99c4bc6429433798d Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Mon, 22 Jul 2024 12:20:05 +0300
Subject: [PATCH 13/29] Add observability via MetaRef

---
 .../space/kscience/dataforge/meta/MetaRef.kt  | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
index ef265f0d..2e6f3452 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaRef.kt
@@ -7,6 +7,7 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.startsWith
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
 
@@ -35,6 +36,24 @@ public operator fun <T> MutableMetaProvider.set(ref: MetaRef<T>, value: T) {
     set(ref.name, ref.converter.convert(value))
 }
 
+/**
+ * Observe changes to specific property via given [ref].
+ *
+ * This listener should be removed in a same way as [ObservableMeta.onChange].
+ *
+ * @param callback an action to be performed on each change of item. Null means that the item is not present or malformed.
+ */
+@DFExperimental
+public fun <T: Any> ObservableMeta.onValueChange(owner: Any?, ref: MetaRef<T>, callback: (T?) -> Unit) {
+    onChange(owner) { name ->
+        if (name.startsWith(ref.name)) {
+            get(name)?.let { value ->
+                callback(ref.converter.readOrNull(value))
+            }
+        }
+    }
+}
+
 /**
  * Remove a node corresponding to [ref] from a mutable provider if it exists
  */

From e5902c0bfed8a33c6527918a3e143478e5e5c8dc Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 3 Aug 2024 10:04:24 +0300
Subject: [PATCH 14/29] Coroutine exception logging in context

---
 CHANGELOG.md                                                 | 1 +
 .../kotlin/space/kscience/dataforge/context/Context.kt       | 5 ++++-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index f7a2e363..6c856ac5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,7 @@
 ## Unreleased
 
 ### Added
+- Coroutine exception logging in context
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
index c614598c..457c39fc 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/context/Context.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.context
 
+import kotlinx.coroutines.CoroutineExceptionHandler
 import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.Job
 import kotlinx.coroutines.SupervisorJob
@@ -67,7 +68,9 @@ public open class Context internal constructor(
 
     override val coroutineContext: CoroutineContext by lazy {
         (parent ?: Global).coroutineContext.let { parenContext ->
-            parenContext + coroutineContext + SupervisorJob(parenContext[Job])
+            parenContext + coroutineContext + SupervisorJob(parenContext[Job]) + CoroutineExceptionHandler { _, throwable ->
+                logger.error(throwable) { "Exception in context $name" }
+            }
         }
     }
 

From de9ca6e0098ee760a685872b4924e7be9074fcd9 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 10 Sep 2024 07:47:53 +0300
Subject: [PATCH 15/29] ObservableMutableMetaSerializer

---
 CHANGELOG.md                                    |  1 +
 .../kscience/dataforge/meta/MetaSerializer.kt   | 17 +++++++++++++++++
 2 files changed, 18 insertions(+)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6c856ac5..82f0a8fb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
 
 ### Added
 - Coroutine exception logging in context
+- ObservableMutableMetaSerializer
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
index 557cd5ef..2ccfd520 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSerializer.kt
@@ -45,4 +45,21 @@ public object MutableMetaSerializer : KSerializer<MutableMeta> {
     override fun serialize(encoder: Encoder, value: MutableMeta) {
         encoder.encodeSerializableValue(MetaSerializer, value)
     }
+}
+
+/**
+ * A serializer for [ObservableMutableMeta]
+ */
+public object ObservableMutableMetaSerializer : KSerializer<ObservableMutableMeta> {
+
+    override val descriptor: SerialDescriptor = MetaSerializer.descriptor
+
+    override fun deserialize(decoder: Decoder): ObservableMutableMeta {
+        val meta = decoder.decodeSerializableValue(MetaSerializer)
+        return ((meta as? MutableMeta) ?: meta.toMutableMeta()).asObservable()
+    }
+
+    override fun serialize(encoder: Encoder, value: ObservableMutableMeta) {
+        encoder.encodeSerializableValue(MetaSerializer, value)
+    }
 }
\ No newline at end of file

From 8f966b6c0c35b165ba51538ddb957aeafd9c5531 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 10 Sep 2024 07:48:17 +0300
Subject: [PATCH 16/29] Fix replacement for readable delegate in Meta

---
 .../kotlin/space/kscience/dataforge/meta/MetaDelegate.kt        | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 2fa0f528..26044f18 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -57,7 +57,7 @@ public fun <T> MetaProvider.readable(
 /**
  * Use [reader] to read the Meta node
  */
-@Deprecated("Replace with reading", ReplaceWith("reading(metaReader, key)"))
+@Deprecated("Replace with readable", ReplaceWith("readable(metaReader, key)"))
 public fun <T> MetaProvider.spec(
     reader: MetaReader<T>,
     key: Name? = null,

From 088ed64f4af833aa350baae57be37f20b215c3c6 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 17 Sep 2024 09:56:19 +0300
Subject: [PATCH 17/29] 0.9.1

---
 build.gradle.kts                              |  2 +-
 dataforge-context/api/dataforge-context.api   |  1 +
 .../space/kscience/dataforge/provider/Path.kt | 23 ++++++++++
 .../kscience/dataforge/data/ActionsTest.kt    | 35 +++++++-------
 dataforge-meta/api/dataforge-meta.api         | 46 ++++++++++++++-----
 .../kscience/dataforge/meta/ObservableMeta.kt |  3 ++
 6 files changed, 81 insertions(+), 29 deletions(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 9f9632de..98fca6b3 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.1-dev-1"
+    version = "0.9.1"
 }
 
 subprojects {
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index 67cd0cba..390123b8 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -282,6 +282,7 @@ public final class space/kscience/dataforge/provider/Path : java/lang/Iterable,
 
 public final class space/kscience/dataforge/provider/Path$Companion {
 	public final fun parse-X5wN5Vs (Ljava/lang/String;)Ljava/util/List;
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
 public final class space/kscience/dataforge/provider/PathKt {
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
index 9ecb68c2..3bfa83e1 100644
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/provider/Path.kt
@@ -15,15 +15,37 @@
  */
 package space.kscience.dataforge.provider
 
+import kotlinx.serialization.KSerializer
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.builtins.serializer
+import kotlinx.serialization.descriptors.SerialDescriptor
+import kotlinx.serialization.encoding.Decoder
+import kotlinx.serialization.encoding.Encoder
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
 import kotlin.jvm.JvmInline
 
+private object PathSerializer : KSerializer<Path> {
+
+    override val descriptor: SerialDescriptor
+        get() = String.serializer().descriptor
+
+    override fun serialize(encoder: Encoder, value: Path) {
+        encoder.encodeString(value.toString())
+    }
+
+    override fun deserialize(decoder: Decoder): Path {
+        return Path.parse(decoder.decodeString())
+    }
+}
+
+
 /**
  * Path interface.
  *
  */
 @JvmInline
+@Serializable(PathSerializer::class)
 public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken> {
 
     override fun iterator(): Iterator<PathToken> = tokens.iterator()
@@ -33,6 +55,7 @@ public value class Path(public val tokens: List<PathToken>) : Iterable<PathToken
     public companion object {
         public const val PATH_SEGMENT_SEPARATOR: String = "/"
 
+
         public fun parse(path: String): Path = Path(path.split(PATH_SEGMENT_SEPARATOR).map { PathToken.parse(it) })
     }
 }
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index ee4b05be..2e790caa 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -18,20 +18,22 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
-        val plusOne = Action.mapping<Int, Int> {
-            result { it + 1 }
-        }
-
-        val data: DataTree<Int> = DataTree {
-            repeat(10) {
-                putValue(it.toString(), it)
+        withContext(Dispatchers.Default) {
+            val plusOne = Action.mapping<Int, Int> {
+                result { it + 1 }
             }
+
+            val data: DataTree<Int> = DataTree {
+                repeat(10) {
+                    putValue(it.toString(), it)
+                }
+            }
+
+            val result = plusOne(data)
+
+            advanceUntilIdle()
+            assertEquals(2, result["1"]?.await())
         }
-
-        val result = plusOne(data)
-
-        advanceUntilIdle()
-        assertEquals(2, result["1"]?.await())
     }
 
     @Test
@@ -45,14 +47,13 @@ internal class ActionsTest {
         val result = plusOne(source)
 
 
-        withContext(Dispatchers.Default) {
-            repeat(10) {
-                source.updateValue(it.toString(), it)
-            }
 
-            delay(50)
+        repeat(10) {
+            source.updateValue(it.toString(), it)
         }
 
+        delay(10)
+
 //        result.updates.take(10).onEach { println(it.name) }.collect()
 
         assertEquals(2, result["1"]?.await())
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 0bb4a908..5bf71f94 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -53,8 +53,10 @@ public final class space/kscience/dataforge/meta/ExoticValuesKt {
 	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun doubleArray$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;[DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Meta;)[B
+	public static final fun getByteArray (Lspace/kscience/dataforge/meta/MetaConverter$Companion;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static final fun getByteArray (Lspace/kscience/dataforge/meta/Value;)[B
 	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Meta;)[D
+	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/MetaConverter$Companion;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static final fun getDoubleArray (Lspace/kscience/dataforge/meta/Value;)[D
 	public static final fun lazyParseValue (Ljava/lang/String;)Lspace/kscience/dataforge/meta/LazyParsedValue;
 }
@@ -217,6 +219,7 @@ public final class space/kscience/dataforge/meta/MetaConverter$Companion {
 	public final fun getMeta ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getNumber ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getString ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getStringList ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun getValue ()Lspace/kscience/dataforge/meta/MetaConverter;
 	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MetaConverter;
 	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaConverter;
@@ -248,6 +251,8 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun listOfReadable (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun listOfReadable$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -264,6 +269,10 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun readable (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static final fun readable (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun readable$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
+	public static synthetic fun readable$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaDelegate;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MetaDelegate;
@@ -322,6 +331,10 @@ public final class space/kscience/dataforge/meta/MetaReaderKt {
 	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaReader;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaRefStore : space/kscience/dataforge/meta/descriptors/Described {
+	public abstract fun getRefs ()Ljava/util/List;
+}
+
 public abstract interface class space/kscience/dataforge/meta/MetaRepr {
 	public abstract fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
 }
@@ -411,7 +424,9 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
+	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
@@ -433,9 +448,7 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMetaDelegate;
@@ -508,7 +521,10 @@ public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx
 }
 
 public abstract interface class space/kscience/dataforge/meta/MutableTypedMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/TypedMeta {
-	public abstract fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
+	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
+	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
 	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
 }
 
@@ -546,16 +562,24 @@ public final class space/kscience/dataforge/meta/ObservableMetaWrapperKt {
 }
 
 public abstract interface class space/kscience/dataforge/meta/ObservableMutableMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/MutableTypedMeta, space/kscience/dataforge/meta/ObservableMeta {
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableTypedMeta;
-	public fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
-	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/TypedMeta;
-	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public static final field Companion Lspace/kscience/dataforge/meta/ObservableMutableMeta$Companion;
 	public fun getSelf ()Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 	public synthetic fun getSelf ()Lspace/kscience/dataforge/meta/TypedMeta;
 }
 
+public final class space/kscience/dataforge/meta/ObservableMutableMeta$Companion {
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
+}
+
+public final class space/kscience/dataforge/meta/ObservableMutableMetaSerializer : kotlinx/serialization/KSerializer {
+	public static final field INSTANCE Lspace/kscience/dataforge/meta/ObservableMutableMetaSerializer;
+	public synthetic fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Ljava/lang/Object;
+	public fun deserialize (Lkotlinx/serialization/encoding/Decoder;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public fun getDescriptor ()Lkotlinx/serialization/descriptors/SerialDescriptor;
+	public synthetic fun serialize (Lkotlinx/serialization/encoding/Encoder;Ljava/lang/Object;)V
+	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/ObservableMutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/RegexItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
 	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
 	public final fun component1 ()Lkotlin/text/Regex;
@@ -596,9 +620,9 @@ public final class space/kscience/dataforge/meta/SchemeKt {
 	public static final fun listOfScheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listOfScheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun scheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 20388a92..3e53891b 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.meta
 
+import kotlinx.serialization.Serializable
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.Name
 
@@ -35,6 +36,8 @@ public interface ObservableMeta : Meta {
 /**
  * A [Meta] which is both observable and mutable
  */
+@Serializable(ObservableMutableMetaSerializer::class)
+@MetaBuilderMarker
 public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTypedMeta<ObservableMutableMeta> {
     override val self: ObservableMutableMeta get() = this
 }

From 425f9801a59aafe93e475beba63abd98dcb24c18 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 15 Dec 2024 10:56:35 +0300
Subject: [PATCH 18/29] Rework data. Split static state and dynamic state

---
 CHANGELOG.md                                  |  1 +
 build.gradle.kts                              |  2 +-
 .../dataforge/actions/AbstractAction.kt       | 33 ++++---
 .../kscience/dataforge/actions/MapAction.kt   | 34 ++++---
 .../dataforge/actions/ReduceAction.kt         | 21 ++--
 .../kscience/dataforge/actions/SplitAction.kt | 21 ++--
 .../data/{GroupRule.kt => DataRenamer.kt}     | 45 ++++-----
 .../space/kscience/dataforge/data/DataSink.kt | 96 +++++--------------
 .../kscience/dataforge/data/DataSource.kt     | 13 ++-
 .../kscience/dataforge/data/MetaMaskData.kt   |  2 +-
 .../kscience/dataforge/data/NamedData.kt      |  5 +
 .../kscience/dataforge/data/dataBuilders.kt   | 75 ++++++---------
 .../kscience/dataforge/data/dataTransform.kt  | 45 ++++++---
 .../dataforge/data/dataTreeBuilder.kt         | 34 ++++---
 .../dataforge/data/DataTreeBuilderTest.kt     | 14 +--
 .../dataforge/data/dataSetBuilderInContext.kt |  6 +-
 .../kscience/dataforge/data/ActionsTest.kt    | 41 +++-----
 dataforge-meta/build.gradle.kts               |  6 ++
 .../dataforge/meta/valueExtensions.kt         | 15 ++-
 .../dataforge/workspace/FileDataTest.kt       |  4 +-
 docs/templates/README-TEMPLATE.md             | 61 ++++++++++++
 gradle.properties                             |  2 +-
 22 files changed, 318 insertions(+), 258 deletions(-)
 rename dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/{GroupRule.kt => DataRenamer.kt} (53%)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 82f0a8fb..3cb3daf0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
+- API of DataSink.
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/build.gradle.kts b/build.gradle.kts
index 98fca6b3..2d7cb095 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -9,7 +9,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.9.1"
+    version = "0.10.0"
 }
 
 subprojects {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index 4ed5f8df..e8249f0b 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,12 +1,7 @@
 package space.kscience.dataforge.actions
 
 import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.onEach
-import space.kscience.dataforge.data.DataSink
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.DataUpdate
-import space.kscience.dataforge.data.launchUpdate
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -31,10 +26,10 @@ public abstract class AbstractAction<T, R>(
     /**
      * Generate initial content of the output
      */
-    protected abstract fun DataSink<R>.generate(
+    protected abstract fun DataBuilderScope<R>.generate(
         source: DataTree<T>,
         meta: Meta,
-    )
+    ): Map<Name, Data<R>>
 
     /**
      * Update part of the data set using provided data
@@ -49,7 +44,7 @@ public abstract class AbstractAction<T, R>(
         updatedData: DataUpdate<T>,
     ) {
         //by default regenerate the whole data set
-        generate(source, meta)
+        putAll(generate(source, meta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -57,13 +52,21 @@ public abstract class AbstractAction<T, R>(
         source: DataTree<T>,
         meta: Meta,
         updatesScope: CoroutineScope
-    ): DataTree<R> = DataTree(outputType) {
-        generate(source, meta)
+    ): DataTree<R> = DataTree(
+        dataType = outputType,
+        scope = updatesScope,
+        initialData = DataBuilderScope<R>().generate(source, meta)
+    ) {
+
         //propagate updates
-        launchUpdate(updatesScope) {
-            source.updates.onEach { update ->
-                update(source, meta, update)
-            }.collect()
+        val updateSink = DataSink<R> { name, data ->
+            put(name, data)
+        }
+
+        with(updateSink) {
+            source.updates.collect { du: DataUpdate<T> ->
+                update(source, meta, du)
+            }
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 08bf08e9..fefa12d1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -54,22 +54,21 @@ public class MapAction<T, R>(
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.mapOne(name: Name, data: Data<T>?, meta: Meta) {
+    private fun mapOne(name: Name, data: Data<T>?, meta: Meta): Pair<Name, Data<R>?> {
         //fast return for null data
         if (data == null) {
-            put(name, null)
-            return
+            return name to null
         }
         // Creating a new environment for action using **old** name, old meta and task meta
         val env = ActionEnv(name, data.meta, meta)
 
         //applying transformation from builder
         val builder = MapActionBuilder<T, R>(
-            name,
-            data.meta.toMutableMeta(), // using data meta
-            meta,
-            data.type,
-            outputType
+            name = name,
+            meta = data.meta.toMutableMeta(), // using data meta
+            actionMeta = meta,
+            dataType = data.type,
+            outputType = outputType
         ).apply(block)
 
         //getting new name
@@ -82,21 +81,26 @@ public class MapAction<T, R>(
             builder.result(env, data.await())
         }
         //setting the data node
-        put(newName, newData)
+        return newName to newData
     }
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
-        source.forEach { mapOne(it.name, it.data, meta) }
+    override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
+        source.forEach {
+            val (name, data) = mapOne(it.name, it.data, meta)
+            if (data != null) {
+                check(name !in keys) { "Data with key $name already exist in the result" }
+                put(name, data)
+            }
+        }
     }
 
-
-
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
         updatedData: DataUpdate<T>,
-    )  {
-        mapOne(updatedData.name, updatedData.data, meta)
+    ) {
+        val (name, data) = mapOne(updatedData.name, updatedData.data, meta)
+        put(name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 93278442..73388fec 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -3,6 +3,8 @@ package space.kscience.dataforge.actions
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.get
+import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFBuilder
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -13,7 +15,7 @@ import kotlin.reflect.typeOf
 
 public class JoinGroup<T, R>(
     public var name: String,
-    internal val set: DataTree<T>,
+    internal val data: DataTree<T>,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -41,12 +43,17 @@ public class ReduceGroupBuilder<T, R>(
     private val groupRules: MutableList<(DataTree<T>) -> List<JoinGroup<T, R>>> = ArrayList();
 
     /**
-     * introduce grouping by meta value
+     * Group by a meta value
      */
-    public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
+    @OptIn(UnsafeKType::class)
+    public fun byMetaValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
         groupRules += { node ->
-            GroupRule.byMetaValue(tag, defaultTag).gather(node).map {
-                JoinGroup<T, R>(it.key, it.value, outputType).apply(action)
+            val groups = mutableMapOf<String, MutableMap<Name, Data<T>>>()
+            node.forEach { data ->
+                groups.getOrPut(data.meta[tag]?.string ?: defaultTag) { mutableMapOf() }.put(data.name, data)
+            }
+            groups.map { (key, dataMap) ->
+                JoinGroup<T, R>(key, dataMap.asTree(node.dataType), outputType).apply(action)
             }
         }
     }
@@ -84,9 +91,9 @@ internal class ReduceAction<T, R>(
 ) : AbstractAction<T, R>(outputType) {
     //TODO optimize reduction. Currently, the whole action recalculates on push
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
+    override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(source).forEach { group ->
-            val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
+            val dataFlow: Map<Name, Data<T>> = group.data.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
                     acc[value.name] = value.data
                 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 2268b0fa..bf3284be 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -7,7 +7,6 @@ import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.toMutableMeta
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.parseAsName
-import kotlin.collections.set
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
@@ -48,7 +47,7 @@ internal class SplitAction<T, R>(
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSink<R>.splitOne(name: Name, data: Data<T>?, meta: Meta) {
+    private fun splitOne(name: Name, data: Data<T>?, meta: Meta): Map<Name, Data<R>?> = buildMap {
         val laminate = Laminate(data?.meta, meta)
 
         val split = SplitBuilder<T, R>(name, data?.meta ?: Meta.EMPTY).apply(action)
@@ -76,16 +75,26 @@ internal class SplitAction<T, R>(
         }
     }
 
-    override fun DataSink<R>.generate(source: DataTree<T>, meta: Meta) {
-        source.forEach { splitOne(it.name, it.data, meta) }
+    override fun DataBuilderScope<R>.generate(
+        source: DataTree<T>,
+        meta: Meta
+    ): Map<Name, Data<R>> = buildMap {
+        source.forEach {
+            splitOne(it.name, it.data, meta).forEach { (name, data) ->
+                check(name !in keys) { "Data with key $name already exist in the result" }
+                if (data != null) {
+                    put(name, data)
+                }
+            }
+        }
     }
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
         meta: Meta,
         updatedData: DataUpdate<T>,
-    )  {
-        splitOne(updatedData.name, updatedData.data, meta)
+    ) {
+        putAll(splitOne(updatedData.name, updatedData.data, meta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
similarity index 53%
rename from dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
rename to dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
index 6dd3caa9..af2596ac 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataRenamer.kt
@@ -15,40 +15,41 @@
  */
 package space.kscience.dataforge.data
 
+import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.plus
+import kotlin.reflect.KType
 
-public interface GroupRule {
-    public fun <T> gather(set: DataTree<T>): Map<String, DataTree<T>>
+/**
+ * Interface that define rename rule for [Data]
+ */
+@DFExperimental
+public fun interface DataRenamer {
+    public fun rename(name: Name, meta: Meta, type: KType): Name
 
     public companion object {
+
         /**
-         * Create grouping rule that creates groups for different values of value
-         * field with name [key]
-         *
-         * @param key
-         * @param defaultTagValue
-         * @return
+         * Prepend name token `key\[tagValue\]` to data name
          */
         @OptIn(UnsafeKType::class)
-        public fun byMetaValue(
+        public fun groupByMetaValue(
             key: String,
             defaultTagValue: String,
-        ): GroupRule = object : GroupRule {
+        ): DataRenamer = object : DataRenamer {
 
-            override fun <T> gather(
-                set: DataTree<T>,
-            ): Map<String, DataTree<T>> {
-                val map = HashMap<String, MutableDataTree<T>>()
-
-                set.forEach { data ->
-                    val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                    map.getOrPut(tagValue) { MutableDataTree(set.dataType) }.put(data.name, data.data)
-                }
-
-
-                return map
+            override fun rename(
+                name: Name,
+                meta: Meta,
+                type: KType
+            ): Name {
+                val tagValue: String = meta[key]?.string ?: defaultTagValue
+                return NameToken(key,tagValue).plus(name)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index 6daeae98..ccd37514 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -1,49 +1,28 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.channels.BufferOverflow
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.MutableSharedFlow
 import kotlinx.coroutines.flow.mapNotNull
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface DataSink<in T> {
-    /**
-     * Put data without notification
-     */
-    public fun put(name: Name, data: Data<T>?)
-
-    /**
-     * Put data and propagate changes downstream
-     */
-    public suspend fun update(name: Name, data: Data<T>?)
+public interface DataBuilderScope<in T>{
+    public companion object: DataBuilderScope<Nothing>
 }
 
-/**
- * Launch continuous update using
- */
-public fun <T> DataSink<T>.launchUpdate(
-    scope: CoroutineScope,
-    updater: suspend DataSink<T>.() -> Unit,
-): Job = scope.launch {
-    object : DataSink<T> {
-        override fun put(name: Name, data: Data<T>?) {
-            launch {
-                this@launchUpdate.update(name, data)
-            }
-        }
+@Suppress("UNCHECKED_CAST")
+public fun <T> DataBuilderScope(): DataBuilderScope<T> = DataBuilderScope as DataBuilderScope<T>
 
-        override suspend fun update(name: Name, data: Data<T>?) {
-            this@launchUpdate.update(name, data)
-        }
-    }.updater()
+public fun interface DataSink<in T>: DataBuilderScope<T> {
+    /**
+     * Put data and notify listeners if needed
+     */
+    public suspend fun put(name: Name, data: Data<T>?)
 }
 
+
 /**
  * A mutable version of [DataTree]
  */
@@ -54,16 +33,14 @@ public interface MutableDataTree<T> : DataTree<T>, DataSink<T> {
 
     public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
 
-    public operator fun set(token: NameToken, data: Data<T>?)
+    public suspend fun put(token: NameToken, data: Data<T>?)
 
-    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
-}
-
-public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
-    when (name.length) {
-        0 -> this.data = data
-        1 -> set(name.first(), data)
-        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
+    override suspend fun put(name: Name, data: Data<T>?): Unit {
+        when (name.length) {
+            0 -> this.data = data
+            1 -> put(name.first(), data)
+            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
+        }
     }
 }
 
@@ -81,8 +58,8 @@ private class MutableDataTreeRoot<T>(
     override val dataType: KType,
 ) : MutableDataTree<T> {
 
-    override val updates = MutableSharedFlow<DataUpdate<T>>(100, onBufferOverflow = BufferOverflow.DROP_LATEST)
-
+    override val items = HashMap<NameToken, MutableDataTree<T>>()
+    override val updates = MutableSharedFlow<DataUpdate<T>>(extraBufferCapacity = 100)
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
@@ -101,44 +78,21 @@ private class MutableDataTreeRoot<T>(
         override fun getOrCreateItem(token: NameToken): MutableDataTree<T> =
             items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }
 
-
-        override fun set(token: NameToken, data: Data<T>?) {
-            val subTree = getOrCreateItem(token)
-            subTree.data = data
+        override suspend fun put(token: NameToken, data: Data<T>?) {
+            this.data = data
+            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + token, data))
         }
-
-        override suspend fun update(name: Name, data: Data<T>?) {
-            if (name.isEmpty()) {
-                this.data = data
-                this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + name, data))
-            } else {
-                getOrCreateItem(name.first()).update(name.cutFirst(), data)
-            }
-        }
-
     }
 
-
     override var data: Data<T>? = null
 
-    override val items = HashMap<NameToken, MutableDataTree<T>>()
-
     override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
         MutableDataTreeBranch(token.asName())
     }
 
-    override fun set(token: NameToken, data: Data<T>?) {
-        val subTree = getOrCreateItem(token)
-        subTree.data = data
-    }
-
-    override suspend fun update(name: Name, data: Data<T>?) {
-        if (name.isEmpty()) {
-            this.data = data
-            updates.emit(DataUpdate(data?.type ?: dataType, name, data))
-        } else {
-            getOrCreateItem(name.first()).update(name.cutFirst(), data)
-        }
+    override suspend fun put(token: NameToken, data: Data<T>?) {
+        this.data = data
+        updates.emit(DataUpdate(data?.type ?: dataType, token.asName(), data))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index a9756be7..dfde4e0b 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -2,6 +2,7 @@ package space.kscience.dataforge.data
 
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.emptyFlow
+import kotlinx.coroutines.flow.first
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.*
 import kotlin.contracts.contract
@@ -30,11 +31,18 @@ public interface DataSource<out T> {
 public interface ObservableDataSource<out T> : DataSource<T> {
 
     /**
-     * Flow updates made to the data
+     * Flow updates made to the data. Updates are considered critical. The producer will suspend unless all updates are consumed.
      */
     public val updates: Flow<DataUpdate<T>>
 }
 
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
+    return read(name) ?: updates.first { it.name == name && it.data != null }.data!!
+}
+
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
+    awaitData(name.parseAsName())
+
 /**
  * A tree like structure for data holding
  */
@@ -54,8 +62,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
     override val updates: Flow<DataUpdate<T>>
 
     public companion object {
-        private object EmptyDataTree :
-            DataTree<Nothing> {
+        private object EmptyDataTree : DataTree<Nothing> {
             override val data: Data<Nothing>? = null
             override val items: Map<NameToken, EmptyDataTree> = emptyMap()
             override val dataType: KType = typeOf<Unit>()
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
index 85f0b2f9..2fbece21 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
@@ -8,7 +8,7 @@ import space.kscience.dataforge.meta.copy
 private class MetaMaskData<T>(val origin: Data<T>, override val meta: Meta) : Data<T> by origin
 
 /**
- * A data with overriden meta. It reflects original data computed state.
+ * A data with overridden meta. It reflects original data computed state.
  */
 public fun <T> Data<T>.withMeta(newMeta: Meta): Data<T> = if (this is MetaMaskData) {
     MetaMaskData(origin, newMeta)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 9cb4c2d9..54a9715c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -20,6 +20,11 @@ public fun <T> DataUpdate(type: KType, name: Name, data: Data<T>?): DataUpdate<T
     override val type: KType = type
     override val name: Name = name
     override val data: Data<T>? = data
+
+    override fun toString(): String {
+        return "DataUpdate(type=$type, name=$name, data=$data)"
+    }
+
 }
 
 /**
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 649cfd19..b3433616 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -1,15 +1,14 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.launchIn
-import kotlinx.coroutines.flow.onEach
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.names.*
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.isEmpty
+import space.kscience.dataforge.names.plus
 
 
-public fun <T> DataSink<T>.put(value: NamedData<T>) {
+public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
     put(value.name, value.data)
 }
 
@@ -20,16 +19,7 @@ public inline fun <T> DataSink<T>.putAll(
     if (prefix.isEmpty()) {
         apply(block)
     } else {
-        val proxyDataSink = object :DataSink<T>{
-            override fun put(name: Name, data: Data<T>?) {
-                this@putAll.put(prefix + name, data)
-            }
-
-            override suspend fun update(name: Name, data: Data<T>?) {
-                this@putAll.update(prefix + name, data)
-            }
-
-        }
+        val proxyDataSink = DataSink<T> { name, data -> this@putAll.put(prefix + name, data) }
 
         proxyDataSink.apply(block)
     }
@@ -42,23 +32,23 @@ public inline fun <T> DataSink<T>.putAll(
 ): Unit = putAll(prefix.asName(), block)
 
 
-public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
+public suspend fun <T> DataSink<T>.put(name: String, value: Data<T>) {
     put(Name.parse(name), value)
 }
 
-public fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
     putAll(name) { putAll(tree.asSequence()) }
 }
 
 
-public fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
     putAll(Name.parse(name)) { putAll(tree.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -67,7 +57,7 @@ public inline fun <reified T> DataSink<T>.putValue(
     put(name, data)
 }
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
@@ -79,56 +69,49 @@ public inline fun <reified T> DataSink<T>.putValue(
 /**
  * Emit static data with the fixed value
  */
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: Name,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     value: T,
     meta: Meta = Meta.EMPTY,
 ): Unit = put(name, Data.wrapValue(value, meta))
 
-public inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.putValue(
     name: String,
     value: T,
     metaBuilder: MutableMeta.() -> Unit,
 ): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
-public suspend inline fun <reified T> DataSink<T>.updateValue(
-    name: Name,
-    value: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = update(name, Data.wrapValue(value, meta))
-
-public suspend inline fun <reified T> DataSink<T>.updateValue(
-    name: String,
-    value: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = update(name.parseAsName(), Data.wrapValue(value, meta))
-
-public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
+public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
         put(it.name, it.data)
     }
 }
 
-public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
+    map.forEach { (name, data) ->
+        put(name, data)
+    }
+}
+
+public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
     putAll(tree.asSequence())
 }
 
 /**
- * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
+ * Copy given data set and mirror its changes to this [DataSink]. Suspends indefinitely.
  */
-public fun <T : Any> DataSink<T>.putAllAndWatch(
-    scope: CoroutineScope,
-    branchName: Name = Name.EMPTY,
+public suspend fun <T : Any> DataSink<T>.putAllAndWatch(
     source: DataTree<T>,
-): Job {
+    branchName: Name = Name.EMPTY,
+) {
     putAll(branchName, source)
-    return source.updates.onEach {
-        update(branchName + it.name, it.data)
-    }.launchIn(scope)
+    source.updates.collect {
+        put(branchName + it.name, it.data)
+    }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index c0d92e9e..80105fc6 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
@@ -36,7 +37,6 @@ public fun <T, R> Data<T>.transform(
 }
 
 
-
 /**
  * Lazily transform this data to another data. By convention [block] should not use external data (be pure).
  * @param coroutineContext additional [CoroutineContext] elements used for data computation.
@@ -77,7 +77,6 @@ internal fun Iterable<Data<*>>.joinMeta(): Meta = Meta {
 }
 
 
-
 @PublishedApi
 internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
     forEach { (key, data) ->
@@ -201,34 +200,46 @@ public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
 
 
 @UnsafeKType
-public suspend fun <T, R> DataTree<T>.transform(
+public fun <T, R> DataTree<T>.map(
     outputType: KType,
+    scope: CoroutineScope,
     metaTransform: MutableMeta.() -> Unit = {},
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = DataTree<R>(outputType){
-    //quasi-synchronous processing of elements in the tree
-    asSequence().forEach { namedData: NamedData<T> ->
+    compute: suspend (NamedValueWithMeta<T>) -> R,
+): DataTree<R> = DataTree<R>(
+    outputType,
+    scope,
+    initialData = asSequence().associate { namedData: NamedData<T> ->
         val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
-        val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
-            block(namedData.awaitWithMeta())
+        val newData = Data(outputType, newMeta, scope.coroutineContext, listOf(namedData)) {
+            compute(namedData.awaitWithMeta())
+        }
+        namedData.name to newData
+    }
+) {
+    updates.collect { update ->
+        val data: Data<T>? = update.data
+        if (data == null) put(update.name, null) else {
+            val newMeta = data.meta.toMutableMeta().apply(metaTransform).seal()
+            val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
+                compute(NamedValueWithMeta(update.name, data.await(), data.meta))
+            }
+            put(update.name, d)
         }
-        put(namedData.name, d)
     }
 }
 
 @OptIn(UnsafeKType::class)
-public suspend inline fun <T, reified R> DataTree<T>.transform(
+public inline fun <T, reified R> DataTree<T>.map(
+    scope: CoroutineScope,
     noinline metaTransform: MutableMeta.() -> Unit = {},
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = this@transform.transform(typeOf<R>(), metaTransform, coroutineContext, block)
+): DataTree<R> = map(typeOf<R>(), scope, metaTransform, block)
 
 public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
     asSequence().forEach(block)
 }
 
-// DataSet reduction
+// DataSet snapshot reduction
 
 @PublishedApi
 internal fun DataTree<*>.joinMeta(): Meta = Meta {
@@ -238,6 +249,10 @@ internal fun DataTree<*>.joinMeta(): Meta = Meta {
     }
 }
 
+/**
+ * Reduce current snapshot of the [DataTree] to a single [Data].
+ * Even if a tree is changed in the future, only current data set is taken.
+ */
 public inline fun <T, reified R> DataTree<T>.reduceToData(
     meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index ac0492f0..20afcc76 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -1,8 +1,11 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.MutableSharedFlow
+import kotlinx.coroutines.flow.SharedFlow
 import kotlinx.coroutines.flow.mapNotNull
+import kotlinx.coroutines.launch
 import kotlinx.coroutines.sync.Mutex
 import kotlinx.coroutines.sync.withLock
 import space.kscience.dataforge.misc.UnsafeKType
@@ -14,7 +17,7 @@ import kotlin.reflect.typeOf
 private class FlatDataTree<T>(
     override val dataType: KType,
     private val dataSet: Map<Name, Data<T>>,
-    private val sourceUpdates: Flow<DataUpdate<T>>,
+    private val sourceUpdates: SharedFlow<DataUpdate<T>>,
     private val prefix: Name,
 ) : DataTree<T> {
     override val data: Data<T>? get() = dataSet[prefix]
@@ -33,7 +36,7 @@ private class FlatDataTree<T>(
 }
 
 /**
- * A builder for static [DataTree].
+ * A builder for [DataTree].
  */
 private class DataTreeBuilder<T>(
     private val type: KType,
@@ -46,20 +49,13 @@ private class DataTreeBuilder<T>(
 
     private val updatesFlow = MutableSharedFlow<DataUpdate<T>>()
 
-    override fun put(name: Name, data: Data<T>?) {
-        if (data == null) {
-            map.remove(name)
-        } else {
-            map[name] = data
-        }
-    }
 
-    override suspend fun update(name: Name, data: Data<T>?) {
+    override suspend fun put(name: Name, data: Data<T>?) {
         mutex.withLock {
             if (data == null) {
                 map.remove(name)
             } else {
-                map.put(name, data)
+                map[name] = data
             }
         }
         updatesFlow.emit(DataUpdate(data?.type ?: type, name, data))
@@ -74,16 +70,24 @@ private class DataTreeBuilder<T>(
 @UnsafeKType
 public fun <T> DataTree(
     dataType: KType,
-    generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
+    scope: CoroutineScope,
+    initialData: Map<Name, Data<T>> = emptyMap(),
+    updater: suspend DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
+    scope.launch{
+        updater()
+    }
+}.build()
 
 /**
  * Create and a data tree.
  */
 @OptIn(UnsafeKType::class)
 public inline fun <reified T> DataTree(
-    noinline generator: DataSink<T>.() -> Unit,
-): DataTree<T> = DataTree(typeOf<T>(), generator)
+    scope: CoroutineScope,
+    initialData: Map<Name, Data<T>> = emptyMap(),
+    noinline updater: suspend DataSink<T>.() -> Unit,
+): DataTree<T> = DataTree(typeOf<T>(), scope, initialData, updater)
 
 
 /**
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 760aeec2..1f78b36c 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -1,8 +1,7 @@
 package space.kscience.dataforge.data
 
 import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.collect
-import kotlinx.coroutines.flow.take
+import kotlinx.coroutines.launch
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.names.asName
 import kotlin.test.Test
@@ -57,17 +56,18 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = putAllAndWatch(this@runTest, "sub".asName(), subNode)
+            job = launch {  putAllAndWatch(subNode,"sub".asName())}
         }
 
         repeat(10) {
             subNode.updateValue("value[$it]", it)
         }
 
-        rootNode.updates.take(10).collect()
-        assertEquals(9, rootNode["sub.value[9]"]?.await())
-        assertEquals(8, rootNode["sub.value[8]"]?.await())
-
+        assertEquals(9, subNode.awaitData("value[9]").await())
+        assertEquals(8, subNode.awaitData("value[8]").await())
+        assertEquals(9, rootNode.awaitData("sub.value[9]").await())
+        assertEquals(8, rootNode.awaitData("sub.value[8]").await())
+        println("finished")
         job?.cancel()
     }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index 8ce50a22..f9374974 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -1,3 +1,5 @@
+@file:Suppress("CONTEXT_RECEIVERS_DEPRECATED")
+
 package space.kscience.dataforge.data
 
 import space.kscience.dataforge.names.Name
@@ -7,14 +9,14 @@ import space.kscience.dataforge.names.Name
  * Append data to node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.put(data: Data<T>): Unit =
+public suspend infix fun <T : Any> String.put(data: Data<T>): Unit =
     put(Name.parse(this), data)
 
 /**
  * Append node
  */
 context(DataSink<T>)
-public infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
+public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
     putAll(this, dataSet)
 
 /**
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 2e790caa..e9ec343c 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,11 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.ExperimentalCoroutinesApi
-import kotlinx.coroutines.delay
 import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
-import kotlinx.coroutines.withContext
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
 import space.kscience.dataforge.actions.mapping
@@ -18,22 +15,20 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
-        withContext(Dispatchers.Default) {
-            val plusOne = Action.mapping<Int, Int> {
-                result { it + 1 }
-            }
-
-            val data: DataTree<Int> = DataTree {
-                repeat(10) {
-                    putValue(it.toString(), it)
-                }
-            }
-
-            val result = plusOne(data)
-
-            advanceUntilIdle()
-            assertEquals(2, result["1"]?.await())
+        val plusOne = Action.mapping<Int, Int> {
+            result { it + 1 }
         }
+
+        val data: DataTree<Int> = DataTree {
+            repeat(10) {
+                putValue(it.toString(), it)
+            }
+        }
+
+        val result = plusOne(data)
+
+        advanceUntilIdle()
+        assertEquals(2, result["1"]?.await())
     }
 
     @Test
@@ -44,19 +39,13 @@ internal class ActionsTest {
 
         val source: MutableDataTree<Int> = MutableDataTree()
 
-        val result = plusOne(source)
-
-
+        val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
             source.updateValue(it.toString(), it)
         }
 
-        delay(10)
-
-//        result.updates.take(10).onEach { println(it.name) }.collect()
-
-        assertEquals(2, result["1"]?.await())
+        assertEquals(2, result.awaitData("1").await())
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-meta/build.gradle.kts b/dataforge-meta/build.gradle.kts
index d150ef98..831aea72 100644
--- a/dataforge-meta/build.gradle.kts
+++ b/dataforge-meta/build.gradle.kts
@@ -16,4 +16,10 @@ description = "Meta definition and basic operations on meta"
 
 readme{
     maturity = space.kscience.gradle.Maturity.DEVELOPMENT
+
+    feature("metadata"){
+        """
+            
+        """.trimIndent()
+    }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
index 73464305..0c87bcc2 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/valueExtensions.kt
@@ -11,9 +11,18 @@ public fun Value.isNull(): Boolean = this == Null
 public fun Value.isList(): Boolean = this.type == ValueType.LIST
 
 public val Value.boolean: Boolean
-    get() = this == True
-            || this.list.firstOrNull() == True
-            || (type == ValueType.STRING && string.toBoolean())
+    get() = when (type) {
+        ValueType.NUMBER -> int > 0
+        ValueType.STRING -> string.toBoolean()
+        ValueType.BOOLEAN -> this === True
+        ValueType.LIST -> list.singleOrNull()?.boolean == true
+        ValueType.NULL -> false
+    }
+
+//        this == True
+//            || this.list.firstOrNull() == True
+//            || (type == ValueType.STRING && string.toBoolean())
+//            || (type == ValueType.)
 
 
 public val Value.int: Int get() = number.toInt()
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 5466da76..f526e194 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -54,7 +54,7 @@ class FileDataTest {
         val data = DataTree {
             files(io, Name.EMPTY, dir)
         }
-        val reconstructed = data.transform { (_, value) -> value.toByteArray().decodeToString() }
+        val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
@@ -69,7 +69,7 @@ class FileDataTest {
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
         val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
-            .transform { (_, value) -> value.toByteArray().decodeToString() }
+            .map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
 
diff --git a/docs/templates/README-TEMPLATE.md b/docs/templates/README-TEMPLATE.md
index 762e5d7e..6de55245 100644
--- a/docs/templates/README-TEMPLATE.md
+++ b/docs/templates/README-TEMPLATE.md
@@ -3,4 +3,65 @@
 
 ![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
 
+## Publications
+
+* [A general overview](https://doi.org/10.1051/epjconf/201817705003)
+* [An application in "Troitsk nu-mass" experiment](https://doi.org/10.1088/1742-6596/1525/1/012024)
+
+## Video
+
+* [A presentation on application of (old version of) DataForge to Troitsk nu-mass analysis.] (https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+
+## Questions and Answers
+In this section, we will try to cover DataForge main ideas in the form of questions and answers.
+
+### General
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+
+**Q**: How does it work?
+
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+
+**Q**: But where does it take algorithms to use?
+
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
+
+**Q**: So I still need to write the code? What is the difference then?
+
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
+
+### Platform
+
+**Q**: Which platform does DataForge use? Which operating system is it working on?
+
+**A**: The DataForge is mostly written in Kotlin-multiplatform and could be used on JVM, JS and native targets. Some modules and functions are supported only on JVM
+
+**Q**: Can I use my C++/Fortran/Python code in DataForge?
+
+A: Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+
+### Features
+**Q**: What other features does DataForge provide?
+
+**A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
+
+* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
+
+* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
+
+### Misc
+**Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
+
+**A**: One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics.
+
+**Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
+
+**A**: Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+
+**Q**: Is it possible to use DataForge in notebook mode?
+
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows to use any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
+
 ${modules}
diff --git a/gradle.properties b/gradle.properties
index ffc318d9..1b920cd8 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.15.4-kotlin-2.0.0
\ No newline at end of file
+toolsVersion=0.16.0-kotlin-2.1.0
\ No newline at end of file

From 6634ece349ddb36672b1967b005c4b40b28ae9e8 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Tue, 31 Dec 2024 13:58:21 +0300
Subject: [PATCH 19/29] Rework data. Split static state and dynamic state

---
 .../dataforge/actions/AbstractAction.kt       |  12 +-
 .../kscience/dataforge/actions/MapAction.kt   |  10 +-
 .../dataforge/actions/ReduceAction.kt         |   2 +-
 .../kscience/dataforge/actions/SplitAction.kt |   8 +-
 .../kscience/dataforge/data/DataFilter.kt     |  37 ++--
 .../space/kscience/dataforge/data/DataSink.kt |  21 +-
 .../kscience/dataforge/data/DataSource.kt     |  18 +-
 .../space/kscience/dataforge/data/Goal.kt     |   2 +-
 .../kscience/dataforge/data/NamedData.kt      |  33 +--
 .../dataforge/data/StaticDataBuilder.kt       |  63 ++++++
 .../kscience/dataforge/data/dataBuilders.kt   |  30 ++-
 .../kscience/dataforge/data/dataTransform.kt  |  28 +--
 .../dataforge/data/dataTreeBuilder.kt         |  24 ++-
 .../dataforge/data/DataTreeBuilderTest.kt     |  18 +-
 .../kscience/dataforge/data/dataFilterJvm.kt  |  20 +-
 .../kscience/dataforge/data/ActionsTest.kt    |   4 +-
 .../kscience/dataforge/workspace/Task.kt      |  43 ++--
 .../dataforge/workspace/TaskResult.kt         |   6 +-
 .../dataforge/workspace/WorkspaceBuilder.kt   |  29 ++-
 .../dataforge/workspace/taskBuilders.kt       |  82 ++++----
 .../dataforge/workspace/CachingAction.kt      |  20 +-
 .../dataforge/workspace/FileDataTree.kt       | 185 +++++++++++++++++
 .../workspace/InMemoryWorkspaceCache.kt       |   2 +-
 .../dataforge/workspace/readFileData.kt       | 188 ------------------
 .../dataforge/workspace/workspaceJvm.kt       |   4 +-
 .../dataforge/workspace/writeFileData.kt      |   8 +-
 .../workspace/CachingWorkspaceTest.kt         |  12 +-
 .../dataforge/workspace/FileDataTest.kt       |   4 +-
 .../workspace/FileWorkspaceCacheTest.kt       |   2 +-
 29 files changed, 486 insertions(+), 429 deletions(-)
 create mode 100644 dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
 create mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
 delete mode 100644 dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index e8249f0b..af0aab00 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -35,16 +35,16 @@ public abstract class AbstractAction<T, R>(
      * Update part of the data set using provided data
      *
      * @param source the source data tree in case we need several data items to update
-     * @param meta the metadata used for the whole data tree
+     * @param actionMeta the metadata used for the whole data tree
      * @param updatedData an updated item
      */
     protected open suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
         //by default regenerate the whole data set
-        putAll(generate(source, meta))
+        putAll(generate(source, actionMeta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -64,8 +64,8 @@ public abstract class AbstractAction<T, R>(
         }
 
         with(updateSink) {
-            source.updates.collect { du: DataUpdate<T> ->
-                update(source, meta, du)
+            source.updates.collect {
+                update(source, meta, it)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index fefa12d1..2e4b2ddc 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -85,8 +85,8 @@ public class MapAction<T, R>(
     }
 
     override fun DataBuilderScope<R>.generate(source: DataTree<T>, meta: Meta): Map<Name, Data<R>> = buildMap {
-        source.forEach {
-            val (name, data) = mapOne(it.name, it.data, meta)
+        source.forEach { data ->
+            val (name, data) = mapOne(data.name, data, meta)
             if (data != null) {
                 check(name !in keys) { "Data with key $name already exist in the result" }
                 put(name, data)
@@ -96,10 +96,10 @@ public class MapAction<T, R>(
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
-        val (name, data) = mapOne(updatedData.name, updatedData.data, meta)
+        val (name, data) = mapOne(updateName, source.read(updateName), actionMeta)
         put(name, data)
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index 73388fec..d7bacda5 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -95,7 +95,7 @@ internal class ReduceAction<T, R>(
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(source).forEach { group ->
             val dataFlow: Map<Name, Data<T>> = group.data.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
-                    acc[value.name] = value.data
+                    acc[value.name] = value
                 }
             }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index bf3284be..6dfbc7c9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -80,7 +80,7 @@ internal class SplitAction<T, R>(
         meta: Meta
     ): Map<Name, Data<R>> = buildMap {
         source.forEach {
-            splitOne(it.name, it.data, meta).forEach { (name, data) ->
+            splitOne(it.name, it, meta).forEach { (name, data) ->
                 check(name !in keys) { "Data with key $name already exist in the result" }
                 if (data != null) {
                     put(name, data)
@@ -91,10 +91,10 @@ internal class SplitAction<T, R>(
 
     override suspend fun DataSink<R>.update(
         source: DataTree<T>,
-        meta: Meta,
-        updatedData: DataUpdate<T>,
+        actionMeta: Meta,
+        updateName: Name,
     ) {
-        putAll(splitOne(updatedData.name, updatedData.data, meta))
+        putAll(splitOne(updateName, source.read(updateName), actionMeta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index 38174e50..5538cc28 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -18,23 +18,25 @@ public fun interface DataFilter {
 }
 
 
-public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
+//public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
 
-public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
-    predicate.accepts(data)
-}
-
-public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
-    predicate.accepts(data)
-}
+//public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
+//    predicate.accepts(data)
+//}
+//
+//public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
+//    predicate.accepts(data)
+//}
 
 public fun <T> DataSource<T>.filterData(
-    predicate: DataFilter,
+    dataFilter: DataFilter,
 ): DataSource<T> = object : DataSource<T> {
     override val dataType: KType get() = this@filterData.dataType
 
     override fun read(name: Name): Data<T>? =
-        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+        this@filterData.read(name)?.takeIf {
+            dataFilter.accepts(name, it.meta, it.type)
+        }
 }
 
 /**
@@ -43,8 +45,12 @@ public fun <T> DataSource<T>.filterData(
 public fun <T> ObservableDataSource<T>.filterData(
     predicate: DataFilter,
 ): ObservableDataSource<T> = object : ObservableDataSource<T> {
-    override val updates: Flow<DataUpdate<T>>
-        get() = this@filterData.updates.filter { predicate.accepts(it) }
+
+    override val updates: Flow<Name>
+        get() = this@filterData.updates.filter {
+            val data = read(it)
+            predicate.accepts(it, data?.meta, data?.type ?: dataType)
+        }
 
     override val dataType: KType get() = this@filterData.dataType
 
@@ -70,8 +76,11 @@ internal class FilteredDataTree<T>(
             ?.filter { !it.value.isEmpty() }
             ?: emptyMap()
 
-    override val updates: Flow<DataUpdate<T>>
-        get() = source.updates.filter { filter.accepts(it) }
+    override val updates: Flow<Name>
+        get() = source.updates.filter {
+            val data = read(it)
+            filter.accepts(it, data?.meta, data?.type ?: dataType)
+        }
 }
 
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index ccd37514..c8a0f2a7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -8,14 +8,17 @@ import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface DataBuilderScope<in T>{
-    public companion object: DataBuilderScope<Nothing>
+public interface DataBuilderScope<in T> {
+    public companion object : DataBuilderScope<Nothing>
 }
 
 @Suppress("UNCHECKED_CAST")
 public fun <T> DataBuilderScope(): DataBuilderScope<T> = DataBuilderScope as DataBuilderScope<T>
 
-public fun interface DataSink<in T>: DataBuilderScope<T> {
+/**
+ * Asynchronous data sink
+ */
+public fun interface DataSink<in T> : DataBuilderScope<T> {
     /**
      * Put data and notify listeners if needed
      */
@@ -59,7 +62,7 @@ private class MutableDataTreeRoot<T>(
 ) : MutableDataTree<T> {
 
     override val items = HashMap<NameToken, MutableDataTree<T>>()
-    override val updates = MutableSharedFlow<DataUpdate<T>>(extraBufferCapacity = 100)
+    override val updates = MutableSharedFlow<Name>(extraBufferCapacity = 100)
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
@@ -67,10 +70,8 @@ private class MutableDataTreeRoot<T>(
 
         override val items = HashMap<NameToken, MutableDataTree<T>>()
 
-        override val updates: Flow<DataUpdate<T>> = this@MutableDataTreeRoot.updates.mapNotNull { update ->
-            update.name.removeFirstOrNull(branchName)?.let {
-                DataUpdate(update.data?.type ?: dataType, it, update.data)
-            }
+        override val updates: Flow<Name> = this@MutableDataTreeRoot.updates.mapNotNull { update ->
+            update.removeFirstOrNull(branchName)
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
@@ -80,7 +81,7 @@ private class MutableDataTreeRoot<T>(
 
         override suspend fun put(token: NameToken, data: Data<T>?) {
             this.data = data
-            this@MutableDataTreeRoot.updates.emit(DataUpdate(data?.type ?: dataType, branchName + token, data))
+            this@MutableDataTreeRoot.updates.emit(branchName + token)
         }
     }
 
@@ -92,7 +93,7 @@ private class MutableDataTreeRoot<T>(
 
     override suspend fun put(token: NameToken, data: Data<T>?) {
         this.data = data
-        updates.emit(DataUpdate(data?.type ?: dataType, token.asName(), data))
+        updates.emit(token.asName())
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index dfde4e0b..7ee87180 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -1,8 +1,6 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.emptyFlow
-import kotlinx.coroutines.flow.first
+import kotlinx.coroutines.flow.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.*
 import kotlin.contracts.contract
@@ -31,13 +29,17 @@ public interface DataSource<out T> {
 public interface ObservableDataSource<out T> : DataSource<T> {
 
     /**
-     * Flow updates made to the data. Updates are considered critical. The producer will suspend unless all updates are consumed.
+     * Names of updated elements.
+     * Data updates with the same names could be glued together.
+     *
+     * Updates are considered critical.
+     * The producer will suspend unless all updates are consumed.
      */
-    public val updates: Flow<DataUpdate<T>>
+    public val updates: Flow<Name>
 }
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
-    return read(name) ?: updates.first { it.name == name && it.data != null }.data!!
+    return read(name) ?: updates.filter { it == name }.map { read(name) }.filterNotNull().first()
 }
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
@@ -59,7 +61,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
     /**
      * Flow updates made to the data
      */
-    override val updates: Flow<DataUpdate<T>>
+    override val updates: Flow<Name>
 
     public companion object {
         private object EmptyDataTree : DataTree<Nothing> {
@@ -68,7 +70,7 @@ public interface DataTree<out T> : ObservableDataSource<T> {
             override val dataType: KType = typeOf<Unit>()
 
             override fun read(name: Name): Data<Nothing>? = null
-            override val updates: Flow<DataUpdate<Nothing>> get() = emptyFlow()
+            override val updates: Flow<Name> get() = emptyFlow()
         }
 
         public val EMPTY: DataTree<Nothing> = EmptyDataTree
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index 95ddbbf7..e54710b1 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -32,7 +32,7 @@ public interface Goal<out T> {
     public companion object
 }
 
-public fun Goal<*>.launch(coroutineScope: CoroutineScope): Job = async(coroutineScope)
+public fun Goal<*>.launchIn(coroutineScope: CoroutineScope): Job = async(coroutineScope)
 
 public suspend fun <T> Goal<T>.await(): T = coroutineScope { async(this).await() }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 54a9715c..b20736ae 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -3,43 +3,16 @@ package space.kscience.dataforge.data
 import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
-import kotlin.reflect.KType
-
-/**
- * An interface implementing a data update event.
- *
- * If [data] is null, then corresponding element should be removed.
- */
-public interface DataUpdate<out T> : Named {
-    public val type: KType
-    override val name: Name
-    public val data: Data<T>?
-}
-
-public fun <T> DataUpdate(type: KType, name: Name, data: Data<T>?): DataUpdate<T> = object : DataUpdate<T> {
-    override val type: KType = type
-    override val name: Name = name
-    override val data: Data<T>? = data
-
-    override fun toString(): String {
-        return "DataUpdate(type=$type, name=$name, data=$data)"
-    }
-
-}
 
 /**
  * A data coupled to a name.
  */
-public interface NamedData<out T> : DataUpdate<T>, Data<T> {
-    override val data: Data<T>
-}
+public interface NamedData<out T> : Data<T>, Named
 
-public operator fun NamedData<*>.component1(): Name = name
-public operator fun <T> NamedData<T>.component2(): Data<T> = data
 
 private class NamedDataImpl<T>(
     override val name: Name,
-    override val data: Data<T>,
+    val data: Data<T>,
 ) : Data<T> by data, NamedData<T> {
     override fun toString(): String = buildString {
         append("NamedData(name=\"$name\"")
@@ -54,7 +27,7 @@ private class NamedDataImpl<T>(
 }
 
 public fun <T> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
-    NamedDataImpl(name, this.data)
+    NamedDataImpl(name, this)
 } else {
     NamedDataImpl(name, this)
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
new file mode 100644
index 00000000..0c1fe0b9
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
@@ -0,0 +1,63 @@
+package space.kscience.dataforge.data
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.misc.UnsafeKType
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.parseAsName
+import space.kscience.dataforge.names.plus
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+
+public fun interface StaticDataBuilder<T> : DataBuilderScope<T> {
+    public fun put(name: Name, data: Data<T>)
+}
+
+private class DataMapBuilder<T> : StaticDataBuilder<T> {
+    val map = mutableMapOf<Name, Data<T>>()
+
+    override fun put(name: Name, data: Data<T>) {
+        if (map.containsKey(name)) {
+            error("Duplicate key '$name'")
+        } else {
+            map.put(name, data)
+        }
+    }
+}
+
+public fun <T> StaticDataBuilder<T>.put(name: String, data: Data<T>) {
+    put(name.parseAsName(), data)
+}
+
+public inline fun <T, reified T1 : T> StaticDataBuilder<T>.putValue(
+    name: String,
+    value: T1,
+    metaBuilder: MutableMeta.() -> Unit = {}
+) {
+    put(name, Data(value, Meta(metaBuilder)))
+}
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
+    val map = DataMapBuilder<T>().apply(block).map
+    map.forEach { (name, data) ->
+        put(prefix + name, data)
+    }
+}
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
+    putAll(prefix.parseAsName(), block)
+
+public fun <T> StaticDataBuilder<T>.putAll(prefix: String, tree: DataTree<T>) {
+    tree.forEach { data ->
+        put(prefix + data.name, data)
+    }
+}
+
+@UnsafeKType
+public fun <T> DataTree.Companion.static(type: KType, block: StaticDataBuilder<T>.() -> Unit): DataTree<T> =
+    DataMapBuilder<T>().apply(block).map.asTree(type)
+
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> DataTree.Companion.static(noinline block: StaticDataBuilder<T>.() -> Unit): DataTree<T> =
+    static(typeOf<T>(), block)
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index b3433616..640e8541 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -9,7 +9,7 @@ import space.kscience.dataforge.names.plus
 
 
 public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
-    put(value.name, value.data)
+    put(value.name, value)
 }
 
 public inline fun <T> DataSink<T>.putAll(
@@ -89,7 +89,7 @@ public suspend inline fun <reified T> DataSink<T>.putValue(
 
 public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        put(it.name, it.data)
+        put(it)
     }
 }
 
@@ -99,19 +99,27 @@ public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
     }
 }
 
-public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
-    putAll(tree.asSequence())
-}
+//public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+//    putAll(tree.asSequence())
+//}
 
 /**
- * Copy given data set and mirror its changes to this [DataSink]. Suspends indefinitely.
+ * Suspends indefinitely.
  */
-public suspend fun <T : Any> DataSink<T>.putAllAndWatch(
-    source: DataTree<T>,
-    branchName: Name = Name.EMPTY,
+public suspend fun <T : Any> DataSink<T>.watch(
+    source: ObservableDataSource<T>,
+    prefix: Name = Name.EMPTY,
 ) {
-    putAll(branchName, source)
+//    putAll(branchName, source)
     source.updates.collect {
-        put(branchName + it.name, it.data)
+        put(prefix + it, source.read(it))
     }
+}
+
+public suspend fun <T : Any> MutableDataTree<T>.putAllAndWatch(
+    source: DataTree<T>,
+    prefix: Name = Name.EMPTY,
+) {
+    putAll(prefix, source)
+    watch(source,prefix)
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 80105fc6..a8d5ac20 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -200,40 +200,44 @@ public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
 
 
 @UnsafeKType
-public fun <T, R> DataTree<T>.map(
+public fun <T, R> DataTree<T>.transformEach(
     outputType: KType,
     scope: CoroutineScope,
-    metaTransform: MutableMeta.() -> Unit = {},
+    metaTransform: MutableMeta.(name: Name) -> Unit = {},
     compute: suspend (NamedValueWithMeta<T>) -> R,
 ): DataTree<R> = DataTree<R>(
     outputType,
     scope,
     initialData = asSequence().associate { namedData: NamedData<T> ->
-        val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
+        val newMeta = namedData.meta.toMutableMeta().apply {
+            metaTransform(namedData.name)
+        }.seal()
         val newData = Data(outputType, newMeta, scope.coroutineContext, listOf(namedData)) {
             compute(namedData.awaitWithMeta())
         }
         namedData.name to newData
     }
 ) {
-    updates.collect { update ->
-        val data: Data<T>? = update.data
-        if (data == null) put(update.name, null) else {
-            val newMeta = data.meta.toMutableMeta().apply(metaTransform).seal()
+    updates.collect { name ->
+        val data: Data<T>? = read(name)
+        if (data == null) put(name, null) else {
+            val newMeta = data.meta.toMutableMeta().apply {
+                metaTransform(name)
+            }.seal()
             val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
-                compute(NamedValueWithMeta(update.name, data.await(), data.meta))
+                compute(NamedValueWithMeta(name, data.await(), data.meta))
             }
-            put(update.name, d)
+            put(name, d)
         }
     }
 }
 
 @OptIn(UnsafeKType::class)
-public inline fun <T, reified R> DataTree<T>.map(
+public inline fun <T, reified R> DataTree<T>.transformEach(
     scope: CoroutineScope,
-    noinline metaTransform: MutableMeta.() -> Unit = {},
+    noinline metaTransform: MutableMeta.(name: Name) -> Unit = {},
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = map(typeOf<R>(), scope, metaTransform, block)
+): DataTree<R> = transformEach(typeOf<R>(), scope, metaTransform, block)
 
 public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
     asSequence().forEach(block)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index 20afcc76..664f9904 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -17,7 +17,7 @@ import kotlin.reflect.typeOf
 private class FlatDataTree<T>(
     override val dataType: KType,
     private val dataSet: Map<Name, Data<T>>,
-    private val sourceUpdates: SharedFlow<DataUpdate<T>>,
+    private val sourceUpdates: SharedFlow<Name>,
     private val prefix: Name,
 ) : DataTree<T> {
     override val data: Data<T>? get() = dataSet[prefix]
@@ -29,10 +29,9 @@ private class FlatDataTree<T>(
 
     override fun read(name: Name): Data<T>? = dataSet[prefix + name]
 
-    override val updates: Flow<DataUpdate<T>> =
-        sourceUpdates.mapNotNull { update ->
-            update.name.removeFirstOrNull(prefix)?.let { DataUpdate(dataType, it, update.data) }
-        }
+    override val updates: Flow<Name> = sourceUpdates.mapNotNull { update ->
+        update.removeFirstOrNull(prefix)
+    }
 }
 
 /**
@@ -47,7 +46,7 @@ private class DataTreeBuilder<T>(
 
     private val mutex = Mutex()
 
-    private val updatesFlow = MutableSharedFlow<DataUpdate<T>>()
+    private val updatesFlow = MutableSharedFlow<Name>()
 
 
     override suspend fun put(name: Name, data: Data<T>?) {
@@ -58,7 +57,7 @@ private class DataTreeBuilder<T>(
                 map[name] = data
             }
         }
-        updatesFlow.emit(DataUpdate(data?.type ?: type, name, data))
+        updatesFlow.emit(name)
     }
 
     public fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
@@ -74,7 +73,7 @@ public fun <T> DataTree(
     initialData: Map<Name, Data<T>> = emptyMap(),
     updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
-    scope.launch{
+    scope.launch {
         updater()
     }
 }.build()
@@ -89,6 +88,13 @@ public inline fun <reified T> DataTree(
     noinline updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTree(typeOf<T>(), scope, initialData, updater)
 
+@UnsafeKType
+public fun <T> DataTree(type: KType, data: Map<Name, Data<T>>): DataTree<T> =
+    DataTreeBuilder(type, data).build()
+
+@OptIn(UnsafeKType::class)
+public inline fun <reified T> DataTree(data: Map<Name, Data<T>>): DataTree<T> =
+    DataTree(typeOf<T>(), data)
 
 /**
  * Represent this flat data map as a [DataTree] without copying it
@@ -106,7 +112,7 @@ public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = asTree(
 
 @UnsafeKType
 public fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
-    DataTreeBuilder(type, associate { it.name to it.data }).build()
+    DataTreeBuilder(type, associate { it.name to it }).build()
 
 
 /**
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 1f78b36c..1d4d2ea4 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -12,7 +12,7 @@ import kotlin.time.Duration.Companion.milliseconds
 internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
-        val node = DataTree<Any> {
+        val node = DataTree.static<Any> {
             putAll("primary") {
                 putValue("a", "a")
                 putValue("b", "b")
@@ -29,20 +29,18 @@ internal class DataTreeBuilderTest {
 
     @Test
     fun testDataUpdate() = runTest(timeout = 500.milliseconds) {
-        val updateData = DataTree<Any> {
-            putAll("update") {
-                put("a", Data.wrapValue("a"))
-                put("b", Data.wrapValue("b"))
-            }
+        val updateData = DataTree.static<Any> {
+            put("a", Data.wrapValue("a"))
+            put("b", Data.wrapValue("b"))
         }
 
-        val node = DataTree<Any> {
+        val node = DataTree.static<Any> {
             putAll("primary") {
                 putValue("a", "a")
                 putValue("b", "b")
             }
             putValue("root", "root")
-            putAll(updateData)
+            putAll("update", updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
@@ -56,11 +54,11 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = launch {  putAllAndWatch(subNode,"sub".asName())}
+            job = launch { putAllAndWatch(subNode, "sub".asName()) }
         }
 
         repeat(10) {
-            subNode.updateValue("value[$it]", it)
+            subNode.putValue("value[$it]", it)
         }
 
         assertEquals(9, subNode.awaitData("value[9]").await())
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 0cc81f7a..ba7fdadf 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -1,7 +1,5 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
@@ -22,15 +20,15 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
-@Suppress("UNCHECKED_CAST")
-@DFInternal
-public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
-    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
-
-@Suppress("UNCHECKED_CAST")
-@DFInternal
-public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
-    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
+//@Suppress("UNCHECKED_CAST")
+//@DFInternal
+//public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
+//    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
+//
+//@Suppress("UNCHECKED_CAST")
+//@DFInternal
+//public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
+//    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
 
 /**
  * Select all data matching given type and filters. Does not modify paths
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index e9ec343c..6828b674 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -19,7 +19,7 @@ internal class ActionsTest {
             result { it + 1 }
         }
 
-        val data: DataTree<Int> = DataTree {
+        val data: DataTree<Int> = DataTree.static {
             repeat(10) {
                 putValue(it.toString(), it)
             }
@@ -42,7 +42,7 @@ internal class ActionsTest {
         val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
-            source.updateValue(it.toString(), it)
+            source.putValue(it.toString(), it)
         }
 
         assertEquals(2, result.awaitData("1").await())
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index a1a754a4..f1e9130a 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -1,9 +1,9 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.DataBuilderScope
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.GoalExecutionRestriction
-import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaReader
 import space.kscience.dataforge.meta.MetaRepr
@@ -62,12 +62,12 @@ public interface TaskWithSpec<T, C : Any> : Task<T> {
 //    block: C.() -> Unit = {},
 //): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultBuilder<T>(
+public class TaskResultScope<T>(
+    public val resultType: KType,
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-    private val dataSink: DataSink<T>,
-) : DataSink<T> by dataSink
+) : DataBuilderScope<T>
 
 /**
  * Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
@@ -77,10 +77,11 @@ public class TaskResultBuilder<T>(
  * @param descriptor of meta accepted by this task
  * @param builder for resulting data set
  */
+@UnsafeKType
 public fun <T : Any> Task(
     resultType: KType,
     descriptor: MetaDescriptor? = null,
-    builder: suspend TaskResultBuilder<T>.() -> Unit,
+    builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Task<T> = object : Task<T> {
 
     override val descriptor: MetaDescriptor? = descriptor
@@ -89,23 +90,19 @@ public fun <T : Any> Task(
         workspace: Workspace,
         taskName: Name,
         taskMeta: Meta,
-    ): TaskResult<T> {
+    ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
-        @OptIn(UnsafeKType::class)
-        val dataset = MutableDataTree<T>(resultType).apply {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply {
-                withContext(GoalExecutionRestriction() + workspace.goalLogger) {
-                    builder()
-                }
-            }
-        }
-        return workspace.wrapResult(dataset, taskName, taskMeta)
+        val dataset = TaskResultScope<T>(resultType, workspace, taskName, taskMeta).builder()
+
+
+        workspace.wrapResult(dataset, taskName, taskMeta)
     }
 }
 
+@OptIn(UnsafeKType::class)
 public inline fun <reified T : Any> Task(
     descriptor: MetaDescriptor? = null,
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Task<T> = Task(typeOf<T>(), descriptor, builder)
 
 
@@ -117,13 +114,11 @@ public inline fun <reified T : Any> Task(
  * @param specification a specification for task configuration
  * @param builder for resulting data set
  */
-
-
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
     specification: MetaReader<C>,
-    builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
     override val spec: MetaReader<C> = specification
 
@@ -134,15 +129,15 @@ public fun <T : Any, C : MetaRepr> Task(
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
         val taskMeta = configuration.toMeta()
+
         @OptIn(UnsafeKType::class)
-        val dataset = MutableDataTree<T>(resultType).apply {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
-        }
+        val dataset = TaskResultScope<T>(resultType, workspace, taskName, taskMeta).builder(configuration)
+
         workspace.wrapResult(dataset, taskName, taskMeta)
     }
 }
 
 public inline fun <reified T : Any, C : MetaRepr> Task(
     specification: MetaReader<C>,
-    noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    noinline builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): Task<T> = Task(typeOf<T>(), specification, builder)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index d4d4291a..aff438ca 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -6,7 +6,7 @@ import kotlinx.coroutines.joinAll
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.asSequence
-import space.kscience.dataforge.data.launch
+import space.kscience.dataforge.data.launchIn
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
@@ -33,9 +33,9 @@ public fun <T> Workspace.wrapResult(data: DataTree<T>, taskName: Name, taskMeta:
  * Start computation for all data elements of this node.
  * The resulting [Job] is completed only when all of them are completed.
  */
-public fun TaskResult<*>.launch(scope: CoroutineScope): Job {
+public fun TaskResult<*>.launchIn(scope: CoroutineScope): Job {
     val jobs = asSequence().map {
-        it.data.launch(scope)
+        it.launchIn(scope)
     }.toList()
     return scope.launch { jobs.joinAll() }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 013c0171..38d90b31 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -4,20 +4,17 @@ import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
 import space.kscience.dataforge.context.Global
-import space.kscience.dataforge.data.DataSink
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.MutableDataTree
+import space.kscience.dataforge.data.StaticDataBuilder
+import space.kscience.dataforge.data.static
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
-import kotlin.collections.set
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
-import kotlin.reflect.typeOf
 
 public data class TaskReference<T>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
 
@@ -42,7 +39,7 @@ public interface TaskContainer {
 public inline fun <reified T : Any> TaskContainer.registerTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
 
 /**
@@ -51,7 +48,7 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
 public inline fun <reified T : Any> TaskContainer.buildTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): TaskReference<T> {
     val theName = Name.parse(name)
     val descriptor = MetaDescriptor(descriptorBuilder)
@@ -62,7 +59,7 @@ public inline fun <reified T : Any> TaskContainer.buildTask(
 
 public inline fun <reified T : Any> TaskContainer.task(
     descriptor: MetaDescriptor,
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
     val task = Task(descriptor, builder)
@@ -75,7 +72,7 @@ public inline fun <reified T : Any> TaskContainer.task(
  */
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
     specification: MetaReader<C>,
-    noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
+    noinline builder: suspend TaskResultScope<T>.(C) -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
     val task = Task(specification, builder)
@@ -88,7 +85,7 @@ public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
  */
 public inline fun <reified T : Any> TaskContainer.task(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
-    noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
+    noinline builder: suspend TaskResultScope<T>.() -> DataTree<T>,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> =
     task(MetaDescriptor(descriptorBuilder), builder)
 
@@ -102,15 +99,15 @@ public inline fun <T : Any, reified R : Any> TaskContainer.action(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
     task(MetaDescriptor(descriptorBuilder)) {
-        result(action.execute(from(selector), taskMeta.copy(metaTransform), workspace))
+        action.execute(from(selector), taskMeta.copy(metaTransform), workspace)
     }
 
 public class WorkspaceBuilder(
     private val parentContext: Context = Global,
 ) : TaskContainer {
     private var context: Context? = null
-    @OptIn(UnsafeKType::class)
-    private val data = MutableDataTree<Any?>(typeOf<Any?>())
+
+    private var data: DataTree<Any?>? = null
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
@@ -125,8 +122,8 @@ public class WorkspaceBuilder(
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSink<Any?>.() -> Unit) {
-        data.apply(builder)
+    public fun data(builder: StaticDataBuilder<Any?>.() -> Unit) {
+        data = DataTree.static(builder)
     }
 
     /**
@@ -152,7 +149,7 @@ public class WorkspaceBuilder(
         val postProcess: suspend (TaskResult<*>) -> TaskResult<*> = { result ->
             cache?.cache(result) ?: result
         }
-        return WorkspaceImpl(context ?: parentContext, data, targets, tasks, postProcess)
+        return WorkspaceImpl(context ?: parentContext, data ?: DataTree.EMPTY, targets, tasks, postProcess)
     }
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index 15565995..49b485e5 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -1,12 +1,13 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.forEach
-import space.kscience.dataforge.data.putAll
-import space.kscience.dataforge.data.transform
-import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.data.NamedValueWithMeta
+import space.kscience.dataforge.data.transformEach
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.meta.remove
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.plus
@@ -14,7 +15,7 @@ import space.kscience.dataforge.names.plus
 /**
  * A task meta without a node corresponding to the task itself (removing a node with name of the task).
  */
-public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
+public val TaskResultScope<*>.defaultDependencyMeta: Meta
     get() = taskMeta.copy {
         remove(taskName)
     }
@@ -25,12 +26,12 @@ public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  */
-public suspend fun <T> TaskResultBuilder<*>.from(
+public suspend fun <T> TaskResultScope<*>.from(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
 ): DataTree<T> = selector.select(workspace, dependencyMeta)
 
-public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultScope<*>.from(
     plugin: P,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -50,7 +51,7 @@ public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  * @param selectorBuilder a builder of task from the plugin.
  */
-public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultScope<*>.from(
     pluginFactory: PluginFactory<P>,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
@@ -64,7 +65,7 @@ public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBui
     return res as TaskResult<T>
 }
 
-public val TaskResultBuilder<*>.allData: DataSelector<*>
+public val TaskResultScope<*>.allData: DataSelector<*>
     get() = DataSelector { workspace, _ -> workspace.data }
 
 /**
@@ -77,43 +78,38 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
  * @param action process individual data asynchronously.
  */
 @DFExperimental
-public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
+public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-    dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
-    crossinline action: suspend (arg: T, name: Name, meta: Meta) -> R,
-) {
-    from(selector, dependencyMeta).forEach { data ->
-        val meta = data.meta.toMutableMeta().apply {
-            taskMeta[taskName]?.let { taskName.put(it) }
-            dataMetaTransform(data.name)
-        }
-
-        val res = data.transform(meta, workspace.context.coroutineContext) {
-            action(it, data.name, meta)
-        }
-
-        put(data.name, res)
+    crossinline dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
+    crossinline action: suspend (NamedValueWithMeta<T>) -> R,
+): DataTree<R> = from(selector, dependencyMeta).transformEach<T, R>(
+    workspace.context,
+    metaTransform = { name ->
+        taskMeta[taskName]?.let { taskName put it }
+        dataMetaTransform(name)
     }
-}
-
-/**
- * Set given [dataSet] as a task result.
- */
-public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
-    this.putAll(dataSet)
-}
-
-/**
- * Use provided [action] to fill the result
- */
-@DFExperimental
-public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
-    selector: DataSelector<T>,
-    action: Action<T, R>,
-    dependencyMeta: Meta = defaultDependencyMeta,
 ) {
-    putAll(action.execute(from(selector, dependencyMeta), dependencyMeta, workspace))
+    action(it)
 }
 
+///**
+// * Set given [dataSet] as a task result.
+// */
+//public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
+//    putAll(dataSet)
+//}
+
+///**
+// * Use provided [action] to fill the result
+// */
+//@DFExperimental
+//public suspend inline fun <T, reified R> TaskResultScope<R>.actionFrom(
+//    selector: DataSelector<T>,
+//    action: Action<T, R>,
+//    dependencyMeta: Meta = defaultDependencyMeta,
+//) {
+//    putAll(action.execute(from(selector, dependencyMeta), dependencyMeta, workspace))
+//}
+
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
index 5f88ab74..9fc91e33 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
@@ -3,17 +3,25 @@ package space.kscience.dataforge.workspace
 import space.kscience.dataforge.actions.AbstractAction
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 
-internal class CachingAction<T>(type: KType, private val caching: (NamedData<T>) -> NamedData<T>) :
-    AbstractAction<T, T>(type) {
-    override fun DataSink<T>.generate(source: DataTree<T>, meta: Meta) {
+internal class CachingAction<T>(
+    type: KType, private val caching: (NamedData<T>) -> NamedData<T>
+) : AbstractAction<T, T>(type) {
+
+    override fun DataBuilderScope<T>.generate(
+        source: DataTree<T>,
+        meta: Meta
+    ): Map<Name, Data<T>> = buildMap {
         source.forEach {
-            put(caching(it))
+            val cached = caching(it)
+            put(cached.name, cached)
         }
     }
 
-    override suspend fun DataSink<T>.update(source: DataTree<T>, meta: Meta, updatedData: DataUpdate<T>) {
-        put(updatedData.name, updatedData.data?.named(updatedData.name)?.let(caching))
+    override suspend fun DataSink<T>.update(source: DataTree<T>, actionMeta: Meta, updateName: Name) {
+        val updatedData = source.read(updateName)
+        put(updateName, updatedData?.named(updateName)?.let(caching))
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
new file mode 100644
index 00000000..c3cd3a0b
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
@@ -0,0 +1,185 @@
+package space.kscience.dataforge.workspace
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.channels.awaitClose
+import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.isActive
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.StaticData
+import space.kscience.dataforge.io.*
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.plus
+import java.nio.file.*
+import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.*
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+
+public class FileDataTree(
+    public val io: IOPlugin,
+    public val path: Path,
+    private val monitor: Boolean = false
+) : DataTree<Binary> {
+    override val dataType: KType = typeOf<Binary>()
+
+    /**
+     * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
+     * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
+     */
+    private fun readFileAsData(
+        path: Path,
+    ): Data<Binary> {
+        val envelope = io.readEnvelopeFile(path, true)
+        val updatedMeta = envelope.meta.copy {
+            FILE_PATH_KEY put path.toString()
+            FILE_EXTENSION_KEY put path.extension
+
+            val attributes = path.readAttributes<BasicFileAttributes>()
+            FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
+            FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
+        }
+        return StaticData(
+            typeOf<Binary>(),
+            envelope.data ?: Binary.EMPTY,
+            updatedMeta
+        )
+    }
+
+    private fun readFilesFromDirectory(
+        path: Path
+    ): Map<NameToken, FileDataTree> = path.listDirectoryEntries().filterNot { it.name.startsWith("@") }.associate {
+        NameToken.parse(it.nameWithoutExtension) to FileDataTree(io, it)
+    }
+
+    override val data: Data<Binary>?
+        get() = when {
+            path.isRegularFile() -> {
+                //TODO process zip
+                readFileAsData(path)
+            }
+
+            path.isDirectory() -> {
+                val dataBinary: Binary? = path.resolve(IOPlugin.DATA_FILE_NAME)?.asBinary()
+                val meta: Meta? = path.find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }?.let {
+                    io.readMetaFileOrNull(it)
+                }
+                if (dataBinary != null || meta != null) {
+                    StaticData(
+                        typeOf<Binary>(),
+                        dataBinary ?: Binary.EMPTY,
+                        meta ?: Meta.EMPTY
+                    )
+                } else {
+                    null
+                }
+            }
+
+            else -> {
+                null
+            }
+        }
+
+
+    override val items: Map<NameToken, DataTree<Binary>>
+        get() = when {
+            path.isDirectory() -> readFilesFromDirectory(path)
+            path.isRegularFile() && path.extension == "zip" -> {
+                //Using an explicit Zip file system to avoid bizarre compatibility bugs
+                val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+                    ?: error("Zip file system provider not found")
+                val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
+                readFilesFromDirectory(fs.rootDirectories.single())
+            }
+
+            else -> emptyMap()
+        }
+
+
+    override val updates: Flow<Name> = if (monitor) {
+        callbackFlow<Name> {
+            val watchService: WatchService = path.fileSystem.newWatchService()
+
+            fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
+
+            fun monitor(childPath: Path): Job {
+                val key: WatchKey = childPath.register(
+                    watchService, arrayOf(
+                        StandardWatchEventKinds.ENTRY_DELETE,
+                        StandardWatchEventKinds.ENTRY_MODIFY,
+                        StandardWatchEventKinds.ENTRY_CREATE,
+                    )
+                )
+
+                return launch {
+                    while (isActive) {
+                        for (event: WatchEvent<*> in key.pollEvents()) {
+                            val eventPath = event.context() as Path
+                            if (event.kind() === StandardWatchEventKinds.ENTRY_CREATE) {
+                                monitor(eventPath)
+                            } else {
+                                send(eventPath.relativeTo(path).toName())
+                            }
+                        }
+                        key.reset()
+                    }
+                }
+            }
+
+            monitor(path)
+
+            awaitClose {
+                watchService.close()
+            }
+
+        }.flowOn(Dispatchers.IO).shareIn(io.context, SharingStarted.WhileSubscribed())
+    } else {
+        emptyFlow()
+    }
+
+    public companion object {
+        public val FILE_KEY: Name = "file".asName()
+        public val FILE_PATH_KEY: Name = FILE_KEY + "path"
+        public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
+        public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
+        public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
+        public const val DF_FILE_EXTENSION: String = "df"
+        public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
+    }
+}
+
+
+///**
+// * @param resources The names of the resources to read.
+// * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
+// */
+//@DFExperimental
+//public fun DataSink<Binary>.resources(
+//    io: IOPlugin,
+//    resource: String,
+//    vararg otherResources: String,
+//    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
+//) {
+//    //create a file system if necessary
+//    val uri = Thread.currentThread().contextClassLoader.getResource("common")!!.toURI()
+//    try {
+//        uri.toPath()
+//    } catch (e: FileSystemNotFoundException) {
+//        FileSystems.newFileSystem(uri, mapOf("create" to "true"))
+//    }
+//
+//    listOf(resource, *otherResources).forEach { r ->
+//        val path = classLoader.getResource(r)?.toURI()?.toPath() ?: error(
+//            "Resource with name $r is not resolved"
+//        )
+//        io.readAsDataTree(r.asName(), path)
+//    }
+//}
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index 8ba39ec1..9e986ba9 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -28,7 +28,7 @@ public class InMemoryWorkspaceCache : WorkspaceCache {
             val cachedData =  cache.getOrPut(TaskResultId(result.taskName, result.taskMeta)){
                 HashMap()
             }.getOrPut(data.name){
-                data.data
+                data
             }
             cachedData.checkType<T>(result.dataType).named(data.name)
         }
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
deleted file mode 100644
index 37dafab9..00000000
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
+++ /dev/null
@@ -1,188 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import kotlinx.coroutines.*
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.DataSink
-import space.kscience.dataforge.data.StaticData
-import space.kscience.dataforge.io.*
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.NameToken
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.plus
-import java.nio.file.*
-import java.nio.file.attribute.BasicFileAttributes
-import java.nio.file.spi.FileSystemProvider
-import kotlin.io.path.*
-import kotlin.reflect.typeOf
-
-
-public object FileData {
-    public val FILE_KEY: Name = "file".asName()
-    public val FILE_PATH_KEY: Name = FILE_KEY + "path"
-    public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
-    public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
-    public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
-    public const val DF_FILE_EXTENSION: String = "df"
-    public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
-
-}
-
-
-/**
- * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
- * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
- */
-public fun IOPlugin.readFileData(
-    path: Path,
-): Data<Binary> {
-    val envelope = readEnvelopeFile(path, true)
-    val updatedMeta = envelope.meta.copy {
-        FileData.FILE_PATH_KEY put path.toString()
-        FileData.FILE_EXTENSION_KEY put path.extension
-
-        val attributes = path.readAttributes<BasicFileAttributes>()
-        FileData.FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
-        FileData.FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
-    }
-    return StaticData(
-        typeOf<Binary>(),
-        envelope.data ?: Binary.EMPTY,
-        updatedMeta
-    )
-}
-
-public fun DataSink<Binary>.file(io: IOPlugin, name: Name, path: Path) {
-    if (!path.isRegularFile()) error("Only regular files could be handled by this function")
-    put(name, io.readFileData(path))
-}
-
-public fun DataSink<Binary>.directory(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-) {
-    if (!path.isDirectory()) error("Only directories could be handled by this function")
-    //process root data
-
-    var dataBinary: Binary? = null
-    var meta: Meta? = null
-    Files.list(path).forEach { childPath ->
-        val fileName = childPath.fileName.toString()
-        if (fileName == IOPlugin.DATA_FILE_NAME) {
-            dataBinary = childPath.asBinary()
-        } else if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-            meta = io.readMetaFileOrNull(childPath)
-        } else if (!fileName.startsWith("@")) {
-            val token = if (childPath.isRegularFile() && childPath.extension in FileData.DEFAULT_IGNORE_EXTENSIONS) {
-                NameToken(childPath.nameWithoutExtension)
-            } else {
-                NameToken(childPath.name)
-            }
-
-            files(io, name + token, childPath)
-        }
-    }
-
-    //set data if it is relevant
-    if (dataBinary != null || meta != null) {
-        put(
-            name,
-            StaticData(
-                typeOf<Binary>(),
-                dataBinary ?: Binary.EMPTY,
-                meta ?: Meta.EMPTY
-            )
-        )
-    }
-}
-
-public fun DataSink<Binary>.files(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-) {
-    if (path.isRegularFile() && path.extension == "zip") {
-        //Using explicit Zip file system to avoid bizarre compatibility bugs
-        val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
-            ?: error("Zip file system provider not found")
-        val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
-
-        files(io, name, fs.rootDirectories.first())
-    }
-    if (path.isRegularFile()) {
-        file(io, name, path)
-    } else {
-        directory(io, name, path)
-    }
-}
-
-
-private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
-
-public fun DataSink<Binary>.monitorFiles(
-    io: IOPlugin,
-    name: Name,
-    path: Path,
-    scope: CoroutineScope = io.context,
-): Job {
-    files(io, name, path)
-    return scope.launch(Dispatchers.IO) {
-        val watchService = path.fileSystem.newWatchService()
-
-        path.register(
-            watchService,
-            StandardWatchEventKinds.ENTRY_DELETE,
-            StandardWatchEventKinds.ENTRY_MODIFY,
-            StandardWatchEventKinds.ENTRY_CREATE
-        )
-
-        do {
-            val key = watchService.take()
-            if (key != null) {
-                for (event: WatchEvent<*> in key.pollEvents()) {
-                    val eventPath = event.context() as Path
-                    if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
-                        put(eventPath.toName(), null)
-                    } else {
-                        val fileName = eventPath.fileName.toString()
-                        if (!fileName.startsWith("@")) {
-                            files(io, name, eventPath)
-                        }
-                    }
-                }
-                key.reset()
-            }
-        } while (isActive && key != null)
-    }
-
-}
-
-/**
- * @param resources The names of the resources to read.
- * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
- */
-@DFExperimental
-public fun DataSink<Binary>.resources(
-    io: IOPlugin,
-    resource: String,
-    vararg otherResources: String,
-    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
-) {
-    //create a file system if necessary
-    val uri = Thread.currentThread().contextClassLoader.getResource("common")!!.toURI()
-    try {
-        uri.toPath()
-    } catch (e: FileSystemNotFoundException) {
-        FileSystems.newFileSystem(uri, mapOf("create" to "true"))
-    }
-
-    listOf(resource,*otherResources).forEach { r ->
-        val path = classLoader.getResource(r)?.toURI()?.toPath() ?: error(
-            "Resource with name $r is not resolved"
-        )
-        files(io, r.asName(), path)
-    }
-}
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index 61caf7e0..688b5699 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -15,14 +15,14 @@ import space.kscience.dataforge.names.matches
  * Select the whole data set from the workspace filtered by type.
  */
 @OptIn(DFExperimental::class)
-public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
+public inline fun <reified T : Any> TaskResultScope<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
     DataSelector<T> { workspace, _ ->
         workspace.data.filterByType { name, _, _ ->
             namePattern == null || name.matches(namePattern)
         }
     }
 
-public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
+public suspend inline fun <reified T : Any> TaskResultScope<*>.fromTask(
     task: Name,
     taskMeta: Meta = Meta.EMPTY,
 ): DataTree<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
index 379a79dd..c65570df 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
@@ -2,7 +2,9 @@ package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.*
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.forEach
+import space.kscience.dataforge.data.meta
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
@@ -32,8 +34,8 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
     } else if (!Files.isDirectory(path)) {
         error("Can't write a node into file")
     }
-    dataSet.forEach { (name, data) ->
-        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+    dataSet.forEach { data ->
+        val childPath = path.resolve(data.name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
         childPath.parent.createDirectories()
         val envelope = data.toEnvelope(format)
         if (envelopeFormat != null) {
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 7a6a8202..1c43fba0 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -29,7 +29,7 @@ internal class CachingWorkspaceTest {
             inMemoryCache()
 
             val doFirst by task<Any> {
-                transformEach(allData) { _, name, _ ->
+                transformEach(allData) { (name, _, _) ->
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
@@ -39,7 +39,7 @@ internal class CachingWorkspaceTest {
                 transformEach(
                     doFirst,
                     dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
-                ) { _, name, _ ->
+                ) { (name, _, _) ->
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
                 }
@@ -52,11 +52,11 @@ internal class CachingWorkspaceTest {
         val secondC = workspace.produce("doSecond")
         //use coroutineScope to wait for the result
         coroutineScope {
-            first.launch(this)
-            secondA.launch(this)
-            secondB.launch(this)
+            first.launchIn(this)
+            secondA.launchIn(this)
+            secondB.launchIn(this)
             //repeat to check caching
-            secondC.launch(this)
+            secondC.launchIn(this)
         }
 
         assertEquals(10, firstCounter)
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index f526e194..d9fa9ae4 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -52,7 +52,7 @@ class FileDataTest {
         io.writeDataDirectory(dir, dataNode, StringIOFormat)
         println(dir.toUri().toString())
         val data = DataTree {
-            files(io, Name.EMPTY, dir)
+            io.readAsDataTree(Name.EMPTY, dir)
         }
         val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
@@ -68,7 +68,7 @@ class FileDataTest {
         zip.deleteExisting()
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
-        val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
+        val reconstructed = DataTree { io.readAsDataTree(Name.EMPTY, zip) }
             .map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 0cf4f401..7d07481c 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -26,7 +26,7 @@ class FileWorkspaceCacheTest {
             }
         }
 
-        workspace.produce("echo").launch(this)
+        workspace.produce("echo").launchIn(this)
 
     }
 }
\ No newline at end of file

From 531f95d55f073408520c60bb0ce812c2718b9467 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 2 Jan 2025 10:09:00 +0300
Subject: [PATCH 20/29] Fix implementation of MutableDataTree. All tests pass

---
 .../space/kscience/dataforge/data/DataSink.kt | 81 ++++++++++++-------
 .../kscience/dataforge/data/DataSource.kt     |  5 +-
 .../dataforge/data/StaticDataBuilder.kt       | 24 +++---
 .../dataforge/data/DataTreeBuilderTest.kt     | 28 ++++---
 .../kscience/dataforge/data/ActionsTest.kt    |  6 +-
 .../kscience/dataforge/workspace/Task.kt      |  2 +-
 ...{taskBuilders.kt => taskResultBuilders.kt} | 22 +++--
 .../dataforge/workspace/FileDataTree.kt       | 15 +++-
 .../workspace/CachingWorkspaceTest.kt         |  8 +-
 .../workspace/DataPropagationTest.kt          | 12 ++-
 .../dataforge/workspace/FileDataTest.kt       | 20 ++---
 .../workspace/FileWorkspaceCacheTest.kt       |  6 +-
 .../workspace/SimpleWorkspaceTest.kt          | 54 +++++++------
 13 files changed, 162 insertions(+), 121 deletions(-)
 rename dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/{taskBuilders.kt => taskResultBuilders.kt} (87%)

diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index c8a0f2a7..c9786244 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -8,6 +8,9 @@ import space.kscience.dataforge.names.*
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
+/**
+ * A marker scope for data builders
+ */
 public interface DataBuilderScope<in T> {
     public companion object : DataBuilderScope<Nothing>
 }
@@ -30,21 +33,19 @@ public fun interface DataSink<in T> : DataBuilderScope<T> {
  * A mutable version of [DataTree]
  */
 public interface MutableDataTree<T> : DataTree<T>, DataSink<T> {
-    override var data: Data<T>?
-
     override val items: Map<NameToken, MutableDataTree<T>>
-
-    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
-
-    public suspend fun put(token: NameToken, data: Data<T>?)
-
-    override suspend fun put(name: Name, data: Data<T>?): Unit {
-        when (name.length) {
-            0 -> this.data = data
-            1 -> put(name.first(), data)
-            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
-        }
-    }
+//
+//    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
+//
+//    public suspend fun put(token: NameToken, data: Data<T>?)
+//
+//    override suspend fun put(name: Name, data: Data<T>?): Unit {
+//        when (name.length) {
+//            0 -> this.data = data
+//            1 -> put(name.first(), data)
+//            else -> getOrCreateItem(name.first()).put(name.cutFirst(), data)
+//        }
+//    }
 }
 
 /**
@@ -62,11 +63,12 @@ private class MutableDataTreeRoot<T>(
 ) : MutableDataTree<T> {
 
     override val items = HashMap<NameToken, MutableDataTree<T>>()
-    override val updates = MutableSharedFlow<Name>(extraBufferCapacity = 100)
+    override val updates = MutableSharedFlow<Name>()
 
     inner class MutableDataTreeBranch(val branchName: Name) : MutableDataTree<T> {
 
         override var data: Data<T>? = null
+            private set
 
         override val items = HashMap<NameToken, MutableDataTree<T>>()
 
@@ -75,26 +77,43 @@ private class MutableDataTreeRoot<T>(
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
+        override suspend fun put(
+            name: Name,
+            data: Data<T>?
+        ) {
+            when (name.length) {
+                0 -> {
+                    this.data = data
+                    this@MutableDataTreeRoot.updates.emit(branchName)
+                }
 
-        override fun getOrCreateItem(token: NameToken): MutableDataTree<T> =
-            items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }
+                else -> {
+                    val token = name.first()
+                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.put(name.cutFirst(), data)
+                }
+            }
+        }
+    }
+    override var data: Data<T>? = null
+        private set
 
-        override suspend fun put(token: NameToken, data: Data<T>?) {
-            this.data = data
-            this@MutableDataTreeRoot.updates.emit(branchName + token)
+    override suspend fun put(
+        name: Name,
+        data: Data<T>?
+    ) {
+        when (name.length) {
+            0 -> {
+                this.data = data
+                this@MutableDataTreeRoot.updates.emit(Name.EMPTY)
+            }
+
+            else -> {
+                val token = name.first()
+                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.put(name.cutFirst(), data)
+            }
         }
     }
 
-    override var data: Data<T>? = null
-
-    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = items.getOrPut(token) {
-        MutableDataTreeBranch(token.asName())
-    }
-
-    override suspend fun put(token: NameToken, data: Data<T>?) {
-        this.data = data
-        updates.emit(token.asName())
-    }
 }
 
 /**
@@ -106,7 +125,7 @@ public fun <T> MutableDataTree(
 ): MutableDataTree<T> = MutableDataTreeRoot<T>(type)
 
 /**
- * Create and initialize a observable mutable data tree.
+ * Create and initialize an observable mutable data tree.
  */
 @OptIn(UnsafeKType::class)
 public inline fun <reified T> MutableDataTree(
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
index 7ee87180..531d37fd 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -38,9 +38,8 @@ public interface ObservableDataSource<out T> : DataSource<T> {
     public val updates: Flow<Name>
 }
 
-public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> {
-    return read(name) ?: updates.filter { it == name }.map { read(name) }.filterNotNull().first()
-}
+public suspend fun <T> ObservableDataSource<T>.awaitData(name: Name): Data<T> =
+    read(name) ?: updates.filter { it == name }.mapNotNull { read(name) }.first()
 
 public suspend fun <T> ObservableDataSource<T>.awaitData(name: String): Data<T> =
     awaitData(name.parseAsName())
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
index 0c1fe0b9..54bcf19c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataBuilder.kt
@@ -11,13 +11,13 @@ import kotlin.reflect.typeOf
 
 
 public fun interface StaticDataBuilder<T> : DataBuilderScope<T> {
-    public fun put(name: Name, data: Data<T>)
+    public fun data(name: Name, data: Data<T>)
 }
 
 private class DataMapBuilder<T> : StaticDataBuilder<T> {
     val map = mutableMapOf<Name, Data<T>>()
 
-    override fun put(name: Name, data: Data<T>) {
+    override fun data(name: Name, data: Data<T>) {
         if (map.containsKey(name)) {
             error("Duplicate key '$name'")
         } else {
@@ -26,31 +26,31 @@ private class DataMapBuilder<T> : StaticDataBuilder<T> {
     }
 }
 
-public fun <T> StaticDataBuilder<T>.put(name: String, data: Data<T>) {
-    put(name.parseAsName(), data)
+public fun <T> StaticDataBuilder<T>.data(name: String, data: Data<T>) {
+    data(name.parseAsName(), data)
 }
 
-public inline fun <T, reified T1 : T> StaticDataBuilder<T>.putValue(
+public inline fun <T, reified T1 : T> StaticDataBuilder<T>.value(
     name: String,
     value: T1,
     metaBuilder: MutableMeta.() -> Unit = {}
 ) {
-    put(name, Data(value, Meta(metaBuilder)))
+    data(name, Data(value, Meta(metaBuilder)))
 }
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
+public fun <T> StaticDataBuilder<T>.node(prefix: Name, block: StaticDataBuilder<T>.() -> Unit) {
     val map = DataMapBuilder<T>().apply(block).map
     map.forEach { (name, data) ->
-        put(prefix + name, data)
+        data(prefix + name, data)
     }
 }
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
-    putAll(prefix.parseAsName(), block)
+public fun <T> StaticDataBuilder<T>.node(prefix: String, block: StaticDataBuilder<T>.() -> Unit) =
+    node(prefix.parseAsName(), block)
 
-public fun <T> StaticDataBuilder<T>.putAll(prefix: String, tree: DataTree<T>) {
+public fun <T> StaticDataBuilder<T>.node(prefix: String, tree: DataTree<T>) {
     tree.forEach { data ->
-        put(prefix + data.name, data)
+        data(prefix.parseAsName() + data.name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 1d4d2ea4..6f1a7ed1 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -13,12 +13,12 @@ internal class DataTreeBuilderTest {
     @Test
     fun testTreeBuild() = runTest(timeout = 500.milliseconds) {
         val node = DataTree.static<Any> {
-            putAll("primary") {
-                putValue("a", "a")
-                putValue("b", "b")
+            node("primary") {
+                value("a", "a")
+                value("b", "b")
             }
-            putValue("c.d", "c.d")
-            putValue("c.f", "c.f")
+            value("c.d", "c.d")
+            value("c.f", "c.f")
         }
         assertEquals("a", node["primary.a"]?.await())
         assertEquals("b", node["primary.b"]?.await())
@@ -30,17 +30,17 @@ internal class DataTreeBuilderTest {
     @Test
     fun testDataUpdate() = runTest(timeout = 500.milliseconds) {
         val updateData = DataTree.static<Any> {
-            put("a", Data.wrapValue("a"))
-            put("b", Data.wrapValue("b"))
+            data("a", Data.wrapValue("a"))
+            data("b", Data.wrapValue("b"))
         }
 
         val node = DataTree.static<Any> {
-            putAll("primary") {
-                putValue("a", "a")
-                putValue("b", "b")
+            node("primary") {
+                value("a", "a")
+                value("b", "b")
             }
-            putValue("root", "root")
-            putAll("update", updateData)
+            value("root", "root")
+            node("update", updateData)
         }
 
         assertEquals("a", node["update.a"]?.await())
@@ -54,7 +54,9 @@ internal class DataTreeBuilderTest {
         val subNode = MutableDataTree<Int>()
 
         val rootNode = MutableDataTree<Int>() {
-            job = launch { putAllAndWatch(subNode, "sub".asName()) }
+            job = launch {
+                putAllAndWatch(subNode, "sub".asName())
+            }
         }
 
         repeat(10) {
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 6828b674..1789237e 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.data
 
 import kotlinx.coroutines.ExperimentalCoroutinesApi
-import kotlinx.coroutines.test.advanceUntilIdle
 import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.actions.invoke
@@ -21,14 +20,13 @@ internal class ActionsTest {
 
         val data: DataTree<Int> = DataTree.static {
             repeat(10) {
-                putValue(it.toString(), it)
+                value(it.toString(), it)
             }
         }
 
         val result = plusOne(data)
 
-        advanceUntilIdle()
-        assertEquals(2, result["1"]?.await())
+        assertEquals(2, result.awaitData("1").await())
     }
 
     @Test
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index f1e9130a..06134ce6 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -62,7 +62,7 @@ public interface TaskWithSpec<T, C : Any> : Task<T> {
 //    block: C.() -> Unit = {},
 //): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultScope<T>(
+public class TaskResultScope<in T>(
     public val resultType: KType,
     public val workspace: Workspace,
     public val taskName: Name,
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
similarity index 87%
rename from dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
rename to dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
index 49b485e5..9df49aba 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskResultBuilders.kt
@@ -1,14 +1,13 @@
 package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.context.PluginFactory
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.NamedValueWithMeta
-import space.kscience.dataforge.data.transformEach
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.meta.copy
 import space.kscience.dataforge.meta.remove
 import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.plus
 
@@ -77,13 +76,15 @@ public val TaskResultScope<*>.allData: DataSelector<*>
  * @param dataMetaTransform additional transformation of individual data meta.
  * @param action process individual data asynchronously.
  */
+@OptIn(UnsafeKType::class)
 @DFExperimental
-public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
+public suspend fun <T, R> TaskResultScope<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-    crossinline dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
-    crossinline action: suspend (NamedValueWithMeta<T>) -> R,
+    dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
+    action: suspend NamedValueWithMeta<T>.() -> R,
 ): DataTree<R> = from(selector, dependencyMeta).transformEach<T, R>(
+    resultType,
     workspace.context,
     metaTransform = { name ->
         taskMeta[taskName]?.let { taskName put it }
@@ -93,6 +94,15 @@ public suspend inline fun <T, reified R> TaskResultScope<R>.transformEach(
     action(it)
 }
 
+@OptIn(UnsafeKType::class)
+public fun <R> TaskResultScope<R>.result(data: Data<R>): DataTree<R> = DataTree.static(resultType) {
+    data(Name.EMPTY, data)
+}
+
+@OptIn(UnsafeKType::class)
+public fun <R> TaskResultScope<R>.result(builder: StaticDataBuilder<R>.() -> Unit): DataTree<R> =
+    DataTree.static(resultType, builder)
+
 ///**
 // * Set given [dataSet] as a task result.
 // */
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
index c3cd3a0b..049ec2ec 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileDataTree.kt
@@ -68,10 +68,18 @@ public class FileDataTree(
             }
 
             path.isDirectory() -> {
-                val dataBinary: Binary? = path.resolve(IOPlugin.DATA_FILE_NAME)?.asBinary()
-                val meta: Meta? = path.find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }?.let {
+                //FIXME find data and meta in a single pass instead of two
+
+                val dataBinary: Binary? = path.listDirectoryEntries().find {
+                    it.fileName.nameWithoutExtension == IOPlugin.DATA_FILE_NAME
+                }?.asBinary()
+
+                val meta: Meta? = path.listDirectoryEntries().find {
+                    it.fileName.nameWithoutExtension == IOPlugin.META_FILE_NAME
+                }?.let {
                     io.readMetaFileOrNull(it)
                 }
+
                 if (dataBinary != null || meta != null) {
                     StaticData(
                         typeOf<Binary>(),
@@ -156,6 +164,9 @@ public class FileDataTree(
     }
 }
 
+public fun IOPlugin.readDirectory(path: Path, monitor: Boolean = false): FileDataTree =
+    FileDataTree(this, path, monitor)
+
 
 ///**
 // * @param resources The names of the resources to read.
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 1c43fba0..eb705e56 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.putValue
+import space.kscience.dataforge.data.value
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
 import space.kscience.dataforge.meta.get
@@ -22,14 +22,14 @@ internal class CachingWorkspaceTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    putValue("myData[$it]", it)
+                    value("myData[$it]", it)
                 }
             }
 
             inMemoryCache()
 
             val doFirst by task<Any> {
-                transformEach(allData) { (name, _, _) ->
+                transformEach(allData) {
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
@@ -39,7 +39,7 @@ internal class CachingWorkspaceTest {
                 transformEach(
                     doFirst,
                     dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
-                ) { (name, _, _) ->
+                ) {
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
                 }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index cd38f809..9cb040be 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,14 +20,12 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        put("result", result)
+        result(result)
     }
 
 
     val singleData by task<Int> {
-        workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            put("result", it)
-        }
+        result(workspace.data.filterByType<Int>()["myData[12]"]!!)
     }
 
 
@@ -47,7 +45,7 @@ class DataPropagationTest {
         }
         data {
             repeat(100) {
-                putValue("myData[$it]", it)
+                value("myData[$it]", it)
             }
         }
     }
@@ -55,12 +53,12 @@ class DataPropagationTest {
     @Test
     fun testAllData() = runTest {
         val node = testWorkspace.produce("Test.allData")
-        assertEquals(4950, node.content.asSequence().single().await())
+        assertEquals(4950, node.content.data?.await())
     }
 
     @Test
     fun testSingleData() = runTest {
         val node = testWorkspace.produce("Test.singleData")
-        assertEquals(12, node.content.asSequence().single().await())
+        assertEquals(12, node.content.data?.await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index d9fa9ae4..c9e5ea5a 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -12,7 +12,6 @@ import space.kscience.dataforge.io.*
 import space.kscience.dataforge.io.yaml.YamlPlugin
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
 import java.nio.file.Files
 import kotlin.io.path.deleteExisting
 import kotlin.io.path.fileSize
@@ -22,13 +21,13 @@ import kotlin.test.assertEquals
 
 
 class FileDataTest {
-    val dataNode = DataTree<String> {
-        putAll("dir") {
-            putValue("a", "Some string") {
+    val dataNode = DataTree.static<String> {
+        node("dir") {
+            value("a", "Some string") {
                 "content" put "Some string"
             }
         }
-        putValue("b", "root data")
+        value("b", "root data")
 //        meta {
 //            "content" put "This is root meta node"
 //        }
@@ -51,10 +50,10 @@ class FileDataTest {
         val dir = Files.createTempDirectory("df_data_node")
         io.writeDataDirectory(dir, dataNode, StringIOFormat)
         println(dir.toUri().toString())
-        val data = DataTree {
-            io.readAsDataTree(Name.EMPTY, dir)
+        val data = io.readDirectory(dir)
+        val reconstructed = data.transformEach(this) { (_, value) ->
+            value.toByteArray().decodeToString()
         }
-        val reconstructed = data.map { (_, value) -> value.toByteArray().decodeToString() }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
@@ -68,8 +67,9 @@ class FileDataTest {
         zip.deleteExisting()
         io.writeZip(zip, dataNode, StringIOFormat)
         println(zip.toUri().toString())
-        val reconstructed = DataTree { io.readAsDataTree(Name.EMPTY, zip) }
-            .map { (_, value) -> value.toByteArray().decodeToString() }
+        val reconstructed = io.readDirectory(zip).transformEach(this) { (_, value) ->
+            value.toByteArray().decodeToString()
+        }
         assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
         assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 7d07481c..7aa1fb0e 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,7 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.putValue
+import space.kscience.dataforge.data.value
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 
@@ -16,13 +16,13 @@ class FileWorkspaceCacheTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    putValue("myData[$it]", it)
+                    value("myData[$it]", it)
                 }
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
 
             val echo by task<String> {
-                transformEach(dataByType<String>()) { arg, _, _ -> arg }
+                transformEach(dataByType<String>()) { value }
             }
         }
 
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 39837c15..111b3b89 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -37,9 +37,9 @@ internal object TestPlugin : WorkspacePlugin() {
 
     val test by task {
         // type is inferred
-        transformEach(dataByType<Int>()) { arg, _, _ ->
-            logger.info { "Test: $arg" }
-            arg
+        transformEach(dataByType<Int>()) {
+            logger.info { "Test: $value" }
+            value
         }
 
     }
@@ -62,42 +62,42 @@ internal class SimpleWorkspaceTest {
         data {
             //statically initialize data
             repeat(100) {
-                putValue("myData[$it]", it)
+                value("myData[$it]", it)
             }
         }
 
         val filterOne by task<Int> {
             val name by taskMeta.string { error("Name field not defined") }
-            from(testPluginFactory) { test }[name]?.let { source: Data<Int> ->
-                put(name, source)
-            }
+            result(from(testPluginFactory) { test }[name]!!)
         }
 
         val square by task<Int> {
-            transformEach(dataByType<Int>()) { arg, name, meta ->
+            transformEach(dataByType<Int>()) {
                 if (meta["testFlag"].boolean == true) {
                     println("Side effect")
                 }
                 workspace.logger.info { "Starting square on $name" }
-                arg * arg
+                value * value
             }
         }
 
         val linear by task<Int> {
-            transformEach(dataByType<Int>()) { arg, name, _ ->
+            transformEach(dataByType<Int>()) {
                 workspace.logger.info { "Starting linear on $name" }
-                arg * 2 + 1
+                value * 2 + 1
             }
         }
 
         val fullSquare by task<Int> {
             val squareData = from(square)
             val linearData = from(linear)
-            squareData.forEach { data ->
-                val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
-                    l + r
+            result {
+                squareData.forEach { data ->
+                    val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
+                        l + r
+                    }
+                    data(data.name, newData)
                 }
-                put(data.name, newData)
             }
         }
 
@@ -106,7 +106,7 @@ internal class SimpleWorkspaceTest {
             val res = from(square).foldToData(0) { l, r ->
                 l + r.value
             }
-            put("sum", res)
+            result(res)
         }
 
         val averageByGroup by task<Int> {
@@ -116,13 +116,15 @@ internal class SimpleWorkspaceTest {
                 l + r.value
             }
 
-            put("even", evenSum)
             val oddSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 1
             }.foldToData(0) { l, r ->
                 l + r.value
             }
-            put("odd", oddSum)
+            result {
+                data("even", evenSum)
+                data("odd", oddSum)
+            }
         }
 
         val delta by task<Int> {
@@ -132,15 +134,17 @@ internal class SimpleWorkspaceTest {
             val res = even.combine(odd) { l, r ->
                 l - r
             }
-            put("res", res)
+            result(res)
         }
 
         val customPipe by task<Int> {
-            workspace.data.filterByType<Int>().forEach { data ->
-                val meta = data.meta.toMutableMeta().apply {
-                    "newValue" put 22
+            result {
+                workspace.data.filterByType<Int>().forEach { data ->
+                    val meta = data.meta.toMutableMeta().apply {
+                        "newValue" put 22
+                    }
+                    data(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
                 }
-                put(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
             }
         }
 
@@ -157,7 +161,7 @@ internal class SimpleWorkspaceTest {
     @Test
     fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node["sum"]!!.await()
+        val res = node.data?.await()
     }
 
     @Test
@@ -175,7 +179,7 @@ internal class SimpleWorkspaceTest {
                 """
                 Name: ${it.name}
                 Meta: ${it.meta}
-                Data: ${it.data.await()}
+                Data: ${it.await()}
             """.trimIndent()
             )
         }

From c11007216c2f2154a9a6c0c598903432bf13bf57 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Thu, 2 Jan 2025 14:27:22 +0300
Subject: [PATCH 21/29] Update documentation

---
 README.md                                     | 88 ++++++++++++++++++-
 dataforge-context/README.md                   |  4 +-
 dataforge-data/README.md                      |  4 +-
 dataforge-io/README.md                        | 13 ++-
 dataforge-io/build.gradle.kts                 | 58 +++++++++++-
 dataforge-io/dataforge-io-proto/README.md     | 21 +++++
 dataforge-io/dataforge-io-yaml/README.md      |  4 +-
 .../dataforge-io-yaml/build.gradle.kts        |  6 +-
 .../space/kscience/dataforge/io/Responder.kt  | 12 ---
 dataforge-meta/README.md                      | 11 ++-
 dataforge-meta/build.gradle.kts               | 46 +++++++++-
 dataforge-scripting/README.md                 |  4 +-
 dataforge-scripting/build.gradle.kts          | 10 ++-
 dataforge-workspace/README.md                 |  4 +-
 docs/templates/README-TEMPLATE.md             | 26 +++---
 gradle.properties                             |  2 +-
 gradle/wrapper/gradle-wrapper.properties      |  2 +-
 17 files changed, 259 insertions(+), 56 deletions(-)
 create mode 100644 dataforge-io/dataforge-io-proto/README.md
 delete mode 100644 dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt

diff --git a/README.md b/README.md
index da910804..be5591cf 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,70 @@
 [![JetBrains Research](https://jb.gg/badges/research.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
 [![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
 
-![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
+## Publications
+
+* [A general overview](https://doi.org/10.1051/epjconf/201817705003)
+* [An application in "Troitsk nu-mass" experiment](https://doi.org/10.1088/1742-6596/1525/1/012024)
+
+## Video
+
+* [A presentation on application of DataForge (legacy version) to Troitsk nu-mass analysis.](https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+
+## Questions and Answers
+
+In this section, we will try to cover DataForge main ideas in the form of questions and answers.
+
+### General
+
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages, and data flow is not always obvious. Also, the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows performing some automated data manipulations with optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+
+**Q**: How does it work?
+
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values similar to XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one does not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+
+**Q**: But where does it take algorithms to use?
+
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for a specific problem.
+
+**Q**: So I still need to write the code? What is the difference then?
+
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching, etc. Most of the things are done by the DataForge.
+
+### Platform
+
+**Q**: Which platform does DataForge use? Which operating system is it working on?
+
+**A**: The DataForge is mostly written in Kotlin-multiplatform and could be used on JVM, JS and native targets. Some modules and functions are supported only on JVM
+
+**Q**: Can I use my C++/Fortran/Python code in DataForge?
+
+**A**: Yes, as long as the code could be called from Java. Most common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+
+### Features
+
+**Q**: What other features does DataForge provide?
+
+**A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
+
+* **Modularisation**. Contrary to lot other frameworks, DataForge is intrinsically modular. The mandatory part is a rather tiny core module. Everything else could be customized.
+
+* **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
+
+### Misc
+
+**Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
+
+**A**: One must note that DataForge is made for analysis, not for visualization. The visualization and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. [VisionForge](https://git.sciprog.center/kscience/visionforge) project aims to provide tools for both 2D and 3D visualization both locally and remotely.
+
+**Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
+
+**A**: It is not the purpose of DataForge to replace cluster computing software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+
+**Q**: Is it possible to use DataForge in notebook mode?
+
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows using any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
 
 
 ### [dataforge-context](dataforge-context)
@@ -14,14 +77,28 @@
 > **Maturity**: EXPERIMENTAL
 
 ### [dataforge-io](dataforge-io)
-> IO module
+> Serialization foundation for Meta objects and Envelope processing.
 >
 > **Maturity**: EXPERIMENTAL
+>
+> **Features:**
+> - [IO format](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt) : A generic API for reading something from binary representation and writing it to Binary.
+> - [Binary](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt) : Multi-read random access binary.
+> - [Envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt) : API and implementations for combined data and metadata format.
+> - [Tagged envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt) : Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+> - [Tagged envelope](dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt) : Implementation for text-friendly envelope format with text separators for sections.
+
 
 ### [dataforge-meta](dataforge-meta)
-> Meta definition and basic operations on meta
+> Core Meta and Name manipulation module
 >
 > **Maturity**: DEVELOPMENT
+>
+> **Features:**
+> - [Meta](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt) : **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+> - [Value](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt) : **Value** a sum type for different meta values.
+> - [Name](dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt) : **Name** is an identifier to access tree-like structure.
+
 
 ### [dataforge-scripting](dataforge-scripting)
 >
@@ -31,6 +108,11 @@
 >
 > **Maturity**: EXPERIMENTAL
 
+### [dataforge-io/dataforge-io-proto](dataforge-io/dataforge-io-proto)
+> ProtoBuf Meta representation
+>
+> **Maturity**: PROTOTYPE
+
 ### [dataforge-io/dataforge-io-yaml](dataforge-io/dataforge-io-yaml)
 > YAML meta converters and Front Matter envelope format
 >
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index 2cd53fd1..894868fa 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,7 +6,7 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-context:0.10.0")
 }
 ```
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index 35aaa4e6..3970fc31 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-data:0.10.0")
 }
 ```
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index 5a9979a9..9b56352e 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -2,11 +2,20 @@
 
 IO module
 
+## Features
+
+ - [IO format](src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt) : A generic API for reading something from binary representation and writing it to Binary.
+ - [Binary](src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt) : Multi-read random access binary.
+ - [Envelope](src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt) : API and implementations for combined data and metadata format.
+ - [Tagged envelope](src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt) : Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+ - [Tagged envelope](src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt) : Implementation for text-friendly envelope format with text separators for sections.
+
+
 ## Usage
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +25,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-io:0.10.0")
 }
 ```
diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index 16a6fbfc..52e47bb3 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.4.0"
+val ioVersion = "0.6.0"
 
 kscience {
     jvm()
@@ -22,6 +22,60 @@ kscience {
     }
 }
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.EXPERIMENTAL
+
+    description = """
+        Serialization foundation for Meta objects and Envelope processing.
+    """.trimIndent()
+
+    feature(
+        "io-format",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt",
+        name = "IO format"
+    ) {
+        """
+            A generic API for reading something from binary representation and writing it to Binary.
+            
+            Similar to KSerializer, but without schema.
+        """.trimIndent()
+    }
+
+    feature(
+        "binary",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt",
+        name = "Binary"
+    ) {
+        "Multi-read random access binary."
+    }
+
+    feature(
+        "envelope",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/Envelope.kt",
+        name = "Envelope"
+    ) {
+        """
+            API and implementations for combined data and metadata format.
+        """.trimIndent()
+    }
+
+    feature(
+        "envelope.tagged",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/TaggedEnvelope.kt",
+        name = "Tagged envelope"
+    ) {
+        """
+            Implementation for binary-friendly envelope format with machine readable tag and forward size declaration.
+        """.trimIndent()
+    }
+
+    feature(
+        "envelope.tagless",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/io/TaglessEnvelope.kt",
+        name = "Tagged envelope"
+    ) {
+        """
+            Implementation for text-friendly envelope format with text separators for sections.
+        """.trimIndent()
+    }
 }
\ No newline at end of file
diff --git a/dataforge-io/dataforge-io-proto/README.md b/dataforge-io/dataforge-io-proto/README.md
new file mode 100644
index 00000000..31e48c15
--- /dev/null
+++ b/dataforge-io/dataforge-io-proto/README.md
@@ -0,0 +1,21 @@
+# Module dataforge-io-proto
+
+ProtoBuf meta IO
+
+## Usage
+
+## Artifact:
+
+The Maven coordinates of this project are `space.kscience:dataforge-io-proto:0.10.0`.
+
+**Gradle Kotlin DSL:**
+```kotlin
+repositories {
+    maven("https://repo.kotlin.link")
+    mavenCentral()
+}
+
+dependencies {
+    implementation("space.kscience:dataforge-io-proto:0.10.0")
+}
+```
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index f70a1490..e296e17e 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,7 +6,7 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-io-yaml:0.10.0")
 }
 ```
diff --git a/dataforge-io/dataforge-io-yaml/build.gradle.kts b/dataforge-io/dataforge-io-yaml/build.gradle.kts
index 505ce360..0ae7e9a2 100644
--- a/dataforge-io/dataforge-io-yaml/build.gradle.kts
+++ b/dataforge-io/dataforge-io-yaml/build.gradle.kts
@@ -11,14 +11,14 @@ kscience {
     dependencies {
         api(projects.dataforgeIo)
     }
-    useSerialization{
+    useSerialization {
         yamlKt()
     }
 }
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.PROTOTYPE
-    description ="""
+    description = """
         YAML meta converters and Front Matter envelope format
     """.trimIndent()
 }
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt
deleted file mode 100644
index 2a64966f..00000000
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Responder.kt
+++ /dev/null
@@ -1,12 +0,0 @@
-package space.kscience.dataforge.io
-
-/**
- * An object that could respond to external messages asynchronously
- */
-public interface Responder {
-    /**
-     * Send a request and wait for response for this specific request
-     */
-    public suspend fun respond(request: Envelope): Envelope
-}
-
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index e4fcacb1..7eb186ed 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -2,11 +2,18 @@
 
 Meta definition and basic operations on meta
 
+## Features
+
+ - [Meta](src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt) : **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+ - [Value](src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt) : **Value** a sum type for different meta values.
+ - [Name](src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt) : **Name** is an identifier to access tree-like structure.
+
+
 ## Usage
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +23,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-meta:0.10.0")
 }
 ```
diff --git a/dataforge-meta/build.gradle.kts b/dataforge-meta/build.gradle.kts
index 831aea72..49d8bdc5 100644
--- a/dataforge-meta/build.gradle.kts
+++ b/dataforge-meta/build.gradle.kts
@@ -7,19 +7,57 @@ kscience {
     js()
     native()
     wasm()
-    useSerialization{
+    useSerialization {
         json()
     }
 }
 
 description = "Meta definition and basic operations on meta"
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.DEVELOPMENT
 
-    feature("metadata"){
+    description = """
+        Core Meta and Name manipulation module
+    """.trimIndent()
+
+    feature(
+        "meta",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt",
+        name = "Meta"
+    ) {
         """
-            
+        **Meta** is the representation of basic DataForge concept: Metadata, but it also could be called meta-value tree.
+        
+        Each Meta node could hava a node Value as well as a map of named child items.
+                    
+        """.trimIndent()
+    }
+
+    feature(
+        "value",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/meta/Value.kt",
+        name = "Value"
+    ) {
+        """
+        **Value** a sum type for different meta values.
+        
+        The following types are implemented in core (custom ones are also available):
+            * null
+            * boolean
+            * number
+            * string
+            * list of values
+        """.trimIndent()
+    }
+
+    feature(
+        "name",
+        ref = "src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt",
+        name = "Name"
+    ) {
+        """
+        **Name** is an identifier to access tree-like structure.
         """.trimIndent()
     }
 }
\ No newline at end of file
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index 1f650bea..e516b392 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-scripting:0.10.0")
 }
 ```
diff --git a/dataforge-scripting/build.gradle.kts b/dataforge-scripting/build.gradle.kts
index d9d87742..37bc2e11 100644
--- a/dataforge-scripting/build.gradle.kts
+++ b/dataforge-scripting/build.gradle.kts
@@ -2,22 +2,24 @@ plugins {
     id("space.kscience.gradle.mpp")
 }
 
-kscience{
+description = "Scripting definition fow workspace generation"
+
+kscience {
     jvm()
     commonMain {
         api(projects.dataforgeWorkspace)
         implementation(kotlin("scripting-common"))
     }
-    jvmMain{
+    jvmMain {
         implementation(kotlin("scripting-jvm-host"))
         implementation(kotlin("scripting-jvm"))
     }
-    jvmTest{
+    jvmTest {
         implementation(spclibs.logback.classic)
     }
 }
 
 
-readme{
+readme {
     maturity = space.kscience.gradle.Maturity.PROTOTYPE
 }
\ No newline at end of file
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index 87b38c6e..8bb476a3 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,7 +6,7 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.9.0-dev-1`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.10.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
@@ -16,6 +16,6 @@ repositories {
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.9.0-dev-1")
+    implementation("space.kscience:dataforge-workspace:0.10.0")
 }
 ```
diff --git a/docs/templates/README-TEMPLATE.md b/docs/templates/README-TEMPLATE.md
index 6de55245..f868eb59 100644
--- a/docs/templates/README-TEMPLATE.md
+++ b/docs/templates/README-TEMPLATE.md
@@ -1,8 +1,6 @@
 [![JetBrains Research](https://jb.gg/badges/research.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
 [![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
 
-![Gradle build](https://github.com/mipt-npm/dataforge-core/workflows/Gradle%20build/badge.svg)
-
 ## Publications
 
 * [A general overview](https://doi.org/10.1051/epjconf/201817705003)
@@ -10,27 +8,29 @@
 
 ## Video
 
-* [A presentation on application of (old version of) DataForge to Troitsk nu-mass analysis.] (https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
+* [A presentation on application of DataForge (legacy version) to Troitsk nu-mass analysis.](https://youtu.be/OpWzLXUZnLI?si=3qn7EMruOHMJX3Bc)
 
 ## Questions and Answers
+
 In this section, we will try to cover DataForge main ideas in the form of questions and answers.
 
 ### General
-**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages and data flow is not always obvious. To top it the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). And yes, I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
 
-**A**: Yes, that is precisely the problem DataForge was made to solve. It allows to perform some automated data manipulations with automatic optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
+**Q**: I have a lot of data to analyze. The analysis process is complicated, requires a lot of stages, and data flow is not always obvious. Also, the data size is huge, so I don't want to perform operation I don't need (calculate something I won't need or calculate something twice). I need it to be performed in parallel and probably on remote computer. By the way, I am sick and tired of scripts that modify other scripts that control scripts. Could you help me?
+
+**A**: Yes, that is precisely the problem DataForge was made to solve. It allows performing some automated data manipulations with optimization and parallelization. The important thing that data processing recipes are made in the declarative way, so it is quite easy to perform computations on a remote station. Also, DataForge guarantees reproducibility of analysis results.
 
 **Q**: How does it work?
 
-**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that in order to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values not unlike XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one do not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
+**A**: At the core of DataForge lies the idea of metadata processor. It utilizes the fact that to analyze something you need data itself and some additional information about what does that data represent and what does user want as a result. This additional information is called metadata and could be organized in a regular structure (a tree of values similar to XML or JSON). The important thing is that this distinction leaves no place for user instructions (or scripts). Indeed, the idea of DataForge logic is that one does not need imperative commands. The framework configures itself according to input meta-data and decides what operations should be performed in the most efficient way.
 
 **Q**: But where does it take algorithms to use?
 
-**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for specific problem.
+**A**: Of course algorithms must be written somewhere. No magic here. The logic is written in specialized modules. Some modules are provided out of the box at the system core, some need to be developed for a specific problem.
 
 **Q**: So I still need to write the code? What is the difference then?
 
-**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching etc. Most of the things are done by the DataForge.
+**A**: Yes, someone still needs to write the code. But not necessary you. Simple operations could be performed using provided core logic. Also, your group can have one programmer writing the logic and all other using it without any real programming expertise. The framework organized in a such way that one writes some additional logic, they do not need to think about complicated thing like parallel computing, resource handling, logging, caching, etc. Most of the things are done by the DataForge.
 
 ### Platform
 
@@ -40,9 +40,10 @@ In this section, we will try to cover DataForge main ideas in the form of questi
 
 **Q**: Can I use my C++/Fortran/Python code in DataForge?
 
-A: Yes, as long as the code could be called from Java. Most of common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
+**A**: Yes, as long as the code could be called from Java. Most common languages have a bridge for Java access. There are completely no problems with compiled C/Fortran libraries. Python code could be called via one of existing python-java interfaces. It is also planned to implement remote method invocation for common languages, so your Python, or, say, Julia, code could run in its native environment. The metadata processor paradigm makes it much easier to do so.
 
 ### Features
+
 **Q**: What other features does DataForge provide?
 
 **A**: Alongside metadata processing (and a lot of tools for metadata manipulation and layering), DataForge has two additional important concepts:
@@ -52,16 +53,17 @@ A: Yes, as long as the code could be called from Java. Most of common languages
 * **Context encapsulation**. Every DataForge task is executed in some context. The context isolates environment for the task and also works as dependency injection base and specifies interaction of the task with the external world.
 
 ### Misc
+
 **Q**: So everything looks great, can I replace my ROOT / other data analysis framework with DataForge?
 
-**A**: One must note, that DataForge is made for analysis, not for visualisation. The visualisation and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. In fact JAS3 and DataMelt could be used as a frontend for DataForge mechanics.
+**A**: One must note that DataForge is made for analysis, not for visualization. The visualization and user interaction capabilities of DataForge are rather limited compared to frameworks like ROOT, JAS3 or DataMelt. The idea is to provide reliable API and core functionality. [VisionForge](https://git.sciprog.center/kscience/visionforge) project aims to provide tools for both 2D and 3D visualization both locally and remotely.
 
 **Q**: How does DataForge compare to cluster computation frameworks like Apache Spark?
 
-**A**: Again, it is not the purpose of DataForge to replace cluster software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
+**A**: It is not the purpose of DataForge to replace cluster computing software. DataForge has some internal parallelism mechanics and implementations, but they are most certainly worse than specially developed programs. Still, DataForge is not fixed on one single implementation. Your favourite parallel processing tool could be still used as a back-end for the DataForge. With full benefit of configuration tools, integrations and no performance overhead.
 
 **Q**: Is it possible to use DataForge in notebook mode?
 
-**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows to use any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
+**A**: [Kotlin jupyter](https://github.com/Kotlin/kotlin-jupyter) allows using any JVM program in a notebook mode. The dedicated module for DataForge is work in progress.
 
 ${modules}
diff --git a/gradle.properties b/gradle.properties
index 1b920cd8..67ba7f8e 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.0-kotlin-2.1.0
\ No newline at end of file
+toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 17655d0e..d6e308a6 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists

From 6a13182d1c50bf0c364c66f4dc1612329cc11746 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 3 Jan 2025 14:19:36 +0300
Subject: [PATCH 22/29] Rename `put` to `write` in `DataSink`

---
 CHANGELOG.md                                  |  2 +-
 .../dataforge/actions/AbstractAction.kt       |  4 +-
 .../kscience/dataforge/actions/MapAction.kt   |  2 +-
 .../kscience/dataforge/actions/SplitAction.kt |  2 +-
 .../space/kscience/dataforge/data/DataSink.kt | 10 +--
 .../kscience/dataforge/data/dataBuilders.kt   | 76 ++++++++-----------
 .../kscience/dataforge/data/dataTransform.kt  |  4 +-
 .../dataforge/data/dataTreeBuilder.kt         |  6 +-
 .../dataforge/data/DataTreeBuilderTest.kt     |  4 +-
 .../dataforge/data/dataSetBuilderInContext.kt |  6 +-
 .../kscience/dataforge/data/ActionsTest.kt    |  2 +-
 .../dataforge/workspace/CachingAction.kt      |  2 +-
 12 files changed, 54 insertions(+), 66 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3cb3daf0..5e4a3ce2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,7 +8,7 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
-- API of DataSink.
+- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc).
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index af0aab00..1f1bbf2d 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -44,7 +44,7 @@ public abstract class AbstractAction<T, R>(
         updateName: Name,
     ) {
         //by default regenerate the whole data set
-        putAll(generate(source, actionMeta))
+        writeAll(generate(source, actionMeta))
     }
 
     @OptIn(UnsafeKType::class)
@@ -60,7 +60,7 @@ public abstract class AbstractAction<T, R>(
 
         //propagate updates
         val updateSink = DataSink<R> { name, data ->
-            put(name, data)
+            write(name, data)
         }
 
         with(updateSink) {
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 2e4b2ddc..a763515c 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -100,7 +100,7 @@ public class MapAction<T, R>(
         updateName: Name,
     ) {
         val (name, data) = mapOne(updateName, source.read(updateName), actionMeta)
-        put(name, data)
+        write(name, data)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 6dfbc7c9..acc1ba36 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -94,7 +94,7 @@ internal class SplitAction<T, R>(
         actionMeta: Meta,
         updateName: Name,
     ) {
-        putAll(splitOne(updateName, source.read(updateName), actionMeta))
+        writeAll(splitOne(updateName, source.read(updateName), actionMeta))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
index c9786244..4345866e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSink.kt
@@ -25,7 +25,7 @@ public fun interface DataSink<in T> : DataBuilderScope<T> {
     /**
      * Put data and notify listeners if needed
      */
-    public suspend fun put(name: Name, data: Data<T>?)
+    public suspend fun write(name: Name, data: Data<T>?)
 }
 
 
@@ -77,7 +77,7 @@ private class MutableDataTreeRoot<T>(
         }
         override val dataType: KType get() = this@MutableDataTreeRoot.dataType
 
-        override suspend fun put(
+        override suspend fun write(
             name: Name,
             data: Data<T>?
         ) {
@@ -89,7 +89,7 @@ private class MutableDataTreeRoot<T>(
 
                 else -> {
                     val token = name.first()
-                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.put(name.cutFirst(), data)
+                    items.getOrPut(token) { MutableDataTreeBranch(branchName + token) }.write(name.cutFirst(), data)
                 }
             }
         }
@@ -97,7 +97,7 @@ private class MutableDataTreeRoot<T>(
     override var data: Data<T>? = null
         private set
 
-    override suspend fun put(
+    override suspend fun write(
         name: Name,
         data: Data<T>?
     ) {
@@ -109,7 +109,7 @@ private class MutableDataTreeRoot<T>(
 
             else -> {
                 val token = name.first()
-                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.put(name.cutFirst(), data)
+                items.getOrPut(token) { MutableDataTreeBranch(token.asName()) }.write(name.cutFirst(), data)
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
index 640e8541..80c5fca9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -8,118 +8,106 @@ import space.kscience.dataforge.names.isEmpty
 import space.kscience.dataforge.names.plus
 
 
-public suspend fun <T> DataSink<T>.put(value: NamedData<T>) {
-    put(value.name, value)
+public suspend fun <T> DataSink<T>.write(value: NamedData<T>) {
+    write(value.name, value)
 }
 
-public inline fun <T> DataSink<T>.putAll(
+public inline fun <T> DataSink<T>.writeAll(
     prefix: Name,
     block: DataSink<T>.() -> Unit,
 ) {
     if (prefix.isEmpty()) {
         apply(block)
     } else {
-        val proxyDataSink = DataSink<T> { name, data -> this@putAll.put(prefix + name, data) }
+        val proxyDataSink = DataSink<T> { name, data -> this@writeAll.write(prefix + name, data) }
 
         proxyDataSink.apply(block)
     }
 }
 
 
-public inline fun <T> DataSink<T>.putAll(
+public inline fun <T> DataSink<T>.writeAll(
     prefix: String,
     block: DataSink<T>.() -> Unit,
-): Unit = putAll(prefix.asName(), block)
+): Unit = writeAll(prefix.asName(), block)
 
 
-public suspend fun <T> DataSink<T>.put(name: String, value: Data<T>) {
-    put(Name.parse(name), value)
+public suspend fun <T> DataSink<T>.write(name: String, value: Data<T>) {
+    write(Name.parse(name), value)
 }
 
-public suspend fun <T> DataSink<T>.putAll(name: Name, tree: DataTree<T>) {
-    putAll(name) { putAll(tree.asSequence()) }
+public suspend fun <T> DataSink<T>.writeAll(name: Name, tree: DataTree<T>) {
+    writeAll(name) { writeAll(tree.asSequence()) }
 }
 
 
-public suspend fun <T> DataSink<T>.putAll(name: String, tree: DataTree<T>) {
-    putAll(Name.parse(name)) { putAll(tree.asSequence()) }
+public suspend fun <T> DataSink<T>.writeAll(name: String, tree: DataTree<T>) {
+    writeAll(Name.parse(name)) { writeAll(tree.asSequence()) }
 }
 
 /**
  * Produce lazy [Data] and emit it into the [MutableDataTree]
  */
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    put(name, data)
+    write(name, data)
 }
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: Name,
     meta: Meta = Meta.EMPTY,
     noinline producer: suspend () -> T,
 ) {
     val data = Data(meta, block = producer)
-    put(name, data)
+    write(name, data)
 }
 
 /**
  * Emit static data with the fixed value
  */
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: Name,
     value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.wrapValue(value, meta))
+): Unit = write(name, Data.wrapValue(value, meta))
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     value: T,
     meta: Meta = Meta.EMPTY,
-): Unit = put(name, Data.wrapValue(value, meta))
+): Unit = write(name, Data.wrapValue(value, meta))
 
-public suspend inline fun <reified T> DataSink<T>.putValue(
+public suspend inline fun <reified T> DataSink<T>.writeValue(
     name: String,
     value: T,
     metaBuilder: MutableMeta.() -> Unit,
-): Unit = put(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
+): Unit = write(Name.parse(name), Data.wrapValue(value, Meta(metaBuilder)))
 
-public suspend fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
+public suspend fun <T> DataSink<T>.writeAll(sequence: Sequence<NamedData<T>>) {
     sequence.forEach {
-        put(it)
+        write(it)
     }
 }
 
-public suspend fun <T> DataSink<T>.putAll(map: Map<Name, Data<T>?>) {
+public suspend fun <T> DataSink<T>.writeAll(map: Map<Name, Data<T>?>) {
     map.forEach { (name, data) ->
-        put(name, data)
+        write(name, data)
     }
 }
 
-//public suspend fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
-//    putAll(tree.asSequence())
-//}
-
 /**
- * Suspends indefinitely.
+ * Copy all data from [this] and mirror changes if they appear. Suspends indefinitely.
  */
-public suspend fun <T : Any> DataSink<T>.watch(
-    source: ObservableDataSource<T>,
-    prefix: Name = Name.EMPTY,
-) {
-//    putAll(branchName, source)
-    source.updates.collect {
-        put(prefix + it, source.read(it))
-    }
-}
-
-public suspend fun <T : Any> MutableDataTree<T>.putAllAndWatch(
+public suspend fun <T : Any> MutableDataTree<T>.writeAllAndWatch(
     source: DataTree<T>,
     prefix: Name = Name.EMPTY,
 ) {
-    putAll(prefix, source)
-    watch(source,prefix)
+    writeAll(prefix, source)
+    source.updates.collect {
+        write(prefix + it, source.read(it))
+    }
 }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index a8d5ac20..868cb82f 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -220,14 +220,14 @@ public fun <T, R> DataTree<T>.transformEach(
 ) {
     updates.collect { name ->
         val data: Data<T>? = read(name)
-        if (data == null) put(name, null) else {
+        if (data == null) write(name, null) else {
             val newMeta = data.meta.toMutableMeta().apply {
                 metaTransform(name)
             }.seal()
             val d = Data(outputType, newMeta, scope.coroutineContext, listOf(data)) {
                 compute(NamedValueWithMeta(name, data.await(), data.meta))
             }
-            put(name, d)
+            write(name, d)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
index 664f9904..069e60bc 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTreeBuilder.kt
@@ -49,7 +49,7 @@ private class DataTreeBuilder<T>(
     private val updatesFlow = MutableSharedFlow<Name>()
 
 
-    override suspend fun put(name: Name, data: Data<T>?) {
+    override suspend fun write(name: Name, data: Data<T>?) {
         mutex.withLock {
             if (data == null) {
                 map.remove(name)
@@ -60,7 +60,7 @@ private class DataTreeBuilder<T>(
         updatesFlow.emit(name)
     }
 
-    public fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
+    fun build(): DataTree<T> = FlatDataTree(type, map, updatesFlow, Name.EMPTY)
 }
 
 /**
@@ -73,7 +73,7 @@ public fun <T> DataTree(
     initialData: Map<Name, Data<T>> = emptyMap(),
     updater: suspend DataSink<T>.() -> Unit,
 ): DataTree<T> = DataTreeBuilder<T>(dataType, initialData).apply {
-    scope.launch {
+    scope.launch(GoalExecutionRestriction(GoalExecutionRestrictionPolicy.ERROR)) {
         updater()
     }
 }.build()
diff --git a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index 6f1a7ed1..6a90664a 100644
--- a/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/commonTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -55,12 +55,12 @@ internal class DataTreeBuilderTest {
 
         val rootNode = MutableDataTree<Int>() {
             job = launch {
-                putAllAndWatch(subNode, "sub".asName())
+                writeAllAndWatch(subNode, "sub".asName())
             }
         }
 
         repeat(10) {
-            subNode.putValue("value[$it]", it)
+            subNode.writeValue("value[$it]", it)
         }
 
         assertEquals(9, subNode.awaitData("value[9]").await())
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index f9374974..2bcca5e4 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -10,14 +10,14 @@ import space.kscience.dataforge.names.Name
  */
 context(DataSink<T>)
 public suspend infix fun <T : Any> String.put(data: Data<T>): Unit =
-    put(Name.parse(this), data)
+    write(Name.parse(this), data)
 
 /**
  * Append node
  */
 context(DataSink<T>)
 public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
-    putAll(this, dataSet)
+    writeAll(this, dataSet)
 
 /**
  * Build and append node
@@ -25,5 +25,5 @@ public suspend infix fun <T : Any> String.putAll(dataSet: DataTree<T>): Unit =
 context(DataSink<T>)
 public infix fun <T : Any> String.putAll(
     block: DataSink<T>.() -> Unit,
-): Unit = putAll(Name.parse(this), block)
+): Unit = writeAll(Name.parse(this), block)
 
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 1789237e..4a0342dc 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -40,7 +40,7 @@ internal class ActionsTest {
         val result: DataTree<Int> = plusOne(source)
 
         repeat(10) {
-            source.putValue(it.toString(), it)
+            source.writeValue(it.toString(), it)
         }
 
         assertEquals(2, result.awaitData("1").await())
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
index 9fc91e33..57b0746e 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/CachingAction.kt
@@ -22,6 +22,6 @@ internal class CachingAction<T>(
 
     override suspend fun DataSink<T>.update(source: DataTree<T>, actionMeta: Meta, updateName: Name) {
         val updatedData = source.read(updateName)
-        put(updateName, updatedData?.named(updateName)?.let(caching))
+        write(updateName, updatedData?.named(updateName)?.let(caching))
     }
 }
\ No newline at end of file

From 9d70ba96eba59dddd29c4efd58d7770aca582ee7 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Fri, 3 Jan 2025 15:44:10 +0300
Subject: [PATCH 23/29] Remove implicit IOFormat in IOPlugin

---
 CHANGELOG.md                                  |  3 +-
 .../space/kscience/dataforge/io/IOPlugin.kt   | 17 -------
 .../space/kscience/dataforge/io/fileIO.kt     | 10 ----
 .../space/kscience/dataforge/meta/Scheme.kt   |  2 +-
 .../dataforge/names/NameIndexComparator.kt    |  2 +-
 .../kscience/dataforge/names/NameToken.kt     |  8 +--
 .../kscience/dataforge/workspace/Task.kt      |  6 +--
 .../dataforge/workspace/FileWorkspaceCache.kt | 51 +++++++++++++------
 8 files changed, 46 insertions(+), 53 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5e4a3ce2..83462d70 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,12 +8,13 @@
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
-- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc).
+- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc.).
 
 ### Deprecated
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
+- Remove implicit io format resolver in `IOPlugin` and `FileWorkspaceCache`. There are no guarantees that only one format is present in the contrxt for each type.
 
 ### Fixed
 - Fixed NameToken parsing.
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
index f431a731..11b5e5e3 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
@@ -6,28 +6,11 @@ import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.Name
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
 
 public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
     override val tag: PluginTag get() = Companion.tag
 
-    public val ioFormatFactories: Collection<IOFormatFactory<*>> by lazy {
-        context.gather<IOFormatFactory<*>>(IO_FORMAT_TYPE).values
-    }
-
-    @Suppress("UNCHECKED_CAST")
-    @UnsafeKType
-    public fun <T> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
-        ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
-
-    @OptIn(UnsafeKType::class)
-    public inline fun <reified T> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
-        resolveIOFormat(typeOf<T>(), meta)
-
-
     public val metaFormatFactories: Collection<MetaFormatFactory> by lazy {
         context.gather<MetaFormatFactory>(META_FORMAT_TYPE).values
     }
diff --git a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
index 7df23eb5..2d54e061 100644
--- a/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
+++ b/dataforge-io/src/jvmMain/kotlin/space/kscience/dataforge/io/fileIO.kt
@@ -15,8 +15,6 @@ import java.nio.file.Path
 import java.nio.file.StandardOpenOption
 import kotlin.io.path.inputStream
 import kotlin.math.min
-import kotlin.reflect.full.isSupertypeOf
-import kotlin.reflect.typeOf
 import kotlin.streams.asSequence
 
 
@@ -79,14 +77,6 @@ public fun Path.rewrite(block: Sink.() -> Unit): Unit {
 
 public fun EnvelopeFormat.readFile(path: Path): Envelope = readFrom(path.asBinary())
 
-/**
- * Resolve IOFormat based on type
- */
-@Suppress("UNCHECKED_CAST")
-public inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? =
-    ioFormatFactories.find { it.type.isSupertypeOf(typeOf<T>()) } as IOFormat<T>?
-
-
 public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
 public val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index bc05cb5d..12eb4c68 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -227,7 +227,7 @@ public fun <T : Scheme> MutableMetaProvider.scheme(
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T {
         val name = key ?: property.name.asName()
-        val node = get(name)?: MutableMeta().also { set(name,it) }
+        val node = get(name) ?: MutableMeta().also { set(name, it) }
         return spec.write(node)
     }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
index 742f8ebb..bb95cf65 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameIndexComparator.kt
@@ -27,4 +27,4 @@ public object NameIndexComparator : Comparator<String?> {
 public fun Meta.getIndexedList(name: Name): List<Meta> = getIndexed(name).entries.sortedWith(
     //sort by index
     compareBy(space.kscience.dataforge.names.NameIndexComparator) { it.key }
-).map{it.value}
\ No newline at end of file
+).map { it.value }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
index 3994ef27..d6a760f1 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/NameToken.kt
@@ -82,13 +82,13 @@ public class NameToken(public val body: String, public val index: String? = null
                         else -> indexEnd = index
                     }
 
-                    else -> if(indexEnd>=0) error("Symbols not allowed after index in NameToken: $string")
+                    else -> if (indexEnd >= 0) error("Symbols not allowed after index in NameToken: $string")
                 }
             }
-            if(indexStart>=0 && indexEnd<0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
+            if (indexStart >= 0 && indexEnd < 0) error("Opening bracket without closing bracket not allowed in NameToken: $string")
             return NameToken(
-                if(indexStart>=0) string.substring(0, indexStart) else string,
-                if(indexStart>=0) string.substring(indexStart + 1, indexEnd) else null
+                if (indexStart >= 0) string.substring(0, indexStart) else string,
+                if (indexStart >= 0) string.substring(indexStart + 1, indexEnd) else null
             )
         }
     }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 06134ce6..5e0ff572 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -29,10 +29,10 @@ public interface Task<T> : Described {
     public val fingerprint: String get() = hashCode().toString(radix = 16)
 
     /**
-     * Compute a [TaskResult] using given meta. In general, the result is lazy and represents both computation model
-     * and a handler for actual result
+     * Compute a [TaskResult] using given meta. In general, the result is lazy and represents both the computation model
+     * and a handler for the actual result
      *
-     * @param workspace a workspace to run task in
+     * @param workspace a workspace to run the task in
      * @param taskName the name of the task in this workspace
      * @param taskMeta configuration for current stage computation
      */
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index 4d2578e5..ce32848a 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -15,6 +15,7 @@ import space.kscience.dataforge.data.Data
 import space.kscience.dataforge.data.await
 import space.kscience.dataforge.data.named
 import space.kscience.dataforge.io.*
+import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.UnsafeKType
 import space.kscience.dataforge.names.withIndex
@@ -24,11 +25,7 @@ import kotlin.io.path.div
 import kotlin.io.path.exists
 import kotlin.reflect.KType
 
-public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
-
-    @Suppress("UNCHECKED_CAST")
-    private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
-
+public class JsonIOFormat<T>(public val serializer: KSerializer<T>) : IOFormat<T> {
     override fun readFrom(source: Source): T = Json.decodeFromString(serializer, source.readString())
 
     override fun writeTo(sink: Sink, obj: T) {
@@ -36,12 +33,11 @@ public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
     }
 }
 
+/**
+ * An [IOFormat] based on Protobuf representation of the serializeable object.
+ */
 @OptIn(ExperimentalSerializationApi::class)
-public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
-
-    @Suppress("UNCHECKED_CAST")
-    private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
-
+public class ProtobufIOFormat<T>(public val serializer: KSerializer<T>) : IOFormat<T> {
     override fun readFrom(source: Source): T = ProtoBuf.decodeFromByteArray(serializer, source.readByteArray())
 
     override fun writeTo(sink: Sink, obj: T) {
@@ -49,19 +45,39 @@ public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
     }
 }
 
+public interface IOFormatResolveStrategy {
+    public fun <T> resolve(type: KType, meta: Meta): IOFormat<T>
 
-public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCache {
+    public companion object {
+        public val PROTOBUF: IOFormatResolveStrategy = object : IOFormatResolveStrategy {
+            @Suppress("UNCHECKED_CAST")
+            override fun <T> resolve(
+                type: KType,
+                meta: Meta
+            ): IOFormat<T> = ProtobufIOFormat(serializer(type) as KSerializer<T>)
+        }
 
-    //    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> = this as TaskData<T>
+        public val JSON: IOFormatResolveStrategy = object : IOFormatResolveStrategy {
+            @Suppress("UNCHECKED_CAST")
+            override fun <T> resolve(
+                type: KType,
+                meta: Meta
+            ): IOFormat<T> = JsonIOFormat(serializer(type) as KSerializer<T>)
+        }
+    }
+}
+
+public class FileWorkspaceCache(
+    public val cacheDirectory: Path,
+    private val ioFormatResolveStrategy: IOFormatResolveStrategy,
+) : WorkspaceCache {
 
 
     @OptIn(DFExperimental::class, UnsafeKType::class)
     override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
         val io = result.workspace.context.request(IOPlugin)
 
-        val format: IOFormat<T> = io.resolveIOFormat(result.dataType, result.taskMeta)
-            ?: ProtobufIOFormat(result.dataType)
-            ?: error("Can't resolve IOFormat for ${result.dataType}")
+        val format: IOFormat<T> = ioFormatResolveStrategy.resolve<T>(result.dataType, result.taskMeta)
 
 
         val cachingAction: Action<T, T> = CachingAction(result.dataType) { data ->
@@ -104,4 +120,7 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
     }
 }
 
-public fun WorkspaceBuilder.fileCache(cacheDir: Path): Unit = cache(FileWorkspaceCache(cacheDir))
\ No newline at end of file
+public fun WorkspaceBuilder.fileCache(
+    cacheDir: Path,
+    ioFormatResolveStrategy: IOFormatResolveStrategy = IOFormatResolveStrategy.PROTOBUF
+): Unit = cache(FileWorkspaceCache(cacheDir, ioFormatResolveStrategy))
\ No newline at end of file

From da0ecbe2e5f7151207263b9a187877cabb041924 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:10:57 +0300
Subject: [PATCH 24/29] Build update

---
 CHANGELOG.md                                         |  3 ++-
 dataforge-context/build.gradle.kts                   |  2 +-
 dataforge-data/build.gradle.kts                      |  2 +-
 .../space/kscience/dataforge/actions/MapAction.kt    |  2 ++
 .../kotlin/space/kscience/dataforge/data/Goal.kt     | 12 ++++++++----
 gradle.properties                                    |  2 +-
 6 files changed, 15 insertions(+), 8 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 83462d70..60e5bfd9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,8 @@
 
 ### Added
 - Coroutine exception logging in context
-- ObservableMutableMetaSerializer
+- `ObservableMutableMetaSerializer`
+- `MutableMetaView` - a Meta wrapper that creates nodes only when its or its children are changed. 
 
 ### Changed
 - Simplify inheritance logic in `MutableTypedMeta`
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index 72a41693..c411f592 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -13,7 +13,7 @@ kscience {
     useSerialization()
     commonMain {
         api(projects.dataforgeMeta)
-        api(spclibs.atomicfu)
+//        api(spclibs.atomicfu)
     }
     jvmMain{
         api(kotlin("reflect"))
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index 99314ea3..8c0a690e 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -9,7 +9,7 @@ kscience{
     wasm()
     useCoroutines()
     dependencies {
-        api(spclibs.atomicfu)
+//        api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
         //Remove after subtype moved to stdlib
         api(kotlin("reflect"))
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index a763515c..ffa313f2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -37,6 +37,7 @@ public class MapActionBuilder<T, R>(
     /**
      * Set unsafe [outputType] for the resulting data. Be sure that it is correct.
      */
+    @UnsafeKType
     public fun <R1 : R> result(outputType: KType, f: suspend ActionEnv.(T) -> R1) {
         this.outputType = outputType
         result = f;
@@ -45,6 +46,7 @@ public class MapActionBuilder<T, R>(
     /**
      * Calculate the result of goal
      */
+    @OptIn(UnsafeKType::class)
     public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1): Unit = result(typeOf<R1>(), f)
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index e54710b1..e9b022b2 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -64,11 +64,14 @@ public open class LazyGoal<T>(
     /**
      * Get ongoing computation or start a new one.
      * Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
-     * If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
+     * If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce an error or a warning
      * depending on the settings.
+     *
+     * If [Goal] is already started on a different scope, it is not restarted.
      */
     @OptIn(DFExperimental::class)
-    override fun async(coroutineScope: CoroutineScope): Deferred<T> {
+    override fun async(coroutineScope: CoroutineScope): Deferred<T> = deferred ?: run {
+
         val log = coroutineScope.coroutineContext[GoalLogger]
         // Check if context restricts goal computation
         coroutineScope.coroutineContext[GoalExecutionRestriction]?.let { restriction ->
@@ -85,13 +88,14 @@ public open class LazyGoal<T>(
         val startedDependencies = dependencies.map { goal ->
             goal.async(coroutineScope)
         }
-        return deferred ?: coroutineScope.async(
+
+        coroutineScope.async(
             coroutineContext
                     + CoroutineMonitor()
                     + Dependencies(startedDependencies)
                     + GoalExecutionRestriction(GoalExecutionRestrictionPolicy.NONE) // Remove restrictions on goal execution
         ) {
-            //cancel execution if error encountered in one of dependencies
+            //cancel execution if error encountered in one of the dependencies
             startedDependencies.forEach { deferred ->
                 deferred.invokeOnCompletion { error ->
                     if (error != null) this.cancel(CancellationException("Dependency $deferred failed with error: ${error.message}"))
diff --git a/gradle.properties b/gradle.properties
index 67ba7f8e..ea4473c1 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file
+toolsVersion=0.16.0-kotlin-2.1.20-Beta1
\ No newline at end of file

From 27745802e159af4d45eccd61a1f11965f17ac2b1 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:11:08 +0300
Subject: [PATCH 25/29] Add MutableMetaView

---
 .../dataforge/meta/MutableMetaView.kt         | 40 +++++++++++++++++++
 1 file changed, 40 insertions(+)
 create mode 100644 dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
new file mode 100644
index 00000000..6a601940
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
@@ -0,0 +1,40 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.plus
+
+/**
+ * A [Meta] child proxy that creates required nodes on write
+ */
+public class MutableMetaView(
+    public val origin: MutableMeta,
+    public val path: Name
+) : MutableMeta {
+
+    override val items: Map<NameToken, MutableMeta>
+        get() = origin[path]?.items ?: emptyMap()
+
+    override var value: Value?
+        get() = origin[path]?.value
+        set(value) {
+            origin[path] = value
+        }
+
+    override fun getOrCreate(name: Name): MutableMeta = MutableMetaView(origin, path + name)
+
+
+    override fun set(name: Name, node: Meta?) {
+        set(path + name, node)
+    }
+
+
+    override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
+
+
+    override fun hashCode(): Int  = Meta.hashCode(this)
+
+    override fun toString(): String = Meta.toString(this)
+}
+
+public fun MutableMeta.view(name: Name): MutableMetaView = MutableMetaView(this, name)
\ No newline at end of file

From 79bbc6c76b00f60390d801a752d64c71dc0df785 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 13:48:24 +0300
Subject: [PATCH 26/29] Fix MutableMetaView.kt

---
 .../dataforge/meta/MutableMetaView.kt         | 27 ++++++++++++-------
 .../dataforge/meta/ObservableMetaWrapper.kt   |  2 +-
 .../dataforge/meta/MutableMetaViewTest.kt     | 25 +++++++++++++++++
 3 files changed, 43 insertions(+), 11 deletions(-)
 create mode 100644 dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt

diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
index 6a601940..2bc3f9aa 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaView.kt
@@ -2,14 +2,15 @@ package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.parseAsName
 import space.kscience.dataforge.names.plus
 
 /**
- * A [Meta] child proxy that creates required nodes on write
+ * A [Meta] child proxy that creates required nodes on value write
  */
-public class MutableMetaView(
-    public val origin: MutableMeta,
-    public val path: Name
+private class MutableMetaView(
+    val origin: MutableMeta,
+    val path: Name
 ) : MutableMeta {
 
     override val items: Map<NameToken, MutableMeta>
@@ -23,18 +24,24 @@ public class MutableMetaView(
 
     override fun getOrCreate(name: Name): MutableMeta = MutableMetaView(origin, path + name)
 
-
     override fun set(name: Name, node: Meta?) {
-        set(path + name, node)
+        if (origin[path + name] == null && node?.isEmpty() == true) return
+        origin[path + name] = node
     }
 
-
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
 
-
-    override fun hashCode(): Int  = Meta.hashCode(this)
+    override fun hashCode(): Int = Meta.hashCode(this)
 
     override fun toString(): String = Meta.toString(this)
 }
 
-public fun MutableMeta.view(name: Name): MutableMetaView = MutableMetaView(this, name)
\ No newline at end of file
+/**
+ * Create a view of this [MutableMeta] node that creates child items only when their values are written.
+ *
+ * The difference between this method and regular [getOrCreate] is that [getOrCreate] always creates and attaches node
+ * even if it is empty.
+ */
+public fun MutableMeta.view(name: Name): MutableMeta = MutableMetaView(this, name)
+
+public fun MutableMeta.view(name: String): MutableMeta = view(name.parseAsName())
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
index 7c62f692..c8780417 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMetaWrapper.kt
@@ -59,7 +59,7 @@ private class ObservableMetaWrapper(
 
     fun removeNode(name: Name): Meta? {
         val oldMeta = get(name)
-        //don't forget to remove listener
+        //remember to remove listener
         oldMeta?.removeListener(this)
 
         return oldMeta
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt
new file mode 100644
index 00000000..61af1d04
--- /dev/null
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MutableMetaViewTest.kt
@@ -0,0 +1,25 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.names.asName
+import kotlin.test.Test
+import kotlin.test.assertEquals
+import kotlin.test.assertTrue
+
+class MutableMetaViewTest {
+    @Test
+    fun metaView() {
+        val meta = MutableMeta()
+        val view = meta.view("a".asName())
+
+        view["b"] = Meta.EMPTY
+
+        assertTrue { meta.items.isEmpty() }
+
+        view["c"] = Meta {
+            "d" put 22
+        }
+
+        assertEquals(22, meta["a.c.d"].int)
+    }
+
+}
\ No newline at end of file

From de534dec2d89a8a6b3a0f924df3c2d6a46f38173 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sat, 18 Jan 2025 14:26:56 +0300
Subject: [PATCH 27/29] Update plugin version

---
 dataforge-meta/api/dataforge-meta.api | 5 +++++
 gradle.properties                     | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 5bf71f94..b282c77f 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -520,6 +520,11 @@ public final class space/kscience/dataforge/meta/MutableMetaSerializer : kotlinx
 	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
+public final class space/kscience/dataforge/meta/MutableMetaViewKt {
+	public static final fun view (Lspace/kscience/dataforge/meta/MutableMeta;Ljava/lang/String;)Lspace/kscience/dataforge/meta/MutableMeta;
+	public static final fun view (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
+}
+
 public abstract interface class space/kscience/dataforge/meta/MutableTypedMeta : space/kscience/dataforge/meta/MutableMeta, space/kscience/dataforge/meta/TypedMeta {
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/MutableMeta;
diff --git a/gradle.properties b/gradle.properties
index ea4473c1..67ba7f8e 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,4 +6,4 @@ org.gradle.jvmargs=-Xmx4096m
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
 
-toolsVersion=0.16.0-kotlin-2.1.20-Beta1
\ No newline at end of file
+toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file

From 99a053b978d09cd3c262b2e33a83a3c154db669b Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 19 Jan 2025 12:34:36 +0300
Subject: [PATCH 28/29] 0.10.0 release

---
 CHANGELOG.md                                  | 23 +++++-
 README.md                                     |  1 +
 build.gradle.kts                              |  6 ++
 dataforge-context/build.gradle.kts            |  5 +-
 .../dataforge/context/ClassLoaderPlugin.kt    |  2 +-
 .../kscience/dataforge/provider/dfType.kt     | 17 ++--
 dataforge-data/build.gradle.kts               |  3 -
 .../kscience/dataforge/data/DataFilter.kt     | 57 +++-----------
 .../kscience/dataforge/data/ActionsTest.kt    |  8 +-
 dataforge-output/api/dataforge-output.api     | 68 ----------------
 dataforge-output/build.gradle.kts             | 15 ----
 .../hep/dataforge/output/OutputManager.kt     | 75 ------------------
 .../kotlin/hep/dataforge/output/Renderer.kt   | 21 -----
 .../hep/dataforge/output/TextRenderer.kt      | 78 -------------------
 .../kotlin/hep/dataforge/output/outputJS.kt   |  7 --
 .../kotlin/hep/dataforge/output/outputJVM.kt  |  6 --
 .../hep/dataforge/output/outputNative.kt      |  6 --
 dataforge-scripting/README.md                 |  2 +-
 dataforge-workspace/build.gradle.kts          |  2 +
 .../dataforge/workspace}/dataFilterJvm.kt     | 17 +---
 .../dataforge/workspace/workspaceJvm.kt       |  1 -
 .../workspace/SimpleWorkspaceTest.kt          | 10 +--
 gradle.properties                             |  1 +
 23 files changed, 69 insertions(+), 362 deletions(-)
 delete mode 100644 dataforge-output/api/dataforge-output.api
 delete mode 100644 dataforge-output/build.gradle.kts
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
 delete mode 100644 dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
 delete mode 100644 dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
 delete mode 100644 dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
 delete mode 100644 dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
 rename {dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data => dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace}/dataFilterJvm.kt (77%)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 60e5bfd9..06dfa5bf 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,26 +3,45 @@
 ## Unreleased
 
 ### Added
+
+### Changed
+
+### Deprecated
+
+### Removed
+
+### Fixed
+
+### Security
+
+## 0.10.0 - 2025-01-19
+
+### Added
+
 - Coroutine exception logging in context
 - `ObservableMutableMetaSerializer`
 - `MutableMetaView` - a Meta wrapper that creates nodes only when its or its children are changed. 
 
 ### Changed
+
 - Simplify inheritance logic in `MutableTypedMeta`
 - Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc.).
+- Filter data by type is moved from `dataforge-data` to `dataforge-workspace` to avoid reflection dependency.
 
 ### Deprecated
+
 - MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
 
 ### Removed
+
 - Remove implicit io format resolver in `IOPlugin` and `FileWorkspaceCache`. There are no guarantees that only one format is present in the contrxt for each type.
+- Dependencies on `atomicfu` and `kotlin.reflect` from dataforge-data to improve performance.
 
 ### Fixed
+
 - Fixed NameToken parsing.
 - Top level string list meta conversion.
 
-### Security
-
 ## 0.9.0 - 2024-06-04
 
 ### Added
diff --git a/README.md b/README.md
index be5591cf..a3dd7b7b 100644
--- a/README.md
+++ b/README.md
@@ -101,6 +101,7 @@ In this section, we will try to cover DataForge main ideas in the form of questi
 
 
 ### [dataforge-scripting](dataforge-scripting)
+> Scripting definition fow workspace generation
 >
 > **Maturity**: PROTOTYPE
 
diff --git a/build.gradle.kts b/build.gradle.kts
index 2d7cb095..22784656 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -22,6 +22,12 @@ subprojects {
     }
 }
 
+dependencies{
+    subprojects.forEach {
+        dokka(it)
+    }
+}
+
 readme {
     readmeTemplate = file("docs/templates/README-TEMPLATE.md")
 }
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index c411f592..ad7b76a9 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -13,11 +13,10 @@ kscience {
     useSerialization()
     commonMain {
         api(projects.dataforgeMeta)
-//        api(spclibs.atomicfu)
     }
     jvmMain{
-        api(kotlin("reflect"))
-        api("org.slf4j:slf4j-api:1.7.30")
+        api(spclibs.kotlin.reflect)
+        api(spclibs.slf4j)
     }
 }
 
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
index b2c703e1..44ba290f 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/context/ClassLoaderPlugin.kt
@@ -17,7 +17,7 @@ package space.kscience.dataforge.context
 
 import java.util.*
 import kotlin.reflect.KClass
-import kotlin.reflect.full.cast
+import kotlin.reflect.cast
 
 public class ClassLoaderPlugin(private val classLoader: ClassLoader) : AbstractPlugin() {
     override val tag: PluginTag = PluginTag("classLoader", PluginTag.DATAFORGE_GROUP)
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
index cccc3efb..d291f378 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
@@ -8,25 +8,30 @@ import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KClass
+import kotlin.reflect.KType
 import kotlin.reflect.full.findAnnotation
-
+import kotlin.reflect.typeOf
 
 @DFExperimental
 public val KClass<*>.dfType: String
     get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
 
+@DFExperimental
+public val KType.dfType: String
+    get() = findAnnotation<DfType>()?.id ?: (classifier as? KClass<*>)?.simpleName ?: ""
+
 /**
  * Provide an object with given name inferring target from its type using [DfType] annotation
  */
 @DFExperimental
 public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
-    val target = T::class.dfType
+    val target = typeOf<T>().dfType
     return provide(target, name)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
-    val target = T::class.dfType
+    val target = typeOf<T>().dfType
     return top(target)
 }
 
@@ -35,15 +40,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
  */
 @DFExperimental
 public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
-    gather<T>(T::class.dfType, inherit)
+    gather<T>(typeOf<T>().dfType, inherit)
 
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
-    provides(T::class.dfType, items)
+    provides(typeOf<T>().dfType, items)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
-    provides(T::class.dfType, *items)
+    provides(typeOf<T>().dfType, *items)
 }
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index 8c0a690e..9451f59a 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -9,10 +9,7 @@ kscience{
     wasm()
     useCoroutines()
     dependencies {
-//        api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
-        //Remove after subtype moved to stdlib
-        api(kotlin("reflect"))
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
index 5538cc28..e9392da8 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -3,6 +3,7 @@ package space.kscience.dataforge.data
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
 import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.NameToken
 import space.kscience.dataforge.names.plus
@@ -17,17 +18,6 @@ public fun interface DataFilter {
     }
 }
 
-
-//public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
-
-//public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
-//    predicate.accepts(data)
-//}
-//
-//public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
-//    predicate.accepts(data)
-//}
-
 public fun <T> DataSource<T>.filterData(
     dataFilter: DataFilter,
 ): DataSource<T> = object : DataSource<T> {
@@ -58,10 +48,14 @@ public fun <T> ObservableDataSource<T>.filterData(
         this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
 }
 
-internal class FilteredDataTree<T>(
-    val source: DataTree<T>,
-    val filter: DataFilter,
-    val branch: Name,
+/**
+ * A [DataTree] filtered by branch and some criterion, possibly changing resulting type
+ */
+@DFInternal
+public class FilteredDataTree<T>(
+    public val source: DataTree<T>,
+    public val filter: DataFilter,
+    public val branch: Name,
     override val dataType: KType = source.dataType,
 ) : DataTree<T> {
 
@@ -83,37 +77,6 @@ internal class FilteredDataTree<T>(
         }
 }
 
-
 public fun <T> DataTree<T>.filterData(
     predicate: DataFilter,
-): DataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
-
-
-///**
-// * Generate a wrapper data set with a given name prefix appended to all names
-// */
-//public fun <T : Any> DataTree<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
-//    this
-//} else object : DataSource<T> {
-//
-//    override val dataType: KType get() = this@withNamePrefix.dataType
-//
-//    override val coroutineContext: CoroutineContext
-//        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-//
-//    override val meta: Meta get() = this@withNamePrefix.meta
-//
-//
-//    override fun iterator(): Iterator<NamedData<T>> = iterator {
-//        for (d in this@withNamePrefix) {
-//            yield(d.data.named(prefix + d.name))
-//        }
-//    }
-//
-//    override fun get(name: Name): Data<T>? =
-//        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
-//
-//    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
-//}
-//
-
+): FilteredDataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index 4a0342dc..fcc3e299 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -13,7 +13,7 @@ import kotlin.time.Duration.Companion.milliseconds
 @OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
 internal class ActionsTest {
     @Test
-    fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
+    fun testStaticMapAction() = runTest(timeout = 200.milliseconds) {
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
@@ -26,11 +26,11 @@ internal class ActionsTest {
 
         val result = plusOne(data)
 
-        assertEquals(2, result.awaitData("1").await())
+        assertEquals(5, result.awaitData("4").await())
     }
 
     @Test
-    fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
+    fun testDynamicMapAction() = runTest(timeout = 200.milliseconds) {
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
@@ -43,7 +43,7 @@ internal class ActionsTest {
             source.writeValue(it.toString(), it)
         }
 
-        assertEquals(2, result.awaitData("1").await())
+        assertEquals(5, result.awaitData("4").await())
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-output/api/dataforge-output.api b/dataforge-output/api/dataforge-output.api
deleted file mode 100644
index f991b754..00000000
--- a/dataforge-output/api/dataforge-output.api
+++ /dev/null
@@ -1,68 +0,0 @@
-public final class hep/dataforge/output/ConsoleOutputManager : hep/dataforge/context/AbstractPlugin, hep/dataforge/output/OutputManager {
-	public static final field Companion Lhep/dataforge/output/ConsoleOutputManager$Companion;
-	public fun <init> ()V
-	public fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
-	public fun getTag ()Lhep/dataforge/context/PluginTag;
-}
-
-public final class hep/dataforge/output/ConsoleOutputManager$Companion : hep/dataforge/context/PluginFactory {
-	public fun getTag ()Lhep/dataforge/context/PluginTag;
-	public fun getType ()Lkotlin/reflect/KClass;
-	public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/output/ConsoleOutputManager;
-	public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/DefaultTextFormat : hep/dataforge/output/TextFormat {
-	public static final field INSTANCE Lhep/dataforge/output/DefaultTextFormat;
-	public fun getPriority ()I
-	public fun getType ()Lkotlin/reflect/KClass;
-	public fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/OutputJVMKt {
-	public static final fun getOutput (Lkotlinx/coroutines/Dispatchers;)Lkotlinx/coroutines/CoroutineDispatcher;
-}
-
-public abstract interface class hep/dataforge/output/OutputManager {
-	public abstract fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
-}
-
-public final class hep/dataforge/output/OutputManager$DefaultImpls {
-	public static synthetic fun get$default (Lhep/dataforge/output/OutputManager;Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/output/Renderer;
-}
-
-public final class hep/dataforge/output/OutputManagerKt {
-	public static final fun getCONSOLE_RENDERER ()Lhep/dataforge/output/Renderer;
-	public static final fun getOutput (Lhep/dataforge/context/Context;)Lhep/dataforge/output/OutputManager;
-	public static final fun render (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)V
-	public static synthetic fun render$default (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
-}
-
-public abstract interface class hep/dataforge/output/Renderer : hep/dataforge/context/ContextAware {
-	public abstract fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
-}
-
-public final class hep/dataforge/output/Renderer$DefaultImpls {
-	public static fun getLogger (Lhep/dataforge/output/Renderer;)Lmu/KLogger;
-	public static synthetic fun render$default (Lhep/dataforge/output/Renderer;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
-}
-
-public abstract interface class hep/dataforge/output/TextFormat {
-	public static final field Companion Lhep/dataforge/output/TextFormat$Companion;
-	public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
-	public abstract fun getPriority ()I
-	public abstract fun getType ()Lkotlin/reflect/KClass;
-	public abstract fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
-}
-
-public final class hep/dataforge/output/TextFormat$Companion {
-	public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
-}
-
-public final class hep/dataforge/output/TextRenderer : hep/dataforge/output/Renderer {
-	public fun <init> (Lhep/dataforge/context/Context;Ljava/lang/Appendable;)V
-	public fun getContext ()Lhep/dataforge/context/Context;
-	public fun getLogger ()Lmu/KLogger;
-	public fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
-}
-
diff --git a/dataforge-output/build.gradle.kts b/dataforge-output/build.gradle.kts
deleted file mode 100644
index a858ff93..00000000
--- a/dataforge-output/build.gradle.kts
+++ /dev/null
@@ -1,15 +0,0 @@
-plugins {
-    id("space.kscience.gradle.mpp")
-    id("space.kscience.gradle.native")
-}
-
-kotlin {
-    sourceSets {
-        val commonMain by getting{
-            dependencies {
-                api(project(":dataforge-context"))
-                //api(project(":dataforge-io"))
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
deleted file mode 100644
index c8580403..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/OutputManager.kt
+++ /dev/null
@@ -1,75 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.*
-import space.kscience.dataforge.context.PluginTag.Companion.DATAFORGE_GROUP
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-import kotlin.reflect.KClass
-
-/**
- * A manager for outputs
- */
-public interface OutputManager {
-
-    /**
-     * Get an output specialized for given type, name and stage.
-     * @param stage represents the node or directory for the output. Empty means root node.
-     * @param name represents the name inside the node.
-     * @param meta configuration for [Renderer] (not for rendered object)
-     */
-    public fun <T : Any> getOutputContainer(
-        type: KClass<out T>,
-        name: Name,
-        stage: Name = Name.EMPTY,
-        meta: Meta = Meta.EMPTY
-    ): Renderer<T>
-}
-
-/**
- * Get an output manager for a context
- */
-public val Context.output: OutputManager get() = plugins.get() ?: ConsoleOutputManager()
-
-/**
- * Get an output with given [name], [stage] and reified content type
- */
-public inline fun <reified T : Any> OutputManager.getOutputContainer(
-    name: Name,
-    stage: Name = Name.EMPTY,
-    meta: Meta = Meta.EMPTY
-): Renderer<T> {
-    return getOutputContainer(T::class, name, stage, meta)
-}
-
-/**
- * Directly render an object using the most suitable renderer
- */
-public fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = Meta.EMPTY): Unit =
-    getOutputContainer(obj::class, name, stage).render(obj, meta)
-
-/**
- * System console output.
- * The [CONSOLE_RENDERER] is used when no other [OutputManager] is provided.
- */
-public val CONSOLE_RENDERER: Renderer<Any> = Renderer { obj, meta -> println(obj) }
-
-public class ConsoleOutputManager : AbstractPlugin(), OutputManager {
-    override val tag: PluginTag get() = ConsoleOutputManager.tag
-
-    override fun <T : Any> getOutputContainer(type: KClass<out T>, name: Name, stage: Name, meta: Meta): Renderer<T> = CONSOLE_RENDERER
-
-    public companion object : PluginFactory<ConsoleOutputManager> {
-        override val tag: PluginTag = PluginTag("output.console", group = DATAFORGE_GROUP)
-
-        override val type: KClass<ConsoleOutputManager> = ConsoleOutputManager::class
-
-        override fun invoke(meta: Meta, context: Context): ConsoleOutputManager = ConsoleOutputManager()
-    }
-}
-
-/**
- * A dispatcher for output tasks.
- */
-public expect val Dispatchers.Output: CoroutineDispatcher
\ No newline at end of file
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
deleted file mode 100644
index f6caaeb3..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/Renderer.kt
+++ /dev/null
@@ -1,21 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.ContextAware
-import space.kscience.dataforge.meta.Meta
-
-/**
- * A generic way to render any object in the output.
- *
- * An object could be rendered either in append or overlay mode. The mode is decided by the [Renderer]
- * based on its configuration and provided meta
- *
- */
-public fun interface Renderer<in T : Any> {
-    /**
-     * Render specific object with configuration.
-     *
-     * By convention actual render is called in asynchronous mode, so this method should never
-     * block execution
-     */
-    public fun render(obj: T, meta: Meta)
-}
diff --git a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt b/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
deleted file mode 100644
index 8b33241b..00000000
--- a/dataforge-output/src/commonMain/kotlin/hep/dataforge/output/TextRenderer.kt
+++ /dev/null
@@ -1,78 +0,0 @@
-package space.kscience.dataforge.output
-
-import space.kscience.dataforge.context.Context
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.output.TextFormat.Companion.TEXT_RENDERER_TYPE
-import space.kscience.dataforge.provider.Type
-import space.kscience.dataforge.provider.top
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.launch
-import kotlin.reflect.KClass
-import kotlin.reflect.KType
-
-
-/**
- * A text or binary renderer based on [Output]
- */
-@Type(TEXT_RENDERER_TYPE)
-@Deprecated("Bad design")
-public interface TextFormat {
-    /**
-     * The priority of this renderer compared to other renderers
-     */
-    public val priority: Int
-    /**
-     * The type of the content served by this renderer
-     */
-    public val type: KClass<*>
-
-    public suspend fun Appendable.render(obj: Any)
-
-    public companion object {
-        public const val TEXT_RENDERER_TYPE: String = "dataforge.textRenderer"
-    }
-}
-
-@Deprecated("Bad design")
-public object DefaultTextFormat : TextFormat {
-    override val priority: Int = Int.MAX_VALUE
-    override val type: KClass<*> = Any::class
-
-    override suspend fun Appendable.render(obj: Any) {
-        append(obj.toString() + "\n")
-    }
-}
-
-/**
- * A text-based renderer
- */
-@Deprecated("Bad design")
-public class TextRenderer(override val context: Context, private val output: Appendable) : Renderer<Any> {
-    private val cache = HashMap<KClass<*>, TextFormat>()
-
-    /**
-     * Find the first [TextFormat] matching the given object type.
-     */
-    override fun render(obj: Any, meta: Meta) {
-        val format: TextFormat = if (obj is CharSequence) {
-            DefaultTextFormat
-        } else {
-            val value = cache[obj::class]
-            if (value == null) {
-                val answer =
-                    context.top<TextFormat>(TEXT_RENDERER_TYPE).values.firstOrNull { it.type.isInstance(obj) }
-                if (answer != null) {
-                    cache[obj::class] = answer
-                    answer
-                } else {
-                    DefaultTextFormat
-                }
-            } else {
-                value
-            }
-        }
-        context.launch(Dispatchers.Output) {
-            format.run { output.render(obj) }
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt b/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
deleted file mode 100644
index 453d7351..00000000
--- a/dataforge-output/src/jsMain/kotlin/hep/dataforge/output/outputJS.kt
+++ /dev/null
@@ -1,7 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = Default
\ No newline at end of file
diff --git a/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt b/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
deleted file mode 100644
index d9ba0b2f..00000000
--- a/dataforge-output/src/jvmMain/kotlin/hep/dataforge/output/outputJVM.kt
+++ /dev/null
@@ -1,6 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = IO
\ No newline at end of file
diff --git a/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt b/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
deleted file mode 100644
index 2d59ae4e..00000000
--- a/dataforge-output/src/nativeMain/kotlin/hep/dataforge/output/outputNative.kt
+++ /dev/null
@@ -1,6 +0,0 @@
-package space.kscience.dataforge.output
-
-import kotlinx.coroutines.CoroutineDispatcher
-import kotlinx.coroutines.Dispatchers
-
-public actual val Dispatchers.Output: CoroutineDispatcher get() = Dispatchers.Default
\ No newline at end of file
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index e516b392..76b36b5b 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -1,6 +1,6 @@
 # Module dataforge-scripting
 
-
+Scripting definition fow workspace generation
 
 ## Usage
 
diff --git a/dataforge-workspace/build.gradle.kts b/dataforge-workspace/build.gradle.kts
index 5fa555eb..8254ef7d 100644
--- a/dataforge-workspace/build.gradle.kts
+++ b/dataforge-workspace/build.gradle.kts
@@ -2,6 +2,8 @@ plugins {
     id("space.kscience.gradle.mpp")
 }
 
+description = "A framework for pull-based data processing"
+
 kscience {
     jvm()
     js()
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
similarity index 77%
rename from dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
rename to dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
index ba7fdadf..ba8e148c 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/dataFilterJvm.kt
@@ -1,5 +1,6 @@
-package space.kscience.dataforge.data
+package space.kscience.dataforge.workspace
 
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
@@ -20,16 +21,6 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
-//@Suppress("UNCHECKED_CAST")
-//@DFInternal
-//public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
-//    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
-//
-//@Suppress("UNCHECKED_CAST")
-//@DFInternal
-//public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
-//    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
-
 /**
  * Select all data matching given type and filters. Does not modify paths
  *
@@ -40,7 +31,7 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
 public fun <R> DataTree<*>.filterByType(
     type: KType,
     branch: Name = Name.EMPTY,
-    filter: DataFilter = DataFilter.EMPTY,
+    filter: DataFilter = DataFilter.Companion.EMPTY,
 ): DataTree<R> {
     val filterWithType = DataFilter { name, meta, dataType ->
         filter.accepts(name, meta, dataType) && dataType.isSubtypeOf(type)
@@ -54,7 +45,7 @@ public fun <R> DataTree<*>.filterByType(
 @OptIn(DFInternal::class)
 public inline fun <reified R : Any> DataTree<*>.filterByType(
     branch: Name = Name.EMPTY,
-    filter: DataFilter = DataFilter.EMPTY,
+    filter: DataFilter = DataFilter.Companion.EMPTY,
 ): DataTree<R> = filterByType(typeOf<R>(), branch, filter = filter)
 
 /**
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index 688b5699..d9bc3e84 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -1,7 +1,6 @@
 package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.filterByType
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 111b3b89..eb1c17b7 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -152,14 +152,14 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    fun testWorkspace() = runTest(timeout = 100.milliseconds) {
+    fun testWorkspace() = runTest(timeout = 200.milliseconds) {
         val node = workspace.produce("sum")
-        val res = node.asSequence().single()
-        assertEquals(328350, res.await())
+        val res = node.data
+        assertEquals(328350, res?.await())
     }
 
     @Test
-    fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
+    fun testMetaPropagation() = runTest(timeout = 200.milliseconds) {
         val node = workspace.produce("sum") { "testFlag" put true }
         val res = node.data?.await()
     }
@@ -190,7 +190,7 @@ internal class SimpleWorkspaceTest {
         val node = workspace.produce("filterOne") {
             "name" put "myData[12]"
         }
-        assertEquals(12, node.asSequence().first().await())
+        assertEquals(12, node.data?.await())
     }
 
 }
\ No newline at end of file
diff --git a/gradle.properties b/gradle.properties
index 67ba7f8e..015d2c52 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -5,5 +5,6 @@ org.gradle.jvmargs=-Xmx4096m
 
 kotlin.mpp.stability.nowarn=true
 kotlin.native.ignoreDisabledTargets=true
+org.jetbrains.dokka.experimental.gradle.pluginMode=V2Enabled
 
 toolsVersion=0.16.1-kotlin-2.1.0
\ No newline at end of file

From ec2626e006a8109cb19b83e05c8c559e39897ed8 Mon Sep 17 00:00:00 2001
From: Alexander Nozik <altavir@gmail.com>
Date: Sun, 19 Jan 2025 12:35:31 +0300
Subject: [PATCH 29/29] Add central publishing

---
 build.gradle.kts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/build.gradle.kts b/build.gradle.kts
index 22784656..84c1bba7 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -38,7 +38,7 @@ ksciencePublish {
         useSPCTeam()
     }
     repository("spc", "https://maven.sciprog.center/kscience")
-    sonatype("https://oss.sonatype.org")
+    central()
 }
 
 apiValidation {