diff --git a/.gitignore b/.gitignore
index 17a319a4..53b55cd4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,5 +5,7 @@ out/
 .gradle
 build/
 
+.kotlin
+
 
 !gradle-wrapper.jar
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 87ccad00..b9d3b6a9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,7 +3,6 @@
 ## Unreleased
 
 ### Added
-- Wasm artifacts
 
 ### Changed
 
@@ -12,10 +11,34 @@
 ### Removed
 
 ### Fixed
-- Partially fixed a bug with `MutableMeta` observable wrappers.
 
 ### Security
 
+## 0.8.0 - 2024-02-03
+
+### Added
+
+- Wasm artifacts
+- Add automatic MetaConverter for serializeable objects
+- Add Meta and MutableMeta delegates for convertable and serializeable
+- Meta mapping for data.
+
+### Changed
+
+- Descriptor `children` renamed to `nodes`
+- `MetaConverter` now inherits `MetaSpec` (former `Specifiction`). So `MetaConverter` could be used more universally.
+- Meta copy and modification now use lightweight non-observable meta builders.
+- Full refactor of Data API. DataTree now works similar to Meta: contains optional anonymous root element and data items. Updates are available for `ObservaleDataSource` and `ObservableDataTree` variants.
+
+### Deprecated
+
+- `node(key,converter)` in favor of `serializable` delegate
+
+### Fixed
+
+- Partially fixed a bug with `MutableMeta` observable wrappers.
+- `valueSequence` now include root value. So `meta.update` works properly.
+
 ## 0.7.0 - 2023-11-26
 
 ### Added
diff --git a/build.gradle.kts b/build.gradle.kts
index edeae557..b9349868 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ plugins {
 
 allprojects {
     group = "space.kscience"
-    version = "0.7.1"
+    version = "0.8.0"
 }
 
 subprojects {
diff --git a/dataforge-context/README.md b/dataforge-context/README.md
index 905171b1..f0aff459 100644
--- a/dataforge-context/README.md
+++ b/dataforge-context/README.md
@@ -6,18 +6,16 @@ Context and provider definitions
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-context:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-context:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-context:0.7.0")
+    implementation("space.kscience:dataforge-context:0.8.0")
 }
 ```
diff --git a/dataforge-context/api/dataforge-context.api b/dataforge-context/api/dataforge-context.api
index eac8e52c..12bca176 100644
--- a/dataforge-context/api/dataforge-context.api
+++ b/dataforge-context/api/dataforge-context.api
@@ -249,10 +249,27 @@ public final class space/kscience/dataforge/context/SlfLogManager$Companion : sp
 	public fun getTag ()Lspace/kscience/dataforge/context/PluginTag;
 }
 
-public final class space/kscience/dataforge/properties/PropertyKt {
+public abstract interface annotation class space/kscience/dataforge/descriptors/Description : java/lang/annotation/Annotation {
+	public abstract fun value ()Ljava/lang/String;
 }
 
-public final class space/kscience/dataforge/properties/SchemePropertyKt {
+public abstract interface annotation class space/kscience/dataforge/descriptors/DescriptorResource : java/lang/annotation/Annotation {
+	public abstract fun resourceName ()Ljava/lang/String;
+}
+
+public abstract interface annotation class space/kscience/dataforge/descriptors/DescriptorUrl : java/lang/annotation/Annotation {
+	public abstract fun url ()Ljava/lang/String;
+}
+
+public abstract interface annotation class space/kscience/dataforge/descriptors/Multiple : java/lang/annotation/Annotation {
+}
+
+public final class space/kscience/dataforge/descriptors/ReflectiveDescriptorsKt {
+	public static final fun forClass (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+	public static synthetic fun forClass$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor$Companion;Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+}
+
+public final class space/kscience/dataforge/properties/MetaAsFlowKt {
 }
 
 public final class space/kscience/dataforge/provider/DfTypeKt {
diff --git a/dataforge-context/build.gradle.kts b/dataforge-context/build.gradle.kts
index b59abed0..72a41693 100644
--- a/dataforge-context/build.gradle.kts
+++ b/dataforge-context/build.gradle.kts
@@ -12,7 +12,7 @@ kscience {
     useCoroutines()
     useSerialization()
     commonMain {
-        api(project(":dataforge-meta"))
+        api(projects.dataforgeMeta)
         api(spclibs.atomicfu)
     }
     jvmMain{
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
deleted file mode 100644
index e79ce931..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/MetaProperty.kt
+++ /dev/null
@@ -1,35 +0,0 @@
-package space.kscience.dataforge.properties
-
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.ObservableMutableMeta
-import space.kscience.dataforge.meta.transformations.MetaConverter
-import space.kscience.dataforge.meta.transformations.nullableMetaToObject
-import space.kscience.dataforge.meta.transformations.nullableObjectToMeta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.startsWith
-
-@DFExperimental
-public class MetaProperty<T : Any>(
-    public val meta: ObservableMutableMeta,
-    public val name: Name,
-    public val converter: MetaConverter<T>,
-) : Property<T?> {
-
-    override var value: T?
-        get() = converter.nullableMetaToObject(meta[name])
-        set(value) {
-            meta[name] = converter.nullableObjectToMeta(value) ?: Meta.EMPTY
-        }
-
-    override fun onChange(owner: Any?, callback: (T?) -> Unit) {
-        meta.onChange(owner) { name ->
-            if (name.startsWith(this@MetaProperty.name)) callback(converter.nullableMetaToObject(this[name]))
-        }
-    }
-
-    override fun removeChangeListener(owner: Any?) {
-        meta.removeListener(owner)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
deleted file mode 100644
index 7b0280eb..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/Property.kt
+++ /dev/null
@@ -1,47 +0,0 @@
-package space.kscience.dataforge.properties
-
-import kotlinx.coroutines.ExperimentalCoroutinesApi
-import kotlinx.coroutines.flow.MutableStateFlow
-import kotlinx.coroutines.flow.StateFlow
-import space.kscience.dataforge.misc.DFExperimental
-
-@DFExperimental
-public interface Property<T> {
-    public var value: T
-
-    public fun onChange(owner: Any? = null, callback: (T) -> Unit)
-    public fun removeChangeListener(owner: Any? = null)
-}
-
-@DFExperimental
-@OptIn(ExperimentalCoroutinesApi::class)
-public fun <T> Property<T>.toFlow(): StateFlow<T> = MutableStateFlow(value).also { stateFlow ->
-    onChange {
-        stateFlow.value = it
-    }
-}
-
-/**
- * Reflect all changes in the [source] property onto this property. Does not reflect changes back.
- *
- * @return a mirroring job
- */
-@DFExperimental
-public fun <T> Property<T>.mirror(source: Property<T>) {
-    source.onChange(this) {
-        this.value = it
-    }
-}
-
-/**
- * Bi-directional connection between properties
- */
-@DFExperimental
-public fun <T> Property<T>.bind(other: Property<T>) {
-    onChange(other) {
-        other.value = it
-    }
-    other.onChange {
-        this.value = it
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
new file mode 100644
index 00000000..90fafc5e
--- /dev/null
+++ b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/metaAsFlow.kt
@@ -0,0 +1,51 @@
+package space.kscience.dataforge.properties
+
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.channels.awaitClose
+import kotlinx.coroutines.flow.*
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.meta.*
+import space.kscience.dataforge.misc.DFExperimental
+
+@DFExperimental
+public fun <T> ObservableMeta.asFlow(converter: MetaSpec<T>): Flow<T> = callbackFlow {
+    onChange(this){
+        trySend(converter.read(this))
+    }
+
+    awaitClose{
+        removeListener(this)
+    }
+}
+
+@DFExperimental
+public fun <T> MutableMeta.listenTo(
+    scope: CoroutineScope,
+    converter: MetaConverter<T>,
+    flow: Flow<T>,
+): Job = flow.onEach {
+    update(converter.convert(it))
+}.launchIn(scope)
+
+@DFExperimental
+public fun <T> ObservableMutableMeta.bind(
+    scope: CoroutineScope,
+    converter: MetaConverter<T>,
+    flow: MutableSharedFlow<T>,
+): Job = scope.launch{
+    listenTo(this, converter,flow)
+    onChange(flow){
+        launch {
+            flow.emit(converter.read(this@onChange))
+        }
+    }
+    flow.onCompletion {
+        removeListener(flow)
+    }
+}.also {
+    it.invokeOnCompletion {
+        removeListener(flow)
+    }
+}
diff --git a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt b/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt
deleted file mode 100644
index 3b4d948d..00000000
--- a/dataforge-context/src/commonMain/kotlin/space/kscience/dataforge/properties/schemeProperty.kt
+++ /dev/null
@@ -1,31 +0,0 @@
-package space.kscience.dataforge.properties
-
-
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.parseAsName
-import space.kscience.dataforge.names.startsWith
-import kotlin.reflect.KMutableProperty1
-
-@DFExperimental
-public fun <S : Scheme, T : Any> S.property(property: KMutableProperty1<S, T?>): Property<T?> =
-    object : Property<T?> {
-        override var value: T?
-            get() = property.get(this@property)
-            set(value) {
-                property.set(this@property, value)
-            }
-
-        override fun onChange(owner: Any?, callback: (T?) -> Unit) {
-            this@property.meta.onChange(this) { name ->
-                if (name.startsWith(property.name.parseAsName(true))) {
-                    callback(property.get(this@property))
-                }
-            }
-        }
-
-        override fun removeChangeListener(owner: Any?) {
-            this@property.meta.removeListener(this@property)
-        }
-
-    }
\ No newline at end of file
diff --git a/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt b/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt
deleted file mode 100644
index 00b71673..00000000
--- a/dataforge-context/src/commonTest/kotlin/space/kscience/dataforge/properties/MetaPropertiesTest.kt
+++ /dev/null
@@ -1,28 +0,0 @@
-package space.kscience.dataforge.properties
-
-import space.kscience.dataforge.meta.Scheme
-import space.kscience.dataforge.meta.SchemeSpec
-import space.kscience.dataforge.meta.int
-import space.kscience.dataforge.misc.DFExperimental
-import kotlin.test.Test
-import kotlin.test.assertEquals
-
-internal class TestScheme : Scheme() {
-    var a by int()
-    var b by int()
-    companion object : SchemeSpec<TestScheme>(::TestScheme)
-}
-
-@DFExperimental
-class MetaPropertiesTest {
-    @Test
-    fun testBinding() {
-        val scheme = TestScheme.empty()
-        val a = scheme.property(TestScheme::a)
-        val b = scheme.property(TestScheme::b)
-        a.bind(b)
-        scheme.a = 2
-        assertEquals(2, scheme.b)
-        assertEquals(2, b.value)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt b/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt
deleted file mode 100644
index b4451c97..00000000
--- a/dataforge-context/src/jsMain/kotlin/space/kscience/dataforge/properties/bindings.kt
+++ /dev/null
@@ -1,32 +0,0 @@
-package space.kscience.dataforge.properties
-
-import org.w3c.dom.HTMLInputElement
-import space.kscience.dataforge.misc.DFExperimental
-
-@DFExperimental
-public fun HTMLInputElement.bindValue(property: Property<String>) {
-    if (this.onchange != null) error("Input element already bound")
-    this.onchange = {
-        property.value = this.value
-        Unit
-    }
-    property.onChange(this) {
-        if (value != it) {
-            value = it
-        }
-    }
-}
-
-@DFExperimental
-public fun HTMLInputElement.bindChecked(property: Property<Boolean>) {
-    if (this.onchange != null) error("Input element already bound")
-    this.onchange = {
-        property.value = this.checked
-        Unit
-    }
-    property.onChange(this) {
-        if (checked != it) {
-            checked = it
-        }
-    }
-}
\ No newline at end of file
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt
deleted file mode 100644
index f97cb28d..00000000
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/annotations.kt
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright  2018 Alexander Nozik.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package space.kscience.dataforge.descriptors
-
-//@MustBeDocumented
-//annotation class Attribute(
-//    val key: String,
-//    val value: String
-//)
-//
-//@MustBeDocumented
-//annotation class Attributes(
-//    val attrs: Array<Attribute>
-//)
-//
-//@MustBeDocumented
-//annotation class ItemDef(
-//    val info: String = "",
-//    val multiple: Boolean = false,
-//    val required: Boolean = false
-//)
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@MustBeDocumented
-//annotation class ValueDef(
-//    val type: Array<ValueType> = [ValueType.STRING],
-//    val def: String = "",
-//    val allowed: Array<String> = [],
-//    val enumeration: KClass<*> = Any::class
-//)
-
-///**
-// * Description text for meta property, node or whole object
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Description(val value: String)
-//
-///**
-// * Annotation for value property which states that lists are expected
-// */
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Multiple
-//
-///**
-// * Descriptor target
-// * The DataForge path to the resource containing the description. Following targets are supported:
-// *  1. resource
-// *  1. file
-// *  1. class
-// *  1. method
-// *  1. property
-// *
-// *
-// * Does not work if [type] is provided
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class Descriptor(val value: String)
-//
-//
-///**
-// * Aggregator class for descriptor nodes
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorNodes(vararg val nodes: NodeDef)
-//
-///**
-// * Aggregator class for descriptor values
-// */
-//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorValues(vararg val nodes: ValueDef)
-//
-///**
-// * Alternative name for property descriptor declaration
-// */
-//@Target(AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorName(val name: String)
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class DescriptorValue(val def: ValueDef)
-////TODO enter fields directly?
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class ValueProperty(
-//    val name: String = "",
-//    val type: Array<ValueType> = arrayOf(ValueType.STRING),
-//    val multiple: Boolean = false,
-//    val def: String = "",
-//    val enumeration: KClass<*> = Any::class,
-//    val tags: Array<String> = emptyArray()
-//)
-//
-//
-//@Target(AnnotationTarget.PROPERTY)
-//@Retention(AnnotationRetention.RUNTIME)
-//@MustBeDocumented
-//annotation class NodeProperty(val name: String = "")
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
index 9de5e280..1f2db7fc 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/descriptors/reflectiveDescriptors.kt
@@ -1,53 +1,124 @@
 package space.kscience.dataforge.descriptors
 
+import kotlinx.serialization.ExperimentalSerializationApi
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.decodeFromStream
+import org.slf4j.LoggerFactory
+import space.kscience.dataforge.meta.Scheme
+import space.kscience.dataforge.meta.SchemeSpec
+import space.kscience.dataforge.meta.ValueType
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
+import space.kscience.dataforge.meta.descriptors.node
+import java.net.URL
+import kotlin.reflect.KClass
+import kotlin.reflect.full.isSubclassOf
+import kotlin.reflect.full.memberProperties
+import kotlin.reflect.typeOf
 
-//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
-//    T::class.apply {
-//        findAnnotation<ItemDef>()?.let { def ->
-//            info = def.info
-//            required = def.required
-//            multiple = def.multiple
-//        }
-//        findAnnotation<Attribute>()?.let { attr ->
-//            attributes {
-//                this[attr.key] = attr.value.parseValue()
-//            }
-//        }
-//        findAnnotation<Attributes>()?.attrs?.forEach { attr ->
-//            attributes {
-//                this[attr.key] = attr.value.parseValue()
-//            }
-//        }
-//    }
-//    T::class.memberProperties.forEach { property ->
-//        val delegate = property.getDelegate(this@buildDescriptor)
-//
-//        val descriptor: ItemDescriptor = when (delegate) {
-//            is ConfigurableDelegate -> buildPropertyDescriptor(property, delegate)
-//            is ReadWriteDelegateWrapper<*, *> -> {
-//                if (delegate.delegate is ConfigurableDelegate) {
-//                    buildPropertyDescriptor(property, delegate.delegate as ConfigurableDelegate)
-//                } else {
-//                    return@forEach
-//                }
-//            }
-//            else -> return@forEach
-//        }
-//        defineItem(property.name, descriptor)
-//    }
-//}
 
-//inline fun <T : Scheme, reified V : Any?> buildPropertyDescriptor(
-//    property: KProperty1<T, V>,
-//    delegate: ConfigurableDelegate
-//): ItemDescriptor {
-//    when {
-//        V::class.isSubclassOf(Scheme::class) -> NodeDescriptor {
-//            default = delegate.default.node
-//        }
-//        V::class.isSubclassOf(Meta::class) -> NodeDescriptor {
-//            default = delegate.default.node
-//        }
-//
-//    }
-//}
+/**
+ * Description text for meta property, node or whole object
+ */
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class Description(val value: String)
+
+@Target(AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class Multiple()
+
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class DescriptorResource(val resourceName: String)
+
+@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
+@Retention(AnnotationRetention.RUNTIME)
+@MustBeDocumented
+public annotation class DescriptorUrl(val url: String)
+
+
+@OptIn(ExperimentalSerializationApi::class)
+private fun MetaDescriptorBuilder.loadDescriptorFromUrl(url: URL) {
+    url.openStream().use {
+        from(Json.decodeFromStream(MetaDescriptor.serializer(), it))
+    }
+}
+
+private fun MetaDescriptorBuilder.loadDescriptorFromResource(resource: DescriptorResource) {
+    val url = {}.javaClass.getResource(resource.resourceName)
+    if (url != null) {
+        loadDescriptorFromUrl(url)
+    } else {
+        LoggerFactory.getLogger("System")
+            .error("Can't find descriptor resource with name ${resource.resourceName}")
+    }
+}
+
+
+public fun <T : Any> MetaDescriptor.Companion.forClass(
+    kClass: KClass<T>,
+    mod: MetaDescriptorBuilder.() -> Unit = {},
+): MetaDescriptor = MetaDescriptor {
+    when {
+        kClass.isSubclassOf(Number::class) -> valueType(ValueType.NUMBER)
+        kClass == String::class -> ValueType.STRING
+        kClass == Boolean::class -> ValueType.BOOLEAN
+        kClass == DoubleArray::class -> ValueType.LIST
+    }
+
+    kClass.annotations.forEach {
+        when (it) {
+            is Description -> description = it.value
+
+            is DescriptorResource -> loadDescriptorFromResource(it)
+
+            is DescriptorUrl -> loadDescriptorFromUrl(URL(it.url))
+        }
+    }
+    kClass.memberProperties.forEach { property ->
+
+        var flag = false
+
+        val descriptor = MetaDescriptor {
+            //use base type descriptor as a base
+            (property.returnType.classifier as? KClass<*>)?.let {
+                from(forClass(it))
+            }
+            property.annotations.forEach {
+                when (it) {
+                    is Description -> {
+                        description = it.value
+                        flag = true
+                    }
+
+                    is Multiple -> {
+                        multiple = true
+                        flag = true
+                    }
+
+                    is DescriptorResource -> {
+                        loadDescriptorFromResource(it)
+                        flag = true
+                    }
+
+                    is DescriptorUrl -> {
+                        loadDescriptorFromUrl(URL(it.url))
+                        flag = true
+                    }
+                }
+            }
+        }
+        if (flag) {
+            node(property.name, descriptor)
+        }
+    }
+    mod()
+}
+
+@Suppress("UNCHECKED_CAST")
+public inline fun <reified T : Scheme> SchemeSpec<T>.autoDescriptor( noinline mod: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptor =
+    MetaDescriptor.forClass(typeOf<T>().classifier as KClass<T>, mod)
\ No newline at end of file
diff --git a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
index 04e681da..cccc3efb 100644
--- a/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
+++ b/dataforge-context/src/jvmMain/kotlin/space/kscience/dataforge/provider/dfType.kt
@@ -12,7 +12,7 @@ import kotlin.reflect.full.findAnnotation
 
 
 @DFExperimental
-public val KClass<*>.dfId: String
+public val KClass<*>.dfType: String
     get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
 
 /**
@@ -20,13 +20,13 @@ public val KClass<*>.dfId: String
  */
 @DFExperimental
 public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
-    val target = T::class.dfId
+    val target = T::class.dfType
     return provide(target, name)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
-    val target = T::class.dfId
+    val target = T::class.dfType
     return top(target)
 }
 
@@ -35,15 +35,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
  */
 @DFExperimental
 public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
-    gather<T>(T::class.dfId, inherit)
+    gather<T>(T::class.dfType, inherit)
 
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
-    provides(T::class.dfId, items)
+    provides(T::class.dfType, items)
 }
 
 @DFExperimental
 public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
-    provides(T::class.dfId, *items)
+    provides(T::class.dfType, *items)
 }
diff --git a/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
new file mode 100644
index 00000000..617e85cc
--- /dev/null
+++ b/dataforge-context/src/jvmTest/kotlin/space/kscience/dataforge/descriptors/TestAutoDescriptors.kt
@@ -0,0 +1,31 @@
+package space.kscience.dataforge.descriptors
+
+import kotlinx.serialization.encodeToString
+import kotlinx.serialization.json.Json
+import org.junit.jupiter.api.Test
+import space.kscience.dataforge.meta.Scheme
+import space.kscience.dataforge.meta.SchemeSpec
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.meta.int
+import space.kscience.dataforge.meta.string
+
+private class TestScheme: Scheme(){
+
+    @Description("A")
+    val a by string()
+
+    @Description("B")
+    val b by int()
+
+    companion object: SchemeSpec<TestScheme>(::TestScheme){
+        override val descriptor: MetaDescriptor = autoDescriptor()
+    }
+}
+
+class TestAutoDescriptors {
+    @Test
+    fun autoDescriptor(){
+        val autoDescriptor = MetaDescriptor.forClass(TestScheme::class)
+        println(Json{prettyPrint = true}.encodeToString(autoDescriptor))
+    }
+}
\ No newline at end of file
diff --git a/dataforge-data/README.md b/dataforge-data/README.md
index 5935af6e..d77ed1b9 100644
--- a/dataforge-data/README.md
+++ b/dataforge-data/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-data:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-data:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-data:0.7.0")
+    implementation("space.kscience:dataforge-data:0.8.0")
 }
 ```
diff --git a/dataforge-data/build.gradle.kts b/dataforge-data/build.gradle.kts
index ea542290..99314ea3 100644
--- a/dataforge-data/build.gradle.kts
+++ b/dataforge-data/build.gradle.kts
@@ -11,6 +11,7 @@ kscience{
     dependencies {
         api(spclibs.atomicfu)
         api(projects.dataforgeMeta)
+        //Remove after subtype moved to stdlib
         api(kotlin("reflect"))
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
index e7bbe6f6..7cd1ced5 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/AbstractAction.kt
@@ -1,5 +1,7 @@
 package space.kscience.dataforge.actions
 
+import kotlinx.coroutines.flow.launchIn
+import kotlinx.coroutines.flow.onEach
 import kotlinx.coroutines.launch
 import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
@@ -19,46 +21,51 @@ internal fun MutableMap<Name, *>.removeWhatStartsWith(name: Name) {
 /**
  * An action that caches results on-demand and recalculates them on source push
  */
-public abstract class AbstractAction<in T : Any, R : Any>(
+public abstract class AbstractAction<T : Any, R : Any>(
     public val outputType: KType,
 ) : Action<T, R> {
 
     /**
      * Generate initial content of the output
      */
-    protected abstract fun DataSetBuilder<R>.generate(
-        data: DataSet<T>,
+    protected abstract fun DataSink<R>.generate(
+        data: DataTree<T>,
         meta: Meta,
     )
 
     /**
-     * Update part of the data set when given [updateKey] is triggered by the source
+     * Update part of the data set using provided data
+     *
+     * @param source the source data tree in case we need several data items to update
      */
-    protected open fun DataSourceBuilder<R>.update(
-        dataSet: DataSet<T>,
+    protected open fun DataSink<R>.update(
+        source: DataTree<T>,
         meta: Meta,
-        updateKey: Name,
-    ) {
-        // By default, recalculate the whole dataset
-        generate(dataSet, meta)
+        namedData: NamedData<T>,
+    ){
+        //by default regenerate the whole data set
+        generate(source,meta)
     }
 
     @OptIn(DFInternal::class)
     override fun execute(
-        dataSet: DataSet<T>,
+        dataSet: DataTree<T>,
         meta: Meta,
-    ): DataSet<R> = if (dataSet is DataSource) {
-        DataSource(outputType, dataSet){
+    ): DataTree<R> = if(dataSet.isObservable()) {
+        MutableDataTree<R>(outputType, dataSet.updatesScope).apply {
             generate(dataSet, meta)
+            dataSet.updates().onEach {
+                update(dataSet, meta, it)
+            }.launchIn(updatesScope)
 
-            launch {
-                dataSet.updates.collect { name ->
-                    update(dataSet, meta, name)
-                }
+            //close updates when the source is closed
+            updatesScope.launch {
+                dataSet.awaitClose()
+                close()
             }
         }
-    } else {
-        DataTree<R>(outputType) {
+    } else{
+        DataTree(outputType){
             generate(dataSet, meta)
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
index 4fed8e51..5ed60db9 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/Action.kt
@@ -1,40 +1,43 @@
 package space.kscience.dataforge.actions
 
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
 
 /**
  * A simple data transformation on a data node. Actions should avoid doing actual dependency evaluation in [execute].
  */
-public interface Action<in T : Any, out R : Any> {
+public fun interface Action<T, R> {
 
     /**
      * Transform the data in the node, producing a new node. By default, it is assumed that all calculations are lazy
      * so not actual computation is started at this moment.
      */
-    public fun execute(dataSet: DataSet<T>, meta: Meta = Meta.EMPTY): DataSet<R>
+    public fun execute(dataSet: DataTree<T>, meta: Meta): DataTree<R>
 
     public companion object
 }
 
+/**
+ * A convenience method to transform data using given [action]
+ */
+public fun <T, R> DataTree<T>.transform(
+    action: Action<T, R>,
+    meta: Meta = Meta.EMPTY,
+): DataTree<R> = action.execute(this, meta)
+
 /**
  * Action composition. The result is terminal if one of its parts is terminal
  */
-public infix fun <T : Any, I : Any, R : Any> Action<T, I>.then(action: Action<I, R>): Action<T, R> {
-    // TODO introduce composite action and add optimize by adding action to the list
-    return object : Action<T, R> {
-
-        override fun execute(
-            dataSet: DataSet<T>,
-            meta: Meta,
-        ): DataSet<R> = action.execute(this@then.execute(dataSet, meta), meta)
-    }
+public infix fun <T, I, R> Action<T, I>.then(action: Action<I, R>): Action<T, R> = Action { dataSet, meta ->
+    action.execute(this@then.execute(dataSet, meta), meta)
 }
 
 @DFExperimental
-public operator fun <T : Any, R : Any> Action<T, R>.invoke(
-    dataSet: DataSet<T>,
+public operator fun <T, R> Action<T, R>.invoke(
+    dataSet: DataTree<T>,
     meta: Meta = Meta.EMPTY,
-): DataSet<R> = execute(dataSet, meta)
+): DataTree<R> = execute(dataSet, meta)
+
+
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
index 7b2c94f5..1f40ed73 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/MapAction.kt
@@ -29,6 +29,7 @@ public class MapActionBuilder<T, R>(
     public var name: Name,
     public var meta: MutableMeta,
     public val actionMeta: Meta,
+    public val dataType: KType,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -45,19 +46,16 @@ public class MapActionBuilder<T, R>(
     /**
      * Calculate the result of goal
      */
-    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1) {
-        outputType = typeOf<R1>()
-        result = f;
-    }
+    public inline fun <reified R1 : R> result(noinline f: suspend ActionEnv.(T) -> R1): Unit = result(typeOf<R1>(), f)
 }
 
 @PublishedApi
-internal class MapAction<in T : Any, R : Any>(
+internal class MapAction<T : Any, R : Any>(
     outputType: KType,
     private val block: MapActionBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSetBuilder<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
+    private fun DataSink<R>.mapOne(name: Name, data: Data<T>, meta: Meta) {
         // Creating a new environment for action using **old** name, old meta and task meta
         val env = ActionEnv(name, data.meta, meta)
 
@@ -66,6 +64,7 @@ internal class MapAction<in T : Any, R : Any>(
             name,
             data.meta.toMutableMeta(), // using data meta
             meta,
+            data.type,
             outputType
         ).apply(block)
 
@@ -80,16 +79,15 @@ internal class MapAction<in T : Any, R : Any>(
             builder.result(env, data.await())
         }
         //setting the data node
-        data(newName, newData)
+        put(newName, newData)
     }
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         data.forEach { mapOne(it.name, it.data, meta) }
     }
 
-    override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
-        remove(updateKey)
-        dataSet[updateKey]?.let { mapOne(updateKey, it, meta) }
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+        mapOne(namedData.name, namedData.data, namedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
index a74cfad9..9440be55 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/ReduceAction.kt
@@ -14,7 +14,7 @@ import kotlin.reflect.typeOf
 
 public class JoinGroup<T : Any, R : Any>(
     public var name: String,
-    internal val set: DataSet<T>,
+    internal val set: DataTree<T>,
     @PublishedApi internal var outputType: KType,
 ) {
 
@@ -39,7 +39,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
     public val actionMeta: Meta,
     private val outputType: KType,
 ) {
-    private val groupRules: MutableList<(DataSet<T>) -> List<JoinGroup<T, R>>> = ArrayList();
+    private val groupRules: MutableList<(DataTree<T>) -> List<JoinGroup<T, R>>> = ArrayList();
 
     /**
      * introduce grouping by meta value
@@ -54,12 +54,12 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
 
     public fun group(
         groupName: String,
-        predicate: (Name, Meta) -> Boolean,
+        predicate: DataFilter,
         action: JoinGroup<T, R>.() -> Unit,
     ) {
         groupRules += { source ->
             listOf(
-                JoinGroup<T, R>(groupName, source.filter(predicate), outputType).apply(action)
+                JoinGroup<T, R>(groupName, source.filterData(predicate), outputType).apply(action)
             )
         }
     }
@@ -73,7 +73,7 @@ public class ReduceGroupBuilder<T : Any, R : Any>(
         }
     }
 
-    internal fun buildGroups(input: DataSet<T>): List<JoinGroup<T, R>> =
+    internal fun buildGroups(input: DataTree<T>): List<JoinGroup<T, R>> =
         groupRules.flatMap { it.invoke(input) }
 
 }
@@ -85,7 +85,7 @@ internal class ReduceAction<T : Any, R : Any>(
 ) : AbstractAction<T, R>(outputType) {
     //TODO optimize reduction. Currently, the whole action recalculates on push
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         ReduceGroupBuilder<T, R>(meta, outputType).apply(action).buildGroups(data).forEach { group ->
             val dataFlow: Map<Name, Data<T>> = group.set.asSequence().fold(HashMap()) { acc, value ->
                 acc.apply {
@@ -103,7 +103,7 @@ internal class ReduceAction<T : Any, R : Any>(
                 meta = groupMeta
             ) { group.result.invoke(env, it) }
 
-            data(env.name, res)
+            put(env.name, res)
         }
     }
 }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
index 0ecde319..057419a7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/actions/SplitAction.kt
@@ -49,7 +49,7 @@ internal class SplitAction<T : Any, R : Any>(
     private val action: SplitBuilder<T, R>.() -> Unit,
 ) : AbstractAction<T, R>(outputType) {
 
-    private fun DataSetBuilder<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
+    private fun DataSink<R>.splitOne(name: Name, data: Data<T>, meta: Meta) {
         val laminate = Laminate(data.meta, meta)
 
         val split = SplitBuilder<T, R>(name, data.meta).apply(action)
@@ -64,7 +64,7 @@ internal class SplitAction<T : Any, R : Any>(
             ).apply(rule)
             //data.map<R>(outputType, meta = env.meta) { env.result(it) }.named(fragmentName)
 
-            data(
+            put(
                 fragmentName,
                 @Suppress("OPT_IN_USAGE") Data(outputType, meta = env.meta, dependencies = listOf(data)) {
                     env.result(data.await())
@@ -73,13 +73,12 @@ internal class SplitAction<T : Any, R : Any>(
         }
     }
 
-    override fun DataSetBuilder<R>.generate(data: DataSet<T>, meta: Meta) {
+    override fun DataSink<R>.generate(data: DataTree<T>, meta: Meta) {
         data.forEach { splitOne(it.name, it.data, meta) }
     }
 
-    override fun DataSourceBuilder<R>.update(dataSet: DataSet<T>, meta: Meta, updateKey: Name) {
-        remove(updateKey)
-        dataSet[updateKey]?.let { splitOne(updateKey, it, meta) }
+    override fun DataSink<R>.update(source: DataTree<T>, meta: Meta, namedData: NamedData<T>) {
+        splitOne(namedData.name, namedData.data, namedData.meta)
     }
 }
 
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
index 4d883795..a0bb58ea 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Data.kt
@@ -41,7 +41,7 @@ public interface Data<out T> : Goal<T>, MetaRepr {
          */
         internal val TYPE_OF_NOTHING: KType = typeOf<Unit>()
 
-        public inline fun <reified T : Any> static(
+        public inline fun <reified T> static(
             value: T,
             meta: Meta = Meta.EMPTY,
         ): Data<T> = StaticData(typeOf<T>(), value, meta)
@@ -69,39 +69,37 @@ public interface Data<out T> : Goal<T>, MetaRepr {
  * A lazily computed variant of [Data] based on [LazyGoal]
  * One must ensure that proper [type] is used so this method should not be used
  */
-private class LazyData<T : Any>(
+private class LazyData<T>(
     override val type: KType,
     override val meta: Meta = Meta.EMPTY,
     additionalContext: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     block: suspend () -> T,
 ) : Data<T>, LazyGoal<T>(additionalContext, dependencies, block)
 
-public class StaticData<T : Any>(
+public class StaticData<T>(
     override val type: KType,
     value: T,
     override val meta: Meta = Meta.EMPTY,
 ) : Data<T>, StaticGoal<T>(value)
 
 @Suppress("FunctionName")
-public inline fun <reified T : Any> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
+public inline fun <reified T> Data(value: T, meta: Meta = Meta.EMPTY): StaticData<T> =
     StaticData(typeOf<T>(), value, meta)
 
-@Suppress("FunctionName")
 @DFInternal
-public fun <T : Any> Data(
+public fun <T> Data(
     type: KType,
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     block: suspend () -> T,
 ): Data<T> = LazyData(type, meta, context, dependencies, block)
 
 @OptIn(DFInternal::class)
-@Suppress("FunctionName")
-public inline fun <reified T : Any> Data(
+public inline fun <reified T> Data(
     meta: Meta = Meta.EMPTY,
     context: CoroutineContext = EmptyCoroutineContext,
-    dependencies: Collection<Goal<*>> = emptyList(),
+    dependencies: Iterable<Goal<*>> = emptyList(),
     noinline block: suspend () -> T,
 ): Data<T> = Data(typeOf<T>(), meta, context, dependencies, block)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
new file mode 100644
index 00000000..f45570ad
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataFilter.kt
@@ -0,0 +1,89 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.filter
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.names.Name
+import kotlin.reflect.KType
+
+public fun interface DataFilter {
+
+    public fun accepts(name: Name, meta: Meta, type: KType): Boolean
+
+    public companion object {
+        public val EMPTY: DataFilter = DataFilter { _, _, _ -> true }
+    }
+}
+
+public fun DataFilter.accepts(data: NamedData<*>): Boolean = accepts(data.name, data.meta, data.type)
+
+public fun <T> Sequence<NamedData<T>>.filterData(predicate: DataFilter): Sequence<NamedData<T>> = filter { data ->
+    predicate.accepts(data)
+}
+
+public fun <T> Flow<NamedData<T>>.filterData(predicate: DataFilter): Flow<NamedData<T>> = filter { data ->
+    predicate.accepts(data)
+}
+
+public fun <T> DataSource<T>.filterData(
+    predicate: DataFilter,
+): DataSource<T> = object : DataSource<T> {
+    override val dataType: KType get() = this@filterData.dataType
+
+    override fun read(name: Name): Data<T>? =
+        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+}
+
+/**
+ * Stateless filtered [ObservableDataSource]
+ */
+public fun <T> ObservableDataSource<T>.filterData(
+    predicate: DataFilter,
+): ObservableDataSource<T> = object : ObservableDataSource<T> {
+    override fun updates(): Flow<NamedData<T>> = this@filterData.updates().filter { predicate.accepts(it) }
+
+    override val dataType: KType get() = this@filterData.dataType
+
+    override fun read(name: Name): Data<T>? =
+        this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
+}
+
+public fun <T> GenericDataTree<T, *>.filterData(
+    predicate: DataFilter,
+): DataTree<T> = asSequence().filterData(predicate).toTree(dataType)
+
+public fun <T> GenericObservableDataTree<T, *>.filterData(
+    scope: CoroutineScope,
+    predicate: DataFilter,
+): ObservableDataTree<T> = asSequence().filterData(predicate).toObservableTree(dataType, scope, updates().filterData(predicate))
+
+
+///**
+// * Generate a wrapper data set with a given name prefix appended to all names
+// */
+//public fun <T : Any> DataTree<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
+//    this
+//} else object : DataSource<T> {
+//
+//    override val dataType: KType get() = this@withNamePrefix.dataType
+//
+//    override val coroutineContext: CoroutineContext
+//        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
+//
+//    override val meta: Meta get() = this@withNamePrefix.meta
+//
+//
+//    override fun iterator(): Iterator<NamedData<T>> = iterator {
+//        for (d in this@withNamePrefix) {
+//            yield(d.data.named(prefix + d.name))
+//        }
+//    }
+//
+//    override fun get(name: Name): Data<T>? =
+//        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
+//
+//    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
+//}
+//
+
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt
deleted file mode 100644
index 44639653..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSet.kt
+++ /dev/null
@@ -1,124 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.*
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.emptyFlow
-import kotlinx.coroutines.flow.mapNotNull
-import space.kscience.dataforge.data.Data.Companion.TYPE_OF_NOTHING
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.endsWith
-import space.kscience.dataforge.names.parseAsName
-import kotlin.reflect.KType
-
-public interface DataSet<out T : Any> {
-
-    /**
-     * The minimal common ancestor to all data in the node
-     */
-    public val dataType: KType
-
-    /**
-     * Meta-data associated with this node. If no meta is provided, returns [Meta.EMPTY].
-     */
-    public val meta: Meta
-
-    /**
-     * Traverse this [DataSet] returning named data instances. The order is not guaranteed.
-     */
-    public operator fun iterator(): Iterator<NamedData<T>>
-
-    /**
-     * Get data with given name.
-     */
-    public operator fun get(name: Name): Data<T>?
-
-    public companion object {
-        public val META_KEY: Name = "@meta".asName()
-
-        /**
-         * An empty [DataSet] that suits all types
-         */
-        public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
-            override val dataType: KType = TYPE_OF_NOTHING
-            override val meta: Meta get() = Meta.EMPTY
-
-            override fun iterator(): Iterator<NamedData<Nothing>> = emptySequence<NamedData<Nothing>>().iterator()
-
-            override fun get(name: Name): Data<Nothing>? = null
-        }
-    }
-}
-
-public fun <T : Any> DataSet<T>.asSequence(): Sequence<NamedData<T>> = object : Sequence<NamedData<T>> {
-    override fun iterator(): Iterator<NamedData<T>> = this@asSequence.iterator()
-}
-
-/**
- * Return a single [Data] in this [DataSet]. Throw error if it is not single.
- */
-public fun <T : Any> DataSet<T>.single(): NamedData<T> = asSequence().single()
-
-public fun <T : Any> DataSet<T>.asIterable(): Iterable<NamedData<T>> = object : Iterable<NamedData<T>> {
-    override fun iterator(): Iterator<NamedData<T>> = this@asIterable.iterator()
-}
-
-public operator fun <T : Any> DataSet<T>.get(name: String): Data<T>? = get(name.parseAsName())
-
-/**
- * A [DataSet] with propagated updates.
- */
-public interface DataSource<out T : Any> : DataSet<T>, CoroutineScope {
-
-    /**
-     * A flow of updated item names. Updates are propagated in a form of [Flow] of names of updated nodes.
-     * Those can include new data items and replacement of existing ones. The replaced items could update existing data content
-     * and replace it completely, so they should be pulled again.
-     *
-     */
-    public val updates: Flow<Name>
-
-    /**
-     * Stop generating updates from this [DataSource]
-     */
-    public fun close() {
-        coroutineContext[Job]?.cancel()
-    }
-}
-
-public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is DataSource) updates else emptyFlow()
-//
-///**
-// * Flow all data nodes with names starting with [branchName]
-// */
-//public fun <T : Any> DataSet<T>.children(branchName: Name): Sequence<NamedData<T>> =
-//    this@children.asSequence().filter {
-//        it.name.startsWith(branchName)
-//    }
-
-/**
- * Start computation for all goals in data node and return a job for the whole node
- */
-public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
-    asIterable().map {
-        it.launch(this@launch)
-    }.joinAll()
-}
-
-public suspend fun <T : Any> DataSet<T>.computeAndJoinAll(): Unit = coroutineScope { startAll(this).join() }
-
-public fun DataSet<*>.toMeta(): Meta = Meta {
-    forEach {
-        if (it.name.endsWith(DataSet.META_KEY)) {
-            set(it.name, it.meta)
-        } else {
-            it.name put {
-                "type" put it.type.toString()
-                "meta" put it.meta
-            }
-        }
-    }
-}
-
-public val <T : Any> DataSet<T>.updatesWithData: Flow<NamedData<T>> get() = updates.mapNotNull { get(it)?.named(it) }
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt
deleted file mode 100644
index f9f14f37..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSetBuilder.kt
+++ /dev/null
@@ -1,165 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.isEmpty
-import space.kscience.dataforge.names.plus
-import kotlin.reflect.KType
-
-public interface DataSetBuilder<in T : Any> {
-    public val dataType: KType
-
-    /**
-     * Remove all data items starting with [name]
-     */
-    public fun remove(name: Name)
-
-    public fun data(name: Name, data: Data<T>?)
-
-    /**
-     * Set a current state of given [dataSet] into a branch [name]. Does not propagate updates
-     */
-    public fun node(name: Name, dataSet: DataSet<T>) {
-        //remove previous items
-        if (name != Name.EMPTY) {
-            remove(name)
-        }
-
-        //Set new items
-        dataSet.forEach {
-            data(name + it.name, it.data)
-        }
-    }
-
-    /**
-     * Set meta for the given node
-     */
-    public fun meta(name: Name, meta: Meta)
-
-}
-
-/**
- * Define meta in this [DataSet]
- */
-public fun <T : Any> DataSetBuilder<T>.meta(value: Meta): Unit = meta(Name.EMPTY, value)
-
-/**
- * Define meta in this [DataSet]
- */
-public fun <T : Any> DataSetBuilder<T>.meta(mutableMeta: MutableMeta.() -> Unit): Unit = meta(Meta(mutableMeta))
-
-@PublishedApi
-internal class SubSetBuilder<in T : Any>(
-    private val parent: DataSetBuilder<T>,
-    private val branch: Name,
-) : DataSetBuilder<T> {
-    override val dataType: KType get() = parent.dataType
-
-    override fun remove(name: Name) {
-        parent.remove(branch + name)
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        parent.data(branch + name, data)
-    }
-
-    override fun node(name: Name, dataSet: DataSet<T>) {
-        parent.node(branch + name, dataSet)
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        parent.meta(branch + name, meta)
-    }
-}
-
-public inline fun <T : Any> DataSetBuilder<T>.node(
-    name: Name,
-    crossinline block: DataSetBuilder<T>.() -> Unit,
-) {
-    if (name.isEmpty()) block() else SubSetBuilder(this, name).block()
-}
-
-
-public fun <T : Any> DataSetBuilder<T>.data(name: String, value: Data<T>) {
-    data(Name.parse(name), value)
-}
-
-public fun <T : Any> DataSetBuilder<T>.node(name: String, set: DataSet<T>) {
-    node(Name.parse(name), set)
-}
-
-public inline fun <T : Any> DataSetBuilder<T>.node(
-    name: String,
-    crossinline block: DataSetBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(name), block)
-
-public fun <T : Any> DataSetBuilder<T>.set(value: NamedData<T>) {
-    data(value.name, value.data)
-}
-
-/**
- * Produce lazy [Data] and emit it into the [DataSetBuilder]
- */
-public inline fun <reified T : Any> DataSetBuilder<T>.produce(
-    name: String,
-    meta: Meta = Meta.EMPTY,
-    noinline producer: suspend () -> T,
-) {
-    val data = Data(meta, block = producer)
-    data(name, data)
-}
-
-public inline fun <reified T : Any> DataSetBuilder<T>.produce(
-    name: Name,
-    meta: Meta = Meta.EMPTY,
-    noinline producer: suspend () -> T,
-) {
-    val data = Data(meta, block = producer)
-    data(name, data)
-}
-
-/**
- * Emit a static data with the fixed value
- */
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: String,
-    data: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
-
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: Name,
-    data: T,
-    meta: Meta = Meta.EMPTY,
-): Unit = data(name, Data.static(data, meta))
-
-public inline fun <reified T : Any> DataSetBuilder<T>.static(
-    name: String,
-    data: T,
-    mutableMeta: MutableMeta.() -> Unit,
-): Unit = data(Name.parse(name), Data.static(data, Meta(mutableMeta)))
-
-/**
- * Update data with given node data and meta with node meta.
- */
-@DFExperimental
-public fun <T : Any> DataSetBuilder<T>.populateFrom(tree: DataSet<T>): Unit {
-    tree.forEach {
-        //TODO check if the place is occupied
-        data(it.name, it.data)
-    }
-}
-
-//public fun <T : Any> DataSetBuilder<T>.populateFrom(flow: Flow<NamedData<T>>) {
-//    flow.collect {
-//        data(it.name, it.data)
-//    }
-//}
-
-public fun <T : Any> DataSetBuilder<T>.populateFrom(sequence: Sequence<NamedData<T>>) {
-    sequence.forEach {
-        data(it.name, it.data)
-    }
-}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
new file mode 100644
index 00000000..d379d027
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataSource.kt
@@ -0,0 +1,330 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.*
+import kotlinx.coroutines.flow.*
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.names.*
+import kotlin.contracts.contract
+import kotlin.reflect.KType
+import kotlin.reflect.typeOf
+
+/**
+ * A generic data provider
+ */
+public interface DataSource<out T> {
+
+    /**
+     * The minimal common ancestor to all data in the node
+     */
+    public val dataType: KType
+
+    /**
+     * Get data with given name. Or null if it is not present
+     */
+    public fun read(name: Name): Data<T>?
+}
+
+/**
+ * A data provider with possible dynamic updates
+ */
+public interface ObservableDataSource<out T> : DataSource<T> {
+
+    /**
+     * Flow updates made to the data
+     */
+    public fun updates(): Flow<NamedData<T>>
+}
+
+/**
+ * A tree like structure for data holding
+ */
+public interface GenericDataTree<out T, out TR : GenericDataTree<T, TR>> : DataSource<T> {
+    public val self: TR
+
+    public val data: Data<T>?
+    public val items: Map<NameToken, TR>
+
+
+    override fun read(name: Name): Data<T>? = when (name.length) {
+        0 -> data
+        else -> items[name.first()]?.read(name.cutFirst())
+    }
+
+    public companion object {
+        private object EmptyDataTree : GenericDataTree<Nothing, EmptyDataTree> {
+            override val self: EmptyDataTree get() = this
+            override val data: Data<Nothing>? = null
+            override val items: Map<NameToken, EmptyDataTree> = emptyMap()
+            override val dataType: KType = typeOf<Unit>()
+
+            override fun read(name: Name): Data<Nothing>? = null
+
+        }
+
+        public val EMPTY: GenericDataTree<Nothing, *> = EmptyDataTree
+    }
+}
+
+public typealias DataTree<T> = GenericDataTree<T, GenericDataTree<T, *>>
+
+/**
+ * Return a single data in this tree. Throw error if it is not single.
+ */
+public fun <T> DataTree<T>.single(): NamedData<T> = asSequence().single()
+
+/**
+ * An alias for easier access to tree values
+ */
+public operator fun <T> DataTree<T>.get(name: Name): Data<T>? = read(name)
+
+public operator fun <T> DataTree<T>.get(name: String): Data<T>? = read(name.parseAsName())
+
+/**
+ * Return a sequence of all data items in this tree.
+ * This method does not take updates into account.
+ */
+public fun <T> DataTree<T>.asSequence(
+    namePrefix: Name = Name.EMPTY,
+): Sequence<NamedData<T>> = sequence {
+    data?.let { yield(it.named(namePrefix)) }
+    items.forEach { (token, tree) ->
+        yieldAll(tree.asSequence(namePrefix + token))
+    }
+}
+
+public val DataTree<*>.meta: Meta? get() = data?.meta
+
+/**
+ * Provide subtree if it exists
+ */
+public tailrec fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: Name): TR? =
+    when (name.length) {
+        0 -> self
+        1 -> items[name.first()]
+        else -> items[name.first()]?.branch(name.cutFirst())
+    }
+
+public fun <T, TR : GenericDataTree<T, TR>> GenericDataTree<T, TR>.branch(name: String): TR? =
+    branch(name.parseAsName())
+
+public fun GenericDataTree<*, *>.isEmpty(): Boolean = data == null && items.isEmpty()
+
+@PublishedApi
+internal class FlatDataTree<T>(
+    override val dataType: KType,
+    private val dataSet: Map<Name, Data<T>>,
+    private val prefix: Name,
+) : GenericDataTree<T, FlatDataTree<T>> {
+    override val self: FlatDataTree<T> get() = this
+    override val data: Data<T>? get() = dataSet[prefix]
+    override val items: Map<NameToken, FlatDataTree<T>>
+        get() = dataSet.keys
+            .filter { it.startsWith(prefix) && it.length > prefix.length }
+            .map { it.tokens[prefix.length] }
+            .associateWith { FlatDataTree(dataType, dataSet, prefix + it) }
+
+    override fun read(name: Name): Data<T>? = dataSet[prefix + name]
+}
+
+/**
+ * Represent this flat data map as a [DataTree] without copying it
+ */
+public inline fun <reified T> Map<Name, Data<T>>.asTree(): DataTree<T> = FlatDataTree(typeOf<T>(), this, Name.EMPTY)
+
+internal fun <T> Sequence<NamedData<T>>.toTree(type: KType): DataTree<T> =
+    FlatDataTree(type, associate { it.name to it.data }, Name.EMPTY)
+
+/**
+ * Collect a sequence of [NamedData] to a [DataTree]
+ */
+public inline fun <reified T> Sequence<NamedData<T>>.toTree(): DataTree<T> =
+    FlatDataTree(typeOf<T>(), associate { it.name to it.data }, Name.EMPTY)
+
+public interface GenericObservableDataTree<out T, out TR : GenericObservableDataTree<T, TR>> :
+    GenericDataTree<T, TR>, ObservableDataSource<T>, AutoCloseable {
+
+    /**
+     * A scope that is used to propagate updates. When this scope is closed, no new updates could arrive.
+     */
+    public val updatesScope: CoroutineScope
+
+    /**
+     * Close this data tree updates channel
+     */
+    override fun close() {
+        updatesScope.cancel()
+    }
+
+}
+
+public typealias ObservableDataTree<T> = GenericObservableDataTree<T, GenericObservableDataTree<T, *>>
+
+/**
+ * Check if the [DataTree] is observable
+ */
+public fun <T> DataTree<T>.isObservable(): Boolean {
+    contract {
+        returns(true) implies (this@isObservable is GenericObservableDataTree<T, *>)
+    }
+    return this is GenericObservableDataTree<T, *>
+}
+
+/**
+ * Wait for this data tree to stop spawning updates (updatesScope is closed).
+ * If this [DataTree] is not observable, return immediately.
+ */
+public suspend fun <T> DataTree<T>.awaitClose() {
+    if (isObservable()) {
+        updatesScope.coroutineContext[Job]?.join()
+    }
+}
+
+public fun <T> DataTree<T>.updates(): Flow<NamedData<T>> =
+    if (this is GenericObservableDataTree<T, *>) updates() else emptyFlow()
+
+public fun interface DataSink<in T> {
+    public fun put(name: Name, data: Data<T>?)
+}
+
+@DFInternal
+public class DataTreeBuilder<T>(private val type: KType) : DataSink<T> {
+    private val map = HashMap<Name, Data<T>>()
+    override fun put(name: Name, data: Data<T>?) {
+        if (data == null) {
+            map.remove(name)
+        } else {
+            map[name] = data
+        }
+    }
+
+    public fun build(): DataTree<T> = FlatDataTree(type, map, Name.EMPTY)
+}
+
+@DFInternal
+public inline fun <T> DataTree(
+    dataType: KType,
+    generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(dataType).apply(generator).build()
+
+/**
+ * Create and a data tree.
+ */
+@OptIn(DFInternal::class)
+public inline fun <reified T> DataTree(
+    generator: DataSink<T>.() -> Unit,
+): DataTree<T> = DataTreeBuilder<T>(typeOf<T>()).apply(generator).build()
+
+/**
+ * A mutable version of [GenericDataTree]
+ */
+public interface MutableDataTree<T> : GenericObservableDataTree<T, MutableDataTree<T>>, DataSink<T> {
+    override var data: Data<T>?
+
+    override val items: Map<NameToken, MutableDataTree<T>>
+
+    public fun getOrCreateItem(token: NameToken): MutableDataTree<T>
+
+    public operator fun set(token: NameToken, data: Data<T>?)
+
+    override fun put(name: Name, data: Data<T>?): Unit = set(name, data)
+}
+
+public tailrec operator fun <T> MutableDataTree<T>.set(name: Name, data: Data<T>?): Unit {
+    when (name.length) {
+        0 -> this.data = data
+        1 -> set(name.first(), data)
+        else -> getOrCreateItem(name.first())[name.cutFirst()] = data
+    }
+}
+
+private class MutableDataTreeImpl<T>(
+    override val dataType: KType,
+    override val updatesScope: CoroutineScope,
+) : MutableDataTree<T> {
+
+
+    private val updates = MutableSharedFlow<NamedData<T>>()
+
+    private val children = HashMap<NameToken, MutableDataTree<T>>()
+
+    override var data: Data<T>? = null
+        set(value) {
+            if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
+            field = value
+            if (value != null) {
+                updatesScope.launch {
+                    updates.emit(value.named(Name.EMPTY))
+                }
+            }
+        }
+
+    override val items: Map<NameToken, MutableDataTree<T>> get() = children
+
+    override fun getOrCreateItem(token: NameToken): MutableDataTree<T> = children.getOrPut(token){
+        MutableDataTreeImpl(dataType, updatesScope)
+    }
+
+    override val self: MutableDataTree<T> get() = this
+
+    override fun set(token: NameToken, data: Data<T>?) {
+        if (!updatesScope.isActive) error("Can't send updates to closed MutableDataTree")
+        val subTree = getOrCreateItem(token)
+        subTree.updates().onEach {
+            updates.emit(it.named(token + it.name))
+        }.launchIn(updatesScope)
+        subTree.data = data
+    }
+
+    override fun updates(): Flow<NamedData<T>> = updates
+}
+
+/**
+ * Create a new [MutableDataTree]
+ *
+ * @param parentScope a [CoroutineScope] to control data propagation. By default uses [GlobalScope]
+ */
+@OptIn(DelicateCoroutinesApi::class)
+public fun <T> MutableDataTree(
+    type: KType,
+    parentScope: CoroutineScope = GlobalScope,
+): MutableDataTree<T> = MutableDataTreeImpl<T>(
+    type,
+    CoroutineScope(parentScope.coroutineContext + Job(parentScope.coroutineContext[Job]))
+)
+
+/**
+ * Create and initialize a observable mutable data tree.
+ */
+@OptIn(DelicateCoroutinesApi::class)
+public inline fun <reified T> MutableDataTree(
+    parentScope: CoroutineScope = GlobalScope,
+    generator: MutableDataTree<T>.() -> Unit = {},
+): MutableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply { generator() }
+
+//@DFInternal
+//public fun <T> ObservableDataTree(
+//    type: KType,
+//    scope: CoroutineScope,
+//    generator: suspend MutableDataTree<T>.() -> Unit = {},
+//): ObservableDataTree<T> = MutableDataTree<T>(type, scope.coroutineContext).apply(generator)
+
+public inline fun <reified T> ObservableDataTree(
+    parentScope: CoroutineScope,
+    generator: MutableDataTree<T>.() -> Unit = {},
+): ObservableDataTree<T> = MutableDataTree<T>(typeOf<T>(), parentScope).apply(generator)
+
+
+/**
+ * Collect a [Sequence] into an observable tree with additional [updates]
+ */
+public fun <T> Sequence<NamedData<T>>.toObservableTree(
+    dataType: KType,
+    parentScope: CoroutineScope,
+    updates: Flow<NamedData<T>>,
+): ObservableDataTree<T> = MutableDataTree<T>(dataType, parentScope).apply {
+    this.putAll(this@toObservableTree)
+    updates.onEach {
+        put(it.name, it.data)
+    }.launchIn(updatesScope)
+}
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
deleted file mode 100644
index b9273c07..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTree.kt
+++ /dev/null
@@ -1,119 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.DfType
-import space.kscience.dataforge.names.*
-import kotlin.collections.component1
-import kotlin.collections.component2
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-public sealed class DataTreeItem<out T : Any> {
-
-    public abstract val meta: Meta
-
-    public class Node<out T : Any>(public val tree: DataTree<T>) : DataTreeItem<T>() {
-        override val meta: Meta get() = tree.meta
-    }
-
-    public class Leaf<out T : Any>(public val data: Data<T>) : DataTreeItem<T>() {
-        override val meta: Meta get() = data.meta
-    }
-}
-
-public val <T : Any> DataTreeItem<T>.type: KType
-    get() = when (this) {
-        is DataTreeItem.Node -> tree.dataType
-        is DataTreeItem.Leaf -> data.type
-    }
-
-/**
- * A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
- */
-@DfType(DataTree.TYPE)
-public interface DataTree<out T : Any> : DataSet<T> {
-
-    /**
-     * Top-level children items of this [DataTree]
-     */
-    public val items: Map<NameToken, DataTreeItem<T>>
-
-    override val meta: Meta get() = items[META_ITEM_NAME_TOKEN]?.meta ?: Meta.EMPTY
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        items.forEach { (token, childItem: DataTreeItem<T>) ->
-            if (!token.body.startsWith("@")) {
-                when (childItem) {
-                    is DataTreeItem.Leaf -> yield(childItem.data.named(token.asName()))
-                    is DataTreeItem.Node -> yieldAll(childItem.tree.asSequence().map { it.named(token + it.name) })
-                }
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = when (name.length) {
-        0 -> null
-        1 -> items[name.firstOrNull()!!].data
-        else -> items[name.firstOrNull()!!].tree?.get(name.cutFirst())
-    }
-
-    public companion object {
-        public const val TYPE: String = "dataTree"
-
-        /**
-         * A name token used to designate tree node meta
-         */
-        public val META_ITEM_NAME_TOKEN: NameToken = NameToken("@meta")
-
-        @DFInternal
-        public fun <T : Any> emptyWithType(type: KType, meta: Meta = Meta.EMPTY): DataTree<T> = object : DataTree<T> {
-            override val items: Map<NameToken, DataTreeItem<T>> get() = emptyMap()
-            override val dataType: KType get() = type
-            override val meta: Meta get() = meta
-        }
-
-        @OptIn(DFInternal::class)
-        public inline fun <reified T : Any> empty(meta: Meta = Meta.EMPTY): DataTree<T> =
-            emptyWithType<T>(typeOf<T>(), meta)
-    }
-}
-
-public fun <T : Any> DataTree<T>.listChildren(prefix: Name): List<Name> =
-    getItem(prefix).tree?.items?.keys?.map { prefix + it } ?: emptyList()
-
-/**
- * Get a [DataTreeItem] with given [name] or null if the item does not exist
- */
-public tailrec fun <T : Any> DataTree<T>.getItem(name: Name): DataTreeItem<T>? = when (name.length) {
-    0 -> DataTreeItem.Node(this)
-    1 -> items[name.firstOrNull()]
-    else -> items[name.firstOrNull()!!].tree?.getItem(name.cutFirst())
-}
-
-public val <T : Any> DataTreeItem<T>?.tree: DataTree<T>? get() = (this as? DataTreeItem.Node<T>)?.tree
-public val <T : Any> DataTreeItem<T>?.data: Data<T>? get() = (this as? DataTreeItem.Leaf<T>)?.data
-
-/**
- * A [Sequence] of all children including nodes
- */
-public fun <T : Any> DataTree<T>.traverseItems(): Sequence<Pair<Name, DataTreeItem<T>>> = sequence {
-    items.forEach { (head, item) ->
-        yield(head.asName() to item)
-        if (item is DataTreeItem.Node) {
-            val subSequence = item.tree.traverseItems()
-                .map { (name, data) -> (head.asName() + name) to data }
-            yieldAll(subSequence)
-        }
-    }
-}
-
-/**
- * Get a branch of this [DataTree] with a given [branchName].
- * The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree]
- */
-@OptIn(DFInternal::class)
-public fun <T : Any> DataTree<T>.branch(branchName: Name): DataTree<T> =
-    getItem(branchName)?.tree ?: DataTree.emptyWithType(dataType)
-
-public fun <T : Any> DataTree<T>.branch(branchName: String): DataTree<T> = branch(branchName.parseAsName())
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
deleted file mode 100644
index 303ba44e..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/DataTreeBuilder.kt
+++ /dev/null
@@ -1,127 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.flow.MutableSharedFlow
-import kotlinx.coroutines.launch
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.misc.ThreadSafe
-import space.kscience.dataforge.names.*
-import kotlin.collections.set
-import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.coroutineContext
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-public interface DataSourceBuilder<T : Any> : DataSetBuilder<T>, DataSource<T> {
-    override val updates: MutableSharedFlow<Name>
-}
-
-/**
- * A mutable [DataTree] that propagates updates
- */
-public class DataTreeBuilder<T : Any>(
-    override val dataType: KType,
-    coroutineContext: CoroutineContext,
-) : DataTree<T>, DataSourceBuilder<T> {
-
-    override val coroutineContext: CoroutineContext =
-        coroutineContext + Job(coroutineContext[Job]) + GoalExecutionRestriction()
-
-    private val treeItems = HashMap<NameToken, DataTreeItem<T>>()
-
-    override val items: Map<NameToken, DataTreeItem<T>>
-        get() = treeItems.filter { !it.key.body.startsWith("@") }
-
-    override val updates: MutableSharedFlow<Name> = MutableSharedFlow<Name>()
-
-    @ThreadSafe
-    private fun remove(token: NameToken) {
-        if (treeItems.remove(token) != null) {
-            launch {
-                updates.emit(token.asName())
-            }
-        }
-    }
-
-    override fun remove(name: Name) {
-        if (name.isEmpty()) error("Can't remove the root node")
-        (getItem(name.cutLast()).tree as? DataTreeBuilder)?.remove(name.lastOrNull()!!)
-    }
-
-    @ThreadSafe
-    private fun set(token: NameToken, data: Data<T>) {
-        treeItems[token] = DataTreeItem.Leaf(data)
-    }
-
-    @ThreadSafe
-    private fun set(token: NameToken, node: DataTree<T>) {
-        treeItems[token] = DataTreeItem.Node(node)
-    }
-
-    private fun getOrCreateNode(token: NameToken): DataTreeBuilder<T> =
-        (treeItems[token] as? DataTreeItem.Node<T>)?.tree as? DataTreeBuilder<T>
-            ?: DataTreeBuilder<T>(dataType, coroutineContext).also { set(token, it) }
-
-    private fun getOrCreateNode(name: Name): DataTreeBuilder<T> = when (name.length) {
-        0 -> this
-        1 -> getOrCreateNode(name.firstOrNull()!!)
-        else -> getOrCreateNode(name.firstOrNull()!!).getOrCreateNode(name.cutFirst())
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        if (data == null) {
-            remove(name)
-        } else {
-            when (name.length) {
-                0 -> error("Can't add data with empty name")
-                1 -> set(name.firstOrNull()!!, data)
-                2 -> getOrCreateNode(name.cutLast()).set(name.lastOrNull()!!, data)
-            }
-        }
-        launch {
-            updates.emit(name)
-        }
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        val item = getItem(name)
-        if (item is DataTreeItem.Leaf) error("TODO: Can't change meta of existing leaf item.")
-        data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
-    }
-}
-
-/**
- * Create a dynamic [DataSource]. Initial data is placed synchronously.
- */
-@DFInternal
-@Suppress("FunctionName")
-public fun <T : Any> DataSource(
-    type: KType,
-    parent: CoroutineScope,
-    block: DataSourceBuilder<T>.() -> Unit,
-): DataTreeBuilder<T> = DataTreeBuilder<T>(type, parent.coroutineContext).apply(block)
-
-@Suppress("OPT_IN_USAGE", "FunctionName")
-public inline fun <reified T : Any> DataSource(
-    parent: CoroutineScope,
-    crossinline block: DataSourceBuilder<T>.() -> Unit,
-): DataTreeBuilder<T> = DataSource(typeOf<T>(), parent) { block() }
-
-@Suppress("FunctionName")
-public suspend inline fun <reified T : Any> DataSource(
-    crossinline block: DataSourceBuilder<T>.() -> Unit = {},
-): DataTreeBuilder<T> = DataTreeBuilder<T>(typeOf<T>(), coroutineContext).apply { block() }
-
-public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
-    name: Name,
-    parent: CoroutineScope,
-    noinline block: DataSourceBuilder<T>.() -> Unit,
-): Unit = node(name, DataSource(parent, block))
-
-public inline fun <reified T : Any> DataSourceBuilder<T>.emit(
-    name: String,
-    parent: CoroutineScope,
-    noinline block: DataSourceBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(name), DataSource(parent, block))
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
index 678711c1..95ddbbf7 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/Goal.kt
@@ -9,7 +9,7 @@ import kotlin.coroutines.EmptyCoroutineContext
  * Lazy computation result with its dependencies to allowing to stat computing dependencies ahead of time
  */
 public interface Goal<out T> {
-    public val dependencies: Collection<Goal<*>>
+    public val dependencies: Iterable<Goal<*>>
 
     /**
      * Returns current running coroutine if the goal is started. Null if the computation is not started.
@@ -54,7 +54,7 @@ public open class StaticGoal<T>(public val value: T) : Goal<T> {
  */
 public open class LazyGoal<T>(
     private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    override val dependencies: Collection<Goal<*>> = emptyList(),
+    override val dependencies: Iterable<Goal<*>> = emptyList(),
     public val block: suspend () -> T,
 ) : Goal<T> {
 
@@ -82,8 +82,8 @@ public open class LazyGoal<T>(
         }
 
         log?.emit { "Starting dependencies computation for ${this@LazyGoal}" }
-        val startedDependencies = this.dependencies.map { goal ->
-            goal.run { async(coroutineScope) }
+        val startedDependencies = dependencies.map { goal ->
+            goal.async(coroutineScope)
         }
         return deferred ?: coroutineScope.async(
             coroutineContext
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
index 189087a3..90486d85 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/GroupRule.kt
@@ -15,13 +15,12 @@
  */
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFInternal
 
 public interface GroupRule {
-    public fun <T : Any> gather(set: DataSet<T>): Map<String, DataSet<T>>
+    public fun <T : Any> gather(set: DataTree<T>): Map<String, DataTree<T>>
 
     public companion object {
         /**
@@ -39,39 +38,17 @@ public interface GroupRule {
         ): GroupRule = object : GroupRule {
 
             override fun <T : Any> gather(
-                set: DataSet<T>,
-            ): Map<String, DataSet<T>> {
-                val map = HashMap<String, DataSet<T>>()
+                set: DataTree<T>,
+            ): Map<String, DataTree<T>> {
+                val map = HashMap<String, DataTreeBuilder<T>>()
 
-                if (set is DataSource) {
-                    set.forEach { data ->
-                        val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                        (map.getOrPut(tagValue) { DataTreeBuilder(set.dataType, set.coroutineContext) } as DataTreeBuilder<T>)
-                            .data(data.name, data.data)
-
-                        set.launch {
-                            set.updates.collect { name ->
-                                val dataUpdate = set[name]
-
-                                val updateTagValue = dataUpdate?.meta?.get(key)?.string ?: defaultTagValue
-                                map.getOrPut(updateTagValue) {
-                                    DataSource(set.dataType, this) {
-                                        data(name, dataUpdate)
-                                    }
-                                }
-                            }
-                        }
-                    }
-                } else {
-                    set.forEach { data ->
-                        val tagValue: String = data.meta[key]?.string ?: defaultTagValue
-                        (map.getOrPut(tagValue) { StaticDataTree(set.dataType) } as StaticDataTree<T>)
-                            .data(data.name, data.data)
-                    }
+                set.forEach { data ->
+                    val tagValue: String = data.meta[key]?.string ?: defaultTagValue
+                    map.getOrPut(tagValue) { DataTreeBuilder(set.dataType) }.put(data.name, data.data)
                 }
 
 
-                return map
+                return map.mapValues { it.value.build() }
             }
         }
     }
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
new file mode 100644
index 00000000..acf2410d
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/MetaMaskData.kt
@@ -0,0 +1,23 @@
+package space.kscience.dataforge.data
+
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.meta.copy
+
+
+private class MetaMaskData<T>(val origin: Data<T>, override val meta: Meta) : Data<T> by origin
+
+/**
+ * A data with overriden meta. It reflects original data computed state.
+ */
+public fun <T> Data<T>.withMeta(newMeta: Meta): Data<T> = if (this is MetaMaskData) {
+    MetaMaskData(origin, newMeta)
+} else {
+    MetaMaskData(this, newMeta)
+}
+
+/**
+ * Create a new [Data] with the same computation, but different meta. The meta is created by applying [block] to
+ * the existing data meta.
+ */
+public inline fun <T> Data<T>.mapMeta(block: MutableMeta.() -> Unit): Data<T> = withMeta(meta.copy(block))
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
index 4c9d4bb3..63e36a3f 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/NamedData.kt
@@ -4,7 +4,7 @@ import space.kscience.dataforge.meta.isEmpty
 import space.kscience.dataforge.misc.Named
 import space.kscience.dataforge.names.Name
 
-public interface NamedData<out T : Any> : Named, Data<T> {
+public interface NamedData<out T> : Named, Data<T> {
     override val name: Name
     public val data: Data<T>
 }
@@ -12,7 +12,7 @@ public interface NamedData<out T : Any> : Named, Data<T> {
 public operator fun NamedData<*>.component1(): Name = name
 public operator fun <T: Any> NamedData<T>.component2(): Data<T> = data
 
-private class NamedDataImpl<out T : Any>(
+private class NamedDataImpl<T>(
     override val name: Name,
     override val data: Data<T>,
 ) : Data<T> by data, NamedData<T> {
@@ -28,7 +28,7 @@ private class NamedDataImpl<out T : Any>(
     }
 }
 
-public fun <T : Any> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
+public fun <T> Data<T>.named(name: Name): NamedData<T> = if (this is NamedData) {
     NamedDataImpl(name, this.data)
 } else {
     NamedDataImpl(name, this)
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
deleted file mode 100644
index 4f0f455e..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/StaticDataTree.kt
+++ /dev/null
@@ -1,82 +0,0 @@
-package space.kscience.dataforge.data
-
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.*
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-@PublishedApi
-internal class StaticDataTree<T : Any>(
-    override val dataType: KType,
-) : DataSetBuilder<T>, DataTree<T> {
-
-    private val _items: MutableMap<NameToken, DataTreeItem<T>> = HashMap()
-
-    override val items: Map<NameToken, DataTreeItem<T>>
-        get() = _items.filter { !it.key.body.startsWith("@") }
-
-    override fun remove(name: Name) {
-        when (name.length) {
-            0 -> error("Can't remove root tree node")
-            1 -> _items.remove(name.firstOrNull()!!)
-            else -> (_items[name.firstOrNull()!!].tree as? StaticDataTree<T>)?.remove(name.cutFirst())
-        }
-    }
-
-    private fun getOrCreateNode(name: Name): StaticDataTree<T> = when (name.length) {
-        0 -> this
-        1 -> {
-            val itemName = name.firstOrNull()!!
-            (_items[itemName].tree as? StaticDataTree<T>) ?: StaticDataTree<T>(dataType).also {
-                _items[itemName] = DataTreeItem.Node(it)
-            }
-        }
-        else -> getOrCreateNode(name.cutLast()).getOrCreateNode(name.lastOrNull()!!.asName())
-    }
-
-    private fun set(name: Name, item: DataTreeItem<T>?) {
-        if (name.isEmpty()) error("Can't set top level tree node")
-        if (item == null) {
-            remove(name)
-        } else {
-            getOrCreateNode(name.cutLast())._items[name.lastOrNull()!!] = item
-        }
-    }
-
-    override fun data(name: Name, data: Data<T>?) {
-        set(name, data?.let { DataTreeItem.Leaf(it) })
-    }
-
-    override fun node(name: Name, dataSet: DataSet<T>) {
-        if (dataSet is StaticDataTree) {
-            set(name, DataTreeItem.Node(dataSet))
-        } else {
-            dataSet.forEach {
-                data(name + it.name, it.data)
-            }
-        }
-    }
-
-    override fun meta(name: Name, meta: Meta) {
-        val item = getItem(name)
-        if (item is DataTreeItem.Leaf) TODO("Can't change meta of existing leaf item.")
-        data(name + DataTree.META_ITEM_NAME_TOKEN, Data.empty(meta))
-    }
-}
-
-@Suppress("FunctionName")
-public inline fun <T : Any> DataTree(
-    dataType: KType,
-    block: DataSetBuilder<T>.() -> Unit,
-): DataTree<T> = StaticDataTree<T>(dataType).apply { block() }
-
-@Suppress("FunctionName")
-public inline fun <reified T : Any> DataTree(
-    noinline block: DataSetBuilder<T>.() -> Unit,
-): DataTree<T> = DataTree(typeOf<T>(), block)
-
-@OptIn(DFExperimental::class)
-public fun <T : Any> DataSet<T>.seal(): DataTree<T> = DataTree(dataType) {
-    populateFrom(this@seal)
-}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
new file mode 100644
index 00000000..8e8b6eaa
--- /dev/null
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataBuilders.kt
@@ -0,0 +1,132 @@
+package space.kscience.dataforge.data
+
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.flow.launchIn
+import kotlinx.coroutines.flow.onEach
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.MutableMeta
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.isEmpty
+import space.kscience.dataforge.names.plus
+
+
+public fun <T> DataSink<T>.put(value: NamedData<T>) {
+    put(value.name, value.data)
+}
+
+public fun <T> DataSink<T>.branch(dataTree: DataTree<T>) {
+    putAll(dataTree.asSequence())
+}
+
+public inline fun <T> DataSink<T>.branch(
+    prefix: Name,
+    block: DataSink<T>.() -> Unit,
+) {
+    if (prefix.isEmpty()) {
+        apply(block)
+    } else {
+        val proxyDataSink = DataSink { nameWithoutPrefix, data ->
+            this.put(prefix + nameWithoutPrefix, data)
+        }
+
+        proxyDataSink.apply(block)
+    }
+}
+
+public inline fun <T> DataSink<T>.branch(
+    prefix: String,
+    block: DataSink<T>.() -> Unit,
+): Unit = branch(prefix.asName(), block)
+
+
+public fun <T> DataSink<T>.put(name: String, value: Data<T>) {
+    put(Name.parse(name), value)
+}
+
+public fun <T> DataSink<T>.branch(name: Name, set: DataTree<T>) {
+    branch(name) { putAll(set.asSequence()) }
+}
+
+public fun <T> DataSink<T>.branch(name: String, set: DataTree<T>) {
+    branch(Name.parse(name)) { putAll(set.asSequence()) }
+}
+
+/**
+ * Produce lazy [Data] and emit it into the [MutableDataTree]
+ */
+public inline fun <reified T> DataSink<T>.put(
+    name: String,
+    meta: Meta = Meta.EMPTY,
+    noinline producer: suspend () -> T,
+) {
+    val data = Data(meta, block = producer)
+    put(name, data)
+}
+
+public inline fun <reified T> DataSink<T>.put(
+    name: Name,
+    meta: Meta = Meta.EMPTY,
+    noinline producer: suspend () -> T,
+) {
+    val data = Data(meta, block = producer)
+    put(name, data)
+}
+
+/**
+ * Emit static data with the fixed value
+ */
+public inline fun <reified T> DataSink<T>.wrap(
+    name: String,
+    data: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = put(name, Data.static(data, meta))
+
+public inline fun <reified T> DataSink<T>.wrap(
+    name: Name,
+    data: T,
+    meta: Meta = Meta.EMPTY,
+): Unit = put(name, Data.static(data, meta))
+
+public inline fun <reified T> DataSink<T>.wrap(
+    name: String,
+    data: T,
+    mutableMeta: MutableMeta.() -> Unit,
+): Unit = put(Name.parse(name), Data.static(data, Meta(mutableMeta)))
+
+
+public fun <T> DataSink<T>.putAll(sequence: Sequence<NamedData<T>>) {
+    sequence.forEach {
+        put(it.name, it.data)
+    }
+}
+
+public fun <T> DataSink<T>.putAll(tree: DataTree<T>) {
+    this.putAll(tree.asSequence())
+}
+
+
+/**
+ * Update data with given node data and meta with node meta.
+ */
+@DFExperimental
+public fun <T> MutableDataTree<T>.putAll(source: DataTree<T>) {
+    source.forEach {
+        put(it.name, it.data)
+    }
+}
+
+/**
+ * Copy given data set and mirror its changes to this [DataSink] in [this@setAndObserve]. Returns an update [Job]
+ */
+public fun <T : Any> DataSink<T>.watchBranch(
+    name: Name,
+    dataSet: ObservableDataTree<T>,
+): Job {
+    branch(name, dataSet)
+    return dataSet.updates().onEach {
+        put(name + it.name, it.data)
+    }.launchIn(dataSet.updatesScope)
+
+}
\ No newline at end of file
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt
deleted file mode 100644
index 3d2b6537..00000000
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataFilter.kt
+++ /dev/null
@@ -1,105 +0,0 @@
-package space.kscience.dataforge.data
-
-import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.filter
-import kotlinx.coroutines.flow.map
-import kotlinx.coroutines.flow.mapNotNull
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.*
-import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.EmptyCoroutineContext
-import kotlin.reflect.KType
-
-
-/**
- * A stateless filtered [DataSet]
- */
-public fun <T : Any> DataSet<T>.filter(
-    predicate: (Name, Meta) -> Boolean,
-): DataSource<T> = object : DataSource<T> {
-
-    override val dataType: KType get() = this@filter.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@filter as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-
-    override val meta: Meta get() = this@filter.meta
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@filter) {
-            if (predicate(d.name, d.meta)) {
-                yield(d)
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = this@filter.get(name)?.takeIf {
-        predicate(name, it.meta)
-    }
-
-    override val updates: Flow<Name> = this@filter.updates.filter flowFilter@{ name ->
-        val theData = this@filter[name] ?: return@flowFilter false
-        predicate(name, theData.meta)
-    }
-}
-
-/**
- * Generate a wrapper data set with a given name prefix appended to all names
- */
-public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
-    this
-} else object : DataSource<T> {
-
-    override val dataType: KType get() = this@withNamePrefix.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@withNamePrefix.meta
-
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@withNamePrefix) {
-            yield(d.data.named(prefix + d.name))
-        }
-    }
-
-    override fun get(name: Name): Data<T>? =
-        name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
-
-    override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
-}
-
-/**
- * Get a subset of data starting with a given [branchName]
- */
-public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branchName.isEmpty()) {
-    this
-} else object : DataSource<T> {
-    override val dataType: KType get() = this@branch.dataType
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@branch as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@branch.meta
-
-    override fun iterator(): Iterator<NamedData<T>> = iterator {
-        for (d in this@branch) {
-            d.name.removeFirstOrNull(branchName)?.let { name ->
-                yield(d.data.named(name))
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<T>? = this@branch.get(branchName + name)
-
-    override val updates: Flow<Name> get() = this@branch.updates.mapNotNull { it.removeFirstOrNull(branchName) }
-}
-
-public fun <T : Any> DataSet<T>.branch(branchName: String): DataSet<T> = this@branch.branch(branchName.parseAsName())
-
-@DFExperimental
-public suspend fun <T : Any> DataSet<T>.rootData(): Data<T>? = get(Name.EMPTY)
-
diff --git a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
index 76577346..8c7ce70e 100644
--- a/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
+++ b/dataforge-data/src/commonMain/kotlin/space/kscience/dataforge/data/dataTransform.kt
@@ -1,24 +1,22 @@
 package space.kscience.dataforge.data
 
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.meta.seal
-import space.kscience.dataforge.meta.toMutableMeta
+import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
 import kotlin.coroutines.CoroutineContext
 import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public data class ValueWithMeta<T>(val meta: Meta, val value: T)
+public data class ValueWithMeta<T>(val value: T, val meta: Meta)
 
-public suspend fun <T : Any> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(meta, await())
+public suspend fun <T> Data<T>.awaitWithMeta(): ValueWithMeta<T> = ValueWithMeta(await(), meta)
 
-public data class NamedValueWithMeta<T>(val name: Name, val meta: Meta, val value: T)
+public data class NamedValueWithMeta<T>(val name: Name, val value: T, val meta: Meta)
 
-public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
-    NamedValueWithMeta(name, meta, await())
+public suspend fun <T> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T> =
+    NamedValueWithMeta(name, await(), meta)
 
 
 /**
@@ -27,9 +25,9 @@ public suspend fun <T : Any> NamedData<T>.awaitWithMeta(): NamedValueWithMeta<T>
  * @param meta for the resulting data. By default equals input data.
  * @param block the transformation itself
  */
-public inline fun <T : Any, reified R : Any> Data<T>.map(
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
+public inline fun <T, reified R> Data<T>.transform(
     meta: Meta = this.meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (T) -> R,
 ): Data<R> = Data(meta, coroutineContext, listOf(this)) {
     block(await())
@@ -38,10 +36,10 @@ public inline fun <T : Any, reified R : Any> Data<T>.map(
 /**
  * Combine this data with the other data using [block]. See [Data::map] for other details
  */
-public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
+public inline fun <T1, T2, reified R> Data<T1>.combine(
     other: Data<T2>,
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     meta: Meta = this.meta,
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     crossinline block: suspend (left: T1, right: T2) -> R,
 ): Data<R> = Data(meta, coroutineContext, listOf(this, other)) {
     block(await(), other.await())
@@ -50,26 +48,31 @@ public inline fun <T1 : Any, T2 : Any, reified R : Any> Data<T1>.combine(
 
 //data collection operations
 
-/**
- * Lazily reduce a collection of [Data] to a single data.
- */
-public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduceToData(
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
-    crossinline block: suspend (List<ValueWithMeta<T>>) -> R,
-): Data<R> = Data(
-    meta,
-    coroutineContext,
-    this
-) {
-    block(map { it.awaitWithMeta() })
+@PublishedApi
+internal fun Iterable<Data<*>>.joinMeta(): Meta = Meta {
+    var counter = 0
+    forEach { data ->
+        val inputIndex = (data as? NamedData)?.name?.toString() ?: (counter++).toString()
+        val token = NameToken("data", inputIndex)
+        set(token, data.meta)
+    }
+}
+
+
+
+@PublishedApi
+internal fun Map<*, Data<*>>.joinMeta(): Meta = Meta {
+    forEach { (key, data) ->
+        val token = NameToken("data", key.toString())
+        set(token, data.meta)
+    }
 }
 
 @DFInternal
-public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
+public fun <K, T, R> Map<K, Data<T>>.reduceToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     block: suspend (Map<K, ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -86,9 +89,9 @@ public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduceToData(
  * @param T type of the input goal
  * @param R type of the result goal
  */
-public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
+public inline fun <K, T, reified R> Map<K, Data<T>>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (Map<K, ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     meta,
@@ -101,10 +104,10 @@ public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduceToData(
 //Iterable operations
 
 @DFInternal
-public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
+public inline fun <T, R> Iterable<Data<T>>.reduceToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -116,21 +119,21 @@ public inline fun <T : Any, R : Any> Iterable<Data<T>>.reduceToData(
 }
 
 @OptIn(DFInternal::class)
-public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.reduceToData(
+public inline fun <T, reified R> Iterable<Data<T>>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<ValueWithMeta<T>>) -> R,
-): Data<R> = reduceToData(typeOf<R>(), coroutineContext, meta) {
+): Data<R> = reduceToData(typeOf<R>(), meta, coroutineContext) {
     transformation(it)
 }
 
-public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
+public inline fun <T, reified R> Iterable<Data<T>>.foldToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: ValueWithMeta<T>) -> R,
 ): Data<R> = reduceToData(
-    coroutineContext, meta
+    meta, coroutineContext
 ) {
     it.fold(initial) { acc, t -> block(acc, t) }
 }
@@ -139,10 +142,10 @@ public inline fun <T : Any, reified R : Any> Iterable<Data<T>>.foldToData(
  * Transform an [Iterable] of [NamedData] to a single [Data].
  */
 @DFInternal
-public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
+public inline fun <T, R> Iterable<NamedData<T>>.reduceNamedToData(
     outputType: KType,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
 ): Data<R> = Data(
     outputType,
@@ -154,24 +157,24 @@ public inline fun <T : Any, R : Any> Iterable<NamedData<T>>.reduceNamedToData(
 }
 
 @OptIn(DFInternal::class)
-public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.reduceNamedToData(
+public inline fun <T, reified R> Iterable<NamedData<T>>.reduceNamedToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Collection<NamedValueWithMeta<T>>) -> R,
-): Data<R> = reduceNamedToData(typeOf<R>(), coroutineContext, meta) {
+): Data<R> = reduceNamedToData(typeOf<R>(), meta, coroutineContext) {
     transformation(it)
 }
 
 /**
  * Fold a [Iterable] of named data into a single [Data]
  */
-public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToData(
+public inline fun <T, reified R> Iterable<NamedData<T>>.foldNamedToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
 ): Data<R> = reduceNamedToData(
-    coroutineContext, meta
+    meta, coroutineContext
 ) {
     it.fold(initial) { acc, t -> block(acc, t) }
 }
@@ -179,43 +182,52 @@ public inline fun <T : Any, reified R : Any> Iterable<NamedData<T>>.foldNamedToD
 //DataSet operations
 
 @DFInternal
-public suspend fun <T : Any, R : Any> DataSet<T>.map(
+public suspend fun <T, R> DataTree<T>.transform(
     outputType: KType,
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     metaTransform: MutableMeta.() -> Unit = {},
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = DataTree<R>(outputType) {
-    forEach {
-        val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
-        val d = Data(outputType, newMeta, coroutineContext, listOf(it)) {
-            block(it.awaitWithMeta())
+): DataTree<R> = DataTree<R>(outputType){
+    //quasi-synchronous processing of elements in the tree
+    asSequence().forEach { namedData: NamedData<T> ->
+        val newMeta = namedData.meta.toMutableMeta().apply(metaTransform).seal()
+        val d = Data(outputType, newMeta, coroutineContext, listOf(namedData)) {
+            block(namedData.awaitWithMeta())
         }
-        data(it.name, d)
+        put(namedData.name, d)
     }
 }
 
 @OptIn(DFInternal::class)
-public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
-    coroutineContext: CoroutineContext = EmptyCoroutineContext,
+public suspend inline fun <T, reified R> DataTree<T>.transform(
     noinline metaTransform: MutableMeta.() -> Unit = {},
+    coroutineContext: CoroutineContext = EmptyCoroutineContext,
     noinline block: suspend (NamedValueWithMeta<T>) -> R,
-): DataTree<R> = map(typeOf<R>(), coroutineContext, metaTransform, block)
+): DataTree<R> = this@transform.transform(typeOf<R>(), metaTransform, coroutineContext, block)
 
-public inline fun <T : Any> DataSet<T>.forEach(block: (NamedData<T>) -> Unit) {
-    for (d in this) {
-        block(d)
+public inline fun <T> DataTree<T>.forEach(block: (NamedData<T>) -> Unit) {
+    asSequence().forEach(block)
+}
+
+// DataSet reduction
+
+@PublishedApi
+internal fun DataTree<*>.joinMeta(): Meta = Meta {
+    asSequence().forEach {
+        val token = NameToken("data", it.name.toString())
+        set(token, it.meta)
     }
 }
 
-public inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
+public inline fun <T, reified R> DataTree<T>.reduceToData(
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline transformation: suspend (Iterable<NamedValueWithMeta<T>>) -> R,
-): Data<R> = asIterable().reduceNamedToData(coroutineContext, meta, transformation)
+): Data<R> = asSequence().asIterable().reduceNamedToData(meta, coroutineContext, transformation)
 
-public inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
+public inline fun <T, reified R> DataTree<T>.foldToData(
     initial: R,
+    meta: Meta = joinMeta(),
     coroutineContext: CoroutineContext = EmptyCoroutineContext,
-    meta: Meta = Meta.EMPTY,
     crossinline block: suspend (result: R, data: NamedValueWithMeta<T>) -> R,
-): Data<R> = asIterable().foldNamedToData(initial, coroutineContext, meta, block)
\ No newline at end of file
+): Data<R> = asSequence().asIterable().foldNamedToData(initial, meta, coroutineContext, block)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
index 74d67d9d..48155595 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataFilterJvm.kt
@@ -1,12 +1,10 @@
 package space.kscience.dataforge.data
 
+import kotlinx.coroutines.CoroutineScope
 import kotlinx.coroutines.flow.Flow
 import kotlinx.coroutines.flow.filter
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
-import kotlin.coroutines.CoroutineContext
-import kotlin.coroutines.EmptyCoroutineContext
 import kotlin.reflect.KType
 import kotlin.reflect.full.isSubtypeOf
 import kotlin.reflect.typeOf
@@ -16,7 +14,7 @@ import kotlin.reflect.typeOf
  * Cast the node to given type if the cast is possible or return null
  */
 @Suppress("UNCHECKED_CAST")
-private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
+private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
     if (!this.type.isSubtypeOf(type)) {
         null
     } else {
@@ -25,61 +23,65 @@ private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
         }
     }
 
+@Suppress("UNCHECKED_CAST")
+@DFInternal
+public fun <R> Sequence<NamedData<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
+    filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
+
+@Suppress("UNCHECKED_CAST")
+@DFInternal
+public fun <R> Flow<NamedData<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
+    filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
+
 /**
  * Select all data matching given type and filters. Does not modify paths
  *
- * @param predicate addition filtering condition based on item name and meta. By default, accepts all
+ * @param predicate additional filtering condition based on item name and meta. By default, accepts all
  */
-@OptIn(DFExperimental::class)
-public fun <R : Any> DataSet<*>.filterByType(
+@DFInternal
+public fun <R> DataTree<*>.filterByType(
     type: KType,
-    predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
-): DataSource<R> = object : DataSource<R> {
-    override val dataType = type
-
-    override val coroutineContext: CoroutineContext
-        get() = (this@filterByType as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
-
-    override val meta: Meta get() = this@filterByType.meta
-
-    private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
-            && predicate(name, datum.meta)
-
-    override fun iterator(): Iterator<NamedData<R>> = iterator {
-        for(d in this@filterByType){
-            if(checkDatum(d.name,d.data)){
-                @Suppress("UNCHECKED_CAST")
-                yield(d as NamedData<R>)
-            }
-        }
-    }
-
-    override fun get(name: Name): Data<R>? = this@filterByType[name]?.let { datum ->
-        if (checkDatum(name, datum)) datum.castOrNull(type) else null
-    }
-
-    override val updates: Flow<Name> = this@filterByType.updates.filter { name ->
-        get(name)?.let { datum ->
-            checkDatum(name, datum)
-        } ?: false
-    }
-}
+    predicate: DataFilter = DataFilter.EMPTY,
+): DataTree<R> = asSequence().filterByDataType<R>(type).filterData(predicate).toTree(type)
 
 /**
  * Select a single datum of the appropriate type
  */
-public inline fun <reified R : Any> DataSet<*>.filterByType(
-    noinline predicate: (name: Name, meta: Meta) -> Boolean = { _, _ -> true },
-): DataSet<R> = filterByType(typeOf<R>(), predicate)
+@OptIn(DFInternal::class)
+public inline fun <reified R : Any> DataTree<*>.filterByType(
+    predicate: DataFilter = DataFilter.EMPTY,
+): DataTree<R> = filterByType(typeOf<R>(), predicate)
 
 /**
  * Select a single datum if it is present and of given [type]
  */
-public fun <R : Any> DataSet<*>.getByType(type: KType, name: Name): NamedData<R>? =
+public fun <R> DataTree<*>.getByType(type: KType, name: Name): NamedData<R>? =
     get(name)?.castOrNull<R>(type)?.named(name)
 
-public inline fun <reified R : Any> DataSet<*>.getByType(name: Name): NamedData<R>? =
+public inline fun <reified R : Any> DataTree<*>.getByType(name: Name): NamedData<R>? =
     this@getByType.getByType(typeOf<R>(), name)
 
-public inline fun <reified R : Any> DataSet<*>.getByType(name: String): NamedData<R>? =
-    this@getByType.getByType(typeOf<R>(), Name.parse(name))
\ No newline at end of file
+public inline fun <reified R : Any> DataTree<*>.getByType(name: String): NamedData<R>? =
+    this@getByType.getByType(typeOf<R>(), Name.parse(name))
+
+/**
+ * Select all data matching given type and filters. Does not modify paths
+ *
+ * @param predicate additional filtering condition based on item name and meta. By default, accepts all
+ */
+@DFInternal
+public fun <R> ObservableDataTree<*>.filterByType(
+    type: KType,
+    scope: CoroutineScope,
+    predicate: DataFilter = DataFilter.EMPTY,
+): ObservableDataTree<R> = asSequence()
+    .filterByDataType<R>(type)
+    .filterData(predicate)
+    .toObservableTree(type, scope, updates().filterByDataType<R>(type).filterData(predicate))
+
+
+@OptIn(DFInternal::class)
+public inline fun <reified R> ObservableDataTree<*>.filterByType(
+    scope: CoroutineScope,
+    predicate: DataFilter = DataFilter.EMPTY,
+): ObservableDataTree<R> = filterByType(typeOf<R>(),scope,predicate)
\ No newline at end of file
diff --git a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
index cb222ea0..cfccb02b 100644
--- a/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
+++ b/dataforge-data/src/jvmMain/kotlin/space/kscience/dataforge/data/dataSetBuilderInContext.kt
@@ -1,40 +1,27 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Job
-import kotlinx.coroutines.launch
 import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.plus
 
 
 /**
  * Append data to node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(data: Data<T>): Unit =
-    data(Name.parse(this), data)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(data: Data<T>): Unit =
+    put(Name.parse(this), data)
 
 /**
  * Append node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(dataSet: DataSet<T>): Unit =
-    node(Name.parse(this), dataSet)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(dataSet: DataTree<T>): Unit =
+    branch(this, dataSet)
 
 /**
  * Build and append node
  */
-context(DataSetBuilder<T>) public infix fun <T : Any> String.put(
-    block: DataSetBuilder<T>.() -> Unit,
-): Unit = node(Name.parse(this), block)
+context(DataSink<T>)
+public infix fun <T : Any> String.put(
+    block: DataSink<T>.() -> Unit,
+): Unit = branch(Name.parse(this), block)
 
-/**
- * Copy given data set and mirror its changes to this [DataTreeBuilder] in [this@setAndObserve]. Returns an update [Job]
- */
-context(DataSetBuilder<T>) public fun <T : Any> CoroutineScope.setAndWatch(
-    name: Name,
-    dataSet: DataSet<T>,
-): Job = launch {
-    node(name, dataSet)
-    dataSet.updates.collect { nameInBranch ->
-        data(name + nameInBranch, dataSet.get(nameInBranch))
-    }
-}
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
index b24c4f27..4aa6e6d4 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/ActionsTest.kt
@@ -1,7 +1,7 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.delay
+import kotlinx.coroutines.runBlocking
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
 import space.kscience.dataforge.actions.Action
@@ -10,13 +10,13 @@ import space.kscience.dataforge.actions.mapping
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.assertEquals
 
-@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
+@OptIn(DFExperimental::class)
 internal class ActionsTest {
     @Test
     fun testStaticMapAction() = runTest {
         val data: DataTree<Int> = DataTree {
             repeat(10) {
-                static(it.toString(), it)
+                wrap(it.toString(), it)
             }
         }
 
@@ -28,23 +28,26 @@ internal class ActionsTest {
     }
 
     @Test
-    fun testDynamicMapAction() = runTest {
-        val data: DataSourceBuilder<Int> = DataSource()
+    fun testDynamicMapAction() = runBlocking {
+        val source: MutableDataTree<Int> = MutableDataTree()
 
         val plusOne = Action.mapping<Int, Int> {
             result { it + 1 }
         }
 
-        val result = plusOne(data)
+        val result = plusOne(source)
+
 
         repeat(10) {
-            data.static(it.toString(), it)
+            source.wrap(it.toString(), it)
         }
 
-        delay(20)
+        delay(10)
+
+        source.close()
+        result.awaitClose()
 
         assertEquals(2, result["1"]?.await())
-        data.close()
     }
 
 }
\ No newline at end of file
diff --git a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
index b77f7ea2..95b7a7bd 100644
--- a/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
+++ b/dataforge-data/src/jvmTest/kotlin/space/kscience/dataforge/data/DataTreeBuilderTest.kt
@@ -1,6 +1,8 @@
 package space.kscience.dataforge.data
 
-import kotlinx.coroutines.*
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.runBlocking
+import kotlinx.coroutines.test.runTest
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.asName
 import kotlin.test.Test
@@ -9,27 +11,26 @@ import kotlin.test.assertEquals
 
 internal class DataTreeBuilderTest {
     @Test
-    fun testTreeBuild() = runBlocking {
+    fun testTreeBuild() = runTest {
         val node = DataTree<Any> {
             "primary" put {
-                static("a", "a")
-                static("b", "b")
+                wrap("a", "a")
+                wrap("b", "b")
             }
-            static("c.d", "c.d")
-            static("c.f", "c.f")
-        }
-        runBlocking {
-            assertEquals("a", node["primary.a"]?.await())
-            assertEquals("b", node["primary.b"]?.await())
-            assertEquals("c.d", node["c.d"]?.await())
-            assertEquals("c.f", node["c.f"]?.await())
+            wrap("c.d", "c.d")
+            wrap("c.f", "c.f")
         }
+        assertEquals("a", node["primary.a"]?.await())
+        assertEquals("b", node["primary.b"]?.await())
+        assertEquals("c.d", node["c.d"]?.await())
+        assertEquals("c.f", node["c.f"]?.await())
+
     }
 
     @OptIn(DFExperimental::class)
     @Test
-    fun testDataUpdate() = runBlocking {
-        val updateData: DataTree<Any> = DataTree {
+    fun testDataUpdate() = runTest {
+        val updateData = DataTree<Any> {
             "update" put {
                 "a" put Data.static("a")
                 "b" put Data.static("b")
@@ -38,54 +39,30 @@ internal class DataTreeBuilderTest {
 
         val node = DataTree<Any> {
             "primary" put {
-                static("a", "a")
-                static("b", "b")
+                wrap("a", "a")
+                wrap("b", "b")
             }
-            static("root", "root")
-            populateFrom(updateData)
+            wrap("root", "root")
+            putAll(updateData)
         }
 
-        runBlocking {
-            assertEquals("a", node["update.a"]?.await())
-            assertEquals("a", node["primary.a"]?.await())
-        }
+        assertEquals("a", node["update.a"]?.await())
+        assertEquals("a", node["primary.a"]?.await())
     }
 
     @Test
     fun testDynamicUpdates() = runBlocking {
-        try {
-            lateinit var updateJob: Job
-            supervisorScope {
-                val subNode = DataSource<Int> {
-                    updateJob = launch {
-                        repeat(10) {
-                            delay(10)
-                            static("value", it)
-                        }
-                        delay(10)
-                    }
-                }
-                launch {
-                    subNode.updatesWithData.collect {
-                        println(it)
-                    }
-                }
-                val rootNode = DataSource<Int> {
-                    setAndWatch("sub".asName(), subNode)
-                }
+        val subNode = MutableDataTree<Int>()
 
-                launch {
-                    rootNode.updatesWithData.collect {
-                        println(it)
-                    }
-                }
-                updateJob.join()
-                assertEquals(9, rootNode["sub.value"]?.await())
-                cancel()
-            }
-        } catch (t: Throwable) {
-            if (t !is CancellationException) throw t
+        val rootNode = MutableDataTree<Int> {
+            watchBranch("sub".asName(), subNode)
         }
 
+        repeat(10) {
+            subNode.wrap("value[$it]", it)
+        }
+
+        delay(20)
+        assertEquals(9, rootNode["sub.value[9]"]?.await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-io/README.md b/dataforge-io/README.md
index ec431a04..85e49e5a 100644
--- a/dataforge-io/README.md
+++ b/dataforge-io/README.md
@@ -6,18 +6,16 @@ IO module
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io:0.7.0")
+    implementation("space.kscience:dataforge-io:0.8.0")
 }
 ```
diff --git a/dataforge-io/build.gradle.kts b/dataforge-io/build.gradle.kts
index f7197197..5be52e61 100644
--- a/dataforge-io/build.gradle.kts
+++ b/dataforge-io/build.gradle.kts
@@ -4,7 +4,7 @@ plugins {
 
 description = "IO module"
 
-val ioVersion = "0.3.0"
+val ioVersion = "0.3.1"
 
 kscience {
     jvm()
diff --git a/dataforge-io/dataforge-io-yaml/README.md b/dataforge-io/dataforge-io-yaml/README.md
index 06a0efb4..20f5b4f6 100644
--- a/dataforge-io/dataforge-io-yaml/README.md
+++ b/dataforge-io/dataforge-io-yaml/README.md
@@ -6,18 +6,16 @@ YAML meta IO
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-io-yaml:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-io-yaml:0.7.0")
+    implementation("space.kscience:dataforge-io-yaml:0.8.0")
 }
 ```
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
index f85dd98e..de365519 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/Binary.kt
@@ -71,6 +71,11 @@ internal class ByteArrayBinary(
 
     override fun view(offset: Int, binarySize: Int): ByteArrayBinary =
         ByteArrayBinary(array, start + offset, binarySize)
+
+    override fun toString(): String =
+        "ByteArrayBinary(array=$array, start=$start, size=$size)"
+
+
 }
 
 public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this)
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
index 0e998760..27a192d7 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/EnvelopeFormat.kt
@@ -10,10 +10,7 @@ import space.kscience.dataforge.names.asName
 import kotlin.reflect.KType
 import kotlin.reflect.typeOf
 
-public interface EnvelopeFormat : IOFormat<Envelope> {
-
-    override val type: KType get() = typeOf<Envelope>()
-}
+public interface EnvelopeFormat : IOFormat<Envelope>
 
 public fun EnvelopeFormat.read(input: Source): Envelope = readFrom(input)
 
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
index 390a8bf4..39fa2be1 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOFormat.kt
@@ -17,11 +17,7 @@ import kotlin.reflect.typeOf
 /**
  * Reader of a custom object from input
  */
-public interface IOReader<out T> {
-    /**
-     * The type of object being read
-     */
-    public val type: KType
+public fun interface IOReader<out T> {
 
     public fun readFrom(source: Source): T
 
@@ -32,7 +28,6 @@ public interface IOReader<out T> {
          * no-op reader for binaries.
          */
         public val binary: IOReader<Binary> = object : IOReader<Binary> {
-            override val type: KType = typeOf<Binary>()
 
             override fun readFrom(source: Source): Binary = source.readByteArray().asBinary()
 
@@ -42,8 +37,6 @@ public interface IOReader<out T> {
 }
 
 public inline fun <reified T> IOReader(crossinline read: Source.() -> T): IOReader<T> = object : IOReader<T> {
-    override val type: KType = typeOf<T>()
-
     override fun readFrom(source: Source): T = source.read()
 }
 
@@ -56,24 +49,24 @@ public fun interface IOWriter<in T> {
  */
 public interface IOFormat<T> : IOReader<T>, IOWriter<T>
 
-public fun <T : Any> Source.readWith(format: IOReader<T>): T = format.readFrom(this)
+public fun <T> Source.readWith(format: IOReader<T>): T = format.readFrom(this)
 
 /**
  * Read given binary as an object using given format
  */
-public fun <T : Any> Binary.readWith(format: IOReader<T>): T = read {
-    readWith(format)
+public fun <T> Binary.readWith(format: IOReader<T>): T = read {
+    this.readWith(format)
 }
 
 /**
  * Write an object to the [Sink] with given [format]
  */
-public fun <T : Any> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
+public fun <T> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
     format.writeTo(this, obj)
 
 
 @DfType(IO_FORMAT_TYPE)
-public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
+public interface IOFormatFactory<T> : Factory<IOFormat<T>>, Named {
     /**
      * Explicit type for dynamic type checks
      */
@@ -86,7 +79,7 @@ public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
     }
 }
 
-public fun <T : Any> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeTo(this, obj) }
+public fun <T> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeTo(this, obj) }
 
 public object FloatIOFormat : IOFormat<Float>, IOFormatFactory<Float> {
     override fun build(context: Context, meta: Meta): IOFormat<Float> = this
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
index c3248021..0d79da4d 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/IOPlugin.kt
@@ -5,7 +5,6 @@ import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORM
 import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
 import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
 import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.meta.string
 import space.kscience.dataforge.misc.DFInternal
 import space.kscience.dataforge.names.Name
@@ -21,11 +20,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
 
     @Suppress("UNCHECKED_CAST")
     @DFInternal
-    public fun <T : Any> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
+    public fun <T> resolveIOFormat(type: KType, meta: Meta): IOFormat<T>? =
         ioFormatFactories.singleOrNull { it.type == type }?.build(context, meta) as? IOFormat<T>
 
     @OptIn(DFInternal::class)
-    public inline fun <reified T : Any> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
+    public inline fun <reified T> resolveIOFormat(meta: Meta = Meta.EMPTY): IOFormat<T>? =
         resolveIOFormat(typeOf<T>(), meta)
 
 
diff --git a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
index f864dd2f..b36ddc2a 100644
--- a/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
+++ b/dataforge-io/src/commonMain/kotlin/space/kscience/dataforge/io/MetaFormat.kt
@@ -21,8 +21,6 @@ import kotlin.reflect.typeOf
  */
 public interface MetaFormat : IOFormat<Meta> {
 
-    override val type: KType get() = typeOf<Meta>()
-
     override fun writeTo(sink: Sink, obj: Meta) {
         writeMeta(sink, obj, null)
     }
diff --git a/dataforge-meta/README.md b/dataforge-meta/README.md
index 5f214640..bd11ebf1 100644
--- a/dataforge-meta/README.md
+++ b/dataforge-meta/README.md
@@ -6,18 +6,16 @@ Meta definition and basic operations on meta
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-meta:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-meta:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-meta:0.7.0")
+    implementation("space.kscience:dataforge-meta:0.8.0")
 }
 ```
diff --git a/dataforge-meta/api/dataforge-meta.api b/dataforge-meta/api/dataforge-meta.api
index 1700ca7d..49f28259 100644
--- a/dataforge-meta/api/dataforge-meta.api
+++ b/dataforge-meta/api/dataforge-meta.api
@@ -56,6 +56,20 @@ public final class space/kscience/dataforge/meta/JsonMetaKt {
 	public static final fun toValue (Lkotlinx/serialization/json/JsonPrimitive;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)Lspace/kscience/dataforge/meta/Value;
 }
 
+public final class space/kscience/dataforge/meta/KeepTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lkotlin/jvm/functions/Function1;)V
+	public final fun component1 ()Lkotlin/jvm/functions/Function1;
+	public final fun copy (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/KeepTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/KeepTransformationRule;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/KeepTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getSelector ()Lkotlin/jvm/functions/Function1;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/Laminate : space/kscience/dataforge/meta/TypedMeta {
 	public static final field Companion Lspace/kscience/dataforge/meta/Laminate$Companion;
 	public fun equals (Ljava/lang/Object;)Z
@@ -159,6 +173,32 @@ public final class space/kscience/dataforge/meta/MetaBuilder : space/kscience/da
 public abstract interface annotation class space/kscience/dataforge/meta/MetaBuilderMarker : java/lang/annotation/Annotation {
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaConverter : space/kscience/dataforge/meta/MetaSpec {
+	public static final field Companion Lspace/kscience/dataforge/meta/MetaConverter$Companion;
+	public abstract fun convert (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
+	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaConverter$Companion {
+	public final fun getBoolean ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getDouble ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getFloat ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getInt ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getLong ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getMeta ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getNumber ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getString ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun getValue ()Lspace/kscience/dataforge/meta/MetaConverter;
+	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/MetaConverter;
+	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/MetaConverter;
+}
+
+public final class space/kscience/dataforge/meta/MetaConverterKt {
+	public static final fun convertNullable (Lspace/kscience/dataforge/meta/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+}
+
 public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun boolean (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
@@ -178,20 +218,24 @@ public final class space/kscience/dataforge/meta/MetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun listOfSpec (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun listOfSpec$default (Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaSpec;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static synthetic fun number$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
+	public static final fun spec (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
+	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Ljava/lang/String;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadOnlyProperty;
 	public static final fun string (Lspace/kscience/dataforge/meta/MetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadOnlyProperty;
@@ -252,6 +296,47 @@ public final class space/kscience/dataforge/meta/MetaSerializer : kotlinx/serial
 	public fun serialize (Lkotlinx/serialization/encoding/Encoder;Lspace/kscience/dataforge/meta/Meta;)V
 }
 
+public abstract interface class space/kscience/dataforge/meta/MetaSpec : space/kscience/dataforge/meta/descriptors/Described {
+	public fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public abstract fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaSpecKt {
+	public static final fun readNullable (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public static final fun readValue (Lspace/kscience/dataforge/meta/MetaSpec;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformation {
+	public static final field Companion Lspace/kscience/dataforge/meta/MetaTransformation$Companion;
+	public static final fun apply-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
+	public static final fun bind-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/ObservableMeta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+	public static final synthetic fun box-impl (Ljava/util/Collection;)Lspace/kscience/dataforge/meta/MetaTransformation;
+	public static fun constructor-impl (Ljava/util/Collection;)Ljava/util/Collection;
+	public fun equals (Ljava/lang/Object;)Z
+	public static fun equals-impl (Ljava/util/Collection;Ljava/lang/Object;)Z
+	public static final fun equals-impl0 (Ljava/util/Collection;Ljava/util/Collection;)Z
+	public static final fun generate-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
+	public fun hashCode ()I
+	public static fun hashCode-impl (Ljava/util/Collection;)I
+	public fun toString ()Ljava/lang/String;
+	public static fun toString-impl (Ljava/util/Collection;)Ljava/lang/String;
+	public final synthetic fun unbox-impl ()Ljava/util/Collection;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformation$Companion {
+	public final fun make--mWxz5M (Lkotlin/jvm/functions/Function1;)Ljava/util/Collection;
+}
+
+public final class space/kscience/dataforge/meta/MetaTransformationBuilder {
+	public fun <init> ()V
+	public final fun build-m6Fha10 ()Ljava/util/Collection;
+	public final fun keep (Ljava/lang/String;)V
+	public final fun keep (Lkotlin/jvm/functions/Function1;)V
+	public final fun keep (Lspace/kscience/dataforge/names/Name;)V
+	public final fun move (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
+	public static synthetic fun move$default (Lspace/kscience/dataforge/meta/MetaTransformationBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
+}
+
 public abstract interface class space/kscience/dataforge/meta/MutableMeta : space/kscience/dataforge/meta/Meta, space/kscience/dataforge/meta/MutableMetaProvider {
 	public static final field Companion Lspace/kscience/dataforge/meta/MutableMeta$Companion;
 	public synthetic fun get (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/Meta;
@@ -293,6 +378,8 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun boolean$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ZLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun convertable (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun convertable$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun double (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun double$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;DLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
@@ -307,6 +394,8 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static final fun int (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;ILspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun int$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun listOfConvertable (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun listOfConvertable$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaConverter;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun listValue (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun listValue$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun long (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
@@ -314,9 +403,9 @@ public final class space/kscience/dataforge/meta/MutableMetaDelegateKt {
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;JLspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun long$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun node (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;)Lkotlin/properties/ReadWriteProperty;
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/transformations/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/MetaConverter;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Ljava/lang/Number;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
 	public static final fun number (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function0;)Lkotlin/properties/ReadWriteProperty;
@@ -365,7 +454,7 @@ public final class space/kscience/dataforge/meta/MutableMetaKt {
 	public static final fun set (Lspace/kscience/dataforge/meta/MutableValueProvider;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
 	public static final fun setIndexed (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;)V
 	public static synthetic fun setIndexed$default (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/names/Name;Ljava/lang/Iterable;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
-	public static final fun toMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
+	public static final fun toMutableMeta (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/MutableMeta;
 	public static final fun update (Lspace/kscience/dataforge/meta/MutableMetaProvider;Lspace/kscience/dataforge/meta/Meta;)V
 	public static final fun withDefault (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/MetaProvider;)Lspace/kscience/dataforge/meta/MutableMeta;
 }
@@ -419,11 +508,6 @@ public abstract interface class space/kscience/dataforge/meta/ObservableMeta : s
 	public abstract fun removeListener (Ljava/lang/Object;)V
 }
 
-public final class space/kscience/dataforge/meta/ObservableMetaKt {
-	public static final fun useProperty (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)V
-	public static synthetic fun useProperty$default (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
-}
-
 public final class space/kscience/dataforge/meta/ObservableMetaWrapperKt {
 	public static final fun asObservable (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 }
@@ -437,10 +521,19 @@ public abstract interface class space/kscience/dataforge/meta/ObservableMutableM
 	public abstract fun getOrCreate (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/meta/ObservableMutableMeta;
 }
 
-public abstract interface class space/kscience/dataforge/meta/ReadOnlySpecification : space/kscience/dataforge/meta/descriptors/Described {
-	public abstract fun empty ()Ljava/lang/Object;
-	public fun invoke (Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
-	public abstract fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+public final class space/kscience/dataforge/meta/RegexItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
+	public final fun component1 ()Lkotlin/text/Regex;
+	public final fun component2 ()Lkotlin/jvm/functions/Function4;
+	public final fun copy (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)Lspace/kscience/dataforge/meta/RegexItemTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/RegexItemTransformationRule;Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/RegexItemTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getFrom ()Lkotlin/text/Regex;
+	public final fun getTransform ()Lkotlin/jvm/functions/Function4;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
 public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/meta/Configurable, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/meta/MutableMetaProvider, space/kscience/dataforge/meta/descriptors/Described {
@@ -454,6 +547,7 @@ public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/met
 	public fun setValue (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Value;)V
 	public fun toMeta ()Lspace/kscience/dataforge/meta/Laminate;
 	public synthetic fun toMeta ()Lspace/kscience/dataforge/meta/Meta;
+	public fun toString ()Ljava/lang/String;
 	public fun validate (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
 }
 
@@ -461,20 +555,30 @@ public final class space/kscience/dataforge/meta/SchemeKt {
 	public static final fun copy (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
 	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Scheme;
 	public static final fun invoke (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
-	public static final fun retarget (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun scheme (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun scheme$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun schemeOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun schemeOrNull$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static synthetic fun schemeOrNull$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
+	public static final fun updateWith (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun updateWith (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
+	public static final fun useProperty (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;)V
+	public static synthetic fun useProperty$default (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/reflect/KProperty1;Ljava/lang/Object;Lkotlin/jvm/functions/Function2;ILjava/lang/Object;)V
 }
 
-public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/Specification {
+public class space/kscience/dataforge/meta/SchemeSpec : space/kscience/dataforge/meta/MetaConverter {
 	public fun <init> (Lkotlin/jvm/functions/Function0;)V
-	public synthetic fun empty ()Ljava/lang/Object;
-	public fun empty ()Lspace/kscience/dataforge/meta/Scheme;
+	public synthetic fun convert (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
+	public fun convert (Lspace/kscience/dataforge/meta/Scheme;)Lspace/kscience/dataforge/meta/Meta;
+	public final fun empty ()Lspace/kscience/dataforge/meta/Scheme;
 	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public synthetic fun invoke (Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
 	public final fun invoke (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
-	public synthetic fun read (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public fun read (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Scheme;
-	public synthetic fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Ljava/lang/Object;
-	public fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
+	public synthetic fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
+	public fun readOrNull (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Scheme;
+	public final fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
 }
 
 public final class space/kscience/dataforge/meta/SealedMeta : space/kscience/dataforge/meta/TypedMeta {
@@ -515,21 +619,20 @@ public final class space/kscience/dataforge/meta/SealedMetaKt {
 	public static final fun seal (Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/SealedMeta;
 }
 
-public abstract interface class space/kscience/dataforge/meta/Specification : space/kscience/dataforge/meta/ReadOnlySpecification {
-	public abstract fun write (Lspace/kscience/dataforge/meta/MutableMeta;)Ljava/lang/Object;
-}
-
-public final class space/kscience/dataforge/meta/SpecificationKt {
-	public static final fun spec (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun spec (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun spec$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun specOrNull (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun specOrNull (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static synthetic fun specOrNull$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/Specification;Lspace/kscience/dataforge/names/Name;ILjava/lang/Object;)Lkotlin/properties/ReadWriteProperty;
-	public static final fun updateWith (Lspace/kscience/dataforge/meta/Configurable;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
-	public static final fun updateWith (Lspace/kscience/dataforge/meta/MutableMeta;Lspace/kscience/dataforge/meta/Specification;Lkotlin/jvm/functions/Function1;)Ljava/lang/Object;
+public final class space/kscience/dataforge/meta/SingleItemTransformationRule : space/kscience/dataforge/meta/TransformationRule {
+	public fun <init> (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)V
+	public final fun component1 ()Lspace/kscience/dataforge/names/Name;
+	public final fun component2 ()Lkotlin/jvm/functions/Function3;
+	public final fun copy (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)Lspace/kscience/dataforge/meta/SingleItemTransformationRule;
+	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/SingleItemTransformationRule;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/SingleItemTransformationRule;
+	public fun equals (Ljava/lang/Object;)Z
+	public final fun getFrom ()Lspace/kscience/dataforge/names/Name;
+	public final fun getTransform ()Lkotlin/jvm/functions/Function3;
+	public fun hashCode ()I
+	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public fun toString ()Ljava/lang/String;
+	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
 }
 
 public final class space/kscience/dataforge/meta/StringValue : space/kscience/dataforge/meta/Value {
@@ -550,6 +653,12 @@ public final class space/kscience/dataforge/meta/StringValue : space/kscience/da
 	public final synthetic fun unbox-impl ()Ljava/lang/String;
 }
 
+public abstract interface class space/kscience/dataforge/meta/TransformationRule {
+	public abstract fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
+	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
+	public abstract fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+}
+
 public final class space/kscience/dataforge/meta/True : space/kscience/dataforge/meta/Value {
 	public static final field INSTANCE Lspace/kscience/dataforge/meta/True;
 	public fun equals (Ljava/lang/Object;)Z
@@ -692,6 +801,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptor {
 	public final fun getDescription ()Ljava/lang/String;
 	public final fun getIndexKey ()Ljava/lang/String;
 	public final fun getMultiple ()Z
+	public final fun getNodes ()Ljava/util/Map;
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
 	public fun hashCode ()I
@@ -720,6 +830,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun attributes (Lkotlin/jvm/functions/Function1;)V
 	public final fun build ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public final fun default (Ljava/lang/Object;)V
+	public final fun from (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
 	public final fun getAllowedValues ()Ljava/util/List;
 	public final fun getAttributes ()Lspace/kscience/dataforge/meta/MutableMeta;
 	public final fun getChildren ()Ljava/util/Map;
@@ -730,10 +841,6 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public final fun getMultiple ()Z
 	public final fun getValueRestriction ()Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public final fun getValueTypes ()Ljava/util/List;
-	public final fun item (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun item$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public final fun node (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
 	public final fun setAllowedValues (Ljava/util/List;)V
 	public final fun setAttributes (Lspace/kscience/dataforge/meta/MutableMeta;)V
 	public final fun setChildren (Ljava/util/Map;)V
@@ -751,16 +858,16 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorBuild
 	public static final fun MetaDescriptor (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public static final fun copy (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
 	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public static final fun item (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
 	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
 	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/Described;Lkotlin/jvm/functions/Function1;)V
-	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
+	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;)V
+	public static final fun node (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
 	public static synthetic fun node$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/descriptors/Described;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
 	public static final fun required (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;)V
-	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
-	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;
+	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)V
+	public static final fun value (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;)V
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Ljava/lang/String;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
+	public static synthetic fun value$default (Lspace/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/ValueType;[Lspace/kscience/dataforge/meta/ValueType;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
 }
 
 public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
@@ -774,6 +881,7 @@ public final class space/kscience/dataforge/meta/descriptors/MetaDescriptorKt {
 
 public final class space/kscience/dataforge/meta/descriptors/ValueRestriction : java/lang/Enum {
 	public static final field ABSENT Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
+	public static final field Companion Lspace/kscience/dataforge/meta/descriptors/ValueRestriction$Companion;
 	public static final field NONE Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public static final field REQUIRED Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 	public static fun getEntries ()Lkotlin/enums/EnumEntries;
@@ -781,115 +889,8 @@ public final class space/kscience/dataforge/meta/descriptors/ValueRestriction :
 	public static fun values ()[Lspace/kscience/dataforge/meta/descriptors/ValueRestriction;
 }
 
-public final class space/kscience/dataforge/meta/transformations/KeepTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lkotlin/jvm/functions/Function1;)V
-	public final fun component1 ()Lkotlin/jvm/functions/Function1;
-	public final fun copy (Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/KeepTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getSelector ()Lkotlin/jvm/functions/Function1;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public abstract interface class space/kscience/dataforge/meta/transformations/MetaConverter {
-	public static final field Companion Lspace/kscience/dataforge/meta/transformations/MetaConverter$Companion;
-	public fun getDescriptor ()Lspace/kscience/dataforge/meta/descriptors/MetaDescriptor;
-	public abstract fun getType ()Lkotlin/reflect/KType;
-	public fun metaToObject (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public abstract fun metaToObjectOrNull (Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public abstract fun objectToMeta (Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaConverter$Companion {
-	public final fun getBoolean ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getDouble ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getFloat ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getInt ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getLong ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getMeta ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getNumber ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getString ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun getValue ()Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public final fun valueList (Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-	public static synthetic fun valueList$default (Lspace/kscience/dataforge/meta/transformations/MetaConverter$Companion;Lkotlin/jvm/functions/Function1;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/MetaConverter;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaConverterKt {
-	public static final fun nullableMetaToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Meta;)Ljava/lang/Object;
-	public static final fun nullableObjectToMeta (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Ljava/lang/Object;)Lspace/kscience/dataforge/meta/Meta;
-	public static final fun valueToObject (Lspace/kscience/dataforge/meta/transformations/MetaConverter;Lspace/kscience/dataforge/meta/Value;)Ljava/lang/Object;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformation {
-	public static final field Companion Lspace/kscience/dataforge/meta/transformations/MetaTransformation$Companion;
-	public static final fun apply-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
-	public static final fun bind-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/ObservableMeta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-	public static final synthetic fun box-impl (Ljava/util/Collection;)Lspace/kscience/dataforge/meta/transformations/MetaTransformation;
-	public static fun constructor-impl (Ljava/util/Collection;)Ljava/util/Collection;
-	public fun equals (Ljava/lang/Object;)Z
-	public static fun equals-impl (Ljava/util/Collection;Ljava/lang/Object;)Z
-	public static final fun equals-impl0 (Ljava/util/Collection;Ljava/util/Collection;)Z
-	public static final fun generate-impl (Ljava/util/Collection;Lspace/kscience/dataforge/meta/Meta;)Lspace/kscience/dataforge/meta/Meta;
-	public fun hashCode ()I
-	public static fun hashCode-impl (Ljava/util/Collection;)I
-	public fun toString ()Ljava/lang/String;
-	public static fun toString-impl (Ljava/util/Collection;)Ljava/lang/String;
-	public final synthetic fun unbox-impl ()Ljava/util/Collection;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformation$Companion {
-	public final fun make-XNaMui4 (Lkotlin/jvm/functions/Function1;)Ljava/util/Collection;
-}
-
-public final class space/kscience/dataforge/meta/transformations/MetaTransformationBuilder {
-	public fun <init> ()V
-	public final fun build-050menU ()Ljava/util/Collection;
-	public final fun keep (Ljava/lang/String;)V
-	public final fun keep (Lkotlin/jvm/functions/Function1;)V
-	public final fun keep (Lspace/kscience/dataforge/names/Name;)V
-	public final fun move (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)V
-	public static synthetic fun move$default (Lspace/kscience/dataforge/meta/transformations/MetaTransformationBuilder;Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
-}
-
-public final class space/kscience/dataforge/meta/transformations/RegexItemTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)V
-	public final fun component1 ()Lkotlin/text/Regex;
-	public final fun component2 ()Lkotlin/jvm/functions/Function4;
-	public final fun copy (Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;)Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;Lkotlin/text/Regex;Lkotlin/jvm/functions/Function4;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/RegexItemTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getFrom ()Lkotlin/text/Regex;
-	public final fun getTransform ()Lkotlin/jvm/functions/Function4;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public final class space/kscience/dataforge/meta/transformations/SingleItemTransformationRule : space/kscience/dataforge/meta/transformations/TransformationRule {
-	public fun <init> (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)V
-	public final fun component1 ()Lspace/kscience/dataforge/names/Name;
-	public final fun component2 ()Lkotlin/jvm/functions/Function3;
-	public final fun copy (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;)Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;
-	public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function3;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/transformations/SingleItemTransformationRule;
-	public fun equals (Ljava/lang/Object;)Z
-	public final fun getFrom ()Lspace/kscience/dataforge/names/Name;
-	public final fun getTransform ()Lkotlin/jvm/functions/Function3;
-	public fun hashCode ()I
-	public fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public fun toString ()Ljava/lang/String;
-	public fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
-}
-
-public abstract interface class space/kscience/dataforge/meta/transformations/TransformationRule {
-	public abstract fun matches (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;)Z
-	public fun selectItems (Lspace/kscience/dataforge/meta/Meta;)Lkotlin/sequences/Sequence;
-	public abstract fun transformItem (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/meta/Meta;Lspace/kscience/dataforge/meta/MutableMeta;)V
+public final class space/kscience/dataforge/meta/descriptors/ValueRestriction$Companion {
+	public final fun serializer ()Lkotlinx/serialization/KSerializer;
 }
 
 public final class space/kscience/dataforge/misc/CastJvmKt {
@@ -972,6 +973,7 @@ public final class space/kscience/dataforge/names/NameKt {
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Z
 	public static final fun startsWith (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Z
+	public static final fun toStringUnescaped (Lspace/kscience/dataforge/names/Name;)Ljava/lang/String;
 	public static final fun withIndex (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
 }
 
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
index 36373582..8da8b2d3 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/JsonMeta.kt
@@ -31,7 +31,7 @@ private fun Meta.toJsonWithIndex(descriptor: MetaDescriptor?, index: String?): J
     val pairs: MutableList<Pair<String, JsonElement>> = items.entries.groupBy {
         it.key.body
     }.mapTo(ArrayList()) { (body, list) ->
-        val childDescriptor = descriptor?.children?.get(body)
+        val childDescriptor = descriptor?.nodes?.get(body)
         if (list.size == 1) {
             val (token, element) = list.first()
                 //do not add an empty element
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
index 979c8782..5cf53c75 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Meta.kt
@@ -188,10 +188,12 @@ public operator fun <M : TypedMeta<M>> M?.get(key: String): M? = this?.get(key.p
 
 
 /**
- * Get a sequence of [Name]-[Value] pairs using top-down traversal of the tree
+ * Get a sequence of [Name]-[Value] pairs using top-down traversal of the tree.
+ * The sequence includes root value with empty name
  */
 public fun Meta.valueSequence(): Sequence<Pair<Name, Value>> = sequence {
     items.forEach { (key, item) ->
+        value?.let { yield(Name.EMPTY to it) }
         item.value?.let { itemValue ->
             yield(key.asName() to itemValue)
         }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
new file mode 100644
index 00000000..8959ae4a
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaConverter.kt
@@ -0,0 +1,158 @@
+package space.kscience.dataforge.meta
+
+import kotlinx.serialization.KSerializer
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.encodeToJsonElement
+import kotlinx.serialization.serializer
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.misc.DFExperimental
+
+
+/**
+ * A converter of generic object to and from [Meta]
+ */
+public interface MetaConverter<T>: MetaSpec<T> {
+
+    /**
+     * A descriptor for resulting meta
+     */
+    override val descriptor: MetaDescriptor? get() = null
+
+    /**
+     * Attempt conversion of [source] to an object or return null if conversion failed
+     */
+    override fun readOrNull(source: Meta): T?
+
+    override fun read(source: Meta): T =
+        readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
+
+    public fun convert(obj: T): Meta
+
+    public companion object {
+
+        public val meta: MetaConverter<Meta> = object : MetaConverter<Meta> {
+            override fun readOrNull(source: Meta): Meta = source
+            override fun convert(obj: Meta): Meta = obj
+        }
+
+        public val value: MetaConverter<Value> = object : MetaConverter<Value> {
+            override fun readOrNull(source: Meta): Value? = source.value
+            override fun convert(obj: Value): Meta = Meta(obj)
+        }
+
+        public val string: MetaConverter<String> = object : MetaConverter<String> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.STRING)
+            }
+
+
+            override fun readOrNull(source: Meta): String? = source.string
+            override fun convert(obj: String): Meta = Meta(obj.asValue())
+        }
+
+        public val boolean: MetaConverter<Boolean> = object : MetaConverter<Boolean> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.BOOLEAN)
+            }
+
+            override fun readOrNull(source: Meta): Boolean? = source.boolean
+            override fun convert(obj: Boolean): Meta = Meta(obj.asValue())
+        }
+
+        public val number: MetaConverter<Number> = object : MetaConverter<Number> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.NUMBER)
+            }
+
+            override fun readOrNull(source: Meta): Number? = source.number
+            override fun convert(obj: Number): Meta = Meta(obj.asValue())
+        }
+
+        public val double: MetaConverter<Double> = object : MetaConverter<Double> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.NUMBER)
+            }
+
+            override fun readOrNull(source: Meta): Double? = source.double
+            override fun convert(obj: Double): Meta = Meta(obj.asValue())
+        }
+
+        public val float: MetaConverter<Float> = object : MetaConverter<Float> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.NUMBER)
+            }
+
+            override fun readOrNull(source: Meta): Float? = source.float
+            override fun convert(obj: Float): Meta = Meta(obj.asValue())
+        }
+
+        public val int: MetaConverter<Int> = object : MetaConverter<Int> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.NUMBER)
+            }
+
+            override fun readOrNull(source: Meta): Int? = source.int
+            override fun convert(obj: Int): Meta = Meta(obj.asValue())
+        }
+
+        public val long: MetaConverter<Long> = object : MetaConverter<Long> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.NUMBER)
+            }
+
+            override fun readOrNull(source: Meta): Long? = source.long
+            override fun convert(obj: Long): Meta = Meta(obj.asValue())
+        }
+
+        public inline fun <reified E : Enum<E>> enum(): MetaConverter<E> = object : MetaConverter<E> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.STRING)
+                allowedValues(enumValues<E>())
+            }
+
+            @Suppress("USELESS_CAST")
+            override fun readOrNull(source: Meta): E = source.enum<E>() as? E ?: error("The Item is not a Enum")
+
+            override fun convert(obj: E): Meta = Meta(obj.asValue())
+        }
+
+        public fun <T> valueList(
+            writer: (T) -> Value = { Value.of(it) },
+            reader: (Value) -> T,
+        ): MetaConverter<List<T>> = object : MetaConverter<List<T>> {
+            override val descriptor: MetaDescriptor = MetaDescriptor {
+                valueType(ValueType.LIST)
+            }
+
+            override fun readOrNull(source: Meta): List<T>? = source.value?.list?.map(reader)
+
+            override fun convert(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
+        }
+
+        /**
+         * Automatically generate [MetaConverter] for a class using its serializer and optional [descriptor]
+         */
+        @DFExperimental
+        public inline fun <reified T> serializable(
+            descriptor: MetaDescriptor? = null,
+        ): MetaConverter<T> = object : MetaConverter<T> {
+            private val serializer: KSerializer<T> = serializer()
+
+            override fun readOrNull(source: Meta): T? {
+                val json = source.toJson(descriptor)
+                return Json.decodeFromJsonElement(serializer, json)
+            }
+
+            override fun convert(obj: T): Meta {
+                val json = Json.encodeToJsonElement(obj)
+                return json.toMeta(descriptor)
+            }
+
+        }
+
+    }
+}
+
+public fun <T : Any> MetaConverter<T>.convertNullable(obj: T?): Meta? = obj?.let { convert(it) }
+
+
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
index 73923d56..a24f7371 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaDelegate.kt
@@ -1,6 +1,7 @@
 package space.kscience.dataforge.meta
 
-import space.kscience.dataforge.meta.transformations.MetaConverter
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadOnlyProperty
@@ -11,13 +12,48 @@ public fun MetaProvider.node(key: Name? = null): ReadOnlyProperty<Any?, Meta?> =
     get(key ?: property.name.asName())
 }
 
+/**
+ * Use [metaSpec] to read the Meta node
+ */
+public fun <T> MetaProvider.spec(
+    metaSpec: MetaSpec<T>,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
+    get(key ?: property.name.asName())?.let { metaSpec.read(it) }
+}
+
+/**
+ * Use object serializer to transform it to Meta and back
+ */
+@DFExperimental
+public inline fun <reified T> MetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, T?> = spec(MetaConverter.serializable(descriptor), key)
+
+@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
 public fun <T> MetaProvider.node(
     key: Name? = null,
-    converter: MetaConverter<T>
-): ReadOnlyProperty<Any?, T?> = ReadOnlyProperty { _, property ->
-    get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
+    converter: MetaSpec<T>,
+): ReadOnlyProperty<Any?, T?> = spec(converter, key)
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object
+ */
+public fun <T> Meta.listOfSpec(
+    converter: MetaSpec<T>,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, List<T>> = ReadOnlyProperty{_, property ->
+    val name = key ?: property.name.asName()
+    getIndexed(name).values.map { converter.read(it) }
 }
 
+@DFExperimental
+public inline fun <reified T> Meta.listOfSerializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadOnlyProperty<Any?, List<T>> = listOfSpec(MetaConverter.serializable(descriptor), key)
+
 /**
  * A property delegate that uses custom key
  */
@@ -27,7 +63,7 @@ public fun MetaProvider.value(key: Name? = null): ReadOnlyProperty<Any?, Value?>
 
 public fun <R> MetaProvider.value(
     key: Name? = null,
-    reader: (Value?) -> R
+    reader: (Value?) -> R,
 ): ReadOnlyProperty<Any?, R> = ReadOnlyProperty { _, property ->
     reader(get(key ?: property.name.asName())?.value)
 }
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
new file mode 100644
index 00000000..9918d504
--- /dev/null
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaSpec.kt
@@ -0,0 +1,21 @@
+package space.kscience.dataforge.meta
+
+import space.kscience.dataforge.meta.descriptors.Described
+
+public interface MetaSpec<out T> : Described {
+
+    /**
+     * Read the source meta into an object and return null if Meta could not be interpreted as a target type
+     */
+    public fun readOrNull(source: Meta): T?
+
+    /**
+     * Read generic read-only meta with this [MetaSpec] producing instance of the desired type.
+     * Throws an error if conversion could not be done.
+     */
+    public fun read(source: Meta): T = readOrNull(source) ?: error("Meta $source could not be interpreted by $this")
+}
+
+
+public fun <T : Any> MetaSpec<T>.readNullable(item: Meta?): T? = item?.let { read(it) }
+public fun <T> MetaSpec<T>.readValue(value: Value): T? = read(Meta(value))
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
similarity index 98%
rename from dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt
rename to dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
index d41365a6..dbf94f0f 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaTransformation.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MetaTransformation.kt
@@ -1,6 +1,5 @@
-package space.kscience.dataforge.meta.transformations
+package space.kscience.dataforge.meta
 
-import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import kotlin.jvm.JvmInline
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
index 7e05d215..231f9e54 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMeta.kt
@@ -165,7 +165,7 @@ public fun MutableMetaProvider.remove(key: String) {
 
 // node setters
 
-public operator fun MutableMetaProvider.set(Key: NameToken, value: Meta): Unit = set(Key.asName(), value)
+public operator fun MutableMetaProvider.set(key: NameToken, value: Meta): Unit = set(key.asName(), value)
 public operator fun MutableMetaProvider.set(key: String, value: Meta): Unit = set(Name.parse(key), value)
 
 
@@ -324,8 +324,6 @@ private class MutableMetaImpl(
                     //remove child and invalidate if argument is null
                     if (node == null) {
                         children.remove(token)?.removeListener(this)
-                        // old item is not null otherwise we can't be here
-                        invalidate(name)
                     } else {
                         val newNode = wrapItem(node)
                         newNode.adoptBy(this, token)
@@ -335,7 +333,7 @@ private class MutableMetaImpl(
 
                 else -> {
                     val token = name.firstOrNull()!!
-                    //get existing or create new node.
+                    //get an existing node or create a new node.
                     if (items[token] == null) {
                         val newNode = MutableMetaImpl(null)
                         newNode.adoptBy(this, token)
@@ -372,7 +370,8 @@ public fun MutableMeta.append(key: String, value: Value): Unit = append(Name.par
 /**
  * Create a mutable copy of this meta. The copy is created even if the Meta is already mutable
  */
-public fun Meta.toMutableMeta(): ObservableMutableMeta = MutableMetaImpl(value, items)
+public fun Meta.toMutableMeta(): MutableMeta =
+    MutableMeta { update(this@toMutableMeta) } //MutableMetaImpl(value, items)
 
 public fun Meta.asMutableMeta(): MutableMeta = (this as? MutableMeta) ?: toMutableMeta()
 
@@ -387,12 +386,14 @@ public inline fun ObservableMutableMeta(builder: MutableMeta.() -> Unit = {}): O
 
 
 /**
- * Create a copy of this [Meta], optionally applying the given [block].
- * The listeners of the original Config are not retained.
+ * Create a read-only copy of this [Meta]. [modification] is an optional modification applied to [Meta] on copy.
+ *
+ *  The copy does not reflect changes of the initial Meta.
  */
-public inline fun Meta.copy(block: MutableMeta.() -> Unit = {}): Meta =
-    toMutableMeta().apply(block)
-
+public inline fun Meta.copy(modification: MutableMeta.() -> Unit = {}): Meta = Meta {
+    update(this@copy)
+    modification()
+}
 
 private class MutableMetaWithDefault(
     val source: MutableMeta, val default: MetaProvider, val rootName: Name,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
index 0f28523c..f6e96109 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/MutableMetaDelegate.kt
@@ -1,6 +1,7 @@
 package space.kscience.dataforge.meta
 
-import space.kscience.dataforge.meta.transformations.MetaConverter
+import space.kscience.dataforge.meta.descriptors.MetaDescriptor
+import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.properties.ReadWriteProperty
@@ -20,18 +21,66 @@ public fun MutableMetaProvider.node(key: Name? = null): ReadWriteProperty<Any?,
         }
     }
 
-public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
+/**
+ * Use [converter] to transform an object to Meta and back.
+ * Note that mutation of the object does not change Meta.
+ */
+public fun <T> MutableMetaProvider.convertable(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> =
     object : ReadWriteProperty<Any?, T?> {
         override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-            return get(key ?: property.name.asName())?.let { converter.metaToObject(it) }
+            val name = key ?: property.name.asName()
+            return get(name)?.let { converter.read(it) }
         }
 
         override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
             val name = key ?: property.name.asName()
-            set(name, value?.let { converter.objectToMeta(it) })
+            set(name, value?.let { converter.convert(it) })
         }
     }
 
+@Deprecated("Use convertable", ReplaceWith("convertable(converter, key)"))
+public fun <T> MutableMetaProvider.node(key: Name? = null, converter: MetaConverter<T>): ReadWriteProperty<Any?, T?> =
+    convertable(converter, key)
+
+/**
+ * Use object serializer to transform it to Meta and back.
+ * Note that mutation of the object does not change Meta.
+ */
+@DFExperimental
+public inline fun <reified T> MutableMetaProvider.serializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = convertable(MetaConverter.serializable(descriptor), key)
+
+/**
+ * Use [converter] to convert a list of same name siblings meta to object and back.
+ * Note that mutation of the object does not change Meta.
+ */
+public fun <T> MutableMeta.listOfConvertable(
+    converter: MetaConverter<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
+        val name = key ?: property.name.asName()
+        return getIndexed(name).values.map { converter.read(it) }
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
+        val name = key ?: property.name.asName()
+        setIndexed(name, value.map { converter.convert(it) })
+    }
+}
+
+@DFExperimental
+public inline fun <reified T> MutableMeta.listOfSerializable(
+    descriptor: MetaDescriptor? = null,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = listOfConvertable(MetaConverter.serializable(descriptor), key)
+
+
 public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?, Value?> =
     object : ReadWriteProperty<Any?, Value?> {
         override fun getValue(thisRef: Any?, property: KProperty<*>): Value? =
@@ -45,7 +94,7 @@ public fun MutableMetaProvider.value(key: Name? = null): ReadWriteProperty<Any?,
 public fun <T> MutableMetaProvider.value(
     key: Name? = null,
     writer: (T) -> Value? = { Value.of(it) },
-    reader: (Value?) -> T
+    reader: (Value?) -> T,
 ): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
     override fun getValue(thisRef: Any?, property: KProperty<*>): T =
         reader(get(key ?: property.name.asName())?.value)
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
index 5a2b05f7..b481962e 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/ObservableMeta.kt
@@ -1,8 +1,10 @@
 package space.kscience.dataforge.meta
 
 import space.kscience.dataforge.misc.ThreadSafe
-import space.kscience.dataforge.names.*
-import kotlin.reflect.KProperty1
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.cutFirst
+import space.kscience.dataforge.names.firstOrNull
+import space.kscience.dataforge.names.isEmpty
 
 
 internal data class MetaListener(
@@ -15,12 +17,15 @@ internal data class MetaListener(
  */
 public interface ObservableMeta : Meta {
     /**
-     * Add change listener to this meta. Owner is declared to be able to remove listeners later. Listener without owner could not be removed
+     * Add change listener to this meta. The Owner is declared to be able to remove listeners later.
+     * Listeners without an owner could be only removed all together.
+     *
+     * `this` object in the listener represents the current state of this meta. The name points to a changed node
      */
     public fun onChange(owner: Any?, callback: Meta.(name: Name) -> Unit)
 
     /**
-     * Remove all listeners belonging to given owner
+     * Remove all listeners belonging to the given [owner]. Passing null removes all listeners.
      */
     public fun removeListener(owner: Any?)
 
@@ -67,24 +72,4 @@ internal abstract class AbstractObservableMeta : ObservableMeta {
     override fun toString(): String = Meta.toString(this)
     override fun equals(other: Any?): Boolean = Meta.equals(this, other as? Meta)
     override fun hashCode(): Int = Meta.hashCode(this)
-}
-
-/**
- * Use the value of the property in a [callBack].
- * The callback is called once immediately after subscription to pass the initial value.
- *
- * Optional [owner] property is used for
- */
-public fun <S : Scheme, T> S.useProperty(
-    property: KProperty1<S, T>,
-    owner: Any? = null,
-    callBack: S.(T) -> Unit,
-) {
-    //Pass initial value.
-    callBack(property.get(this))
-    meta.onChange(owner) { name ->
-        if (name.startsWith(property.name.asName())) {
-            callBack(property.get(this@useProperty))
-        }
-    }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
index 90473286..2e9edc1d 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Scheme.kt
@@ -7,36 +7,49 @@ import space.kscience.dataforge.meta.descriptors.validate
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.ThreadSafe
 import space.kscience.dataforge.names.*
+import kotlin.properties.ReadWriteProperty
+import kotlin.reflect.KProperty
+import kotlin.reflect.KProperty1
 
 /**
- * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [Specification].
+ * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [MetaSpec].
  * Default item provider and [MetaDescriptor] are optional
  */
 public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurable {
 
     /**
-     * Meta to be mutated by this schme
+     * Meta to be mutated by this scheme
      */
-    private var targetMeta: MutableMeta = MutableMeta()
+    private var target: MutableMeta? = null
+        get() {
+            // automatic initialization of target if it is missing
+            if (field == null) {
+                field = MutableMeta()
+            }
+            return field
+        }
 
     /**
      * Default values provided by this scheme
      */
-    private var defaultMeta: Meta? = null
+    private var prototype: Meta? = null
 
     final override val meta: ObservableMutableMeta = SchemeMeta(Name.EMPTY)
 
     final override var descriptor: MetaDescriptor? = null
-        internal set
+        private set
 
-    internal fun wrap(
-        newMeta: MutableMeta,
-        preserveDefault: Boolean = false,
+    /**
+     * This method must be called before the scheme could be used
+     */
+    internal fun initialize(
+        target: MutableMeta,
+        prototype: Meta,
+        descriptor: MetaDescriptor?,
     ) {
-        if (preserveDefault) {
-            defaultMeta = targetMeta.seal()
-        }
-        targetMeta = newMeta
+        this.target = target
+        this.prototype = prototype
+        this.descriptor = descriptor
     }
 
     /**
@@ -47,11 +60,11 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
         return descriptor?.validate(meta) ?: true
     }
 
-    override fun get(name: Name): MutableMeta? = meta.get(name)
+    override fun get(name: Name): MutableMeta? = meta[name]
 
     override fun set(name: Name, node: Meta?) {
         if (validate(name, meta)) {
-            meta.set(name, node)
+            meta[name] = node
         } else {
             error("Validation failed for node $node at $name")
         }
@@ -68,14 +81,16 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
     private val listeners: MutableList<MetaListener> = mutableListOf()
 
+    override fun toString(): String = meta.toString()
+
     private inner class SchemeMeta(val pathName: Name) : ObservableMutableMeta {
         override var value: Value?
-            get() = targetMeta[pathName]?.value
-                ?: defaultMeta?.get(pathName)?.value
+            get() = target[pathName]?.value
+                ?: prototype?.get(pathName)?.value
                 ?: descriptor?.get(pathName)?.defaultValue
             set(value) {
-                val oldValue = targetMeta[pathName]?.value
-                targetMeta[pathName] = value
+                val oldValue = target[pathName]?.value
+                target!![pathName] = value
                 if (oldValue != value) {
                     invalidate(Name.EMPTY)
                 }
@@ -83,8 +98,8 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
         override val items: Map<NameToken, ObservableMutableMeta>
             get() {
-                val targetKeys = targetMeta[pathName]?.items?.keys ?: emptySet()
-                val defaultKeys = defaultMeta?.get(pathName)?.items?.keys ?: emptySet()
+                val targetKeys = target[pathName]?.items?.keys ?: emptySet()
+                val defaultKeys = prototype?.get(pathName)?.items?.keys ?: emptySet()
                 return (targetKeys + defaultKeys).associateWith { SchemeMeta(pathName + it) }
             }
 
@@ -111,7 +126,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
         override fun hashCode(): Int = Meta.hashCode(this)
 
         override fun set(name: Name, node: Meta?) {
-            targetMeta.set(name, node)
+            target!![name] = node
             invalidate(name)
         }
 
@@ -119,7 +134,6 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
         @DFExperimental
         override fun attach(name: Name, node: ObservableMutableMeta) {
-            //TODO implement zero-copy attachment
             set(name, node)
             node.onChange(this) { changeName ->
                 set(name + changeName, this[changeName])
@@ -131,10 +145,11 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
 
 /**
  * Relocate scheme target onto given [MutableMeta]. Old provider does not get updates anymore.
- * Current state of the scheme used as a default.
+ * The Current state of the scheme that os used as a default.
  */
+@DFExperimental
 public fun <T : Scheme> T.retarget(provider: MutableMeta): T = apply {
-    wrap(provider, true)
+    initialize(provider, meta.seal(), descriptor)
 }
 
 /**
@@ -151,26 +166,149 @@ public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit =
 /**
  * A specification for simplified generation of wrappers
  */
-public open class SchemeSpec<out T : Scheme>(
+public open class SchemeSpec<T : Scheme>(
     private val builder: () -> T,
-) : Specification<T> {
+) : MetaConverter<T> {
 
-    override fun read(source: Meta): T = builder().also {
-        it.wrap(MutableMeta().withDefault(source))
-    }
-
-    override fun write(target: MutableMeta): T = empty().also {
-        it.wrap(target)
-    }
-
-    //TODO Generate descriptor from Scheme class
     override val descriptor: MetaDescriptor? get() = null
 
-    override fun empty(): T = builder().also {
-        it.descriptor = descriptor
+    override fun readOrNull(source: Meta): T = builder().also {
+        it.initialize(MutableMeta(), source, descriptor)
     }
 
-    @Suppress("OVERRIDE_BY_INLINE")
-    final override inline operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
+    public fun write(target: MutableMeta): T = empty().also {
+        it.initialize(target, Meta.EMPTY, descriptor)
+    }
 
+    /**
+     * Generate an empty object
+     */
+    public fun empty(): T = builder().also {
+        it.initialize(MutableMeta(), Meta.EMPTY, descriptor)
+    }
+
+    override fun convert(obj: T): Meta  = obj.meta
+
+    /**
+     * A convenience method to use specifications in builders
+     */
+    public inline operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
+
+}
+
+
+
+/**
+ * Update a [MutableMeta] using given specification
+ */
+public fun <T : Scheme> MutableMeta.updateWith(
+    spec: SchemeSpec<T>,
+    action: T.() -> Unit,
+): T = spec.write(this).apply(action)
+
+
+/**
+ * Update configuration using given specification
+ */
+public fun <T : Scheme> Configurable.updateWith(
+    spec: SchemeSpec<T>,
+    action: T.() -> Unit,
+): T = spec.write(meta).apply(action)
+
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a child of this provider
+ */
+public fun <T : Scheme> MutableMeta.scheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
+        val name = key ?: property.name.asName()
+        return spec.write(getOrCreate(name))
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
+        val name = key ?: property.name.asName()
+        set(name, value.toMeta())
+    }
+}
+
+public fun <T : Scheme> Scheme.scheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T> = meta.scheme(spec, key)
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a child of this provider.
+ * Returns null if meta with given name does not exist.
+ */
+public fun <T : Scheme> MutableMeta.schemeOrNull(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = object : ReadWriteProperty<Any?, T?> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
+        val name = key ?: property.name.asName()
+        return if (get(name) == null) null else spec.write(getOrCreate(name))
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
+        val name = key ?: property.name.asName()
+        if (value == null) remove(name)
+        else set(name, value.toMeta())
+    }
+}
+
+public fun <T : Scheme> Scheme.schemeOrNull(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, T?> = meta.schemeOrNull(spec, key)
+
+/**
+ * A delegate that uses a [MetaSpec] to wrap a list of child providers.
+ * If children are mutable, the changes in list elements are reflected on them.
+ * The list is a snapshot of children state, so change in structure is not reflected on its composition.
+ */
+@DFExperimental
+public fun <T : Scheme> MutableMeta.listOfScheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
+    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
+        val name = key ?: property.name.asName()
+        return getIndexed(name).values.map { spec.write(it as MutableMeta) }
+    }
+
+    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
+        val name = key ?: property.name.asName()
+        setIndexed(name, value.map { it.toMeta() })
+    }
+}
+
+
+@DFExperimental
+public fun <T : Scheme> Scheme.listOfScheme(
+    spec: SchemeSpec<T>,
+    key: Name? = null,
+): ReadWriteProperty<Any?, List<T>> = meta.listOfScheme(spec, key)
+
+
+/**
+ * Use the value of the property in a [callBack].
+ * The callback is called once immediately after subscription to pass the initial value.
+ *
+ * Optional [owner] property is used for
+ */
+public fun <S : Scheme, T> S.useProperty(
+    property: KProperty1<S, T>,
+    owner: Any? = null,
+    callBack: S.(T) -> Unit,
+) {
+    //Pass initial value.
+    callBack(property.get(this))
+    meta.onChange(owner) { name ->
+        if (name.startsWith(property.name.asName())) {
+            callBack(property.get(this@useProperty))
+        }
+    }
 }
\ No newline at end of file
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
index 217a6a04..e842b990 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/SealedMeta.kt
@@ -101,11 +101,6 @@ internal class MetaBuilder(
     override fun hashCode(): Int = Meta.hashCode(this)
 }
 
-/**
- * Create a read-only meta.
- */
-public inline fun Meta(builder: MutableMeta.() -> Unit): Meta =
-    MetaBuilder().apply(builder).seal()
 
 /**
  * Create an immutable meta.
@@ -113,6 +108,11 @@ public inline fun Meta(builder: MutableMeta.() -> Unit): Meta =
 public inline fun SealedMeta(builder: MutableMeta.() -> Unit): SealedMeta =
     MetaBuilder().apply(builder).seal()
 
+/**
+ * Create a read-only meta.
+ */
+public inline fun Meta(builder: MutableMeta.() -> Unit): Meta = SealedMeta(builder)
+
 /**
  * Create an empty meta mutable meta.
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
deleted file mode 100644
index 6d3afbea..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/Specification.kt
+++ /dev/null
@@ -1,130 +0,0 @@
-package space.kscience.dataforge.meta
-
-import space.kscience.dataforge.meta.descriptors.Described
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.properties.ReadWriteProperty
-import kotlin.reflect.KProperty
-
-public interface ReadOnlySpecification<out T : Any>: Described {
-
-    /**
-     * Read generic read-only meta with this [Specification] producing instance of desired type.
-     * The source is not mutated even if it is in theory mutable
-     */
-    public fun read(source: Meta): T
-
-    /**
-     * Generate an empty object
-     */
-    public fun empty(): T
-
-    /**
-     * A convenience method to use specifications in builders
-     */
-    public operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
-}
-
-
-/**
- * Allows to apply custom configuration in a type safe way to simple untyped configuration.
- * By convention [Scheme] companion should inherit this class
- *
- */
-public interface Specification<out T : Any> : ReadOnlySpecification<T> {
-    /**
-     * Wrap [MutableMeta], using it as inner storage (changes to [Specification] are reflected on [MutableMeta]
-     */
-    public fun write(target: MutableMeta): T
-}
-
-/**
- * Update a [MutableMeta] using given specification
- */
-public fun <T : Any> MutableMeta.updateWith(
-    spec: Specification<T>,
-    action: T.() -> Unit,
-): T = spec.write(this).apply(action)
-
-
-/**
- * Update configuration using given specification
- */
-public fun <T : Any> Configurable.updateWith(
-    spec: Specification<T>,
-    action: T.() -> Unit,
-): T = spec.write(meta).apply(action)
-
-//
-//public fun  <M : MutableTypedMeta<M>> MutableMeta.withSpec(spec: Specification<M>): M? =
-//    spec.write(it)
-
-/**
- * A delegate that uses a [Specification] to wrap a child of this provider
- */
-public fun <T : Scheme> MutableMeta.spec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): T {
-        val name = key ?: property.name.asName()
-        return spec.write(getOrCreate(name))
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {
-        val name = key ?: property.name.asName()
-        set(name, value.toMeta())
-    }
-}
-
-public fun <T : Scheme> Scheme.spec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T> = meta.spec(spec, key)
-
-/**
- * A delegate that uses a [Specification] to wrap a child of this provider.
- * Returns null if meta with given name does not exist.
- */
-public fun <T : Scheme> MutableMeta.specOrNull(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T?> = object : ReadWriteProperty<Any?, T?> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
-        val name = key ?: property.name.asName()
-        return if (get(name) == null) null else spec.write(getOrCreate(name))
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
-        val name = key ?: property.name.asName()
-        if (value == null) remove(name)
-        else set(name, value.toMeta())
-    }
-}
-
-public fun <T : Scheme> Scheme.specOrNull(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, T?> = meta.specOrNull(spec, key)
-
-/**
- * A delegate that uses a [Specification] to wrap a list of child providers.
- * If children are mutable, the changes in list elements are reflected on them.
- * The list is a snapshot of children state, so change in structure is not reflected on its composition.
- */
-@DFExperimental
-public fun <T : Scheme> MutableMeta.listOfSpec(
-    spec: Specification<T>,
-    key: Name? = null,
-): ReadWriteProperty<Any?, List<T>> = object : ReadWriteProperty<Any?, List<T>> {
-    override fun getValue(thisRef: Any?, property: KProperty<*>): List<T> {
-        val name = key ?: property.name.asName()
-        return getIndexed(name).values.map { spec.write(it as MutableMeta) }
-    }
-
-    override fun setValue(thisRef: Any?, property: KProperty<*>, value: List<T>) {
-        val name = key ?: property.name.asName()
-        setIndexed(name, value.map { it.toMeta() })
-    }
-}
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
index 742b89ed..45954985 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptor.kt
@@ -7,6 +7,7 @@ import space.kscience.dataforge.names.*
 /**
  * Restrictions on value in the node
  */
+@Serializable
 public enum class ValueRestriction {
     /**
      * No restrictions
@@ -27,7 +28,7 @@ public enum class ValueRestriction {
 /**
  * The descriptor for a meta
  * @param description description text
- * @param children child descriptors for this node
+ * @param nodes child descriptors for this node
  * @param multiple True if same name siblings with this name are allowed
  * @param valueRestriction The requirements for node content
  * @param valueTypes list of allowed types for [Meta.value], null if all values are allowed.
@@ -39,7 +40,7 @@ public enum class ValueRestriction {
 @Serializable
 public data class MetaDescriptor(
     public val description: String? = null,
-    public val children: Map<String, MetaDescriptor> = emptyMap(),
+    public val nodes: Map<String, MetaDescriptor> = emptyMap(),
     public val multiple: Boolean = false,
     public val valueRestriction: ValueRestriction = ValueRestriction.NONE,
     public val valueTypes: List<ValueType>? = null,
@@ -47,6 +48,9 @@ public data class MetaDescriptor(
     public val defaultValue: Value? = null,
     public val attributes: Meta = Meta.EMPTY,
 ) {
+    @Deprecated("Replace by nodes", ReplaceWith("nodes"))
+    public val children: Map<String, MetaDescriptor> get() = nodes
+
     /**
      * A node constructed of default values for this descriptor and its children
      */
@@ -55,7 +59,7 @@ public data class MetaDescriptor(
             defaultValue?.let { defaultValue ->
                 this.value = defaultValue
             }
-            children.forEach { (key, descriptor) ->
+            nodes.forEach { (key, descriptor) ->
                 set(key, descriptor.defaultNode)
             }
         }
@@ -67,13 +71,13 @@ public data class MetaDescriptor(
     }
 }
 
-public val MetaDescriptor.required: Boolean get() = valueRestriction == ValueRestriction.REQUIRED || children.values.any { required }
+public val MetaDescriptor.required: Boolean get() = valueRestriction == ValueRestriction.REQUIRED || nodes.values.any { required }
 
 public val MetaDescriptor.allowedValues: List<Value>? get() = attributes[MetaDescriptor.ALLOWED_VALUES_KEY]?.value?.list
 
 public operator fun MetaDescriptor.get(name: Name): MetaDescriptor? = when (name.length) {
     0 -> this
-    1 -> children[name.firstOrNull()!!.toString()]
+    1 -> nodes[name.firstOrNull()!!.toString()]
     else -> get(name.firstOrNull()!!.asName())?.get(name.cutFirst())
 }
 
@@ -95,7 +99,7 @@ public fun MetaDescriptor.validate(item: Meta?): Boolean {
     if (item == null) return !required
     if (!validate(item.value)) return false
 
-    children.forEach { (key, childDescriptor) ->
+    nodes.forEach { (key, childDescriptor) ->
         if (!childDescriptor.validate(item[key])) return false
     }
     return true
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
index 95949d03..5d4d81ad 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/MetaDescriptorBuilder.kt
@@ -44,38 +44,27 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
         attributes.apply(block)
     }
 
-    public fun item(name: Name, block: MetaDescriptorBuilder.() -> Unit = {}): MetaDescriptorBuilder {
-        return when (name.length) {
-            0 -> apply(block)
+    internal fun node(
+        name: Name,
+        descriptorBuilder: MetaDescriptorBuilder,
+    ): Unit {
+        when (name.length) {
+            0 -> error("Can't set descriptor to root")
             1 -> {
-                val target = MetaDescriptorBuilder().apply(block)
-                children[name.first().body] = target
-                target
+                children[name.first().body] = descriptorBuilder
             }
 
-            else -> {
-                children.getOrPut(name.first().body) { MetaDescriptorBuilder() }.item(name.cutFirst(), block)
-            }
+            else -> children.getOrPut(name.first().body) {
+                MetaDescriptorBuilder()
+            }.node(name.cutFirst(), descriptorBuilder)
         }
     }
 
-    public fun node(
+    internal fun node(
         name: Name,
-        descriptor: MetaDescriptor,
-        block: MetaDescriptorBuilder.() -> Unit = {},
-    ): MetaDescriptorBuilder = when (name.length) {
-        0 -> error("Can't set descriptor to root")
-        1 -> {
-            val item = descriptor.toBuilder().apply {
-                valueRestriction = ValueRestriction.ABSENT
-            }.apply(block)
-            children[name.first().body] = item
-            item
-        }
-
-        else -> children.getOrPut(name.first().body) {
-            MetaDescriptorBuilder()
-        }.node(name.cutFirst(), descriptor, block)
+        descriptorBuilder: MetaDescriptor,
+    ): Unit {
+        node(name, descriptorBuilder.toBuilder())
     }
 
     public var allowedValues: List<Value>
@@ -89,10 +78,21 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
         allowedValues = values.map { Value.of(it) }
     }
 
+    public fun from(descriptor: MetaDescriptor) {
+        description = descriptor.description
+        children.putAll(descriptor.nodes.mapValues { it.value.toBuilder() })
+        multiple = descriptor.multiple
+        valueRestriction = descriptor.valueRestriction
+        valueTypes = descriptor.valueTypes
+        indexKey = descriptor.indexKey
+        default = descriptor.defaultValue
+        attributes.update(descriptor.attributes)
+    }
+
     @PublishedApi
     internal fun build(): MetaDescriptor = MetaDescriptor(
         description = description,
-        children = children.mapValues { it.value.build() },
+        nodes = children.mapValues { it.value.build() },
         multiple = multiple,
         valueRestriction = valueRestriction,
         valueTypes = valueTypes,
@@ -102,12 +102,57 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
     )
 }
 
-public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
-    item(Name.parse(name), block)
+//public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
+//    item(Name.parse(name), block)
 
 public inline fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor =
     MetaDescriptorBuilder().apply(block).build()
 
+/**
+ * Create and configure child node descriptor
+ */
+public fun MetaDescriptorBuilder.node(
+    name: Name,
+    block: MetaDescriptorBuilder.() -> Unit,
+) {
+    node(
+        name,
+        MetaDescriptorBuilder().apply(block)
+    )
+}
+
+public fun MetaDescriptorBuilder.node(name: String, descriptor: MetaDescriptor) {
+    node(Name.parse(name), descriptor)
+}
+
+public fun MetaDescriptorBuilder.node(name: String, block: MetaDescriptorBuilder.() -> Unit) {
+    node(Name.parse(name), block)
+}
+
+public fun MetaDescriptorBuilder.node(
+    key: String,
+    base: Described,
+    block: MetaDescriptorBuilder.() -> Unit = {},
+) {
+    node(Name.parse(key), base.descriptor?.toBuilder()?.apply(block) ?: MetaDescriptorBuilder())
+}
+
+public fun MetaDescriptorBuilder.required() {
+    valueRestriction = ValueRestriction.REQUIRED
+}
+
+private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
+    description = this@toBuilder.description
+    children = this@toBuilder.nodes.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
+    multiple = this@toBuilder.multiple
+    valueRestriction = this@toBuilder.valueRestriction
+    valueTypes = this@toBuilder.valueTypes
+    indexKey = this@toBuilder.indexKey
+    default = defaultValue
+    attributes = this@toBuilder.attributes.toMutableMeta()
+}
+
+
 /**
  * Create and configure child value descriptor
  */
@@ -116,7 +161,7 @@ public fun MetaDescriptorBuilder.value(
     type: ValueType,
     vararg additionalTypes: ValueType,
     block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = item(name) {
+): Unit = node(name) {
     valueType(type, *additionalTypes)
     block()
 }
@@ -126,41 +171,14 @@ public fun MetaDescriptorBuilder.value(
     type: ValueType,
     vararg additionalTypes: ValueType,
     block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = value(Name.parse(name), type, additionalTypes = additionalTypes, block)
+): Unit = value(Name.parse(name), type, additionalTypes = additionalTypes, block)
 
-/**
- * Create and configure child value descriptor
- */
-public fun MetaDescriptorBuilder.node(
-    name: Name, block: MetaDescriptorBuilder.() -> Unit,
-): MetaDescriptorBuilder = item(name) {
-    valueRestriction = ValueRestriction.ABSENT
-    block()
-}
-
-public fun MetaDescriptorBuilder.node(name: String, block: MetaDescriptorBuilder.() -> Unit) {
-    node(Name.parse(name), block)
-}
-
-public fun MetaDescriptorBuilder.node(
-    key: String,
-    described: Described,
-    block: MetaDescriptorBuilder.() -> Unit = {},
-) {
-    described.descriptor?.let {
-        node(Name.parse(key), it, block)
-    }
-}
-
-public fun MetaDescriptorBuilder.required() {
-    valueRestriction = ValueRestriction.REQUIRED
-}
 
 public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
     key: Name,
     default: E?,
     crossinline modifier: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = value(key, ValueType.STRING) {
+): Unit = value(key, ValueType.STRING) {
     default?.let {
         this.default = default.asValue()
     }
@@ -168,17 +186,6 @@ public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
     modifier()
 }
 
-private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
-    description = this@toBuilder.description
-    children = this@toBuilder.children.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
-    multiple = this@toBuilder.multiple
-    valueRestriction = this@toBuilder.valueRestriction
-    valueTypes = this@toBuilder.valueTypes
-    indexKey = this@toBuilder.indexKey
-    default = defaultValue
-    attributes = this@toBuilder.attributes.toMutableMeta()
-}
-
 /**
  * Make a deep copy of this descriptor applying given transformation [block]
  */
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
index 79fb6fdb..a2a77182 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/descriptors/schemeDescriptor.kt
@@ -3,13 +3,15 @@ package space.kscience.dataforge.meta.descriptors
 import space.kscience.dataforge.meta.Scheme
 import space.kscience.dataforge.meta.SchemeSpec
 import space.kscience.dataforge.meta.ValueType
+import space.kscience.dataforge.misc.DFExperimental
 import kotlin.reflect.KProperty1
 import kotlin.reflect.typeOf
 
+@DFExperimental
 public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
     property: KProperty1<S, T>,
     noinline block: MetaDescriptorBuilder.() -> Unit = {},
-): MetaDescriptorBuilder = when (typeOf<T>()) {
+): Unit = when (typeOf<T>()) {
     typeOf<Number>(), typeOf<Int>(), typeOf<Double>(), typeOf<Short>(), typeOf<Long>(), typeOf<Float>() ->
         value(property.name, ValueType.NUMBER) {
             block()
@@ -34,9 +36,10 @@ public inline fun <S : Scheme, reified T> MetaDescriptorBuilder.value(
         multiple = true
         block()
     }
-    else -> item(property.name, block)
+    else -> node(property.name, block)
 }
 
+@DFExperimental
 public inline fun <S : Scheme, reified T : Scheme> MetaDescriptorBuilder.scheme(
     property: KProperty1<S, T>,
     spec: SchemeSpec<T>,
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
deleted file mode 100644
index 0dfb63d7..00000000
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/meta/transformations/MetaConverter.kt
+++ /dev/null
@@ -1,163 +0,0 @@
-package space.kscience.dataforge.meta.transformations
-
-import space.kscience.dataforge.meta.*
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-/**
- * A converter of generic object to and from [Meta]
- */
-public interface MetaConverter<T> {
-
-    /**
-     * Runtime type of [T]
-     */
-    public val type: KType
-
-    /**
-     * A descriptor for resulting meta
-     */
-    public val descriptor: MetaDescriptor get() = MetaDescriptor.EMPTY
-
-    /**
-     * Attempt conversion of [meta] to an object or return null if conversion failed
-     */
-    public fun metaToObjectOrNull(meta: Meta): T?
-
-    public fun metaToObject(meta: Meta): T =
-        metaToObjectOrNull(meta) ?: error("Meta $meta could not be interpreted by $this")
-
-    public fun objectToMeta(obj: T): Meta
-
-    public companion object {
-
-        public val meta: MetaConverter<Meta> = object : MetaConverter<Meta> {
-            override val type: KType = typeOf<Meta>()
-
-            override fun metaToObjectOrNull(meta: Meta): Meta = meta
-            override fun objectToMeta(obj: Meta): Meta = obj
-        }
-
-        public val value: MetaConverter<Value> = object : MetaConverter<Value> {
-            override val type: KType = typeOf<Value>()
-
-            override fun metaToObjectOrNull(meta: Meta): Value? = meta.value
-            override fun objectToMeta(obj: Value): Meta = Meta(obj)
-        }
-
-        public val string: MetaConverter<String> = object : MetaConverter<String> {
-            override val type: KType = typeOf<String>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.STRING)
-            }
-
-
-            override fun metaToObjectOrNull(meta: Meta): String? = meta.string
-            override fun objectToMeta(obj: String): Meta = Meta(obj.asValue())
-        }
-
-        public val boolean: MetaConverter<Boolean> = object : MetaConverter<Boolean> {
-            override val type: KType = typeOf<Boolean>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.BOOLEAN)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Boolean? = meta.boolean
-            override fun objectToMeta(obj: Boolean): Meta = Meta(obj.asValue())
-        }
-
-        public val number: MetaConverter<Number> = object : MetaConverter<Number> {
-            override val type: KType = typeOf<Number>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.NUMBER)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Number? = meta.number
-            override fun objectToMeta(obj: Number): Meta = Meta(obj.asValue())
-        }
-
-        public val double: MetaConverter<Double> = object : MetaConverter<Double> {
-            override val type: KType = typeOf<Double>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.NUMBER)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Double? = meta.double
-            override fun objectToMeta(obj: Double): Meta = Meta(obj.asValue())
-        }
-
-        public val float: MetaConverter<Float> = object : MetaConverter<Float> {
-            override val type: KType = typeOf<Float>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.NUMBER)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Float? = meta.float
-            override fun objectToMeta(obj: Float): Meta = Meta(obj.asValue())
-        }
-
-        public val int: MetaConverter<Int> = object : MetaConverter<Int> {
-            override val type: KType = typeOf<Int>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.NUMBER)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Int? = meta.int
-            override fun objectToMeta(obj: Int): Meta = Meta(obj.asValue())
-        }
-
-        public val long: MetaConverter<Long> = object : MetaConverter<Long> {
-            override val type: KType = typeOf<Long>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.NUMBER)
-            }
-
-            override fun metaToObjectOrNull(meta: Meta): Long? = meta.long
-            override fun objectToMeta(obj: Long): Meta = Meta(obj.asValue())
-        }
-
-        public inline fun <reified E : Enum<E>> enum(): MetaConverter<E> = object : MetaConverter<E> {
-            override val type: KType = typeOf<E>()
-
-            override val descriptor: MetaDescriptor = MetaDescriptor {
-                valueType(ValueType.STRING)
-                allowedValues(enumValues<E>())
-            }
-
-            @Suppress("USELESS_CAST")
-            override fun metaToObjectOrNull(meta: Meta): E = meta.enum<E>() as? E ?: error("The Item is not a Enum")
-
-            override fun objectToMeta(obj: E): Meta = Meta(obj.asValue())
-        }
-
-        public fun <T> valueList(
-            writer: (T) -> Value = { Value.of(it) },
-            reader: (Value) -> T,
-        ): MetaConverter<List<T>> =
-            object : MetaConverter<List<T>> {
-                override val type: KType = typeOf<List<T>>()
-
-                override val descriptor: MetaDescriptor = MetaDescriptor {
-                    valueType(ValueType.LIST)
-                }
-
-                override fun metaToObjectOrNull(meta: Meta): List<T>? = meta.value?.list?.map(reader)
-
-                override fun objectToMeta(obj: List<T>): Meta = Meta(obj.map(writer).asValue())
-            }
-
-    }
-}
-
-public fun <T : Any> MetaConverter<T>.nullableMetaToObject(item: Meta?): T? = item?.let { metaToObject(it) }
-public fun <T : Any> MetaConverter<T>.nullableObjectToMeta(obj: T?): Meta? = obj?.let { objectToMeta(it) }
-
-public fun <T> MetaConverter<T>.valueToObject(value: Value): T? = metaToObject(Meta(value))
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
index 11f548ae..936e793a 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/misc/DfType.kt
@@ -6,6 +6,3 @@ package space.kscience.dataforge.misc
 @MustBeDocumented
 @Target(AnnotationTarget.CLASS)
 public annotation class DfType(val id: String)
-
-@Deprecated("Replace with DfType", replaceWith = ReplaceWith("DfType"))
-public typealias DfId = DfType
diff --git a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
index 1c9a9cf3..b46a3507 100644
--- a/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
+++ b/dataforge-meta/src/commonMain/kotlin/space/kscience/dataforge/names/Name.kt
@@ -16,12 +16,10 @@ public class Name(public val tokens: List<NameToken>) {
 
     override fun toString(): String = tokens.joinToString(separator = NAME_SEPARATOR) { it.toString() }
 
-    override fun equals(other: Any?): Boolean {
-        return when (other) {
-            is Name -> this.tokens == other.tokens
-            is NameToken -> this.length == 1 && this.tokens.first() == other
-            else -> false
-        }
+    override fun equals(other: Any?): Boolean = when (other) {
+        is Name -> this.tokens == other.tokens
+        is NameToken -> this.length == 1 && this.tokens.first() == other
+        else -> false
     }
 
     private val cachedHashCode = if (tokens.size == 1) {
@@ -115,6 +113,13 @@ public class Name(public val tokens: List<NameToken>) {
     }
 }
 
+/**
+ * Transform this [Name] to a string without escaping special characters in tokens.
+ *
+ * Parsing it back will produce a valid, but different name
+ */
+public fun Name.toStringUnescaped(): String = tokens.joinToString(separator = Name.NAME_SEPARATOR) { it.toStringUnescaped() }
+
 public operator fun Name.get(i: Int): NameToken = tokens[i]
 
 /**
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
index fba9b596..2ada3ade 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/JsonMetaTest.kt
@@ -2,7 +2,7 @@ package space.kscience.dataforge.meta
 
 import kotlinx.serialization.json.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.meta.descriptors.item
+import space.kscience.dataforge.meta.descriptors.node
 import kotlin.test.Test
 import kotlin.test.assertEquals
 
@@ -32,7 +32,7 @@ class JsonMetaTest {
     }
 
     val descriptor = MetaDescriptor {
-        item("nodeArray") {
+        node("nodeArray") {
             indexKey = "index"
         }
     }
diff --git a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
index 7a2dbc22..4b99cc3b 100644
--- a/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
+++ b/dataforge-meta/src/commonTest/kotlin/space/kscience/dataforge/meta/MetaDelegateTest.kt
@@ -20,7 +20,7 @@ class MetaDelegateTest {
         var myValue by string()
         var safeValue by double(2.2)
         var enumValue by enum(TestEnum.YES)
-        var inner by spec(InnerScheme)
+        var inner by scheme(InnerScheme)
 
         companion object : SchemeSpec<TestScheme>(::TestScheme)
     }
diff --git a/dataforge-scripting/README.md b/dataforge-scripting/README.md
index af79cc8f..fbc5cb69 100644
--- a/dataforge-scripting/README.md
+++ b/dataforge-scripting/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-scripting:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-scripting:0.7.0")
+    implementation("space.kscience:dataforge-scripting:0.8.0")
 }
 ```
diff --git a/dataforge-workspace/README.md b/dataforge-workspace/README.md
index c096699f..cea37368 100644
--- a/dataforge-workspace/README.md
+++ b/dataforge-workspace/README.md
@@ -6,18 +6,16 @@
 
 ## Artifact:
 
-The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.7.0`.
+The Maven coordinates of this project are `space.kscience:dataforge-workspace:0.8.0`.
 
 **Gradle Kotlin DSL:**
 ```kotlin
 repositories {
     maven("https://repo.kotlin.link")
-    //uncomment to access development builds
-    //maven("https://maven.pkg.jetbrains.space/spc/p/sci/dev")
     mavenCentral()
 }
 
 dependencies {
-    implementation("space.kscience:dataforge-workspace:0.7.0")
+    implementation("space.kscience:dataforge-workspace:0.8.0")
 }
 ```
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt
deleted file mode 100644
index a1588a54..00000000
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/EnvelopeTask.kt
+++ /dev/null
@@ -1,46 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import space.kscience.dataforge.data.DataTree.Companion.META_ITEM_NAME_TOKEN
-import space.kscience.dataforge.io.Envelope
-import space.kscience.dataforge.io.IOReader
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.descriptors.MetaDescriptor
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.asName
-import kotlin.reflect.KType
-
-public abstract class EnvelopeTask<T : Any>(
-    override val descriptor: MetaDescriptor?,
-    private val reader: IOReader<T>,
-) : Task<T> {
-
-    public abstract suspend fun produceEnvelopes(
-        workspace: Workspace,
-        taskName: Name,
-        taskMeta: Meta,
-    ): Map<Name, Envelope>
-
-    override suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T> =
-        Result(workspace, taskName, taskMeta, reader, produceEnvelopes(workspace, taskName, taskMeta))
-
-    private class Result<T : Any>(
-        override val workspace: Workspace,
-        override val taskName: Name,
-        override val taskMeta: Meta,
-        val reader: IOReader<T>,
-        envelopes: Map<Name, Envelope>,
-    ) : TaskResult<T> {
-
-        private val dataMap = envelopes.mapValues {
-            workspace.wrapData(it.value.toData(reader), it.key, taskName, taskMeta)
-        }
-        override val meta: Meta get() = dataMap[META_ITEM_NAME_TOKEN.asName()]?.meta ?: Meta.EMPTY
-
-        override val dataType: KType get() = reader.type
-
-        override fun iterator(): Iterator<TaskData<T>> = dataMap.values.iterator()
-
-        override fun get(name: Name): TaskData<T>? = dataMap[name]
-    }
-}
-
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
index 329d9c5a..e636de49 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Task.kt
@@ -1,12 +1,12 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataSetBuilder
-import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.DataSink
 import space.kscience.dataforge.data.GoalExecutionRestriction
+import space.kscience.dataforge.data.MutableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.meta.Specification
+import space.kscience.dataforge.meta.MetaSpec
 import space.kscience.dataforge.meta.descriptors.Described
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.misc.DfType
@@ -20,7 +20,7 @@ import kotlin.reflect.typeOf
  * In general no computations should be made until the result is called.
  */
 @DfType(TYPE)
-public interface Task<out T : Any> : Described {
+public interface Task<T> : Described {
 
     /**
      * A task identification string used to compare tasks and check task body for change
@@ -43,10 +43,10 @@ public interface Task<out T : Any> : Described {
 }
 
 /**
- * A [Task] with [Specification] for wrapping and unwrapping task configuration
+ * A [Task] with [MetaSpec] for wrapping and unwrapping task configuration
  */
-public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
-    public val spec: Specification<C>
+public interface TaskWithSpec<T, C : Any> : Task<T> {
+    public val spec: MetaSpec<C>
     override val descriptor: MetaDescriptor? get() = spec.descriptor
 
     public suspend fun execute(workspace: Workspace, taskName: Name, configuration: C): TaskResult<T>
@@ -55,18 +55,18 @@ public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
         execute(workspace, taskName, spec.read(taskMeta))
 }
 
-public suspend fun <T : Any, C : Any> TaskWithSpec<T, C>.execute(
-    workspace: Workspace,
-    taskName: Name,
-    block: C.() -> Unit = {},
-): TaskResult<T> = execute(workspace, taskName, spec(block))
+//public suspend fun <T : Any, C : Scheme> TaskWithSpec<T, C>.execute(
+//    workspace: Workspace,
+//    taskName: Name,
+//    block: C.() -> Unit = {},
+//): TaskResult<T> = execute(workspace, taskName, spec(block))
 
-public class TaskResultBuilder<in T : Any>(
+public class TaskResultBuilder<T>(
     public val workspace: Workspace,
     public val taskName: Name,
     public val taskMeta: Meta,
-    private val dataDrop: DataSetBuilder<T>,
-) : DataSetBuilder<T> by dataDrop
+    private val dataSink: DataSink<T>,
+) : DataSink<T> by dataSink
 
 /**
  * Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
@@ -76,7 +76,6 @@ public class TaskResultBuilder<in T : Any>(
  * @param descriptor of meta accepted by this task
  * @param builder for resulting data set
  */
-@Suppress("FunctionName")
 public fun <T : Any> Task(
     resultType: KType,
     descriptor: MetaDescriptor? = null,
@@ -89,16 +88,20 @@ public fun <T : Any> Task(
         workspace: Workspace,
         taskName: Name,
         taskMeta: Meta,
-    ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
+    ): TaskResult<T> {
         //TODO use safe builder and check for external data on add and detects cycles
-        val dataset = DataTree<T>(resultType) {
-            TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
+        val dataset = MutableDataTree<T>(resultType, workspace.context).apply {
+            TaskResultBuilder(workspace, taskName, taskMeta, this).apply {
+                withContext(GoalExecutionRestriction() + workspace.goalLogger) {
+                    builder()
+                }
+            }
         }
-        workspace.wrapResult(dataset, taskName, taskMeta)
+        return workspace.wrapResult(dataset, taskName, taskMeta)
+
     }
 }
 
-@Suppress("FunctionName")
 public inline fun <reified T : Any> Task(
     descriptor: MetaDescriptor? = null,
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
@@ -113,13 +116,14 @@ public inline fun <reified T : Any> Task(
  * @param specification a specification for task configuration
  * @param builder for resulting data set
  */
+
 @Suppress("FunctionName")
 public fun <T : Any, C : MetaRepr> Task(
     resultType: KType,
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
-    override val spec: Specification<C> = specification
+    override val spec: MetaSpec<C> = specification
 
     override suspend fun execute(
         workspace: Workspace,
@@ -128,15 +132,14 @@ public fun <T : Any, C : MetaRepr> Task(
     ): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
         //TODO use safe builder and check for external data on add and detects cycles
         val taskMeta = configuration.toMeta()
-        val dataset = DataTree<T>(resultType) {
+        val dataset = MutableDataTree<T>(resultType, this).apply {
             TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
         }
         workspace.wrapResult(dataset, taskName, taskMeta)
     }
 }
 
-@Suppress("FunctionName")
 public inline fun <reified T : Any, C : MetaRepr> Task(
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): Task<T> = Task(typeOf<T>(), specification, builder)
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt
deleted file mode 100644
index 080ffec3..00000000
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskData.kt
+++ /dev/null
@@ -1,50 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.NamedData
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.names.Name
-
-/**
- * A [Workspace]-locked [NamedData], that serves as a computation model.
- */
-public interface TaskData<out T : Any> : NamedData<T> {
-    /**
-     * The [Workspace] this data belongs to
-     */
-    public val workspace: Workspace
-
-    /**
-     * The name of the stage that produced this data. [Name.EMPTY] if the workspace intrinsic data is used.
-     */
-    public val taskName: Name
-
-    /**
-     * Stage configuration used to produce this data.
-     */
-    public val taskMeta: Meta
-
-    /**
-     * Dependencies that allow to compute transitive dependencies as well.
-     */
-//    override val dependencies: Collection<TaskData<*>>
-}
-
-private class TaskDataImpl<out T : Any>(
-    override val workspace: Workspace,
-    override val data: Data<T>,
-    override val name: Name,
-    override val taskName: Name,
-    override val taskMeta: Meta,
-) : TaskData<T>, Data<T> by data {
-//    override val dependencies: Collection<TaskData<*>> = data.dependencies.map {
-//        it as? TaskData<*> ?: error("TaskData can't depend on external data")
-//    }
-}
-
-/**
- * Adopt data into this workspace
- */
-public fun <T : Any> Workspace.wrapData(data: Data<T>, name: Name, taskName: Name, taskMeta: Meta): TaskData<T> =
-    TaskDataImpl(this, data, name, taskName, taskMeta)
-
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
index d8db6417..7aa94101 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/TaskResult.kt
@@ -1,54 +1,41 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.data.DataSet
-import space.kscience.dataforge.data.forEach
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.joinAll
+import kotlinx.coroutines.launch
+import space.kscience.dataforge.data.ObservableDataTree
+import space.kscience.dataforge.data.asSequence
+import space.kscience.dataforge.data.launch
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
 /**
  * A result of a [Task]
+ * @param workspace the [Workspace] that produced the result
+ * @param taskName the name of the task that produced the result
+ * @param taskMeta The configuration of the task that produced the result
  */
-public interface TaskResult<out T : Any> : DataSet<T> {
-    /**
-     * The [Workspace] this [DataSet] belongs to
-     */
-    public val workspace: Workspace
-
-    /**
-     * The [Name] of the stage that produced this [DataSet]
-     */
-    public val taskName: Name
-
-    /**
-     * The configuration of the stage that produced this [DataSet]
-     */
-    public val taskMeta: Meta
-
-    override fun iterator(): Iterator<TaskData<T>>
-
-    override fun get(name: Name): TaskData<T>?
-}
-
-private class TaskResultImpl<out T : Any>(
-    override val workspace: Workspace,
-    override val taskName: Name,
-    override val taskMeta: Meta,
-    val dataSet: DataSet<T>,
-) : TaskResult<T>, DataSet<T> by dataSet {
-
-    override fun iterator(): Iterator<TaskData<T>> = iterator {
-        dataSet.forEach {
-            yield(workspace.wrapData(it, it.name, taskName, taskMeta))
-        }
-    }
-
-    override fun get(name: Name): TaskData<T>? = dataSet[name]?.let {
-        workspace.wrapData(it, name, taskName, taskMeta)
-    }
-}
+public data class TaskResult<T>(
+    public val content: ObservableDataTree<T>,
+    public val workspace: Workspace,
+    public val taskName: Name,
+    public val taskMeta: Meta,
+) : ObservableDataTree<T> by content
 
 /**
  * Wrap data into [TaskResult]
  */
-public fun <T : Any> Workspace.wrapResult(dataSet: DataSet<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
-    TaskResultImpl(this, taskName, taskMeta, dataSet)
\ No newline at end of file
+public fun <T> Workspace.wrapResult(data: ObservableDataTree<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
+    TaskResult(data, this, taskName, taskMeta)
+
+/**
+ * Start computation for all data elements of this node.
+ * The resulting [Job] is completed only when all of them are completed.
+ */
+public fun TaskResult<*>.launch(scope: CoroutineScope): Job {
+    val jobs = asSequence().map {
+        it.data.launch(scope)
+    }.toList()
+    return scope.launch { jobs.joinAll() }
+}
\ No newline at end of file
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
index 37b473db..f3ea322c 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/Workspace.kt
@@ -1,29 +1,32 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.CoroutineScope
 import space.kscience.dataforge.context.ContextAware
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.DataSet
-import space.kscience.dataforge.data.asSequence
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.MutableMeta
 import space.kscience.dataforge.misc.DfType
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.provider.Provider
+import kotlin.coroutines.CoroutineContext
 
 
-public interface DataSelector<T: Any>{
-    public suspend fun select(workspace: Workspace, meta: Meta): DataSet<T>
+public fun interface DataSelector<T> {
+    public suspend fun select(workspace: Workspace, meta: Meta): DataTree<T>
 }
 
 /**
  * An environment for pull-mode computation
  */
 @DfType(Workspace.TYPE)
-public interface Workspace : ContextAware, Provider {
+public interface Workspace : ContextAware, Provider, CoroutineScope {
+
+    override val coroutineContext: CoroutineContext get() = context.coroutineContext
+
     /**
      * The whole data node for current workspace
      */
-    public val data: TaskResult<*>
+    public val data: ObservableDataTree<*>
 
     /**
      * All targets associated with the workspace
@@ -37,7 +40,7 @@ public interface Workspace : ContextAware, Provider {
 
     override fun content(target: String): Map<Name, Any> {
         return when (target) {
-            "target", Meta.TYPE -> targets.mapKeys { Name.parse(it.key)}
+            "target", Meta.TYPE -> targets.mapKeys { Name.parse(it.key) }
             Task.TYPE -> tasks
             Data.TYPE -> data.asSequence().associateBy { it.name }
             else -> emptyMap()
@@ -49,7 +52,7 @@ public interface Workspace : ContextAware, Provider {
         return task.execute(this, taskName, taskMeta)
     }
 
-    public suspend fun produceData(taskName: Name, taskMeta: Meta, name: Name): TaskData<*>? =
+    public suspend fun produceData(taskName: Name, taskMeta: Meta, name: Name): Data<*>? =
         produce(taskName, taskMeta)[name]
 
     public companion object {
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
index 1538460f..4705c3b0 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceBuilder.kt
@@ -1,30 +1,30 @@
 package space.kscience.dataforge.workspace
 
 import kotlinx.coroutines.CoroutineScope
+import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.ContextBuilder
 import space.kscience.dataforge.context.Global
-import space.kscience.dataforge.data.*
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.MetaRepr
-import space.kscience.dataforge.meta.MutableMeta
-import space.kscience.dataforge.meta.Specification
+import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.MutableDataTree
+import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.meta.descriptors.MetaDescriptor
 import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
 import space.kscience.dataforge.misc.DFBuilder
-import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.asName
 import kotlin.collections.set
 import kotlin.properties.PropertyDelegateProvider
 import kotlin.properties.ReadOnlyProperty
+import kotlin.reflect.typeOf
 
-public data class TaskReference<T : Any>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
+public data class TaskReference<T>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
 
     @Suppress("UNCHECKED_CAST")
-    override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> {
+    override suspend fun select(workspace: Workspace, meta: Meta): DataTree<T> {
         if (workspace.tasks[taskName] == task) {
-            return workspace.produce(taskName, meta) as TaskResult<T>
+            return workspace.produce(taskName, meta) as DataTree<T>
         } else {
             error("Task $taskName does not belong to the workspace")
         }
@@ -45,6 +45,9 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
 ): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
 
+/**
+ * Create and register a new task
+ */
 public inline fun <reified T : Any> TaskContainer.buildTask(
     name: String,
     descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
@@ -67,8 +70,11 @@ public inline fun <reified T : Any> TaskContainer.task(
     ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
 }
 
+/**
+ * Create a task based on [MetaSpec]
+ */
 public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
-    specification: Specification<C>,
+    specification: MetaSpec<C>,
     noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
     val taskName = Name.parse(property.name)
@@ -77,15 +83,34 @@ public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
     ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
 }
 
+/**
+ * A delegate to create a custom task
+ */
 public inline fun <reified T : Any> TaskContainer.task(
     noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
     noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
 ): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> =
     task(MetaDescriptor(descriptorBuilder), builder)
 
-public class WorkspaceBuilder(private val parentContext: Context = Global) : TaskContainer {
+/**
+ * A delegate for creating a task based on [action]
+ */
+public inline fun <T : Any, reified R : Any> TaskContainer.action(
+    selector: DataSelector<T>,
+    action: Action<T, R>,
+    noinline metaTransform: MutableMeta.()-> Unit = {},
+    noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
+): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<R>>> =
+    task(MetaDescriptor(descriptorBuilder)) {
+        result(action.execute(from(selector), taskMeta.copy(metaTransform)))
+    }
+
+public class WorkspaceBuilder(
+    private val parentContext: Context = Global,
+    private val coroutineScope: CoroutineScope = parentContext,
+) : TaskContainer {
     private var context: Context? = null
-    private var data: DataSet<*>? = null
+    private val data = MutableDataTree<Any?>(typeOf<Any?>(), coroutineScope)
     private val targets: HashMap<String, Meta> = HashMap()
     private val tasks = HashMap<Name, Task<*>>()
     private var cache: WorkspaceCache? = null
@@ -100,13 +125,8 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
     /**
      * Define intrinsic data for the workspace
      */
-    public fun data(builder: DataSetBuilder<Any>.() -> Unit) {
-        data = DataTree(builder)
-    }
-
-    @DFExperimental
-    public fun data(scope: CoroutineScope, builder: DataSourceBuilder<Any>.() -> Unit) {
-        data = DataSource(scope, builder)
+    public fun data(builder: DataSink<Any?>.() -> Unit) {
+        data.apply(builder)
     }
 
     /**
@@ -130,9 +150,9 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
 
     public fun build(): Workspace {
         val postProcess: suspend (TaskResult<*>) -> TaskResult<*> = { result ->
-            cache?.evaluate(result) ?: result
+            cache?.cache(result) ?: result
         }
-        return WorkspaceImpl(context ?: parentContext, data ?: DataSet.EMPTY, targets, tasks, postProcess)
+        return WorkspaceImpl(context ?: parentContext, data, targets, tasks, postProcess)
     }
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
index 62df6744..42cb7b4f 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceCache.kt
@@ -1,5 +1,5 @@
 package space.kscience.dataforge.workspace
 
 public interface WorkspaceCache {
-    public suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T>
+    public suspend fun <T> cache(result: TaskResult<T>): TaskResult<T>
 }
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
index dae9667a..21c5e8c2 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/WorkspaceImpl.kt
@@ -2,21 +2,19 @@ package space.kscience.dataforge.workspace
 
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.gather
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.ObservableDataTree
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 
 
 internal class WorkspaceImpl internal constructor(
     override val context: Context,
-    data: DataSet<*>,
+    override val data: ObservableDataTree<*>,
     override val targets: Map<String, Meta>,
     tasks: Map<Name, Task<*>>,
     private val postProcess: suspend (TaskResult<*>) -> TaskResult<*>,
 ) : Workspace {
 
-    override val data: TaskResult<*> = wrapResult(data, Name.EMPTY, Meta.EMPTY)
-
     override val tasks: Map<Name, Task<*>> by lazy { context.gather<Task<*>>(Task.TYPE) + tasks }
 
     override suspend fun produce(taskName: Name, taskMeta: Meta): TaskResult<*> {
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
index 39bb0726..d54ff510 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/envelopeData.kt
@@ -4,13 +4,14 @@ import space.kscience.dataforge.data.Data
 import space.kscience.dataforge.data.await
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFInternal
+import kotlin.reflect.typeOf
 
 
 /**
  * Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
  */
 @OptIn(DFInternal::class)
-public fun <T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(format.type, meta) {
+public inline fun <reified T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(typeOf<T>(), meta) {
     data?.readWith(format) ?: error("Can't convert envelope without data to Data")
 }
 
diff --git a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
index bf3d5921..1900ff23 100644
--- a/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
+++ b/dataforge-workspace/src/commonMain/kotlin/space/kscience/dataforge/workspace/taskBuilders.kt
@@ -1,9 +1,11 @@
 package space.kscience.dataforge.workspace
 
+import space.kscience.dataforge.actions.Action
 import space.kscience.dataforge.context.PluginFactory
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataTree
+import space.kscience.dataforge.data.branch
 import space.kscience.dataforge.data.forEach
-import space.kscience.dataforge.data.map
+import space.kscience.dataforge.data.transform
 import space.kscience.dataforge.meta.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.names.Name
@@ -23,22 +25,22 @@ public val TaskResultBuilder<*>.defaultDependencyMeta: Meta
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  */
-public suspend fun <T : Any> TaskResultBuilder<*>.from(
+public suspend fun <T> TaskResultBuilder<*>.from(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
-): DataSet<T> = selector.select(workspace, dependencyMeta)
+): DataTree<T> = selector.select(workspace, dependencyMeta)
 
-public suspend inline fun <T : Any, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
     plugin: P,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
-): DataSet<T> {
+): TaskResult<T> {
     require(workspace.context.plugins.contains(plugin)) { "Plugin $plugin is not loaded into $workspace" }
     val taskReference: TaskReference<T> = plugin.selectorBuilder()
     val res = workspace.produce(plugin.name + taskReference.taskName, dependencyMeta)
     //TODO add explicit check after https://youtrack.jetbrains.com/issue/KT-32956
     @Suppress("UNCHECKED_CAST")
-    return  res as TaskResult<T>
+    return res as TaskResult<T>
 }
 
 /**
@@ -48,11 +50,11 @@ public suspend inline fun <T : Any, reified P : WorkspacePlugin> TaskResultBuild
  * @param dependencyMeta meta used for selector. The same meta is used for caching. By default, uses [defaultDependencyMeta].
  * @param selectorBuilder a builder of task from the plugin.
  */
-public suspend inline fun <reified T : Any, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
+public suspend inline fun <reified T, reified P : WorkspacePlugin> TaskResultBuilder<*>.from(
     pluginFactory: PluginFactory<P>,
     dependencyMeta: Meta = defaultDependencyMeta,
     selectorBuilder: P.() -> TaskReference<T>,
-): DataSet<T> {
+): TaskResult<T> {
     val plugin = workspace.context.plugins[pluginFactory]
         ?: error("Plugin ${pluginFactory.tag} not loaded into workspace context")
     val taskReference: TaskReference<T> = plugin.selectorBuilder()
@@ -63,12 +65,10 @@ public suspend inline fun <reified T : Any, reified P : WorkspacePlugin> TaskRes
 }
 
 public val TaskResultBuilder<*>.allData: DataSelector<*>
-    get() = object : DataSelector<Any> {
-        override suspend fun select(workspace: Workspace, meta: Meta): DataSet<Any> = workspace.data
-    }
+    get() = DataSelector { workspace, _ -> workspace.data }
 
 /**
- * Perform a lazy mapping task using given [selector] and [action]. The meta of resulting
+ * Perform a lazy mapping task using given [selector] and one-to-one [action].
  * TODO move selector to receiver with multi-receivers
  *
  * @param selector a workspace data selector. Could be either task selector or initial data selector.
@@ -77,7 +77,7 @@ public val TaskResultBuilder<*>.allData: DataSelector<*>
  * @param action process individual data asynchronously.
  */
 @DFExperimental
-public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFrom(
+public suspend inline fun <T, reified R> TaskResultBuilder<R>.transformEach(
     selector: DataSelector<T>,
     dependencyMeta: Meta = defaultDependencyMeta,
     dataMetaTransform: MutableMeta.(name: Name) -> Unit = {},
@@ -89,12 +89,31 @@ public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFr
             dataMetaTransform(data.name)
         }
 
-        val res = data.map(workspace.context.coroutineContext, meta) {
+        val res = data.transform(meta, workspace.context.coroutineContext) {
             action(it, data.name, meta)
         }
 
-        data(data.name, res)
+        put(data.name, res)
     }
 }
 
+/**
+ * Set given [dataSet] as a task result.
+ */
+public fun <T> TaskResultBuilder<T>.result(dataSet: DataTree<T>) {
+    branch(dataSet)
+}
+
+/**
+ * Use provided [action] to fill the result
+ */
+@DFExperimental
+public suspend inline fun <T, reified R> TaskResultBuilder<R>.actionFrom(
+    selector: DataSelector<T>,
+    action: Action<T, R>,
+    dependencyMeta: Meta = defaultDependencyMeta,
+) {
+    branch(action.execute(from(selector, dependencyMeta), dependencyMeta))
+}
+
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
index 279e61a2..a43657f8 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCache.kt
@@ -1,5 +1,6 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.map
 import kotlinx.io.*
 import kotlinx.serialization.ExperimentalSerializationApi
 import kotlinx.serialization.KSerializer
@@ -9,12 +10,10 @@ import kotlinx.serialization.serializer
 import space.kscience.dataforge.context.error
 import space.kscience.dataforge.context.logger
 import space.kscience.dataforge.context.request
-import space.kscience.dataforge.data.Data
-import space.kscience.dataforge.data.await
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.io.*
 import space.kscience.dataforge.misc.DFExperimental
 import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.names.Name
 import space.kscience.dataforge.names.withIndex
 import java.nio.file.Path
 import kotlin.io.path.deleteIfExists
@@ -22,7 +21,7 @@ import kotlin.io.path.div
 import kotlin.io.path.exists
 import kotlin.reflect.KType
 
-public class JsonIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
+public class JsonIOFormat<T>(private val type: KType) : IOFormat<T> {
 
     @Suppress("UNCHECKED_CAST")
     private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
@@ -35,7 +34,7 @@ public class JsonIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
 }
 
 @OptIn(ExperimentalSerializationApi::class)
-public class ProtobufIOFormat<T : Any>(override val type: KType) : IOFormat<T> {
+public class ProtobufIOFormat<T>(private val type: KType) : IOFormat<T> {
 
     @Suppress("UNCHECKED_CAST")
     private val serializer: KSerializer<T> = serializer(type) as KSerializer<T>
@@ -53,14 +52,14 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
     //    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> = this as TaskData<T>
 
     @OptIn(DFExperimental::class, DFInternal::class)
-    override suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T> {
+    override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
         val io = result.workspace.context.request(IOPlugin)
 
         val format: IOFormat<T> = io.resolveIOFormat(result.dataType, result.taskMeta)
             ?: ProtobufIOFormat(result.dataType)
             ?: error("Can't resolve IOFormat for ${result.dataType}")
 
-        fun evaluateDatum(data: TaskData<T>): TaskData<T> {
+        fun cacheOne(data: NamedData<T>): NamedData<T> {
 
             val path = cacheDirectory /
                     result.taskName.withIndex(result.taskMeta.hashCode().toString(16)).toString() /
@@ -92,15 +91,14 @@ public class FileWorkspaceCache(public val cacheDirectory: Path) : WorkspaceCach
                 }
 
             }
-            return data.workspace.wrapData(datum, data.name, data.taskName, data.taskMeta)
+            return datum.named(data.name)
         }
 
-        return object : TaskResult<T> by result {
-            override fun iterator(): Iterator<TaskData<T>> =
-                result.iterator().asSequence().map { evaluateDatum(it) }.iterator()
 
-            override fun get(name: Name): TaskData<T>? = result[name]?.let { evaluateDatum(it) }
-        }
+        val cachedTree = result.asSequence().map { cacheOne(it) }
+            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
+
+        return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
 }
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
index bb8ea9a4..a3792231 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/InMemoryWorkspaceCache.kt
@@ -1,39 +1,39 @@
 package space.kscience.dataforge.workspace
 
+import kotlinx.coroutines.flow.map
+import space.kscience.dataforge.data.*
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.names.Name
 import kotlin.reflect.KType
 import kotlin.reflect.full.isSubtypeOf
 
-private typealias TaskResultId = Pair<Name, Meta>
+private data class TaskResultId(val name: Name, val meta: Meta)
 
 
 public class InMemoryWorkspaceCache : WorkspaceCache {
 
-    // never do that at home!
-    private val cache = HashMap<TaskResultId, HashMap<Name, TaskData<*>>>()
+    private val cache = HashMap<TaskResultId, HashMap<Name, Data<*>>>()
 
     @Suppress("UNCHECKED_CAST")
-    private fun <T : Any> TaskData<*>.checkType(taskType: KType): TaskData<T> =
-        if (type.isSubtypeOf(taskType)) this as TaskData<T>
+    private fun <T> Data<*>.checkType(taskType: KType): Data<T> =
+        if (type.isSubtypeOf(taskType)) this as Data<T>
         else error("Cached data type mismatch: expected $taskType but got $type")
 
-    override suspend fun <T : Any> evaluate(result: TaskResult<T>): TaskResult<T> {
-        for (d: TaskData<T> in result) {
-            cache.getOrPut(result.taskName to result.taskMeta) { HashMap() }.getOrPut(d.name) { d }
-        }
-
-        return object : TaskResult<T> by result {
-            override fun iterator(): Iterator<TaskData<T>> = (cache[result.taskName to result.taskMeta]
-                ?.values?.map { it.checkType<T>(result.dataType) }
-                ?: emptyList()).iterator()
-
-            override fun get(name: Name): TaskData<T>? {
-                val cached: TaskData<*> = cache[result.taskName to result.taskMeta]?.get(name) ?: return null
-                //TODO check types
-                return cached.checkType(result.dataType)
+    override suspend fun <T> cache(result: TaskResult<T>): TaskResult<T> {
+        fun cacheOne(data: NamedData<T>): NamedData<T> {
+            val cachedData =  cache.getOrPut(TaskResultId(result.taskName, result.taskMeta)){
+                HashMap()
+            }.getOrPut(data.name){
+                data.data
             }
+            return cachedData.checkType<T>(result.dataType).named(data.name)
         }
+
+
+        val cachedTree = result.asSequence().map { cacheOne(it) }
+            .toObservableTree(result.dataType, result.workspace, result.updates().map { cacheOne(it) })
+
+        return result.workspace.wrapResult(cachedTree, result.taskName, result.taskMeta)
     }
 }
 
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
deleted file mode 100644
index ce1b5152..00000000
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/fileData.kt
+++ /dev/null
@@ -1,318 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.isActive
-import kotlinx.coroutines.launch
-import kotlinx.coroutines.withContext
-import space.kscience.dataforge.context.error
-import space.kscience.dataforge.context.logger
-import space.kscience.dataforge.context.warn
-import space.kscience.dataforge.data.*
-import space.kscience.dataforge.io.*
-import space.kscience.dataforge.meta.Meta
-import space.kscience.dataforge.meta.copy
-import space.kscience.dataforge.meta.string
-import space.kscience.dataforge.misc.DFExperimental
-import space.kscience.dataforge.misc.DFInternal
-import space.kscience.dataforge.names.Name
-import space.kscience.dataforge.names.NameToken
-import space.kscience.dataforge.names.asName
-import space.kscience.dataforge.names.plus
-import space.kscience.dataforge.workspace.FileData.Companion.DEFAULT_IGNORE_EXTENSIONS
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.StandardWatchEventKinds
-import java.nio.file.WatchEvent
-import java.nio.file.attribute.BasicFileAttributes
-import java.nio.file.spi.FileSystemProvider
-import java.time.Instant
-import kotlin.io.path.*
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
-
-
-//public typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
-
-public typealias FileFormatResolver<T> = (path: Path, meta: Meta) -> IOReader<T>?
-
-/**
- * A data based on a filesystem [Path]
- */
-public class FileData<T> internal constructor(private val data: Data<T>, public val path: Path) : Data<T> by data {
-
-    //    public val path: String? get() = meta[META_FILE_PATH_KEY].string
-//    public val extension: String? get() = meta[META_FILE_EXTENSION_KEY].string
-//
-    public val createdTime: Instant? get() = meta[FILE_CREATE_TIME_KEY].string?.let { Instant.parse(it) }
-    public val updatedTime: Instant? get() = meta[FILE_UPDATE_TIME_KEY].string?.let { Instant.parse(it) }
-
-    public companion object {
-        public val FILE_KEY: Name = "file".asName()
-        public val FILE_PATH_KEY: Name = FILE_KEY + "path"
-        public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
-        public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
-        public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
-        public const val DF_FILE_EXTENSION: String = "df"
-        public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
-    }
-}
-
-
-/**
- * Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file.
- * The operation is blocking since it must read meta header. The reading of envelope body is lazy
- */
-@OptIn(DFInternal::class)
-@DFExperimental
-public fun <T : Any> IOPlugin.readDataFile(
-    path: Path,
-    formatResolver: FileFormatResolver<T>,
-): FileData<T>? {
-    val envelope = readEnvelopeFile(path, true)
-    val format = formatResolver(path, envelope.meta) ?: return null
-    val updatedMeta = envelope.meta.copy {
-        FileData.FILE_PATH_KEY put path.toString()
-        FileData.FILE_EXTENSION_KEY put path.extension
-
-        val attributes = path.readAttributes<BasicFileAttributes>()
-        FileData.FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
-        FileData.FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
-    }
-    return FileData(
-        Data(format.type, updatedMeta) {
-            (envelope.data ?: Binary.EMPTY).readWith(format)
-        },
-        path
-    )
-}
-
-
-context(IOPlugin) @DFExperimental
-public fun <T : Any> DataSetBuilder<T>.directory(
-    path: Path,
-    ignoreExtensions: Set<String>,
-    formatResolver: FileFormatResolver<T>,
-) {
-    Files.list(path).forEach { childPath ->
-        val fileName = childPath.fileName.toString()
-        if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-            meta(readMetaFile(childPath))
-        } else if (!fileName.startsWith("@")) {
-            file(childPath, ignoreExtensions, formatResolver)
-        }
-    }
-}
-
-/**
- * Read the directory as a data node. If [path] is a zip archive, read it as directory
- */
-@DFExperimental
-@DFInternal
-public fun <T : Any> IOPlugin.readDataDirectory(
-    type: KType,
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-    formatResolver: FileFormatResolver<T>,
-): DataTree<T> {
-    //read zipped data node
-    if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
-        //Using explicit Zip file system to avoid bizarre compatibility bugs
-        val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
-            ?: error("Zip file system provider not found")
-        val fs = fsProvider.newFileSystem(path, mapOf("create" to "true"))
-
-        return readDataDirectory(type, fs.rootDirectories.first(), ignoreExtensions, formatResolver)
-    }
-    if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
-    return DataTree(type) {
-        meta {
-            FileData.FILE_PATH_KEY put path.toString()
-        }
-        directory(path, ignoreExtensions, formatResolver)
-    }
-}
-
-@OptIn(DFInternal::class)
-@DFExperimental
-public inline fun <reified T : Any> IOPlugin.readDataDirectory(
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-    noinline formatResolver: FileFormatResolver<T>,
-): DataTree<T> = readDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
-
-/**
- * Read a raw binary data tree from the directory. All files are read as-is (save for meta files).
- */
-@DFExperimental
-public fun IOPlugin.readRawDirectory(
-    path: Path,
-    ignoreExtensions: Set<String> = emptySet(),
-): DataTree<Binary> = readDataDirectory(path, ignoreExtensions) { _, _ -> IOReader.binary }
-
-
-private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
-
-@DFInternal
-@DFExperimental
-public fun <T : Any> IOPlugin.monitorDataDirectory(
-    type: KType,
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-    formatResolver: FileFormatResolver<T>,
-): DataSource<T> {
-    if (path.fileName.toString().endsWith(".zip")) error("Monitoring not supported for ZipFS")
-    if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
-    return DataSource(type, context) {
-        directory(path, ignoreExtensions, formatResolver)
-        launch(Dispatchers.IO) {
-            val watchService = path.fileSystem.newWatchService()
-
-            path.register(
-                watchService,
-                StandardWatchEventKinds.ENTRY_DELETE,
-                StandardWatchEventKinds.ENTRY_MODIFY,
-                StandardWatchEventKinds.ENTRY_CREATE
-            )
-
-            do {
-                val key = watchService.take()
-                if (key != null) {
-                    for (event: WatchEvent<*> in key.pollEvents()) {
-                        val eventPath = event.context() as Path
-                        if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
-                            remove(eventPath.toName())
-                        } else {
-                            val fileName = eventPath.fileName.toString()
-                            if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
-                                meta(readMetaFile(eventPath))
-                            } else if (!fileName.startsWith("@")) {
-                                file(eventPath, ignoreExtensions, formatResolver)
-                            }
-                        }
-                    }
-                    key.reset()
-                }
-            } while (isActive && key != null)
-        }
-    }
-}
-
-
-/**
- * Start monitoring given directory ([path]) as a [DataSource].
- */
-@OptIn(DFInternal::class)
-@DFExperimental
-public inline fun <reified T : Any> IOPlugin.monitorDataDirectory(
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-    noinline formatResolver: FileFormatResolver<T>,
-): DataSource<T> = monitorDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
-
-/**
- * Read and monitor raw binary data tree from the directory. All files are read as-is (save for meta files).
- */
-@DFExperimental
-public fun IOPlugin.monitorRawDirectory(
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-): DataSource<Binary> = monitorDataDirectory(path, ignoreExtensions) { _, _ -> IOReader.binary }
-
-/**
- * Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
- */
-@DFExperimental
-public suspend fun <T : Any> IOPlugin.writeDataDirectory(
-    path: Path,
-    tree: DataTree<T>,
-    format: IOWriter<T>,
-    envelopeFormat: EnvelopeFormat? = null,
-) {
-    withContext(Dispatchers.IO) {
-        if (!Files.exists(path)) {
-            Files.createDirectories(path)
-        } else if (!Files.isDirectory(path)) {
-            error("Can't write a node into file")
-        }
-        tree.items.forEach { (token, item) ->
-            val childPath = path.resolve(token.toString())
-            when (item) {
-                is DataTreeItem.Node -> {
-                    writeDataDirectory(childPath, item.tree, format, envelopeFormat)
-                }
-
-                is DataTreeItem.Leaf -> {
-                    val envelope = item.data.toEnvelope(format)
-                    if (envelopeFormat != null) {
-                        writeEnvelopeFile(childPath, envelope, envelopeFormat)
-                    } else {
-                        writeEnvelopeDirectory(childPath, envelope)
-                    }
-                }
-            }
-        }
-        val treeMeta = tree.meta
-        writeMetaFile(path, treeMeta)
-    }
-}
-
-/**
- * Reads the specified resources and returns a [DataTree] containing the data.
- *
- * @param resources The names of the resources to read.
- * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
- * @return A DataTree containing the data read from the resources.
- */
-@DFExperimental
-private fun IOPlugin.readResources(
-    vararg resources: String,
-    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
-): DataTree<Binary> {
-//    require(resource.isNotBlank()) {"Can't mount root resource tree as data root"}
-    return DataTree {
-        resources.forEach { resource ->
-            val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
-                "Resource with name $resource is not resolved"
-            )
-            node(resource, readRawDirectory(path))
-        }
-    }
-}
-
-/**
- * Add file/directory-based data tree item
- *
- * @param ignoreExtensions a list of file extensions for which extension should be cut from the resulting item name
- */
-context(IOPlugin)
-@OptIn(DFInternal::class)
-@DFExperimental
-public fun <T : Any> DataSetBuilder<T>.file(
-    path: Path,
-    ignoreExtensions: Set<String> = DEFAULT_IGNORE_EXTENSIONS,
-    formatResolver: FileFormatResolver<out T>,
-) {
-
-    fun defaultPath() = if (path.extension in ignoreExtensions) path.nameWithoutExtension else path.name
-
-    try {
-        //If path is a single file or a special directory, read it as single datum
-        if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
-            val data = readDataFile(path, formatResolver)
-            if (data == null) {
-                logger.warn { "File format is not resolved for $path. Skipping." }
-                return
-            }
-            val name: String = data.meta[Envelope.ENVELOPE_NAME_KEY].string ?: defaultPath()
-            data(name.asName(), data)
-        } else {
-            //otherwise, read as directory
-            val data: DataTree<T> = readDataDirectory(dataType, path, ignoreExtensions, formatResolver)
-            val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string ?: defaultPath()
-            node(name.asName(), data)
-        }
-    } catch (ex: Exception) {
-        logger.error { "Failed to read file or directory at $path: ${ex.message}" }
-    }
-}
-
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
new file mode 100644
index 00000000..1815c3e4
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/readFileData.kt
@@ -0,0 +1,186 @@
+package space.kscience.dataforge.workspace
+
+import kotlinx.coroutines.*
+import space.kscience.dataforge.data.Data
+import space.kscience.dataforge.data.DataSink
+import space.kscience.dataforge.data.StaticData
+import space.kscience.dataforge.io.*
+import space.kscience.dataforge.meta.Meta
+import space.kscience.dataforge.meta.copy
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.misc.DFInternal
+import space.kscience.dataforge.names.Name
+import space.kscience.dataforge.names.NameToken
+import space.kscience.dataforge.names.asName
+import space.kscience.dataforge.names.plus
+import java.nio.file.Files
+import java.nio.file.Path
+import java.nio.file.StandardWatchEventKinds
+import java.nio.file.WatchEvent
+import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.*
+import kotlin.reflect.typeOf
+
+
+public object FileData {
+    public val FILE_KEY: Name = "file".asName()
+    public val FILE_PATH_KEY: Name = FILE_KEY + "path"
+    public val FILE_EXTENSION_KEY: Name = FILE_KEY + "extension"
+    public val FILE_CREATE_TIME_KEY: Name = FILE_KEY + "created"
+    public val FILE_UPDATE_TIME_KEY: Name = FILE_KEY + "updated"
+    public const val DF_FILE_EXTENSION: String = "df"
+    public val DEFAULT_IGNORE_EXTENSIONS: Set<String> = setOf(DF_FILE_EXTENSION)
+
+}
+
+
+/**
+ * Read data with supported envelope format and binary format. If the envelope format is null, then read binary directly from file.
+ * The operation is blocking since it must read the meta header. The reading of envelope body is lazy
+ */
+@OptIn(DFExperimental::class)
+public fun IOPlugin.readFileData(
+    path: Path,
+): Data<Binary> {
+    val envelope = readEnvelopeFile(path, true)
+    val updatedMeta = envelope.meta.copy {
+        FileData.FILE_PATH_KEY put path.toString()
+        FileData.FILE_EXTENSION_KEY put path.extension
+
+        val attributes = path.readAttributes<BasicFileAttributes>()
+        FileData.FILE_UPDATE_TIME_KEY put attributes.lastModifiedTime().toInstant().toString()
+        FileData.FILE_CREATE_TIME_KEY put attributes.creationTime().toInstant().toString()
+    }
+    return StaticData(
+        typeOf<Binary>(),
+        envelope.data ?: Binary.EMPTY,
+        updatedMeta
+    )
+}
+
+public fun DataSink<Binary>.file(io: IOPlugin, name: Name, path: Path) {
+    if (!path.isRegularFile()) error("Only regular files could be handled by this function")
+    put(name, io.readFileData(path))
+}
+
+public fun DataSink<Binary>.directory(
+    io: IOPlugin,
+    name: Name,
+    path: Path,
+) {
+    if (!path.isDirectory()) error("Only directories could be handled by this function")
+    //process root data
+
+    var dataBinary: Binary? = null
+    var meta: Meta? = null
+    Files.list(path).forEach { childPath ->
+        val fileName = childPath.fileName.toString()
+        if (fileName == IOPlugin.DATA_FILE_NAME) {
+            dataBinary = childPath.asBinary()
+        } else if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
+            meta = io.readMetaFileOrNull(childPath)
+        } else if (!fileName.startsWith("@")) {
+            val token = if (childPath.isRegularFile() && childPath.extension in FileData.DEFAULT_IGNORE_EXTENSIONS) {
+                NameToken(childPath.nameWithoutExtension)
+            } else {
+                NameToken(childPath.name)
+            }
+
+            files(io, name + token, childPath)
+        }
+    }
+
+    //set data if it is relevant
+    if (dataBinary != null || meta != null) {
+        put(
+            name,
+            StaticData(
+                typeOf<Binary>(),
+                dataBinary ?: Binary.EMPTY,
+                meta ?: Meta.EMPTY
+            )
+        )
+    }
+}
+
+public fun DataSink<Binary>.files(
+    io: IOPlugin,
+    name: Name,
+    path: Path,
+) {
+    if (path.isRegularFile() && path.extension == "zip") {
+        //Using explicit Zip file system to avoid bizarre compatibility bugs
+        val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+            ?: error("Zip file system provider not found")
+        val fs = fsProvider.newFileSystem(path, emptyMap<String, Any>())
+
+        files(io, name, fs.rootDirectories.first())
+    }
+    if (path.isRegularFile()) {
+        file(io, name, path)
+    } else {
+        directory(io, name, path)
+    }
+}
+
+
+private fun Path.toName() = Name(map { NameToken.parse(it.nameWithoutExtension) })
+
+@DFInternal
+@DFExperimental
+public fun DataSink<Binary>.monitorFiles(
+    io: IOPlugin,
+    name: Name,
+    path: Path,
+    scope: CoroutineScope = io.context,
+): Job {
+    files(io, name, path)
+    return scope.launch(Dispatchers.IO) {
+        val watchService = path.fileSystem.newWatchService()
+
+        path.register(
+            watchService,
+            StandardWatchEventKinds.ENTRY_DELETE,
+            StandardWatchEventKinds.ENTRY_MODIFY,
+            StandardWatchEventKinds.ENTRY_CREATE
+        )
+
+        do {
+            val key = watchService.take()
+            if (key != null) {
+                for (event: WatchEvent<*> in key.pollEvents()) {
+                    val eventPath = event.context() as Path
+                    if (event.kind() == StandardWatchEventKinds.ENTRY_DELETE) {
+                        put(eventPath.toName(), null)
+                    } else {
+                        val fileName = eventPath.fileName.toString()
+                        if (!fileName.startsWith("@")) {
+                            files(io, name, eventPath)
+                        }
+                    }
+                }
+                key.reset()
+            }
+        } while (isActive && key != null)
+    }
+
+}
+
+/**
+ * @param resources The names of the resources to read.
+ * @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
+ */
+@DFExperimental
+public fun DataSink<Binary>.resources(
+    io: IOPlugin,
+    vararg resources: String,
+    classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
+) {
+    resources.forEach { resource ->
+        val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
+            "Resource with name $resource is not resolved"
+        )
+        files(io, resource.asName(), path)
+    }
+}
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
index ea6ffb85..61caf7e0 100644
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/workspaceJvm.kt
@@ -1,6 +1,6 @@
 package space.kscience.dataforge.workspace
 
-import space.kscience.dataforge.data.DataSet
+import space.kscience.dataforge.data.DataTree
 import space.kscience.dataforge.data.filterByType
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.misc.DFExperimental
@@ -16,14 +16,13 @@ import space.kscience.dataforge.names.matches
  */
 @OptIn(DFExperimental::class)
 public inline fun <reified T : Any> TaskResultBuilder<*>.dataByType(namePattern: Name? = null): DataSelector<T> =
-    object : DataSelector<T> {
-        override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> =
-            workspace.data.filterByType { name, _ ->
-                namePattern == null || name.matches(namePattern)
-            }
+    DataSelector<T> { workspace, _ ->
+        workspace.data.filterByType { name, _, _ ->
+            namePattern == null || name.matches(namePattern)
+        }
     }
 
 public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
     task: Name,
     taskMeta: Meta = Meta.EMPTY,
-): DataSet<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
+): DataTree<T> = workspace.produce(task, taskMeta).filterByType()
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
new file mode 100644
index 00000000..379a79dd
--- /dev/null
+++ b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/writeFileData.kt
@@ -0,0 +1,72 @@
+package space.kscience.dataforge.workspace
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import space.kscience.dataforge.data.*
+import space.kscience.dataforge.io.*
+import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
+import java.nio.file.Files
+import java.nio.file.Path
+import java.nio.file.spi.FileSystemProvider
+import kotlin.io.path.Path
+import kotlin.io.path.createDirectories
+import kotlin.io.path.exists
+import kotlin.io.path.extension
+
+
+/**
+ * Write the data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
+ *
+ * @param nameToPath a [Name] to [Path] converter used to create
+ */
+@DFExperimental
+public suspend fun <T : Any> IOPlugin.writeDataDirectory(
+    path: Path,
+    dataSet: DataTree<T>,
+    format: IOWriter<T>,
+    envelopeFormat: EnvelopeFormat? = null,
+): Unit = withContext(Dispatchers.IO) {
+    if (!Files.exists(path)) {
+        Files.createDirectories(path)
+    } else if (!Files.isDirectory(path)) {
+        error("Can't write a node into file")
+    }
+    dataSet.forEach { (name, data) ->
+        val childPath = path.resolve(name.tokens.joinToString("/") { token -> token.toStringUnescaped() })
+        childPath.parent.createDirectories()
+        val envelope = data.toEnvelope(format)
+        if (envelopeFormat != null) {
+            writeEnvelopeFile(childPath, envelope, envelopeFormat)
+        } else {
+            writeEnvelopeDirectory(childPath, envelope)
+        }
+    }
+    dataSet.meta?.let { writeMetaFile(path, it) }
+
+}
+
+/**
+ * Write this [DataTree] as a zip archive
+ */
+@DFExperimental
+public suspend fun <T : Any> IOPlugin.writeZip(
+    path: Path,
+    dataSet: DataTree<T>,
+    format: IOWriter<T>,
+    envelopeFormat: EnvelopeFormat? = null,
+): Unit = withContext(Dispatchers.IO) {
+    if (path.exists()) error("Can't override existing zip data file $path")
+    val actualFile = if (path.extension == "zip") {
+        path
+    } else {
+        path.resolveSibling(path.fileName.toString() + ".zip")
+    }
+    val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
+        ?: error("Zip file system provider not found")
+    //val fs = FileSystems.newFileSystem(actualFile, mapOf("create" to true))
+    val fs = fsProvider.newFileSystem(actualFile, mapOf("create" to true))
+    fs.use {
+        writeDataDirectory(fs.rootDirectories.first(), dataSet, format, envelopeFormat)
+    }
+}
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt b/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
deleted file mode 100644
index 466552c2..00000000
--- a/dataforge-workspace/src/jvmMain/kotlin/space/kscience/dataforge/workspace/zipData.kt
+++ /dev/null
@@ -1,73 +0,0 @@
-package space.kscience.dataforge.workspace
-
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.withContext
-import space.kscience.dataforge.data.DataTree
-import space.kscience.dataforge.data.DataTreeItem
-import space.kscience.dataforge.io.*
-import space.kscience.dataforge.misc.DFExperimental
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.StandardOpenOption
-import java.util.zip.ZipEntry
-import java.util.zip.ZipOutputStream
-
-
-private suspend fun <T : Any> ZipOutputStream.writeNode(
-    name: String,
-    treeItem: DataTreeItem<T>,
-    dataFormat: IOFormat<T>,
-    envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
-): Unit = withContext(Dispatchers.IO) {
-    when (treeItem) {
-        is DataTreeItem.Leaf -> {
-            //TODO add directory-based envelope writer
-            val envelope = treeItem.data.toEnvelope(dataFormat)
-            val entry = ZipEntry(name)
-            putNextEntry(entry)
-
-            //TODO remove additional copy
-            val bytes = ByteArray {
-                writeWith(envelopeFormat, envelope)
-            }
-            write(bytes)
-
-        }
-
-        is DataTreeItem.Node -> {
-            val entry = ZipEntry("$name/")
-            putNextEntry(entry)
-            closeEntry()
-            treeItem.tree.items.forEach { (token, item) ->
-                val childName = "$name/$token"
-                writeNode(childName, item, dataFormat, envelopeFormat)
-            }
-        }
-    }
-}
-
-/**
- * Write this [DataTree] as a zip archive
- */
-@DFExperimental
-public suspend fun <T : Any> DataTree<T>.writeZip(
-    path: Path,
-    format: IOFormat<T>,
-    envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
-): Unit = withContext(Dispatchers.IO) {
-    val actualFile = if (path.toString().endsWith(".zip")) {
-        path
-    } else {
-        path.resolveSibling(path.fileName.toString() + ".zip")
-    }
-    val fos = Files.newOutputStream(
-        actualFile,
-        StandardOpenOption.WRITE,
-        StandardOpenOption.CREATE,
-        StandardOpenOption.TRUNCATE_EXISTING
-    )
-    val zos = ZipOutputStream(fos)
-    zos.use {
-        it.writeNode("", DataTreeItem.Node(this@writeZip), format, envelopeFormat)
-    }
-}
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
index 4e1923bc..e5c2c230 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/CachingWorkspaceTest.kt
@@ -1,18 +1,16 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.coroutineScope
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.startAll
-import space.kscience.dataforge.data.static
+import space.kscience.dataforge.data.wrap
 import space.kscience.dataforge.meta.Meta
 import space.kscience.dataforge.meta.boolean
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
 import kotlin.test.assertEquals
 
-@OptIn(ExperimentalCoroutinesApi::class, DFExperimental::class)
+@OptIn(DFExperimental::class)
 internal class CachingWorkspaceTest {
 
     @Test
@@ -24,24 +22,23 @@ internal class CachingWorkspaceTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    static("myData[$it]", it)
+                    wrap("myData[$it]", it)
                 }
             }
 
             inMemoryCache()
 
             val doFirst by task<Any> {
-                pipeFrom(allData) { _, name, _ ->
+                transformEach(allData) { _, name, _ ->
                     firstCounter++
                     println("Done first on $name with flag=${taskMeta["flag"].boolean}")
                 }
             }
 
-            @Suppress("UNUSED_VARIABLE")
             val doSecond by task<Any> {
-                pipeFrom(
+                transformEach(
                     doFirst,
-                    dependencyMeta = if(taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
+                    dependencyMeta = if (taskMeta["flag"].boolean == true) taskMeta else Meta.EMPTY
                 ) { _, name, _ ->
                     secondCounter++
                     println("Done second on $name with flag=${taskMeta["flag"].boolean ?: false}")
@@ -53,13 +50,15 @@ internal class CachingWorkspaceTest {
         val secondA = workspace.produce("doSecond")
         val secondB = workspace.produce("doSecond", Meta { "flag" put true })
         val secondC = workspace.produce("doSecond")
+        //use coroutineScope to wait for the result
         coroutineScope {
-            first.startAll(this)
-            secondA.startAll(this)
-            secondB.startAll(this)
+            first.launch(this)
+            secondA.launch(this)
+            secondB.launch(this)
             //repeat to check caching
-            secondC.startAll(this)
+            secondC.launch(this)
         }
+
         assertEquals(10, firstCounter)
         assertEquals(10, secondCounter)
     }
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
index 18086902..d611b1c8 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/DataPropagationTest.kt
@@ -20,13 +20,13 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
         val result: Data<Int> = selectedData.foldToData(0) { result, data ->
             result + data.value
         }
-        data("result", result)
+        put("result", result)
     }
 
 
     val singleData by task<Int> {
         workspace.data.filterByType<Int>()["myData[12]"]?.let {
-            data("result", it)
+            put("result", it)
         }
     }
 
@@ -47,7 +47,7 @@ class DataPropagationTest {
         }
         data {
             repeat(100) {
-                static("myData[$it]", it)
+                wrap("myData[$it]", it)
             }
         }
     }
@@ -55,12 +55,12 @@ class DataPropagationTest {
     @Test
     fun testAllData() = runTest {
         val node = testWorkspace.produce("Test.allData")
-        assertEquals(4950, node.asSequence().single().await())
+        assertEquals(4950, node.content.asSequence().single().await())
     }
 
     @Test
     fun testSingleData() = runTest {
         val node = testWorkspace.produce("Test.singleData")
-        assertEquals(12, node.asSequence().single().await())
+        assertEquals(12, node.content.asSequence().single().await())
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
index 451c76f4..10a1c268 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileDataTest.kt
@@ -1,6 +1,5 @@
 package space.kscience.dataforge.workspace
 
-import kotlinx.coroutines.runBlocking
 import kotlinx.coroutines.test.runTest
 import kotlinx.io.Sink
 import kotlinx.io.Source
@@ -9,38 +8,34 @@ import kotlinx.io.writeString
 import space.kscience.dataforge.context.Context
 import space.kscience.dataforge.context.Global
 import space.kscience.dataforge.data.*
-import space.kscience.dataforge.io.Envelope
-import space.kscience.dataforge.io.IOFormat
-import space.kscience.dataforge.io.io
-import space.kscience.dataforge.io.readEnvelopeFile
+import space.kscience.dataforge.io.*
 import space.kscience.dataforge.io.yaml.YamlPlugin
 import space.kscience.dataforge.meta.get
 import space.kscience.dataforge.misc.DFExperimental
+import space.kscience.dataforge.names.Name
 import java.nio.file.Files
+import kotlin.io.path.deleteExisting
 import kotlin.io.path.fileSize
 import kotlin.io.path.toPath
-import kotlin.reflect.KType
-import kotlin.reflect.typeOf
 import kotlin.test.Test
 import kotlin.test.assertEquals
 
 
 class FileDataTest {
     val dataNode = DataTree<String> {
-        node("dir") {
-            static("a", "Some string") {
+        branch("dir") {
+            wrap("a", "Some string") {
                 "content" put "Some string"
             }
         }
-        static("b", "root data")
-        meta {
-            "content" put "This is root meta node"
-        }
+        wrap("b", "root data")
+//        meta {
+//            "content" put "This is root meta node"
+//        }
     }
 
 
     object StringIOFormat : IOFormat<String> {
-        override val type: KType get() = typeOf<String>()
 
         override fun writeTo(sink: Sink, obj: String) {
             sink.writeString(obj)
@@ -51,29 +46,33 @@ class FileDataTest {
 
     @Test
     @DFExperimental
-    fun testDataWriteRead() = with(Global.io) {
+    fun testDataWriteRead() = runTest {
+        val io = Global.io
         val dir = Files.createTempDirectory("df_data_node")
-        runBlocking {
-            writeDataDirectory(dir, dataNode, StringIOFormat)
-            println(dir.toUri().toString())
-            val reconstructed = readDataDirectory(dir) { _, _ -> StringIOFormat }
-            assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
-            assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
+        io.writeDataDirectory(dir, dataNode, StringIOFormat)
+        println(dir.toUri().toString())
+        val data = DataTree {
+            files(io, Name.EMPTY, dir)
         }
+        val reconstructed = data.transform { (_, value) -> value.toByteArray().decodeToString() }
+        assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
+        assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
     }
 
 
     @Test
     @DFExperimental
     fun testZipWriteRead() = runTest {
-        with(Global.io) {
-            val zip = Files.createTempFile("df_data_node", ".zip")
-            dataNode.writeZip(zip, StringIOFormat)
-            println(zip.toUri().toString())
-            val reconstructed = readDataDirectory(zip) { _, _ -> StringIOFormat }
-            assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
-            assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
-        }
+        val io = Global.io
+        val zip = Files.createTempFile("df_data_node", ".zip")
+        zip.deleteExisting()
+        io.writeZip(zip, dataNode, StringIOFormat)
+        println(zip.toUri().toString())
+        val reconstructed = DataTree { files(io, Name.EMPTY, zip) }
+            .transform { (_, value) -> value.toByteArray().decodeToString() }
+        assertEquals(dataNode["dir.a"]?.meta?.get("content"), reconstructed["dir.a"]?.meta?.get("content"))
+        assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
+
     }
 
     @OptIn(DFExperimental::class)
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
index 00ca67cb..0f16b1c8 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/FileWorkspaceCacheTest.kt
@@ -3,8 +3,7 @@ package space.kscience.dataforge.workspace
 import kotlinx.coroutines.ExperimentalCoroutinesApi
 import kotlinx.coroutines.test.runTest
 import org.junit.jupiter.api.Test
-import space.kscience.dataforge.data.startAll
-import space.kscience.dataforge.data.static
+import space.kscience.dataforge.data.wrap
 import space.kscience.dataforge.misc.DFExperimental
 import java.nio.file.Files
 
@@ -17,18 +16,17 @@ class FileWorkspaceCacheTest {
             data {
                 //statically initialize data
                 repeat(5) {
-                    static("myData[$it]", it)
+                    wrap("myData[$it]", it)
                 }
             }
             fileCache(Files.createTempDirectory("dataforge-temporary-cache"))
 
-            @Suppress("UNUSED_VARIABLE")
             val echo by task<String> {
-                pipeFrom(dataByType<String>()) { arg, _, _ -> arg }
+                transformEach(dataByType<String>()) { arg, _, _ -> arg }
             }
         }
 
-        workspace.produce("echo").startAll(this)
+        workspace.produce("echo").launch(this)
 
     }
 }
\ No newline at end of file
diff --git a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
index 7bfe0927..b49b9d54 100644
--- a/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
+++ b/dataforge-workspace/src/jvmTest/kotlin/space/kscience/dataforge/workspace/SimpleWorkspaceTest.kt
@@ -27,8 +27,8 @@ public fun <P : Plugin> P.toFactory(): PluginFactory<P> = object : PluginFactory
     override val tag: PluginTag = this@toFactory.tag
 }
 
-public fun Workspace.produceBlocking(task: String, block: MutableMeta.() -> Unit = {}): DataSet<Any> = runBlocking {
-    produce(task, block)
+public fun Workspace.produceBlocking(task: String, block: MutableMeta.() -> Unit = {}): DataTree<*> = runBlocking {
+    produce(task, block).content
 }
 
 @OptIn(DFExperimental::class)
@@ -37,7 +37,7 @@ internal object TestPlugin : WorkspacePlugin() {
 
     val test by task {
         // type is inferred
-        pipeFrom(dataByType<Int>()) { arg, _, _ ->
+        transformEach(dataByType<Int>()) { arg, _, _ ->
             logger.info { "Test: $arg" }
             arg
         }
@@ -62,19 +62,19 @@ internal class SimpleWorkspaceTest {
         data {
             //statically initialize data
             repeat(100) {
-                static("myData[$it]", it)
+                wrap("myData[$it]", it)
             }
         }
 
         val filterOne by task<Int> {
             val name by taskMeta.string { error("Name field not defined") }
-            from(testPluginFactory) { test }.getByType<Int>(name)?.let { source ->
-                data(source.name, source.map { it })
+            from(testPluginFactory) { test }[name]?.let { source: Data<Int> ->
+                put(name, source)
             }
         }
 
         val square by task<Int> {
-            pipeFrom(dataByType<Int>()) { arg, name, meta ->
+            transformEach(dataByType<Int>()) { arg, name, meta ->
                 if (meta["testFlag"].boolean == true) {
                     println("Side effect")
                 }
@@ -84,7 +84,7 @@ internal class SimpleWorkspaceTest {
         }
 
         val linear by task<Int> {
-            pipeFrom(dataByType<Int>()) { arg, name, _ ->
+            transformEach(dataByType<Int>()) { arg, name, _ ->
                 workspace.logger.info { "Starting linear on $name" }
                 arg * 2 + 1
             }
@@ -97,7 +97,7 @@ internal class SimpleWorkspaceTest {
                 val newData: Data<Int> = data.combine(linearData[data.name]!!) { l, r ->
                     l + r
                 }
-                data(data.name, newData)
+                put(data.name, newData)
             }
         }
 
@@ -106,23 +106,23 @@ internal class SimpleWorkspaceTest {
             val res = from(square).foldToData(0) { l, r ->
                 l + r.value
             }
-            data("sum", res)
+            put("sum", res)
         }
 
         val averageByGroup by task<Int> {
-            val evenSum = workspace.data.filterByType<Int> { name, _ ->
+            val evenSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 0
             }.foldToData(0) { l, r ->
                 l + r.value
             }
 
-            data("even", evenSum)
-            val oddSum = workspace.data.filterByType<Int> { name, _ ->
+            put("even", evenSum)
+            val oddSum = workspace.data.filterByType<Int> { name, _, _ ->
                 name.toString().toInt() % 2 == 1
             }.foldToData(0) { l, r ->
                 l + r.value
             }
-            data("odd", oddSum)
+            put("odd", oddSum)
         }
 
         val delta by task<Int> {
@@ -132,7 +132,7 @@ internal class SimpleWorkspaceTest {
             val res = even.combine(odd) { l, r ->
                 l - r
             }
-            data("res", res)
+            put("res", res)
         }
 
         val customPipe by task<Int> {
@@ -140,7 +140,7 @@ internal class SimpleWorkspaceTest {
                 val meta = data.meta.toMutableMeta().apply {
                     "newValue" put 22
                 }
-                data(data.name + "new", data.map { (data.meta["value"].int ?: 0) + it })
+                put(data.name + "new", data.transform { (data.meta["value"].int ?: 0) + it })
             }
         }
 
@@ -159,7 +159,7 @@ internal class SimpleWorkspaceTest {
     @Timeout(1)
     fun testMetaPropagation() = runTest {
         val node = workspace.produce("sum") { "testFlag" put true }
-        val res = node.asSequence().single().await()
+        val res = node.single().await()
     }
 
     @Test
@@ -170,20 +170,25 @@ internal class SimpleWorkspaceTest {
     }
 
     @Test
-    fun testFullSquare() {
-        runBlocking {
-            val node = workspace.produce("fullSquare")
-            println(node.toMeta())
+    fun testFullSquare() = runTest {
+        val result = workspace.produce("fullSquare")
+        result.forEach {
+            println(
+                """
+                Name: ${it.name}
+                Meta: ${it.meta}
+                Data: ${it.data.await()}
+            """.trimIndent()
+            )
         }
     }
 
     @Test
-    fun testFilter() {
-        runBlocking {
-            val node = workspace.produce("filterOne") {
-                "name" put "myData[12]"
-            }
-            assertEquals(12, node.single().await())
+    fun testFilter() = runTest {
+        val node = workspace.produce("filterOne") {
+            "name" put "myData[12]"
         }
+        assertEquals(12, node.single().await())
     }
+
 }
\ No newline at end of file