From 23fae9794f7f8241371416ad0a88b8ce24671841 Mon Sep 17 00:00:00 2001 From: Alexander Nozik Date: Sun, 10 Jan 2021 17:46:53 +0300 Subject: [PATCH] WIP full data refactor --- .../hep/dataforge/data/CachingAction.kt | 6 +- .../kotlin/hep/dataforge/data/DataSet.kt | 74 ++++--------------- .../kotlin/hep/dataforge/data/DataTree.kt | 6 +- .../kotlin/hep/dataforge/data/MapAction.kt | 2 +- .../hep/dataforge/data/MutableDataTree.kt | 4 +- .../kotlin/hep/dataforge/data/SplitAction.kt | 4 +- .../hep/dataforge/data/dataSetOperations.kt | 72 ++++++++++++++++++ .../kotlin/hep/dataforge/data/dataJVM.kt | 11 +-- .../kotlin/hep/dataforge/meta/MetaBuilder.kt | 6 +- .../transformations/MetaTransformation.kt | 2 +- .../hep/dataforge/workspace/DataPlacement.kt | 8 ++ .../hep/dataforge/workspace/Dependency.kt | 4 +- .../hep/dataforge/workspace/TaskModel.kt | 4 +- .../hep/dataforge/workspace/TaskBuilder.kt | 2 +- .../dataforge/workspace/WorkspaceBuilder.kt | 2 +- .../hep/dataforge/workspace/fileData.kt | 30 +++++--- .../hep/dataforge/workspace/tasksJVM.kt | 9 +++ .../workspace/DataPropagationTest.kt | 26 ++++--- .../hep/dataforge/workspace/FileDataTest.kt | 20 +++-- .../workspace/SimpleWorkspaceTest.kt | 24 +++--- 20 files changed, 190 insertions(+), 126 deletions(-) create mode 100644 dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataSetOperations.kt create mode 100644 dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/tasksJVM.kt diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/CachingAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/CachingAction.kt index 52afc8bf..88d001ad 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/CachingAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/CachingAction.kt @@ -6,7 +6,7 @@ import hep.dataforge.names.startsWith import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.collect -import kotlin.reflect.KType +import kotlin.reflect.KClass /** * Remove all values with keys starting with [name] @@ -20,7 +20,7 @@ internal fun MutableMap.removeWhatStartsWith(name: Name) { * An action that caches results on-demand and recalculates them on source push */ public abstract class CachingAction( - public val outputType: KType, + public val outputType: KClass, ) : Action { protected abstract fun CoroutineScope.transform( @@ -33,7 +33,7 @@ public abstract class CachingAction( set: DataSet, meta: Meta, scope: CoroutineScope, - ): DataSet = DataTree.dynamic(outputType, scope) { + ): DataSet = DataTree.dynamic(outputType,scope) { collectFrom(scope.transform(set, meta)) scope.let { set.updates.collect { diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataSet.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataSet.kt index d14dec4d..c8373949 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataSet.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataSet.kt @@ -1,6 +1,7 @@ package hep.dataforge.data -import hep.dataforge.meta.DFExperimental +import hep.dataforge.meta.Meta +import hep.dataforge.meta.set import hep.dataforge.names.* import kotlinx.coroutines.* import kotlinx.coroutines.flow.* @@ -46,28 +47,6 @@ public interface DataSet { } } -/** - * A stateless filtered [DataSet] - */ -@DFExperimental -public fun DataSet.filter( - predicate: suspend (Name, Data) -> Boolean, -): DataSet = object : DataSet { - override val dataType: KClass get() = this@filter.dataType - - override fun flow(): Flow> = - this@filter.flow().filter { predicate(it.name, it.data) } - - override suspend fun getData(name: Name): Data? = this@filter.getData(name)?.takeIf { - predicate(name, it) - } - - override val updates: Flow = this@filter.updates.filter flowFilter@{ name -> - val theData = this@filter.getData(name) ?: return@flowFilter false - predicate(name, theData) - } -} - /** * Flow all data nodes with names starting with [branchName] */ @@ -75,40 +54,6 @@ public fun DataSet.flowChildren(branchName: Name): Flow DataSet.branch(branchName: Name): DataSet = if (branchName.isEmpty()) this -else object : DataSet { - override val dataType: KClass get() = this@branch.dataType - - override fun flow(): Flow> = this@branch.flow().mapNotNull { - it.name.removeHeadOrNull(branchName)?.let { name -> - it.data.named(name) - } - } - - override suspend fun getData(name: Name): Data? = this@branch.getData(branchName + name) - - override val updates: Flow get() = this@branch.updates.mapNotNull { it.removeHeadOrNull(branchName) } -} - -/** - * Generate a wrapper data set with a given name prefix appended to all names - */ -public fun DataSet.withNamePrefix(prefix: Name): DataSet = if (prefix.isEmpty()) this -else object : DataSet { - override val dataType: KClass get() = this@withNamePrefix.dataType - - override fun flow(): Flow> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) } - - override suspend fun getData(name: Name): Data? = - name.removeHeadOrNull(name)?.let { this@withNamePrefix.getData(it) } - - override val updates: Flow get() = this@withNamePrefix.updates.map { prefix + it } - -} - /** * Start computation for all goals in data node and return a job for the whole node */ @@ -118,4 +63,17 @@ public fun DataSet.startAll(coroutineScope: CoroutineScope): Job = }.toList().joinAll() } -public suspend fun DataSet.join(): Unit = coroutineScope { startAll(this).join() } \ No newline at end of file +public suspend fun DataSet.join(): Unit = coroutineScope { startAll(this).join() } + +public suspend fun DataSet<*>.toMeta(): Meta = Meta { + flow().collect { + if (it.name.endsWith(DataSet.META_KEY)) { + set(it.name, it.meta) + } else { + it.name put { + "type" put it.type.simpleName + "meta" put it.meta + } + } + } +} \ No newline at end of file diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTree.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTree.kt index 0cb383b0..5f6e198d 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTree.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTree.kt @@ -97,10 +97,10 @@ public fun DataTree.itemFlow(): Flow>> = * Get a branch of this [DataTree] with a given [branchName]. * The difference from similar method for [DataSet] is that internal logic is more simple and the return value is a [DataTree] */ -public fun DataTree.branch(branchName: Name): DataTree = object : DataTree { - override val dataType: KClass get() = this@branch.dataType +public operator fun DataTree.get(branchName: Name): DataTree = object : DataTree { + override val dataType: KClass get() = this@get.dataType - override val updates: Flow = this@branch.updates.mapNotNull { it.removeHeadOrNull(branchName) } + override val updates: Flow = this@get.updates.mapNotNull { it.removeHeadOrNull(branchName) } override suspend fun items(): Map> = getItem(branchName).tree?.items() ?: emptyMap() } diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt index f89d6eeb..e51f5fb0 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt @@ -50,7 +50,7 @@ public class MapAction( //applying transformation from builder val builder = MapActionBuilder( data.name, - data.meta.builder(), // using data meta + data.meta.toMutableMeta(), // using data meta meta ).apply(block) diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MutableDataTree.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MutableDataTree.kt index 62d7a80b..d46419c7 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MutableDataTree.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MutableDataTree.kt @@ -155,5 +155,5 @@ public suspend fun DataSet.toMutableTree( }.launchIn(scope) } -public fun MutableDataTree.branch(branchName: Name): MutableDataTree = - (this as DataTree).branch(branchName) as MutableDataTree +public fun MutableDataTree.get(branchName: Name): MutableDataTree = + (this as DataTree).get(branchName) as MutableDataTree diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt index f6ee51ad..851cc82f 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt @@ -3,7 +3,7 @@ package hep.dataforge.data import hep.dataforge.meta.Laminate import hep.dataforge.meta.Meta import hep.dataforge.meta.MetaBuilder -import hep.dataforge.meta.builder +import hep.dataforge.meta.toMutableMeta import hep.dataforge.names.Name import hep.dataforge.names.toName import kotlinx.coroutines.CoroutineScope @@ -58,7 +58,7 @@ public class SplitAction( // apply individual fragment rules to result return split.fragments.entries.asFlow().map { (fragmentName, rule) -> - val env = SplitBuilder.FragmentRule(fragmentName, laminate.builder()).apply(rule) + val env = SplitBuilder.FragmentRule(fragmentName, laminate.toMutableMeta()).apply(rule) data.map(outputType, meta = env.meta) { env.result(it) }.named(fragmentName) } } diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataSetOperations.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataSetOperations.kt new file mode 100644 index 00000000..24846c30 --- /dev/null +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataSetOperations.kt @@ -0,0 +1,72 @@ +package hep.dataforge.data + +import hep.dataforge.meta.DFExperimental +import hep.dataforge.names.* +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.filter +import kotlinx.coroutines.flow.map +import kotlinx.coroutines.flow.mapNotNull +import kotlin.reflect.KClass + + +/** + * A stateless filtered [DataSet] + */ +@DFExperimental +public fun DataSet.filter( + predicate: suspend (Name, Data) -> Boolean, +): DataSet = object : DataSet { + override val dataType: KClass get() = this@filter.dataType + + override fun flow(): Flow> = + this@filter.flow().filter { predicate(it.name, it.data) } + + override suspend fun getData(name: Name): Data? = this@filter.getData(name)?.takeIf { + predicate(name, it) + } + + override val updates: Flow = this@filter.updates.filter flowFilter@{ name -> + val theData = this@filter.getData(name) ?: return@flowFilter false + predicate(name, theData) + } +} + + +/** + * Generate a wrapper data set with a given name prefix appended to all names + */ +public fun DataSet.withNamePrefix(prefix: Name): DataSet = if (prefix.isEmpty()) this +else object : DataSet { + override val dataType: KClass get() = this@withNamePrefix.dataType + + override fun flow(): Flow> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) } + + override suspend fun getData(name: Name): Data? = + name.removeHeadOrNull(name)?.let { this@withNamePrefix.getData(it) } + + override val updates: Flow get() = this@withNamePrefix.updates.map { prefix + it } +} + + +/** + * Get a subset of data starting with a given [branchName] + */ +public operator fun DataSet.get(branchName: Name): DataSet = if (branchName.isEmpty()) this +else object : DataSet { + override val dataType: KClass get() = this@get.dataType + + override fun flow(): Flow> = this@get.flow().mapNotNull { + it.name.removeHeadOrNull(branchName)?.let { name -> + it.data.named(name) + } + } + + override suspend fun getData(name: Name): Data? = this@get.getData(branchName + name) + + override val updates: Flow get() = this@get.updates.mapNotNull { it.removeHeadOrNull(branchName) } +} + +public operator fun DataSet.get(branchName: String): DataSet = this@get.get(branchName.toName()) + +@DFExperimental +public suspend fun DataSet.rootData(): Data? = getData(Name.EMPTY) diff --git a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt index b1c75c7f..775adfc1 100644 --- a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt +++ b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt @@ -1,7 +1,5 @@ package hep.dataforge.data -import hep.dataforge.names.Name -import hep.dataforge.names.toName import kotlinx.coroutines.runBlocking import kotlin.reflect.KClass import kotlin.reflect.full.isSubclassOf @@ -60,11 +58,4 @@ public fun DataSet<*>.cast(type: KClass): DataSet = * Check that node is compatible with given type meaning that each element could be cast to the type */ internal fun DataSet<*>.canCast(type: KClass): Boolean = - type.isSubclassOf(this.dataType) - - -public operator fun DataTree.get(name: Name): DataTreeItem? = runBlocking { - getItem(name) -} - -public operator fun DataTree.get(name: String): DataTreeItem? = get(name.toName()) \ No newline at end of file + type.isSubclassOf(this.dataType) \ No newline at end of file diff --git a/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/MetaBuilder.kt b/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/MetaBuilder.kt index a977199f..f2016b80 100644 --- a/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/MetaBuilder.kt +++ b/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/MetaBuilder.kt @@ -12,7 +12,7 @@ import kotlin.jvm.JvmName */ @DFBuilder public class MetaBuilder : AbstractMutableMeta() { - override fun wrapNode(meta: Meta): MetaBuilder = if (meta is MetaBuilder) meta else meta.builder() + override fun wrapNode(meta: Meta): MetaBuilder = if (meta is MetaBuilder) meta else meta.toMutableMeta() override fun empty(): MetaBuilder = MetaBuilder() public infix fun String.put(item: MetaItem?) { @@ -121,13 +121,13 @@ public class MetaBuilder : AbstractMutableMeta() { /** * For safety, builder always copies the initial meta even if it is builder itself */ -public fun Meta.builder(): MetaBuilder { +public fun Meta.toMutableMeta(): MetaBuilder { return MetaBuilder().also { builder -> items.mapValues { entry -> val item = entry.value builder[entry.key.asName()] = when (item) { is MetaItemValue -> item.value - is MetaItemNode -> MetaItemNode(item.node.builder()) + is MetaItemNode -> MetaItemNode(item.node.toMutableMeta()) } } } diff --git a/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/transformations/MetaTransformation.kt b/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/transformations/MetaTransformation.kt index 865dd776..710998ef 100644 --- a/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/transformations/MetaTransformation.kt +++ b/dataforge-meta/src/commonMain/kotlin/hep/dataforge/meta/transformations/MetaTransformation.kt @@ -116,7 +116,7 @@ public inline class MetaTransformation(public val transformations: Collection rule.selectItems(source).forEach { name -> remove(name) diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/DataPlacement.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/DataPlacement.kt index 7bd607d2..04e3ecb4 100644 --- a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/DataPlacement.kt +++ b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/DataPlacement.kt @@ -22,6 +22,14 @@ public interface DataPlacement: MetaRepr { override fun toMeta(): Meta = Meta{"from" put "*"} } + public fun into(target: Name): DataPlacement = DataPlacementScheme{ + to = target.toString() + } + + public fun into(target: String): DataPlacement = DataPlacementScheme{ + to = target + } + } } diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt index 53ebe5e0..83b2027c 100644 --- a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt +++ b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt @@ -3,7 +3,7 @@ package hep.dataforge.workspace import hep.dataforge.data.DataSet import hep.dataforge.meta.Meta import hep.dataforge.meta.MetaRepr -import hep.dataforge.meta.builder +import hep.dataforge.meta.toMutableMeta import hep.dataforge.names.Name import hep.dataforge.names.asName import hep.dataforge.names.plus @@ -48,7 +48,7 @@ public class ExternalTaskDependency( override val name: Name get() = EXTERNAL_TASK_NAME + task.name - override fun toMeta(): Meta = placement.toMeta().builder().apply { + override fun toMeta(): Meta = placement.toMeta().toMutableMeta().apply { "name" put name.toString() "task" put task.toString() "meta" put meta diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/TaskModel.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/TaskModel.kt index 2ce74197..590d2e2a 100644 --- a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/TaskModel.kt +++ b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/TaskModel.kt @@ -123,8 +123,8 @@ public class TaskModelBuilder(public val name: Name, meta: Meta = Meta.EMPTY) : /** * Meta for current task. By default uses the whole input meta */ - public var meta: MetaBuilder = meta.builder() - private val dependencies: HashSet = HashSet() + public var meta: MetaBuilder = meta.toMutableMeta() + private val dependencies: HashSet = HashSet() override val defaultMeta: Meta get() = meta diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/TaskBuilder.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/TaskBuilder.kt index f41cfa49..a34ca581 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/TaskBuilder.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/TaskBuilder.kt @@ -57,7 +57,7 @@ public class TaskBuilder(private val name: Name, public val type: KClas ) { dataTransforms += { context, model, data -> val env = TaskEnv(Name.EMPTY, model.meta, context, data) - val startData = data.branch(from) + val startData = data.get(from) env.block(startData).withNamePrefix(to) } } diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt index 1575a4b5..b78c76ab 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt @@ -52,7 +52,7 @@ public fun WorkspaceBuilder.target(name: String, block: MetaBuilder.() -> Unit) */ public fun WorkspaceBuilder.target(name: String, base: String, block: MetaBuilder.() -> Unit) { val parentTarget = targets[base] ?: error("Base target with name $base not found") - targets[name] = parentTarget.builder() + targets[name] = parentTarget.toMutableMeta() .apply { "@baseTarget" put base } .apply(block) .seal() diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt index 28a98f2a..d1601c91 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt @@ -15,16 +15,30 @@ import java.nio.file.StandardOpenOption import java.nio.file.spi.FileSystemProvider import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream +import kotlin.reflect.KClass import kotlin.reflect.KType +import kotlin.reflect.typeOf import kotlin.streams.toList //public typealias FileFormatResolver = (Path, Meta) -> IOFormat -public interface FileFormatResolver{ +public interface FileFormatResolver { public val type: KType - public operator fun invoke (path: Path, meta: Meta): IOFormat + public operator fun invoke(path: Path, meta: Meta): IOFormat } +@PublishedApi +internal inline fun IOPlugin.formatResolver(): FileFormatResolver = + object : FileFormatResolver { + override val type: KType = typeOf() + + override fun invoke(path: Path, meta: Meta): IOFormat = + resolveIOFormat() ?: error("Can't resolve IO format for ${T::class}") + } + +private val FileFormatResolver.kClass: KClass + get() = type.classifier as? KClass ?: error("Format resolver actual type does not correspond to type parameter") + private fun newZFS(path: Path): FileSystem { val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" } ?: error("Zip file system provider not found") @@ -51,9 +65,7 @@ public fun IOPlugin.readDataFile( } @DFExperimental -public inline fun IOPlugin.readDataFile(path: Path): Data = readDataFile(path) { _, _ -> - resolveIOFormat() ?: error("Can't resolve IO format for ${T::class}") -} +public inline fun IOPlugin.readDataFile(path: Path): Data = readDataFile(path, formatResolver()) /** * Add file/directory-based data tree item @@ -98,7 +110,7 @@ public fun IOPlugin.readDataDirectory( return readDataDirectory(fs.rootDirectories.first(), formatResolver) } if (!Files.isDirectory(path)) error("Provided path $path is not a directory") - return DataTree.static(formatResolver.type) { + return DataTree.static(formatResolver.kClass) { Files.list(path).toList().forEach { path -> val fileName = path.fileName.toString() if (fileName.startsWith(IOPlugin.META_FILE_NAME)) { @@ -114,9 +126,7 @@ public fun IOPlugin.readDataDirectory( @DFExperimental public inline fun IOPlugin.readDataDirectory(path: Path): DataTree = - readDataDirectory(path) { _, _ -> - resolveIOFormat() ?: error("Can't resolve IO format for ${T::class}") - } + readDataDirectory(path, formatResolver()) /** * Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider @@ -138,7 +148,7 @@ public suspend fun IOPlugin.writeDataDirectory( tree.items().forEach { (token, item) -> val childPath = path.resolve(token.toString()) when (item) { - is DataItem.Node -> { + is DataTreeItem.Node -> { writeDataDirectory(childPath, item.tree, format, envelopeFormat) } is DataTreeItem.Leaf -> { diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/tasksJVM.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/tasksJVM.kt new file mode 100644 index 00000000..6d4b29bd --- /dev/null +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/tasksJVM.kt @@ -0,0 +1,9 @@ +package hep.dataforge.workspace + +import hep.dataforge.data.DataSet +import hep.dataforge.meta.MetaBuilder +import kotlinx.coroutines.runBlocking + +public fun Workspace.runBlocking(task: String, block: MetaBuilder.() -> Unit = {}): DataSet = runBlocking{ + run(task, block) +} \ No newline at end of file diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt index 8853e7f2..9a531731 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt @@ -20,10 +20,10 @@ class DataPropagationTestPlugin : WorkspacePlugin() { val testAllData = task("allData", Int::class) { model { - allData() + data() } transform { data -> - DataTree.dynamic { + DataTree.dynamic(context) { val result = data.flow().map { it.value() }.reduce { acc, pair -> acc + pair } data("result", result) } @@ -36,7 +36,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() { data("myData\\[12\\]") } transform { data -> - DataTree.dynamic { + DataTree.dynamic(context) { val result = data.flow().map { it.value() }.reduce { acc, pair -> acc + pair } data("result", result) } @@ -48,7 +48,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() { data(pattern = "myData.*") } transform { data -> - DataTree.dynamic { + DataTree.dynamic(context) { val result = data.flow().map { it.value() }.reduce { acc, pair -> acc + pair } data("result", result) } @@ -80,19 +80,25 @@ class DataPropagationTest { @Test fun testAllData() { - val node = testWorkspace.run("Test.allData") - assertEquals(4950, node.first()!!.value()) + runBlocking { + val node = testWorkspace.run("Test.allData") + assertEquals(4950, node.first()!!.value()) + } } @Test fun testAllRegexData() { - val node = testWorkspace.run("Test.allRegexData") - assertEquals(4950, node.first()!!.value()) + runBlocking { + val node = testWorkspace.run("Test.allRegexData") + assertEquals(4950, node.first()!!.value()) + } } @Test fun testSingleData() { - val node = testWorkspace.run("Test.singleData") - assertEquals(12, node.first()!!.value()) + runBlocking { + val node = testWorkspace.run("Test.singleData") + assertEquals(12, node.first()!!.value()) + } } } \ No newline at end of file diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt index 5401adf2..a2a029d5 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt @@ -12,6 +12,7 @@ import kotlinx.io.Output import kotlinx.io.text.readUtf8String import kotlinx.io.text.writeUtf8String import java.nio.file.Files +import java.nio.file.Path import kotlin.reflect.KType import kotlin.reflect.typeOf import kotlin.test.Test @@ -49,6 +50,13 @@ class FileDataTest { } + object StringFormatResolver: FileFormatResolver{ + override val type: KType = typeOf() + + override fun invoke(path: Path, meta: Meta): IOFormat =StringIOFormat + + } + @Test @DFExperimental fun testDataWriteRead() { @@ -58,9 +66,9 @@ class FileDataTest { writeDataDirectory(dir, dataNode, StringIOFormat) } println(dir.toUri().toString()) - val reconstructed = readDataDirectory(dir, String::class) { _, _ -> StringIOFormat } - assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta) - assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.value()) + val reconstructed = readDataDirectory(dir,StringFormatResolver) + assertEquals(dataNode["dir.a"].data?.meta, reconstructed["dir.a"].data?.meta) + assertEquals(dataNode["b"]?.data?.value(), reconstructed["b"]?.data?.value()) } } @@ -74,9 +82,9 @@ class FileDataTest { writeZip(zip, dataNode, StringIOFormat) } println(zip.toUri().toString()) - val reconstructed = readDataDirectory(zip, String::class) { _, _ -> StringIOFormat } - assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta) - assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.value()) + val reconstructed = readDataDirectory(zip, StringFormatResolver) + assertEquals(dataNode["dir.a"].data?.meta, reconstructed["dir.a"].data?.meta) + assertEquals(dataNode["b"]?.data?.value(), reconstructed["b"]?.data?.value()) } } } \ No newline at end of file diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt index b58c32c2..680b0016 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt @@ -85,13 +85,13 @@ class SimpleWorkspaceTest { val fullSquare = task("fullsquare") { model { - val squareDep = dependsOn(square, placement = "square") - val linearDep = dependsOn(linear, placement = "linear") + val squareDep = dependsOn(square, placement = DataPlacement.into("square")) + val linearDep = dependsOn(linear, placement = DataPlacement.into("linear")) } transform { data -> - val squareNode = data["square"].tree!!.filterIsInstance() //squareDep() - val linearNode = data["linear"].tree!!.filterIsInstance() //linearDep() - DataTree.dynamic { + val squareNode = data["square"].filterIsInstance() //squareDep() + val linearNode = data["linear"].filterIsInstance() //linearDep() + DataTree.dynamic(context) { squareNode.flow().collect { val newData: Data = Data { val squareValue = squareNode.getData(it.name)!!.value() @@ -142,7 +142,7 @@ class SimpleWorkspaceTest { val customPipeTask = task("custom") { mapAction { - meta = meta.builder().apply { + meta = meta.toMutableMeta().apply { "newValue" put 22 } name += "new" @@ -157,14 +157,14 @@ class SimpleWorkspaceTest { @Test fun testWorkspace() { - val node = workspace.run("sum") + val node = workspace.runBlocking("sum") val res = node.first() assertEquals(328350, res?.value()) } @Test fun testMetaPropagation() { - val node = workspace.run("sum") { "testFlag" put true } + val node = workspace.runBlocking("sum") { "testFlag" put true } val res = node.first()?.value() } @@ -177,13 +177,15 @@ class SimpleWorkspaceTest { @Test fun testFullSquare() { - val node = workspace.run("fullsquare") - println(node.toMeta()) + runBlocking { + val node = workspace.run("fullsquare") + println(node.toMeta()) + } } @Test fun testGather() { - val node = workspace.run("filterOne") + val node = workspace.runBlocking("filterOne") runBlocking { assertEquals(12, node.first()?.value()) }