diff --git a/build.gradle.kts b/build.gradle.kts index 65e56e40..b13264c5 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -19,6 +19,10 @@ allprojects { } } +apiValidation{ + validationDisabled = true +} + subprojects { apply(plugin = "ru.mipt.npm.publish") } diff --git a/dataforge-data/api/dataforge-data.api b/dataforge-data/api/dataforge-data.api index 7b82acc4..709a8038 100644 --- a/dataforge-data/api/dataforge-data.api +++ b/dataforge-data/api/dataforge-data.api @@ -163,18 +163,15 @@ public abstract interface class hep/dataforge/data/DataNode : hep/dataforge/meta public abstract fun getItems ()Ljava/util/Map; public abstract fun getMeta ()Lhep/dataforge/meta/Meta; public abstract fun getType ()Lkotlin/reflect/KClass; - public abstract fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job; public abstract fun toMeta ()Lhep/dataforge/meta/Meta; } public final class hep/dataforge/data/DataNode$Companion { public static final field TYPE Ljava/lang/String; public final fun builder (Lkotlin/reflect/KClass;)Lhep/dataforge/data/DataTreeBuilder; - public final fun invoke (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataTree; } public final class hep/dataforge/data/DataNode$DefaultImpls { - public static fun startAll (Lhep/dataforge/data/DataNode;Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job; public static fun toMeta (Lhep/dataforge/data/DataNode;)Lhep/dataforge/meta/Meta; } @@ -189,13 +186,13 @@ public final class hep/dataforge/data/DataNodeKt { public static final fun itemSequence (Lhep/dataforge/data/DataNode;)Lkotlin/sequences/Sequence; public static final fun iterator (Lhep/dataforge/data/DataNode;)Ljava/util/Iterator; public static final fun join (Lhep/dataforge/data/DataNode;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; + public static final fun startAll (Lhep/dataforge/data/DataNode;Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job; } public final class hep/dataforge/data/DataTree : hep/dataforge/data/DataNode { public fun getItems ()Ljava/util/Map; public fun getMeta ()Lhep/dataforge/meta/Meta; public fun getType ()Lkotlin/reflect/KClass; - public fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job; public fun toMeta ()Lhep/dataforge/meta/Meta; } @@ -219,6 +216,7 @@ public final class hep/dataforge/data/DataTreeBuilder { } public final class hep/dataforge/data/DataTreeBuilderKt { + public static final fun DataTree (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)Lhep/dataforge/data/DataTree; public static final fun builder (Lhep/dataforge/data/DataNode;)Lhep/dataforge/data/DataTreeBuilder; public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Lhep/dataforge/names/Name;Lhep/dataforge/data/Data;)V public static final fun datum (Lhep/dataforge/data/DataTreeBuilder;Ljava/lang/String;Lhep/dataforge/data/Data;)V @@ -303,7 +301,6 @@ public final class hep/dataforge/data/JoinGroup { public final class hep/dataforge/data/MapAction : hep/dataforge/data/Action { public fun (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V - public final fun getOutputType ()Lkotlin/reflect/KClass; public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode; public fun isTerminal ()Z } @@ -335,7 +332,6 @@ public final class hep/dataforge/data/NamedData : hep/dataforge/data/Data { public final class hep/dataforge/data/ReduceAction : hep/dataforge/data/Action { public fun (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V - public final fun getOutputType ()Lkotlin/reflect/KClass; public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode; public fun isTerminal ()Z } @@ -356,7 +352,6 @@ public final class hep/dataforge/data/ReduceGroupBuilder { public final class hep/dataforge/data/SplitAction : hep/dataforge/data/Action { public fun (Lkotlin/reflect/KClass;Lkotlin/jvm/functions/Function1;)V - public final fun getOutputType ()Lkotlin/reflect/KClass; public fun invoke (Lhep/dataforge/data/DataNode;Lhep/dataforge/meta/Meta;)Lhep/dataforge/data/DataNode; public fun isTerminal ()Z } @@ -391,7 +386,6 @@ public final class hep/dataforge/data/TypeFilteredDataNode : hep/dataforge/data/ public fun getMeta ()Lhep/dataforge/meta/Meta; public final fun getOrigin ()Lhep/dataforge/data/DataNode; public fun getType ()Lkotlin/reflect/KClass; - public fun startAll (Lkotlinx/coroutines/CoroutineScope;)Lkotlinx/coroutines/Job; public fun toMeta ()Lhep/dataforge/meta/Meta; } diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt index e8625f65..a1e8be73 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt @@ -44,8 +44,14 @@ public interface DataNode : MetaRepr { */ public val type: KClass + /** + * Children items of this data node + */ public val items: Map> + /** + * Meta for this node + */ public val meta: Meta override fun toMeta(): Meta = Meta { @@ -58,39 +64,33 @@ public interface DataNode : MetaRepr { } } - /** - * Start computation for all goals in data node and return a job for the whole node - */ - @Suppress("DeferredResultUnused") - public fun CoroutineScope.startAll(): Job = launch { - items.values.forEach { - when (it) { - is DataItem.Node<*> -> it.node.run { startAll() } - is DataItem.Leaf<*> -> it.data.run { startAsync(this@launch) } - } - } - } - public companion object { public const val TYPE: String = "dataNode" - public operator fun invoke(type: KClass, block: DataTreeBuilder.() -> Unit): DataTree = - DataTreeBuilder(type).apply(block).build() - - public inline operator fun invoke(noinline block: DataTreeBuilder.() -> Unit): DataTree = - DataTreeBuilder(T::class).apply(block).build() - public fun builder(type: KClass): DataTreeBuilder = DataTreeBuilder(type) } } -public suspend fun DataNode.join(): Unit = coroutineScope { startAll().join() } +/** + * Start computation for all goals in data node and return a job for the whole node + */ +@Suppress("DeferredResultUnused") +public fun DataNode.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch { + items.values.forEach { + when (it) { + is DataItem.Node<*> -> it.node.run { startAll(this@launch) } + is DataItem.Leaf<*> -> it.data.run { this.startAsync(this@launch) } + } + } +} + +public suspend fun DataNode.join(): Unit = coroutineScope { startAll(this).join() } public val DataItem?.node: DataNode? get() = (this as? DataItem.Node)?.node public val DataItem?.data: Data? get() = (this as? DataItem.Leaf)?.data public operator fun DataNode.get(name: Name): DataItem? = when (name.length) { - 0 -> error("Empty name") + 0 -> DataItem.Node(this) 1 -> items[name.firstOrNull()] else -> get(name.firstOrNull()!!.asName()).node?.get(name.cutFirst()) } @@ -127,7 +127,8 @@ public fun DataNode.dataSequence(): Sequence>> = } } -public fun DataNode.filter(predicate: (Name, Data) -> Boolean): DataNode = DataNode.invoke(type) { +@DFExperimental +public fun DataNode.filter(predicate: (Name, Data) -> Boolean): DataNode = DataTree(type) { dataSequence().forEach { (name, data) -> if (predicate(name, data)) { this[name] = data @@ -137,6 +138,5 @@ public fun DataNode.filter(predicate: (Name, Data) -> Boolean): public fun DataNode.first(): Data? = dataSequence().firstOrNull()?.second - public operator fun DataNode.iterator(): Iterator>> = itemSequence().iterator() diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTreeBuilder.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTreeBuilder.kt index 238e862b..7529abe9 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTreeBuilder.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataTreeBuilder.kt @@ -119,6 +119,14 @@ public class DataTreeBuilder(public val type: KClass) { } } +@Suppress("FunctionName") +public fun DataTree(type: KClass, block: DataTreeBuilder.() -> Unit): DataTree = + DataTreeBuilder(type).apply(block).build() + +@Suppress("FunctionName") +public inline fun DataTree(noinline block: DataTreeBuilder.() -> Unit): DataTree = + DataTreeBuilder(T::class).apply(block).build() + public fun DataTreeBuilder.datum(name: Name, data: Data) { this[name] = data @@ -149,11 +157,11 @@ public fun DataTreeBuilder.node(name: String, node: DataNode) { } public inline fun DataTreeBuilder.node(name: Name, noinline block: DataTreeBuilder.() -> Unit) { - this[name] = DataNode(T::class, block) + this[name] = DataTree(T::class, block) } public inline fun DataTreeBuilder.node(name: String, noinline block: DataTreeBuilder.() -> Unit) { - this[name.toName()] = DataNode(T::class, block) + this[name.toName()] = DataTree(T::class, block) } /** diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt index c1ba2074..05ba5056 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/MapAction.kt @@ -30,11 +30,11 @@ public class MapActionBuilder(public var name: Name, public var meta: Meta public class MapAction( - public val outputType: KClass, + private val outputType: KClass, private val block: MapActionBuilder.() -> Unit ) : Action { - override fun invoke(node: DataNode, meta: Meta): DataNode = DataNode(outputType) { + override fun invoke(node: DataNode, meta: Meta): DataNode = DataTree(outputType) { node.dataSequence().forEach { (name, data) -> /* * Creating a new environment for action using **old** name, old meta and task meta diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/ReduceAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/ReduceAction.kt index d9d9dec2..d92a87cf 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/ReduceAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/ReduceAction.kt @@ -72,11 +72,11 @@ public class ReduceGroupBuilder(public val actionMeta: Meta) { * The same rules as for KPipe */ public class ReduceAction( - public val outputType: KClass, + private val outputType: KClass, private val action: ReduceGroupBuilder.() -> Unit ) : Action { - override fun invoke(node: DataNode, meta: Meta): DataNode = DataNode(outputType) { + override fun invoke(node: DataNode, meta: Meta): DataNode = DataTree(outputType) { ReduceGroupBuilder(meta).apply(action).buildGroups(node).forEach { group -> //val laminate = Laminate(group.meta, meta) diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt index 70fbec96..a4390849 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/SplitAction.kt @@ -33,11 +33,11 @@ public class SplitBuilder(public val name: Name, public val me } public class SplitAction( - public val outputType: KClass, + private val outputType: KClass, private val action: SplitBuilder.() -> Unit ) : Action { - override fun invoke(node: DataNode, meta: Meta): DataNode = DataNode(outputType) { + override fun invoke(node: DataNode, meta: Meta): DataNode = DataTree(outputType) { node.dataSequence().forEach { (name, data) -> val laminate = Laminate(data.meta, meta) diff --git a/dataforge-data/src/commonTest/kotlin/hep/dataforge/data/DataTreeBuilderTest.kt b/dataforge-data/src/commonTest/kotlin/hep/dataforge/data/DataTreeBuilderTest.kt index 32bf1760..ca7a1d8f 100644 --- a/dataforge-data/src/commonTest/kotlin/hep/dataforge/data/DataTreeBuilderTest.kt +++ b/dataforge-data/src/commonTest/kotlin/hep/dataforge/data/DataTreeBuilderTest.kt @@ -7,14 +7,14 @@ import kotlin.test.assertTrue internal class DataTreeBuilderTest{ @Test fun testDataUpdate(){ - val updateData = DataNode{ + val updateData = DataTree{ "update" put { "a" put Data.static("a") "b" put Data.static("b") } } - val node = DataNode{ + val node = DataTree{ node("primary"){ static("a","a") static("b","b") diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt index e58a988b..f6961866 100644 --- a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt +++ b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/Dependency.kt @@ -2,6 +2,7 @@ package hep.dataforge.workspace import hep.dataforge.data.DataFilter import hep.dataforge.data.DataNode +import hep.dataforge.data.DataTree import hep.dataforge.data.filter import hep.dataforge.meta.Meta import hep.dataforge.meta.MetaBuilder @@ -24,7 +25,7 @@ public class DataDependency(private val filter: DataFilter, private val placemen return if (placement.isEmpty()) { result } else { - DataNode.invoke(Any::class) { this[placement] = result } + DataTree(Any::class) { this[placement] = result } } } @@ -38,7 +39,7 @@ public class AllDataDependency(private val placement: Name = Name.EMPTY) : Depen override fun apply(workspace: Workspace): DataNode = if (placement.isEmpty()) { workspace.data } else { - DataNode.invoke(Any::class) { this[placement] = workspace.data } + DataTree(Any::class) { this[placement] = workspace.data } } override fun toMeta(): MetaBuilder = Meta { @@ -65,7 +66,7 @@ public abstract class TaskDependency( return if (placement.isEmpty()) { result } else { - DataNode(task.type) { this[placement] = result } + DataTree(task.type) { this[placement] = result } } } diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt index 1b2b0cdb..1289516f 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/WorkspaceBuilder.kt @@ -22,7 +22,6 @@ public interface WorkspaceBuilder { public fun build(): Workspace } - /** * Set the context for future workspcace */ diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt index 8dfcc63d..2cf203d5 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt @@ -37,7 +37,7 @@ private fun newZFS(path: Path): FileSystem { public fun IOPlugin.readDataFile( path: Path, type: KClass, - formatResolver: FileFormatResolver + formatResolver: FileFormatResolver, ): Data { val envelope = readEnvelopeFile(path, true) ?: error("Can't read data from $path") val format = formatResolver(path, envelope.meta) @@ -57,7 +57,7 @@ public inline fun IOPlugin.readDataFile(path: Path): Data = public fun DataTreeBuilder.file( plugin: IOPlugin, path: Path, - formatResolver: FileFormatResolver + formatResolver: FileFormatResolver, ) { //If path is a single file or a special directory, read it as single datum if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) { @@ -85,7 +85,7 @@ public fun DataTreeBuilder.file( public fun IOPlugin.readDataDirectory( path: Path, type: KClass, - formatResolver: FileFormatResolver + formatResolver: FileFormatResolver, ): DataNode { //read zipped data node if (path.fileName != null && path.fileName.toString().endsWith(".zip")) { @@ -94,7 +94,7 @@ public fun IOPlugin.readDataDirectory( return readDataDirectory(fs.rootDirectories.first(), type, formatResolver) } if (!Files.isDirectory(path)) error("Provided path $path is not a directory") - return DataNode(type) { + return DataTree(type) { Files.list(path).forEach { path -> val fileName = path.fileName.toString() if (fileName.startsWith(IOPlugin.META_FILE_NAME)) { @@ -121,7 +121,7 @@ public suspend fun IOPlugin.writeDataDirectory( node: DataNode, format: IOFormat, envelopeFormat: EnvelopeFormat? = null, - metaFormat: MetaFormatFactory? = null + metaFormat: MetaFormatFactory? = null, ) { withContext(Dispatchers.IO) { if (!Files.exists(path)) { @@ -156,7 +156,7 @@ private suspend fun ZipOutputStream.writeNode( name: String, item: DataItem, dataFormat: IOFormat, - envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat + envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat, ) { withContext(Dispatchers.IO) { when (item) { @@ -187,7 +187,7 @@ suspend fun IOPlugin.writeZip( path: Path, node: DataNode, format: IOFormat, - envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat + envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat, ) { withContext(Dispatchers.IO) { val actualFile = if (path.toString().endsWith(".zip")) { @@ -195,7 +195,10 @@ suspend fun IOPlugin.writeZip( } else { path.resolveSibling(path.fileName.toString() + ".zip") } - val fos = Files.newOutputStream(actualFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING) + val fos = Files.newOutputStream(actualFile, + StandardOpenOption.WRITE, + StandardOpenOption.CREATE, + StandardOpenOption.TRUNCATE_EXISTING) val zos = ZipOutputStream(fos) zos.use { it.writeNode("", DataItem.Node(node), format, envelopeFormat) diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt index 784a7441..6e0002ed 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt @@ -19,7 +19,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() { allData() } transform { data -> - return@transform DataNode { + DataTree { val result = data.dataSequence().map { it.second.get() }.reduce { acc, pair -> acc + pair } set("result".asName(), Data { result }) } @@ -32,7 +32,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() { data("myData\\[12\\]") } transform { data -> - return@transform DataNode { + DataTree { val result = data.dataSequence().map { it.second.get() }.reduce { acc, pair -> acc + pair } set("result".asName(), Data { result }) } @@ -44,7 +44,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() { data(pattern = "myData.*") } transform { data -> - return@transform DataNode { + DataTree{ val result = data.dataSequence().map { it.second.get() }.reduce { acc, pair -> acc + pair } set("result".asName(), Data { result }) } diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt index 7357c5c5..66646359 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt @@ -17,7 +17,7 @@ import kotlin.test.assertEquals class FileDataTest { - val dataNode = DataNode { + val dataNode = DataTree { node("dir") { static("a", "Some string") { "content" put "Some string" diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt index 34bfd734..c894f6a0 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt @@ -74,10 +74,10 @@ class SimpleWorkspaceTest { val squareDep = dependsOn(square, placement = "square") val linearDep = dependsOn(linear, placement = "linear") } - transform { data -> + transform { data -> val squareNode = data["square"].node!!.cast()//squareDep() val linearNode = data["linear"].node!!.cast()//linearDep() - return@transform DataNode(Int::class) { + DataTree { squareNode.dataSequence().forEach { (name, _) -> val newData = Data { val squareValue = squareNode[name].data!!.get()