0.10.0 release

This commit is contained in:
Alexander Nozik 2025-01-19 12:34:36 +03:00
parent de534dec2d
commit 99a053b978
23 changed files with 69 additions and 362 deletions
CHANGELOG.mdREADME.mdbuild.gradle.kts
dataforge-context
build.gradle.kts
src/jvmMain/kotlin/space/kscience/dataforge
dataforge-data
build.gradle.kts
src
commonMain/kotlin/space/kscience/dataforge/data
jvmTest/kotlin/space/kscience/dataforge/data
dataforge-output
api
build.gradle.kts
src
commonMain/kotlin/hep/dataforge/output
jsMain/kotlin/hep/dataforge/output
jvmMain/kotlin/hep/dataforge/output
nativeMain/kotlin/hep/dataforge/output
dataforge-scripting
dataforge-workspace
build.gradle.kts
src
jvmMain/kotlin/space/kscience/dataforge/workspace
jvmTest/kotlin/space/kscience/dataforge/workspace
gradle.properties

@ -3,26 +3,45 @@
## Unreleased
### Added
### Changed
### Deprecated
### Removed
### Fixed
### Security
## 0.10.0 - 2025-01-19
### Added
- Coroutine exception logging in context
- `ObservableMutableMetaSerializer`
- `MutableMetaView` - a Meta wrapper that creates nodes only when its or its children are changed.
### Changed
- Simplify inheritance logic in `MutableTypedMeta`
- Full rework of `DataTree` and associated interfaces (`DataSource`, `DataSink`, etc.).
- Filter data by type is moved from `dataforge-data` to `dataforge-workspace` to avoid reflection dependency.
### Deprecated
- MetaProvider `spec` is replaced by `readable`. `listOfSpec` replaced with `listOfReadable`
### Removed
- Remove implicit io format resolver in `IOPlugin` and `FileWorkspaceCache`. There are no guarantees that only one format is present in the contrxt for each type.
- Dependencies on `atomicfu` and `kotlin.reflect` from dataforge-data to improve performance.
### Fixed
- Fixed NameToken parsing.
- Top level string list meta conversion.
### Security
## 0.9.0 - 2024-06-04
### Added

@ -101,6 +101,7 @@ In this section, we will try to cover DataForge main ideas in the form of questi
### [dataforge-scripting](dataforge-scripting)
> Scripting definition fow workspace generation
>
> **Maturity**: PROTOTYPE

@ -22,6 +22,12 @@ subprojects {
}
}
dependencies{
subprojects.forEach {
dokka(it)
}
}
readme {
readmeTemplate = file("docs/templates/README-TEMPLATE.md")
}

@ -13,11 +13,10 @@ kscience {
useSerialization()
commonMain {
api(projects.dataforgeMeta)
// api(spclibs.atomicfu)
}
jvmMain{
api(kotlin("reflect"))
api("org.slf4j:slf4j-api:1.7.30")
api(spclibs.kotlin.reflect)
api(spclibs.slf4j)
}
}

@ -17,7 +17,7 @@ package space.kscience.dataforge.context
import java.util.*
import kotlin.reflect.KClass
import kotlin.reflect.full.cast
import kotlin.reflect.cast
public class ClassLoaderPlugin(private val classLoader: ClassLoader) : AbstractPlugin() {
override val tag: PluginTag = PluginTag("classLoader", PluginTag.DATAFORGE_GROUP)

@ -8,25 +8,30 @@ import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import kotlin.reflect.KClass
import kotlin.reflect.KType
import kotlin.reflect.full.findAnnotation
import kotlin.reflect.typeOf
@DFExperimental
public val KClass<*>.dfType: String
get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
@DFExperimental
public val KType.dfType: String
get() = findAnnotation<DfType>()?.id ?: (classifier as? KClass<*>)?.simpleName ?: ""
/**
* Provide an object with given name inferring target from its type using [DfType] annotation
*/
@DFExperimental
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
val target = T::class.dfType
val target = typeOf<T>().dfType
return provide(target, name)
}
@DFExperimental
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
val target = T::class.dfType
val target = typeOf<T>().dfType
return top(target)
}
@ -35,15 +40,15 @@ public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
*/
@DFExperimental
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
gather<T>(T::class.dfType, inherit)
gather<T>(typeOf<T>().dfType, inherit)
@DFExperimental
public inline fun <reified T : Any> PluginBuilder.provides(items: Map<Name, T>) {
provides(T::class.dfType, items)
provides(typeOf<T>().dfType, items)
}
@DFExperimental
public inline fun <reified T : Any> PluginBuilder.provides(vararg items: Named) {
provides(T::class.dfType, *items)
provides(typeOf<T>().dfType, *items)
}

@ -9,10 +9,7 @@ kscience{
wasm()
useCoroutines()
dependencies {
// api(spclibs.atomicfu)
api(projects.dataforgeMeta)
//Remove after subtype moved to stdlib
api(kotlin("reflect"))
}
}

@ -3,6 +3,7 @@ package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.NameToken
import space.kscience.dataforge.names.plus
@ -17,17 +18,6 @@ public fun interface DataFilter {
}
}
//public fun DataFilter.accepts(update: DataUpdate<*>): Boolean = accepts(update.name, update.data?.meta, update.type)
//public fun <T, DU : DataUpdate<T>> Sequence<DU>.filterData(predicate: DataFilter): Sequence<DU> = filter { data ->
// predicate.accepts(data)
//}
//
//public fun <T, DU : DataUpdate<T>> Flow<DU>.filterData(predicate: DataFilter): Flow<DU> = filter { data ->
// predicate.accepts(data)
//}
public fun <T> DataSource<T>.filterData(
dataFilter: DataFilter,
): DataSource<T> = object : DataSource<T> {
@ -58,10 +48,14 @@ public fun <T> ObservableDataSource<T>.filterData(
this@filterData.read(name)?.takeIf { predicate.accepts(name, it.meta, it.type) }
}
internal class FilteredDataTree<T>(
val source: DataTree<T>,
val filter: DataFilter,
val branch: Name,
/**
* A [DataTree] filtered by branch and some criterion, possibly changing resulting type
*/
@DFInternal
public class FilteredDataTree<T>(
public val source: DataTree<T>,
public val filter: DataFilter,
public val branch: Name,
override val dataType: KType = source.dataType,
) : DataTree<T> {
@ -83,37 +77,6 @@ internal class FilteredDataTree<T>(
}
}
public fun <T> DataTree<T>.filterData(
predicate: DataFilter,
): DataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)
///**
// * Generate a wrapper data set with a given name prefix appended to all names
// */
//public fun <T : Any> DataTree<T>.withNamePrefix(prefix: Name): DataSet<T> = if (prefix.isEmpty()) {
// this
//} else object : DataSource<T> {
//
// override val dataType: KType get() = this@withNamePrefix.dataType
//
// override val coroutineContext: CoroutineContext
// get() = (this@withNamePrefix as? DataSource)?.coroutineContext ?: EmptyCoroutineContext
//
// override val meta: Meta get() = this@withNamePrefix.meta
//
//
// override fun iterator(): Iterator<NamedData<T>> = iterator {
// for (d in this@withNamePrefix) {
// yield(d.data.named(prefix + d.name))
// }
// }
//
// override fun get(name: Name): Data<T>? =
// name.removeFirstOrNull(name)?.let { this@withNamePrefix.get(it) }
//
// override val updates: Flow<Name> get() = this@withNamePrefix.updates.map { prefix + it }
//}
//
): FilteredDataTree<T> = FilteredDataTree(this, predicate, Name.EMPTY)

@ -13,7 +13,7 @@ import kotlin.time.Duration.Companion.milliseconds
@OptIn(DFExperimental::class, ExperimentalCoroutinesApi::class)
internal class ActionsTest {
@Test
fun testStaticMapAction() = runTest(timeout = 500.milliseconds) {
fun testStaticMapAction() = runTest(timeout = 200.milliseconds) {
val plusOne = Action.mapping<Int, Int> {
result { it + 1 }
}
@ -26,11 +26,11 @@ internal class ActionsTest {
val result = plusOne(data)
assertEquals(2, result.awaitData("1").await())
assertEquals(5, result.awaitData("4").await())
}
@Test
fun testDynamicMapAction() = runTest(timeout = 500.milliseconds) {
fun testDynamicMapAction() = runTest(timeout = 200.milliseconds) {
val plusOne = Action.mapping<Int, Int> {
result { it + 1 }
}
@ -43,7 +43,7 @@ internal class ActionsTest {
source.writeValue(it.toString(), it)
}
assertEquals(2, result.awaitData("1").await())
assertEquals(5, result.awaitData("4").await())
}
}

@ -1,68 +0,0 @@
public final class hep/dataforge/output/ConsoleOutputManager : hep/dataforge/context/AbstractPlugin, hep/dataforge/output/OutputManager {
public static final field Companion Lhep/dataforge/output/ConsoleOutputManager$Companion;
public fun <init> ()V
public fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
public fun getTag ()Lhep/dataforge/context/PluginTag;
}
public final class hep/dataforge/output/ConsoleOutputManager$Companion : hep/dataforge/context/PluginFactory {
public fun getTag ()Lhep/dataforge/context/PluginTag;
public fun getType ()Lkotlin/reflect/KClass;
public fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Lhep/dataforge/output/ConsoleOutputManager;
public synthetic fun invoke (Lhep/dataforge/meta/Meta;Lhep/dataforge/context/Context;)Ljava/lang/Object;
}
public final class hep/dataforge/output/DefaultTextFormat : hep/dataforge/output/TextFormat {
public static final field INSTANCE Lhep/dataforge/output/DefaultTextFormat;
public fun getPriority ()I
public fun getType ()Lkotlin/reflect/KClass;
public fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
}
public final class hep/dataforge/output/OutputJVMKt {
public static final fun getOutput (Lkotlinx/coroutines/Dispatchers;)Lkotlinx/coroutines/CoroutineDispatcher;
}
public abstract interface class hep/dataforge/output/OutputManager {
public abstract fun get (Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)Lhep/dataforge/output/Renderer;
}
public final class hep/dataforge/output/OutputManager$DefaultImpls {
public static synthetic fun get$default (Lhep/dataforge/output/OutputManager;Lkotlin/reflect/KClass;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)Lhep/dataforge/output/Renderer;
}
public final class hep/dataforge/output/OutputManagerKt {
public static final fun getCONSOLE_RENDERER ()Lhep/dataforge/output/Renderer;
public static final fun getOutput (Lhep/dataforge/context/Context;)Lhep/dataforge/output/OutputManager;
public static final fun render (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;)V
public static synthetic fun render$default (Lhep/dataforge/output/OutputManager;Ljava/lang/Object;Lhep/dataforge/names/Name;Lhep/dataforge/names/Name;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
}
public abstract interface class hep/dataforge/output/Renderer : hep/dataforge/context/ContextAware {
public abstract fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
}
public final class hep/dataforge/output/Renderer$DefaultImpls {
public static fun getLogger (Lhep/dataforge/output/Renderer;)Lmu/KLogger;
public static synthetic fun render$default (Lhep/dataforge/output/Renderer;Ljava/lang/Object;Lhep/dataforge/meta/Meta;ILjava/lang/Object;)V
}
public abstract interface class hep/dataforge/output/TextFormat {
public static final field Companion Lhep/dataforge/output/TextFormat$Companion;
public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
public abstract fun getPriority ()I
public abstract fun getType ()Lkotlin/reflect/KClass;
public abstract fun render (Ljava/lang/Appendable;Ljava/lang/Object;Lkotlin/coroutines/Continuation;)Ljava/lang/Object;
}
public final class hep/dataforge/output/TextFormat$Companion {
public static final field TEXT_RENDERER_TYPE Ljava/lang/String;
}
public final class hep/dataforge/output/TextRenderer : hep/dataforge/output/Renderer {
public fun <init> (Lhep/dataforge/context/Context;Ljava/lang/Appendable;)V
public fun getContext ()Lhep/dataforge/context/Context;
public fun getLogger ()Lmu/KLogger;
public fun render (Ljava/lang/Object;Lhep/dataforge/meta/Meta;)V
}

@ -1,15 +0,0 @@
plugins {
id("space.kscience.gradle.mpp")
id("space.kscience.gradle.native")
}
kotlin {
sourceSets {
val commonMain by getting{
dependencies {
api(project(":dataforge-context"))
//api(project(":dataforge-io"))
}
}
}
}

@ -1,75 +0,0 @@
package space.kscience.dataforge.output
import space.kscience.dataforge.context.*
import space.kscience.dataforge.context.PluginTag.Companion.DATAFORGE_GROUP
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
import kotlin.reflect.KClass
/**
* A manager for outputs
*/
public interface OutputManager {
/**
* Get an output specialized for given type, name and stage.
* @param stage represents the node or directory for the output. Empty means root node.
* @param name represents the name inside the node.
* @param meta configuration for [Renderer] (not for rendered object)
*/
public fun <T : Any> getOutputContainer(
type: KClass<out T>,
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = Meta.EMPTY
): Renderer<T>
}
/**
* Get an output manager for a context
*/
public val Context.output: OutputManager get() = plugins.get() ?: ConsoleOutputManager()
/**
* Get an output with given [name], [stage] and reified content type
*/
public inline fun <reified T : Any> OutputManager.getOutputContainer(
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = Meta.EMPTY
): Renderer<T> {
return getOutputContainer(T::class, name, stage, meta)
}
/**
* Directly render an object using the most suitable renderer
*/
public fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = Meta.EMPTY): Unit =
getOutputContainer(obj::class, name, stage).render(obj, meta)
/**
* System console output.
* The [CONSOLE_RENDERER] is used when no other [OutputManager] is provided.
*/
public val CONSOLE_RENDERER: Renderer<Any> = Renderer { obj, meta -> println(obj) }
public class ConsoleOutputManager : AbstractPlugin(), OutputManager {
override val tag: PluginTag get() = ConsoleOutputManager.tag
override fun <T : Any> getOutputContainer(type: KClass<out T>, name: Name, stage: Name, meta: Meta): Renderer<T> = CONSOLE_RENDERER
public companion object : PluginFactory<ConsoleOutputManager> {
override val tag: PluginTag = PluginTag("output.console", group = DATAFORGE_GROUP)
override val type: KClass<ConsoleOutputManager> = ConsoleOutputManager::class
override fun invoke(meta: Meta, context: Context): ConsoleOutputManager = ConsoleOutputManager()
}
}
/**
* A dispatcher for output tasks.
*/
public expect val Dispatchers.Output: CoroutineDispatcher

@ -1,21 +0,0 @@
package space.kscience.dataforge.output
import space.kscience.dataforge.context.ContextAware
import space.kscience.dataforge.meta.Meta
/**
* A generic way to render any object in the output.
*
* An object could be rendered either in append or overlay mode. The mode is decided by the [Renderer]
* based on its configuration and provided meta
*
*/
public fun interface Renderer<in T : Any> {
/**
* Render specific object with configuration.
*
* By convention actual render is called in asynchronous mode, so this method should never
* block execution
*/
public fun render(obj: T, meta: Meta)
}

@ -1,78 +0,0 @@
package space.kscience.dataforge.output
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.output.TextFormat.Companion.TEXT_RENDERER_TYPE
import space.kscience.dataforge.provider.Type
import space.kscience.dataforge.provider.top
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlin.reflect.KClass
import kotlin.reflect.KType
/**
* A text or binary renderer based on [Output]
*/
@Type(TEXT_RENDERER_TYPE)
@Deprecated("Bad design")
public interface TextFormat {
/**
* The priority of this renderer compared to other renderers
*/
public val priority: Int
/**
* The type of the content served by this renderer
*/
public val type: KClass<*>
public suspend fun Appendable.render(obj: Any)
public companion object {
public const val TEXT_RENDERER_TYPE: String = "dataforge.textRenderer"
}
}
@Deprecated("Bad design")
public object DefaultTextFormat : TextFormat {
override val priority: Int = Int.MAX_VALUE
override val type: KClass<*> = Any::class
override suspend fun Appendable.render(obj: Any) {
append(obj.toString() + "\n")
}
}
/**
* A text-based renderer
*/
@Deprecated("Bad design")
public class TextRenderer(override val context: Context, private val output: Appendable) : Renderer<Any> {
private val cache = HashMap<KClass<*>, TextFormat>()
/**
* Find the first [TextFormat] matching the given object type.
*/
override fun render(obj: Any, meta: Meta) {
val format: TextFormat = if (obj is CharSequence) {
DefaultTextFormat
} else {
val value = cache[obj::class]
if (value == null) {
val answer =
context.top<TextFormat>(TEXT_RENDERER_TYPE).values.firstOrNull { it.type.isInstance(obj) }
if (answer != null) {
cache[obj::class] = answer
answer
} else {
DefaultTextFormat
}
} else {
value
}
}
context.launch(Dispatchers.Output) {
format.run { output.render(obj) }
}
}
}

@ -1,7 +0,0 @@
package space.kscience.dataforge.output
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
public actual val Dispatchers.Output: CoroutineDispatcher get() = Default

@ -1,6 +0,0 @@
package space.kscience.dataforge.output
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
public actual val Dispatchers.Output: CoroutineDispatcher get() = IO

@ -1,6 +0,0 @@
package space.kscience.dataforge.output
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
public actual val Dispatchers.Output: CoroutineDispatcher get() = Dispatchers.Default

@ -1,6 +1,6 @@
# Module dataforge-scripting
Scripting definition fow workspace generation
## Usage

@ -2,6 +2,8 @@ plugins {
id("space.kscience.gradle.mpp")
}
description = "A framework for pull-based data processing"
kscience {
jvm()
js()

@ -1,5 +1,6 @@
package space.kscience.dataforge.data
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.*
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.names.Name
import kotlin.reflect.KType
@ -20,16 +21,6 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
}
}
//@Suppress("UNCHECKED_CAST")
//@DFInternal
//public fun <R> Sequence<DataUpdate<*>>.filterByDataType(type: KType): Sequence<NamedData<R>> =
// filter { it.type.isSubtypeOf(type) } as Sequence<NamedData<R>>
//
//@Suppress("UNCHECKED_CAST")
//@DFInternal
//public fun <R> Flow<DataUpdate<*>>.filterByDataType(type: KType): Flow<NamedData<R>> =
// filter { it.type.isSubtypeOf(type) } as Flow<NamedData<R>>
/**
* Select all data matching given type and filters. Does not modify paths
*
@ -40,7 +31,7 @@ private fun <R> Data<*>.castOrNull(type: KType): Data<R>? =
public fun <R> DataTree<*>.filterByType(
type: KType,
branch: Name = Name.EMPTY,
filter: DataFilter = DataFilter.EMPTY,
filter: DataFilter = DataFilter.Companion.EMPTY,
): DataTree<R> {
val filterWithType = DataFilter { name, meta, dataType ->
filter.accepts(name, meta, dataType) && dataType.isSubtypeOf(type)
@ -54,7 +45,7 @@ public fun <R> DataTree<*>.filterByType(
@OptIn(DFInternal::class)
public inline fun <reified R : Any> DataTree<*>.filterByType(
branch: Name = Name.EMPTY,
filter: DataFilter = DataFilter.EMPTY,
filter: DataFilter = DataFilter.Companion.EMPTY,
): DataTree<R> = filterByType(typeOf<R>(), branch, filter = filter)
/**

@ -1,7 +1,6 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.filterByType
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name

@ -152,14 +152,14 @@ internal class SimpleWorkspaceTest {
}
@Test
fun testWorkspace() = runTest(timeout = 100.milliseconds) {
fun testWorkspace() = runTest(timeout = 200.milliseconds) {
val node = workspace.produce("sum")
val res = node.asSequence().single()
assertEquals(328350, res.await())
val res = node.data
assertEquals(328350, res?.await())
}
@Test
fun testMetaPropagation() = runTest(timeout = 100.milliseconds) {
fun testMetaPropagation() = runTest(timeout = 200.milliseconds) {
val node = workspace.produce("sum") { "testFlag" put true }
val res = node.data?.await()
}
@ -190,7 +190,7 @@ internal class SimpleWorkspaceTest {
val node = workspace.produce("filterOne") {
"name" put "myData[12]"
}
assertEquals(12, node.asSequence().first().await())
assertEquals(12, node.data?.await())
}
}

@ -5,5 +5,6 @@ org.gradle.jvmargs=-Xmx4096m
kotlin.mpp.stability.nowarn=true
kotlin.native.ignoreDisabledTargets=true
org.jetbrains.dokka.experimental.gradle.pluginMode=V2Enabled
toolsVersion=0.16.1-kotlin-2.1.0