Merge pull request #74 from mipt-npm/dev

0.5.2
This commit is contained in:
Alexander Nozik 2021-11-30 13:12:20 +03:00 committed by GitHub
commit 8c0bc05a9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
63 changed files with 483 additions and 252 deletions

4
.space.kts Normal file
View File

@ -0,0 +1,4 @@
job("Build") {
gradlew("openjdk:11", "build")
}

View File

@ -12,10 +12,32 @@
### Fixed
### Security
## [0.5.2]
### Added
- Yaml plugin
- Partial fix to #53
### Changed
### Deprecated
### Removed
### Fixed
- MutableMetaImpl attachment and checks
- Listeners in observable meta are replaced by lists
- JS number comparison bug.
### Security
## [0.5.0]
### Added
- Experimental `listOfSpec` delegate.
### Changed
- **API breaking** Config is deprecated, use `ObservableMeta` instead.
- **API breaking** Descriptor no has a member property `defaultValue` instead of `defaultItem()` extension. It caches default value state on the first call. It is done because computing default on each call is too expensive.
@ -26,17 +48,18 @@
- **API breaking** `String.toName()` is replaced by `Name.parse()`
- **API breaking** Configurable`config` changed to `meta`
### Removed
- `Config`
- Public PluginManager mutability
- Tables and tables-exposed moved to the separate project `tables.kt`
- BinaryMetaFormat. Use CBOR encoding instead
### Fixed
- Proper json array index treatment.
- Proper json index for single-value array.
### Security
## [0.4.0]
### Added
- LogManager plugin
@ -44,6 +67,7 @@
- Context `withEnv` and `fetch` methods to manipulate plugins without changing plugins after creation.
- Split `ItemDescriptor` into builder and read-only part
### Changed
- Kotlin-logging moved from common to JVM and JS. Replaced by console for native.
- Package changed to `space.kscience`
@ -53,18 +77,19 @@
- Added blank builders for children context.
- Refactor loggers
### Deprecated
- Direct use of PluginManager
### Removed
- Common dependency on Kotlin-logging
- Kotlinx-io fork dependency. Replaced by Ktor-io.
### Fixed
- Scheme properties properly handle children property change.
### Security
## [0.3.0]
### Added
- Yaml meta format based on yaml.kt
@ -73,6 +98,7 @@
- `copy` method to descriptors
- Multiplatform yaml meta
### Changed
- `ListValue` and `DoubleArrayValue` implement `Iterable`.
- Changed the logic of `Value::isList` to check for type instead of size
@ -87,17 +113,7 @@
- \[Major breaking change\] Full refactor of DataTree/DataSource
- \[Major Breaking change\] Replace KClass with KType in data. Remove direct access to constructors with types.
### Deprecated
### Removed
### Fixed
### Security
## [0.2.0]
### Added
### Changed
- Context content resolution refactor
- Kotlin 1.4.10 (build tools 0.6.0)
@ -107,16 +123,17 @@
- Removed io depdendency from `dataforge-output`. Replaced Output by Appendable.
- Configurable is no longer MutableItemProvider. All functionality moved to Scheme.
### Deprecated
- Context activation API
- TextRenderer
### Removed
- Functional server prototype
- `dataforge-output` module
### Fixed
- Global context CoroutineScope resolution
- Library mode compliance
### Security

View File

@ -4,7 +4,10 @@ plugins {
allprojects {
group = "space.kscience"
version = "0.5.0"
version = "0.5.2"
repositories{
mavenCentral()
}
}
subprojects {
@ -22,8 +25,5 @@ ksciencePublish {
}
apiValidation {
if(project.version.toString().contains("dev")) {
validationDisabled = true
}
nonPublicMarkers.add("space.kscience.dataforge.misc.DFExperimental")
}

View File

@ -33,8 +33,8 @@ public final class space/kscience/dataforge/context/ClassLoaderPluginKt {
public class space/kscience/dataforge/context/Context : kotlinx/coroutines/CoroutineScope, space/kscience/dataforge/meta/MetaRepr, space/kscience/dataforge/misc/Named, space/kscience/dataforge/provider/Provider {
public static final field Companion Lspace/kscience/dataforge/context/Context$Companion;
public static final field PROPERTY_TARGET Ljava/lang/String;
public final fun buildContext (Ljava/lang/String;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Ljava/lang/String;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
public final fun buildContext (Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
public static synthetic fun buildContext$default (Lspace/kscience/dataforge/context/Context;Lspace/kscience/dataforge/names/Name;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/context/Context;
public fun close ()V
public fun content (Ljava/lang/String;)Ljava/util/Map;
public final fun content (Ljava/lang/String;Z)Ljava/util/Map;
@ -57,7 +57,6 @@ public abstract interface class space/kscience/dataforge/context/ContextAware {
public final class space/kscience/dataforge/context/ContextBuilder {
public final fun build ()Lspace/kscience/dataforge/context/Context;
public final fun getName ()Lspace/kscience/dataforge/names/Name;
public final fun name (Ljava/lang/String;)V
public final fun plugin (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lkotlin/jvm/functions/Function1;)V
public final fun plugin (Lspace/kscience/dataforge/context/Plugin;)V
public final fun plugin (Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;)V
@ -67,11 +66,9 @@ public final class space/kscience/dataforge/context/ContextBuilder {
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginFactory;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
public static synthetic fun plugin$default (Lspace/kscience/dataforge/context/ContextBuilder;Lspace/kscience/dataforge/context/PluginTag;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)V
public final fun properties (Lkotlin/jvm/functions/Function1;)V
public final fun setName (Lspace/kscience/dataforge/names/Name;)V
}
public final class space/kscience/dataforge/context/ContextBuilderKt {
public static final fun withEnv (Lspace/kscience/dataforge/context/Context;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/context/Context;
}
public final class space/kscience/dataforge/context/DefaultLogManager : space/kscience/dataforge/context/AbstractPlugin, space/kscience/dataforge/context/LogManager {

View File

@ -4,6 +4,7 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.SupervisorJob
import space.kscience.dataforge.meta.*
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider
@ -71,16 +72,16 @@ public open class Context internal constructor(
private val childrenContexts = HashMap<Name, Context>()
/**
* Build and register a child context
* Get and validate existing context or build and register a new child context.
* @param name the relative (tail) name of the new context. If null, uses context hash code as a marker.
*/
@OptIn(DFExperimental::class)
@Synchronized
public fun buildContext(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context {
val newContext = ContextBuilder(this)
.apply { name?.let { name(it) } }
.apply(block)
.build()
childrenContexts[newContext.name] = newContext
return newContext
public fun buildContext(name: Name? = null, block: ContextBuilder.() -> Unit = {}): Context {
val existing = name?.let { childrenContexts[name] }
return existing?.modify(block)?: ContextBuilder(this, name).apply(block).build().also {
childrenContexts[it.name] = it
}
}
/**

View File

@ -20,7 +20,7 @@ import kotlin.collections.set
@DFBuilder
public class ContextBuilder internal constructor(
private val parent: Context,
public var name: Name? = null,
public val name: Name? = null,
meta: Meta = Meta.EMPTY,
) {
internal val factories = HashMap<PluginFactory<*>, Meta>()
@ -30,10 +30,6 @@ public class ContextBuilder internal constructor(
meta.action()
}
public fun name(string: String) {
this.name = Name.parse(string)
}
@OptIn(DFExperimental::class)
private fun findPluginFactory(tag: PluginTag): PluginFactory<*> =
parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
@ -95,19 +91,21 @@ public class ContextBuilder internal constructor(
}
/**
* Check if current context contains all plugins required by the builder and return it it does or forks to a new context
* Check if current context contains all plugins required by the builder and return it does or forks to a new context
* if it does not.
*/
public fun Context.withEnv(block: ContextBuilder.() -> Unit): Context {
@DFExperimental
public fun Context.modify(block: ContextBuilder.() -> Unit): Context {
fun Context.contains(factory: PluginFactory<*>, meta: Meta): Boolean {
val loaded = plugins[factory.tag] ?: return false
return loaded.meta == meta
}
val builder = ContextBuilder(this, name + "env", properties).apply(block)
val builder = ContextBuilder(this, name + "mod", properties).apply(block)
val requiresFork = builder.factories.any { (factory, meta) ->
!contains(factory, meta)
} || ((properties as Meta) == builder.meta)
return if (requiresFork) builder.build() else this
}

View File

@ -3,11 +3,13 @@ package space.kscience.dataforge.context
import kotlinx.coroutines.CoroutineName
import kotlinx.coroutines.Job
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.Name.Companion.parse
import space.kscience.dataforge.names.asName
import kotlin.coroutines.CoroutineContext
import kotlin.native.concurrent.ThreadLocal
internal expect val globalLoggerFactory: PluginFactory<out LogManager>
internal expect fun getGlobalLoggerFactory(): PluginFactory<out LogManager>
/**
* A global root context. Closing [Global] terminates the framework.
@ -20,4 +22,4 @@ private object GlobalContext : Context("GLOBAL".asName(), null, emptySet(), Meta
public val Global: Context get() = GlobalContext
public fun Context(name: String? = null, block: ContextBuilder.() -> Unit = {}): Context =
Global.buildContext(name, block)
Global.buildContext(name?.let(Name::parse), block)

View File

@ -75,7 +75,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
*/
public val Context.logger: LogManager
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
?: globalLoggerFactory(context = Global).apply { attach(Global) }
?: getGlobalLoggerFactory()(context = Global).apply { attach(Global) }
/**
* The named proxy logger for a context member

View File

@ -1,6 +1,7 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.plus
import kotlin.reflect.KClass
@ -84,7 +85,7 @@ public inline fun <reified T : Plugin> Context.fetch(factory: PluginFactory<T>,
val existing = plugins[factory]
return if (existing != null && existing.meta == meta) existing
else {
buildContext {
buildContext(name = this@fetch.name + factory.tag.name) {
plugin(factory, meta)
}.plugins[factory]!!
}

View File

@ -20,8 +20,7 @@ class ContextTest {
@Test
fun testPluginManager() {
val context = Global.buildContext {
name("test")
val context = Context("test") {
plugin(DummyPlugin())
}
val members = context.gather<Name>("test")

View File

@ -29,4 +29,4 @@ public class ConsoleLogManager : AbstractPlugin(), LogManager {
}
}
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = ConsoleLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = ConsoleLogManager

View File

@ -31,4 +31,4 @@ public class SlfLogManager : AbstractPlugin(), LogManager {
}
}
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = SlfLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = SlfLogManager

View File

@ -1,4 +1,4 @@
package space.kscience.dataforge.context
internal actual val globalLoggerFactory: PluginFactory<out LogManager> = DefaultLogManager
internal actual fun getGlobalLoggerFactory(): PluginFactory<out LogManager> = DefaultLogManager

View File

@ -76,7 +76,7 @@ internal class MapAction<in T : Any, out R : Any>(
return newData.named(newName)
}
val flow = dataSet.flow().map(::mapOne)
val flow = dataSet.flowData().map(::mapOne)
return ActiveDataTree(outputType) {
populate(flow)

View File

@ -84,7 +84,7 @@ internal class ReduceAction<T : Any, R : Any>(
override fun CoroutineScope.transform(set: DataSet<T>, meta: Meta, key: Name): Flow<NamedData<R>> = flow {
ReduceGroupBuilder<T, R>(inputType, this@transform, meta).apply(action).buildGroups(set).forEach { group ->
val dataFlow: Map<Name, Data<T>> = group.set.flow().fold(HashMap()) { acc, value ->
val dataFlow: Map<Name, Data<T>> = group.set.flowData().fold(HashMap()) { acc, value ->
acc.apply {
acc[value.name] = value.data
}

View File

@ -72,7 +72,7 @@ internal class SplitAction<T : Any, R : Any>(
}
return ActiveDataTree<R>(outputType) {
populate(dataSet.flow().flatMapConcat(transform = ::splitOne))
populate(dataSet.flowData().flatMapConcat(transform = ::splitOne))
scope?.launch {
dataSet.updates.collect { name ->
//clear old nodes

View File

@ -17,10 +17,8 @@ public interface DataSet<out T : Any> {
/**
* Traverse this provider or its child. The order is not guaranteed.
* [root] points to a root name for traversal. If it is empty, traverse this source, if it points to a [Data],
* return flow, that contains single [Data], if it points to a node with children, return children.
*/
public fun flow(): Flow<NamedData<T>>
public fun flowData(): Flow<NamedData<T>>
/**
* Get data with given name.
@ -31,8 +29,8 @@ public interface DataSet<out T : Any> {
* Get a snapshot of names of top level children of given node. Empty if node does not exist or is a leaf.
*/
public suspend fun listTop(prefix: Name = Name.EMPTY): List<Name> =
flow().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList()
// By default traverses the whole tree. Could be optimized in descendants
flowData().map { it.name }.filter { it.startsWith(prefix) && (it.length == prefix.length + 1) }.toList()
// By default, traverses the whole tree. Could be optimized in descendants
public companion object {
public val META_KEY: Name = "@meta".asName()
@ -43,9 +41,9 @@ public interface DataSet<out T : Any> {
public val EMPTY: DataSet<Nothing> = object : DataSet<Nothing> {
override val dataType: KType = TYPE_OF_NOTHING
private val nothing: Nothing get() = error("this is nothing")
//private val nothing: Nothing get() = error("this is nothing")
override fun flow(): Flow<NamedData<Nothing>> = emptyFlow()
override fun flowData(): Flow<NamedData<Nothing>> = emptyFlow()
override suspend fun getData(name: Name): Data<Nothing>? = null
}
@ -67,7 +65,7 @@ public val <T : Any> DataSet<T>.updates: Flow<Name> get() = if (this is ActiveDa
/**
* Flow all data nodes with names starting with [branchName]
*/
public fun <T : Any> DataSet<T>.flowChildren(branchName: Name): Flow<NamedData<T>> = this@flowChildren.flow().filter {
public fun <T : Any> DataSet<T>.flowChildren(branchName: Name): Flow<NamedData<T>> = this@flowChildren.flowData().filter {
it.name.startsWith(branchName)
}
@ -75,7 +73,7 @@ public fun <T : Any> DataSet<T>.flowChildren(branchName: Name): Flow<NamedData<T
* Start computation for all goals in data node and return a job for the whole node
*/
public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job = coroutineScope.launch {
flow().map {
flowData().map {
it.launch(this@launch)
}.toList().joinAll()
}
@ -83,7 +81,7 @@ public fun <T : Any> DataSet<T>.startAll(coroutineScope: CoroutineScope): Job =
public suspend fun <T : Any> DataSet<T>.join(): Unit = coroutineScope { startAll(this).join() }
public suspend fun DataSet<*>.toMeta(): Meta = Meta {
flow().collect {
flowData().collect {
if (it.name.endsWith(DataSet.META_KEY)) {
set(it.name, it.meta)
} else {

View File

@ -30,7 +30,7 @@ public interface DataSetBuilder<in T : Any> {
}
//Set new items
dataSet.flow().collect {
dataSet.flowData().collect {
emit(name + it.name, it.data)
}
}
@ -139,7 +139,7 @@ public suspend inline fun <reified T : Any> DataSetBuilder<T>.static(
*/
@DFExperimental
public suspend fun <T : Any> DataSetBuilder<T>.populate(tree: DataSet<T>): Unit = coroutineScope {
tree.flow().collect {
tree.flowData().collect {
//TODO check if the place is occupied
emit(it.name, it.data)
}

View File

@ -32,12 +32,12 @@ public interface DataTree<out T : Any> : DataSet<T> {
*/
public suspend fun items(): Map<NameToken, DataTreeItem<T>>
override fun flow(): Flow<NamedData<T>> = flow {
override fun flowData(): Flow<NamedData<T>> = flow {
items().forEach { (token, childItem: DataTreeItem<T>) ->
if(!token.body.startsWith("@")) {
when (childItem) {
is DataTreeItem.Leaf -> emit(childItem.data.named(token.asName()))
is DataTreeItem.Node -> emitAll(childItem.tree.flow().map { it.named(token + it.name) })
is DataTreeItem.Node -> emitAll(childItem.tree.flowData().map { it.named(token + it.name) })
}
}
}

View File

@ -67,7 +67,7 @@ public open class LazyGoal<T>(
* If [GoalExecutionRestriction] is present in the [coroutineScope] context, the call could produce a error a warning
* depending on the settings.
*/
@DFExperimental
@OptIn(DFExperimental::class)
override fun async(coroutineScope: CoroutineScope): Deferred<T> {
val log = coroutineScope.coroutineContext[GoalLogger]
// Check if context restricts goal computation

View File

@ -44,7 +44,7 @@ public interface GroupRule {
): Map<String, DataSet<T>> {
val map = HashMap<String, ActiveDataTree<T>>()
set.flow().collect { data ->
set.flowData().collect { data ->
val tagValue = data.meta[key]?.string ?: defaultTagValue
map.getOrPut(tagValue) { ActiveDataTree(set.dataType) }.emit(data.name, data.data)
}

View File

@ -53,7 +53,7 @@ internal class StaticDataTree<T : Any>(
set(name, DataTreeItem.Node(dataSet))
} else {
coroutineScope {
dataSet.flow().collect {
dataSet.flowData().collect {
emit(name + it.name, it.data)
}
}

View File

@ -20,8 +20,8 @@ public fun <T : Any> DataSet<T>.filter(
): ActiveDataSet<T> = object : ActiveDataSet<T> {
override val dataType: KType get() = this@filter.dataType
override fun flow(): Flow<NamedData<T>> =
this@filter.flow().filter { predicate(it.name, it.data) }
override fun flowData(): Flow<NamedData<T>> =
this@filter.flowData().filter { predicate(it.name, it.data) }
override suspend fun getData(name: Name): Data<T>? = this@filter.getData(name)?.takeIf {
predicate(name, it)
@ -40,7 +40,7 @@ public fun <T : Any> DataSet<T>.withNamePrefix(prefix: Name): DataSet<T> = if (p
else object : ActiveDataSet<T> {
override val dataType: KType get() = this@withNamePrefix.dataType
override fun flow(): Flow<NamedData<T>> = this@withNamePrefix.flow().map { it.data.named(prefix + it.name) }
override fun flowData(): Flow<NamedData<T>> = this@withNamePrefix.flowData().map { it.data.named(prefix + it.name) }
override suspend fun getData(name: Name): Data<T>? =
name.removeHeadOrNull(name)?.let { this@withNamePrefix.getData(it) }
@ -56,7 +56,7 @@ public fun <T : Any> DataSet<T>.branch(branchName: Name): DataSet<T> = if (branc
} else object : ActiveDataSet<T> {
override val dataType: KType get() = this@branch.dataType
override fun flow(): Flow<NamedData<T>> = this@branch.flow().mapNotNull {
override fun flowData(): Flow<NamedData<T>> = this@branch.flowData().mapNotNull {
it.name.removeHeadOrNull(branchName)?.let { name ->
it.data.named(name)
}

View File

@ -144,7 +144,7 @@ public suspend fun <T : Any, R : Any> DataSet<T>.map(
block: suspend (T) -> R,
): DataTree<R> = DataTree<R>(outputType) {
populate(
flow().map {
flowData().map {
val newMeta = it.meta.toMutableMeta().apply(metaTransform).seal()
Data(outputType, newMeta, coroutineContext, listOf(it)) {
block(it.await())
@ -162,7 +162,7 @@ public suspend inline fun <T : Any, reified R : Any> DataSet<T>.map(
public suspend fun <T : Any> DataSet<T>.forEach(block: suspend (NamedData<T>) -> Unit) {
contract { callsInPlace(block, InvocationKind.EXACTLY_ONCE) }
flow().collect {
flowData().collect {
block(it)
}
}
@ -171,11 +171,11 @@ public suspend inline fun <T : Any, reified R : Any> DataSet<T>.reduceToData(
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline transformation: suspend (Flow<NamedData<T>>) -> R,
): Data<R> = flow().reduceToData(coroutineContext, meta, transformation)
): Data<R> = flowData().reduceToData(coroutineContext, meta, transformation)
public suspend inline fun <T : Any, reified R : Any> DataSet<T>.foldToData(
initial: R,
coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = Meta.EMPTY,
noinline block: suspend (result: R, data: NamedData<T>) -> R,
): Data<R> = flow().foldToData(initial, coroutineContext, meta, block)
): Data<R> = flowData().foldToData(initial, coroutineContext, meta, block)

View File

@ -3,6 +3,7 @@ package space.kscience.dataforge.data
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.map
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.matches
@ -16,48 +17,65 @@ import kotlin.reflect.typeOf
*/
@Suppress("UNCHECKED_CAST")
private fun <R : Any> Data<*>.castOrNull(type: KType): Data<R>? =
if (!this.type.isSubtypeOf(type)) null else object : Data<R> by (this as Data<R>) {
if (!this.type.isSubtypeOf(type)) {
null
} else {
object : Data<R> by (this as Data<R>) {
override val type: KType = type
}
}
/**
* Select all data matching given type and filters. Does not modify paths
*
* @param namePattern a name match patter according to [Name.matches]
* @param filter addition filtering condition based on item name and meta. By default, accepts all
*/
@OptIn(DFExperimental::class)
@PublishedApi
internal fun <R : Any> DataSet<*>.select(
public fun <R : Any> DataSet<*>.select(
type: KType,
namePattern: Name? = null,
filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
): ActiveDataSet<R> = object : ActiveDataSet<R> {
override val dataType = type
private fun checkDatum(name: Name, datum: Data<*>): Boolean = datum.type.isSubtypeOf(type)
&& (namePattern == null || name.matches(namePattern))
&& filter(name, datum.meta)
override fun flow(): Flow<NamedData<R>> = this@select.flow().filter { datum ->
datum.type.isSubtypeOf(type) && (namePattern == null || datum.name.matches(namePattern))
override fun flowData(): Flow<NamedData<R>> = this@select.flowData().filter {
checkDatum(it.name, it.data)
}.map {
@Suppress("UNCHECKED_CAST")
it as NamedData<R>
}
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.castOrNull(type)
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it)
datum?.type?.isSubtypeOf(type) ?: false
override suspend fun getData(name: Name): Data<R>? = this@select.getData(name)?.let { datum ->
if (checkDatum(name, datum)) datum.castOrNull(type) else null
}
override val updates: Flow<Name> = this@select.updates.filter {
val datum = this@select.getData(it) ?: return@filter false
checkDatum(it, datum)
}
}
/**
* Select a single datum of the appropriate type
*/
public inline fun <reified R : Any> DataSet<*>.select(namePattern: Name? = null): DataSet<R> =
select(typeOf<R>(), namePattern)
public inline fun <reified R : Any> DataSet<*>.select(
namePattern: Name? = null,
noinline filter: (name: Name, meta: Meta) -> Boolean = { _, _ -> true }
): DataSet<R> = select(typeOf<R>(), namePattern, filter)
/**
* Select a single datum if it is present and of given [type]
*/
public suspend fun <R : Any> DataSet<*>.selectOne(type: KType, name: Name): NamedData<R>? =
getData(name)?.castOrNull<R>(type)?.named(name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? = selectOne(typeOf<R>(), name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: Name): NamedData<R>? =
selectOne(typeOf<R>(), name)
public suspend inline fun <reified R : Any> DataSet<*>.selectOne(name: String): NamedData<R>? =
selectOne(typeOf<R>(), Name.parse(name))

View File

@ -23,11 +23,11 @@ public class FrontMatterEnvelopeFormat(
do {
line = input.readUTF8Line() ?: error("Input does not contain front matter separator")
offset += line.encodeToByteArray().size.toUInt()
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
} while (!line.startsWith(SEPARATOR))
val readMetaFormat =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: space.kscience.dataforge.io.yaml.YamlMetaFormat
metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
//TODO replace by preview
val meta = Binary {
@ -35,7 +35,7 @@ public class FrontMatterEnvelopeFormat(
line = input.readSafeUtf8Line()
writeUtf8String(line + "\r\n")
offset += line.encodeToByteArray().size.toUInt()
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
} while (!line.startsWith(SEPARATOR))
}.read {
readMetaFormat.readMeta(input)
@ -47,16 +47,16 @@ public class FrontMatterEnvelopeFormat(
var line: String
do {
line = input.readSafeUtf8Line() //?: error("Input does not contain front matter separator")
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
} while (!line.startsWith(SEPARATOR))
val readMetaFormat =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: space.kscience.dataforge.io.yaml.YamlMetaFormat
metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
val meta = Binary {
do {
writeUtf8String(input.readSafeUtf8Line() + "\r\n")
} while (!line.startsWith(space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR))
} while (!line.startsWith(SEPARATOR))
}.read {
readMetaFormat.readMeta(input)
}
@ -72,9 +72,9 @@ public class FrontMatterEnvelopeFormat(
formatMeta: Meta,
) {
val metaFormat = metaFormatFactory(formatMeta, this@FrontMatterEnvelopeFormat.io.context)
output.writeRawString("${space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR}\r\n")
output.writeRawString("$SEPARATOR\r\n")
metaFormat.run { this.writeObject(output, envelope.meta) }
output.writeRawString("${space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.SEPARATOR}\r\n")
output.writeRawString("$SEPARATOR\r\n")
//Printing data
envelope.data?.let { data ->
output.writeBinary(data)
@ -92,32 +92,32 @@ public class FrontMatterEnvelopeFormat(
private val metaTypeRegex = "---(\\w*)\\s*".toRegex()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
return space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat(context.io, meta)
return FrontMatterEnvelopeFormat(context.io, meta)
}
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read {
val line = readSafeUtf8Line()
return@read if (line.startsWith("---")) {
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.invoke()
invoke()
} else {
null
}
}
private val default by lazy { space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.invoke() }
private val default by lazy { invoke() }
override fun readPartial(input: Input): PartialEnvelope =
space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.readPartial(input)
default.readPartial(input)
override fun writeEnvelope(
output: Output,
envelope: Envelope,
metaFormatFactory: MetaFormatFactory,
formatMeta: Meta,
): Unit = space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.writeEnvelope(output, envelope, metaFormatFactory, formatMeta)
): Unit = FrontMatterEnvelopeFormat.default.writeEnvelope(output, envelope, metaFormatFactory, formatMeta)
override fun readObject(input: Input): Envelope = space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.Companion.default.readObject(input)
override fun readObject(input: Input): Envelope = default.readObject(input)
}
}

View File

@ -110,7 +110,7 @@ public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
}
override fun toMeta(): Meta = Meta {
NAME_KEY put space.kscience.dataforge.io.yaml.FrontMatterEnvelopeFormat.name.toString()
NAME_KEY put FrontMatterEnvelopeFormat.name.toString()
META_KEY put meta
}

View File

@ -0,0 +1,32 @@
package space.kscience.dataforge.io.yaml
import space.kscience.dataforge.context.AbstractPlugin
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.io.IOPlugin
import space.kscience.dataforge.io.MetaFormatFactory
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.reflect.KClass
@DFExperimental
public class YamlPlugin(meta: Meta) : AbstractPlugin(meta) {
public val io: IOPlugin by require(IOPlugin)
override val tag: PluginTag get() = Companion.tag
override fun content(target: String): Map<Name, Any> = when (target) {
MetaFormatFactory.META_FORMAT_TYPE -> mapOf("yaml".asName() to YamlMetaFormat)
else -> super.content(target)
}
public companion object : PluginFactory<YamlPlugin> {
override val tag: PluginTag = PluginTag("io.yaml", group = PluginTag.DATAFORGE_GROUP)
override val type: KClass<out YamlPlugin> = YamlPlugin::class
override fun invoke(meta: Meta, context: Context): YamlPlugin = YamlPlugin(meta)
}
}

View File

@ -54,6 +54,7 @@ public fun Binary.toByteArray(): ByteArray = if (this is ByteArrayBinary) {
}
}
//TODO optimize for file-based Inputs
public fun Input.readBinary(size: Int): Binary {
val array = readBytes(size)
return ByteArrayBinary(array)

View File

@ -10,8 +10,6 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.names.Name
import kotlin.native.concurrent.ThreadLocal
import kotlin.reflect.KClass
public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
@ -31,7 +29,6 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
}
public val metaFormatFactories: Collection<MetaFormatFactory> by lazy {
context.gather<MetaFormatFactory>(META_FORMAT_TYPE).values
}
@ -55,13 +52,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
return resolveEnvelopeFormat(Name.parse(name), meta)
}
override fun content(target: String): Map<Name, Any> {
return when (target) {
override fun content(target: String): Map<Name, Any> = when (target) {
META_FORMAT_TYPE -> defaultMetaFormats.toMap()
ENVELOPE_FORMAT_TYPE -> defaultEnvelopeFormats.toMap()
else -> super.content(target)
}
}
public companion object : PluginFactory<IOPlugin> {
public val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat)
@ -75,10 +70,13 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
}
@ThreadLocal
internal val ioContext = Global.withEnv {
name("IO")
internal val ioContext = Context("IO") {
plugin(IOPlugin)
}
public val Context.io: IOPlugin get() = (if (this == Global) ioContext else this).fetch(IOPlugin)
public val Context.io: IOPlugin
get() = if (this == Global) {
ioContext.fetch(IOPlugin)
} else {
fetch(IOPlugin)
}

View File

@ -161,7 +161,7 @@ public class TaggedEnvelopeFormat(
}
}
private val default by lazy { invoke(context = ioContext) }
private val default by lazy { invoke() }
override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) }

View File

@ -193,11 +193,9 @@ public class TaglessEnvelopeFormat(
override val name: Name = TAGLESS_ENVELOPE_TYPE.asName()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat {
return TaglessEnvelopeFormat(context.io, meta)
}
override fun invoke(meta: Meta, context: Context): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta)
private val default by lazy { invoke(context = ioContext) }
private val default by lazy { invoke() }
override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) }

View File

@ -365,6 +365,8 @@ public class space/kscience/dataforge/meta/Scheme : space/kscience/dataforge/met
}
public final class space/kscience/dataforge/meta/SchemeKt {
public static final fun copy (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
public static synthetic fun copy$default (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/SchemeSpec;Lkotlin/jvm/functions/Function1;ILjava/lang/Object;)Lspace/kscience/dataforge/meta/Scheme;
public static final fun invoke (Lspace/kscience/dataforge/meta/Scheme;Lkotlin/jvm/functions/Function1;)Lspace/kscience/dataforge/meta/Scheme;
public static final fun retarget (Lspace/kscience/dataforge/meta/Scheme;Lspace/kscience/dataforge/meta/MutableMeta;)Lspace/kscience/dataforge/meta/Scheme;
}
@ -726,6 +728,7 @@ public final class space/kscience/dataforge/names/NameKt {
public static final fun getLength (Lspace/kscience/dataforge/names/Name;)I
public static final fun isEmpty (Lspace/kscience/dataforge/names/Name;)Z
public static final fun lastOrNull (Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/NameToken;
public static final fun parseAsName (Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Ljava/lang/String;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/Name;)Lspace/kscience/dataforge/names/Name;
public static final fun plus (Lspace/kscience/dataforge/names/Name;Lspace/kscience/dataforge/names/NameToken;)Lspace/kscience/dataforge/names/Name;

View File

@ -155,7 +155,7 @@ public fun MutableMeta.getOrCreate(key: String): MutableMeta = getOrCreate(Name.
public interface MutableTypedMeta<M : MutableTypedMeta<M>> : TypedMeta<M>, MutableMeta {
/**
* Zero-copy attach or replace existing node. Node is used with any additional state, listeners, etc.
* Zero-copy (if possible) attach or replace existing node. Node is used with any additional state, listeners, etc.
* In some cases it is possible to have the same node as a child to several others
*/
@DFExperimental
@ -261,6 +261,13 @@ public operator fun <M : MutableTypedMeta<M>> MutableTypedMeta<M>.set(name: Name
}
}
private fun ObservableMeta.adoptBy(parent: MutableMetaImpl, key: NameToken) {
if (this === parent) error("Can't attach a node to itself")
onChange(parent) { name ->
parent.invalidate(key + name)
}
}
/**
* A general implementation of mutable [Meta] which implements both [MutableTypedMeta] and [ObservableMeta].
* The implementation uses blocking synchronization on mutation on JVM
@ -280,24 +287,16 @@ private class MutableMetaImpl(
private val children: LinkedHashMap<NameToken, ObservableMutableMeta> =
LinkedHashMap(children.mapValues { (key, meta) ->
MutableMetaImpl(meta.value, meta.items).apply { adoptBy(this, key) }
MutableMetaImpl(meta.value, meta.items).also { it.adoptBy(this, key) }
})
override val items: Map<NameToken, ObservableMutableMeta> get() = children
private fun ObservableMeta.adoptBy(parent: MutableMetaImpl, key: NameToken) {
onChange(parent) { name ->
parent.invalidate(key + name)
}
}
@DFExperimental
override fun attach(name: Name, node: ObservableMutableMeta) {
when (name.length) {
0 -> error("Can't set a meta with empty name")
1 -> {
replaceItem(name.first(), get(name), node)
}
1 -> replaceItem(name.first(), get(name), node)
else -> get(name.cutLast())?.attach(name.lastOrNull()!!.asName(), node)
}
}
@ -338,9 +337,14 @@ private class MutableMetaImpl(
}
private fun wrapItem(meta: Meta): MutableMetaImpl =
MutableMetaImpl(meta.value, meta.items.mapValuesTo(LinkedHashMap()) { wrapItem(it.value) })
meta as? MutableMetaImpl ?: MutableMetaImpl(
meta.value,
meta.items.mapValuesTo(LinkedHashMap()) {
wrapItem(it.value)
}
)
@Synchronized
override fun setMeta(name: Name, node: Meta?) {
val oldItem: ObservableMutableMeta? = get(name)
if (oldItem != node) {
@ -348,13 +352,24 @@ private class MutableMetaImpl(
0 -> error("Can't set a meta with empty name")
1 -> {
val token = name.firstOrNull()!!
replaceItem(token, oldItem, node?.let { wrapItem(node) })
//remove child and invalidate if argument is null
if (node == null) {
children.remove(token)?.removeListener(this)
// old item is not null otherwise we can't be here
invalidate(name)
} else {
val newNode = wrapItem(node)
newNode.adoptBy(this, token)
children[token] = newNode
}
}
else -> {
val token = name.firstOrNull()!!
//get existing or create new node. Index is ignored for new node
//get existing or create new node.
if (items[token] == null) {
replaceItem(token, null, MutableMetaImpl(null))
val newNode = MutableMetaImpl(null)
newNode.adoptBy(this, token)
children[token] = newNode
}
items[token]?.setMeta(name.cutFirst(), node)
}
@ -384,19 +399,6 @@ public fun MutableMeta.append(name: Name, value: Value): Unit = append(name, Met
public fun MutableMeta.append(key: String, value: Value): Unit = append(Name.parse(key), value)
///**
// * Apply existing node with given [builder] or create a new element with it.
// */
//@DFExperimental
//public fun MutableMeta.edit(name: Name, builder: MutableMeta.() -> Unit) {
// val item = when (val existingItem = get(name)) {
// null -> MutableMeta().also { set(name, it) }
// is MetaItemNode<MutableMeta> -> existingItem.node
// else -> error("Can't edit value meta item")
// }
// item.apply(builder)
//}
/**
* Create a mutable copy of this meta. The copy is created even if the Meta is already mutable
*/

View File

@ -48,7 +48,7 @@ public interface ObservableMutableMeta : ObservableMeta, MutableMeta, MutableTyp
}
internal abstract class AbstractObservableMeta : ObservableMeta {
private val listeners = HashSet<MetaListener>()
private val listeners: MutableList<MetaListener> = mutableListOf()
override fun invalidate(name: Name) {
listeners.forEach { it.callback(this, name) }

View File

@ -68,7 +68,7 @@ private class ObservableMetaWrapper(
override fun attach(name: Name, node: ObservableMutableMeta) {
set(name, node)
node.onChange(this) { changeName ->
setMeta(name + changeName, node[changeName])
setMeta(name + changeName, this[changeName])
}
}
}

View File

@ -1,6 +1,9 @@
package space.kscience.dataforge.meta
import space.kscience.dataforge.meta.descriptors.*
import space.kscience.dataforge.meta.descriptors.Described
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.get
import space.kscience.dataforge.meta.descriptors.validate
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.*
import space.kscience.dataforge.values.Value
@ -29,7 +32,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
internal fun wrap(
newMeta: MutableMeta,
preserveDefault: Boolean = false
preserveDefault: Boolean = false,
) {
if (preserveDefault) {
defaultMeta = targetMeta.seal()
@ -64,7 +67,7 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
override fun toMeta(): Laminate = Laminate(meta, descriptor?.defaultNode)
private val listeners = HashSet<MetaListener>()
private val listeners: MutableList<MetaListener> = mutableListOf()
private inner class SchemeMeta(val pathName: Name) : ObservableMutableMeta {
override var value: Value?
@ -117,9 +120,12 @@ public open class Scheme : Described, MetaRepr, MutableMetaProvider, Configurabl
@DFExperimental
override fun attach(name: Name, node: ObservableMutableMeta) {
TODO("Not yet implemented")
//TODO implement zero-copy attachment
setMeta(name, meta)
node.onChange(this) { changeName ->
setMeta(name + changeName, this[changeName])
}
}
}
}
@ -137,6 +143,12 @@ public fun <T : Scheme> T.retarget(provider: MutableMeta): T = apply {
*/
public inline operator fun <T : Scheme> T.invoke(block: T.() -> Unit): T = apply(block)
/**
* Create a copy of given [Scheme]
*/
public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit = {}): T =
spec.read(meta.copy()).apply(block)
/**
* A specification for simplified generation of wrappers
*/

View File

@ -202,3 +202,5 @@ public fun Name.removeHeadOrNull(head: Name): Name? = if (startsWith(head)) {
} else {
null
}
public fun String.parseAsName(): Name = Name.parse(this)

View File

@ -140,14 +140,17 @@ public class NumberValue(public val number: Number) : Value {
val otherNumber = other.numberOrNull ?: return false
if(number == otherNumber) return true
//Do not change the order of comparison. On JS number is the instance of all types
return when (numberOrNull) {
is Short -> number.toShort() == otherNumber.toShort()
is Long -> number.toLong() == otherNumber.toLong()
is Byte -> number.toByte() == otherNumber.toByte()
is Int -> number.toInt() == otherNumber.toInt()
is Float -> number.toFloat() == otherNumber.toFloat()
is Double -> number.toDouble() == otherNumber.toDouble()
else -> number.toString() == otherNumber.toString()
is Float -> number.toFloat() == otherNumber.toFloat()
is Long -> number.toLong() == otherNumber.toLong()
is Short -> number.toShort() == otherNumber.toShort()
is Int -> number.toInt() == otherNumber.toInt()
is Byte -> number.toByte() == otherNumber.toByte()
else -> false
}
}
@ -166,9 +169,7 @@ public class EnumValue<E : Enum<*>>(override val value: E) : Value {
override fun toString(): String = value.toString()
override fun equals(other: Any?): Boolean {
return string == (other as? Value)?.string
}
override fun equals(other: Any?): Boolean = string == (other as? Value)?.string
override fun hashCode(): Int = value.hashCode()
}
@ -190,9 +191,7 @@ public class ListValue(override val list: List<Value>) : Value, Iterable<Value>
return list == other.list
}
override fun hashCode(): Int {
return list.hashCode()
}
override fun hashCode(): Int = list.hashCode()
public companion object {
public val EMPTY: ListValue = ListValue(emptyList())

View File

@ -36,9 +36,7 @@ public class DoubleArrayValue(override val value: DoubleArray) : Value, Iterable
}
}
override fun hashCode(): Int {
return value.contentHashCode()
}
override fun hashCode(): Int = value.contentHashCode()
override fun toString(): String = list.joinToString(prefix = "[", postfix = "]")

View File

@ -0,0 +1,16 @@
package space.kscience.dataforge.meta
import kotlinx.serialization.json.Json
import space.kscience.dataforge.values.string
import kotlin.test.Test
import kotlin.test.assertEquals
class MetaSerializationTest {
@Test
fun singleValueDeserialization(){
val string = "ddd"
val meta = Json.decodeFromString(MetaSerializer, string)
assertEquals(string, meta.value?.string)
}
}

View File

@ -5,7 +5,7 @@ import kotlin.test.assertEquals
class MutableMetaTest{
@Test
fun testRemove(){
fun remove(){
val meta = MutableMeta {
"aNode" put {
"innerNode" put {

View File

@ -1,8 +1,11 @@
package space.kscience.dataforge.meta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.values.ListValue
import space.kscience.dataforge.values.Value
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertNotNull
@DFExperimental
class SchemeTest {
@ -33,4 +36,16 @@ class SchemeTest {
scheme.a = 2
assertEquals(2, flag)
}
@Test
fun testListSubscription(){
val scheme = TestScheme.empty()
var value: Value? = null
scheme.v = ListValue(0.0,0.0,0.0)
scheme.useProperty(TestScheme::v){
value = it
}
scheme.v = ListValue(1.0, 2.0, 3.0)
assertNotNull(value)
}
}

View File

@ -9,6 +9,8 @@ internal class TestScheme : Scheme() {
var a by int()
var b by string()
var v by value()
companion object : SchemeSpec<TestScheme>(::TestScheme)
}

View File

@ -0,0 +1,32 @@
package space.kscience.dataforge.values
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertNotEquals
class ValueEqualityTest {
@Test
fun numberValueNotEquals(){
val a = 0.33.asValue()
val b = 0.34.asValue()
println(a.number == b.number)
assertNotEquals(a,b)
}
@Test
fun arrayEqualsList() {
val v1 = doubleArrayOf(1.0, 2.0, 3.0).asValue()
val v2 = listOf(1, 2, 3).map { it.asValue() }.asValue()
assertEquals(v1, v2)
}
@Test
fun notEquals() {
val v1 = doubleArrayOf(1.0, 2.0, 3.0).asValue()
val v2 = listOf(1, 2, 6).map { it.asValue() }.asValue()
assertNotEquals(v1, v2)
}
}

View File

@ -0,0 +1,15 @@
package space.kscience.dataforge.meta
import org.junit.jupiter.api.Test
import kotlin.test.assertFails
class JvmMutableMetaTest {
@Test
fun recursiveMeta(){
val meta = MutableMeta {
"a" put 2
}
assertFails { meta["child.a"] = meta }
}
}

View File

@ -15,7 +15,7 @@ class BuildersKtTest {
Workspace(Global){
println("I am working")
context { name("test") }
//context { name("test") }
target("testTarget") {
"a" put 12
@ -28,8 +28,6 @@ class BuildersKtTest {
val script = """
println("I am working")
context{ name("test") }
target("testTarget"){
"a" put 12
}

View File

@ -17,7 +17,7 @@ public class SimpleWorkspace(
private val externalTasks: Map<Name, Task<*>>,
) : Workspace {
override val data: TaskResult<*> = internalize(data, Name.EMPTY, Meta.EMPTY)
override val data: TaskResult<*> = wrapResult(data, Name.EMPTY, Meta.EMPTY)
override val tasks: Map<Name, Task<*>>
get() = context.gather<Task<*>>(Task.TYPE) + externalTasks

View File

@ -28,7 +28,7 @@ public interface Task<out T : Any> : Described {
public suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T>
public companion object {
public const val TYPE: String = "workspace.stage"
public const val TYPE: String = "workspace.task"
}
}
@ -42,6 +42,10 @@ public class TaskResultBuilder<T : Any>(
/**
* Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
* Data dependency cycles are not allowed.
*
* @param resultType the type boundary for data produced by this task
* @param descriptor of meta accepted by this task
* @param builder for resulting data set
*/
@Suppress("FunctionName")
@DFInternal
@ -62,7 +66,7 @@ public fun <T : Any> Task(
val dataset = DataTree<T>(resultType) {
TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder() }
}
workspace.internalize(dataset, taskName, taskMeta)
workspace.wrapResult(dataset, taskName, taskMeta)
}
}

View File

@ -42,6 +42,6 @@ private class TaskDataImpl<out T : Any>(
// }
}
internal fun <T : Any> Workspace.internalize(data: Data<T>, name: Name, stage: Name, stageMeta: Meta): TaskData<T> =
TaskDataImpl(this, data, name, stage, stageMeta)
public fun <T : Any> Workspace.wrapData(data: Data<T>, name: Name, taskName: Name, stageMeta: Meta): TaskData<T> =
TaskDataImpl(this, data, name, taskName, stageMeta)

View File

@ -25,7 +25,7 @@ public interface TaskResult<out T : Any> : DataSet<T> {
*/
public val taskMeta: Meta
override fun flow(): Flow<TaskData<T>>
override fun flowData(): Flow<TaskData<T>>
override suspend fun getData(name: Name): TaskData<T>?
}
@ -36,14 +36,17 @@ private class TaskResultImpl<out T : Any>(
override val taskMeta: Meta,
) : TaskResult<T>, DataSet<T> by dataSet {
override fun flow(): Flow<TaskData<T>> = dataSet.flow().map {
workspace.internalize(it, it.name, taskName, taskMeta)
override fun flowData(): Flow<TaskData<T>> = dataSet.flowData().map {
workspace.wrapData(it, it.name, taskName, taskMeta)
}
override suspend fun getData(name: Name): TaskData<T>? = dataSet.getData(name)?.let {
workspace.internalize(it, name, taskName, taskMeta)
workspace.wrapData(it, name, taskName, taskMeta)
}
}
internal fun <T : Any> Workspace.internalize(dataSet: DataSet<T>, stage: Name, stageMeta: Meta): TaskResult<T> =
TaskResultImpl(this, dataSet, stage, stageMeta)
/**
* Wrap data into [TaskResult]
*/
public fun <T : Any> Workspace.wrapResult(dataSet: DataSet<T>, taskName: Name, taskMeta: Meta): TaskResult<T> =
TaskResultImpl(this, dataSet, taskName, taskMeta)

View File

@ -1,6 +1,7 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.context.ContextAware
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.misc.Type
@ -8,6 +9,10 @@ import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider
public interface DataSelector<T: Any>{
public suspend fun select(workspace: Workspace, meta: Meta): DataSet<T>
}
@Type(Workspace.TYPE)
public interface Workspace : ContextAware, Provider {
/**

View File

@ -14,16 +14,27 @@ import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
import space.kscience.dataforge.misc.DFBuilder
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.properties.PropertyDelegateProvider
import kotlin.properties.ReadOnlyProperty
public data class TaskReference<T: Any>(public val taskName: Name, public val task: Task<T>)
public data class TaskReference<T : Any>(public val taskName: Name, public val task: Task<T>) : DataSelector<T> {
@Suppress("UNCHECKED_CAST")
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> {
if (workspace.tasks[taskName] == task) {
return workspace.produce(taskName, meta) as TaskResult<T>
} else {
error("Task $taskName does not belong to the workspace")
}
}
}
public interface TaskContainer {
public fun registerTask(taskName: Name, task: Task<*>)
}
public inline fun <reified T : Any> TaskContainer.registerTask(
name: String,
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
@ -31,15 +42,20 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
public inline fun <reified T : Any> TaskContainer.task(
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
descriptor: MetaDescriptor,
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
val taskName = Name.parse(property.name)
val task = Task(MetaDescriptor(descriptorBuilder), builder)
val task = Task(descriptor, builder)
registerTask(taskName, task)
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
}
public inline fun <reified T : Any> TaskContainer.task(
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> =
task(MetaDescriptor(descriptorBuilder), builder)
public class WorkspaceBuilder(private val parentContext: Context = Global) : TaskContainer {
private var context: Context? = null
@ -51,7 +67,7 @@ public class WorkspaceBuilder(private val parentContext: Context = Global) : Tas
* Define a context for the workspace
*/
public fun context(block: ContextBuilder.() -> Unit = {}) {
this.context = parentContext.buildContext("workspace", block)
this.context = parentContext.buildContext("workspace".asName(), block)
}
/**

View File

@ -0,0 +1,45 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.forEach
import space.kscience.dataforge.data.map
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.toMutableMeta
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name
/**
* Select data using given [selector]
*/
public suspend fun <T : Any> TaskResultBuilder<*>.from(
selector: DataSelector<T>,
): DataSet<T> = selector.select(workspace, taskMeta)
public val TaskResultBuilder<*>.allData: DataSelector<*>
get() = object : DataSelector<Any> {
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<Any> = workspace.data
}
/**
* Perform a lazy mapping task using given [selector] and [action]. The meta of resulting
* TODO move selector to receiver with multi-receivers
*/
@DFExperimental
public suspend inline fun <T : Any, reified R : Any> TaskResultBuilder<R>.pipeFrom(
selector: DataSelector<T>,
crossinline action: suspend (arg: T, name: Name, meta: Meta) -> R
) {
from(selector).forEach { data ->
val meta = data.meta.toMutableMeta().apply {
taskName put taskMeta
}
val res = data.map(workspace.context.coroutineContext, meta) {
action(it, data.name, meta)
}
emit(data.name, res)
}
}

View File

@ -1,24 +0,0 @@
package space.kscience.dataforge.workspace
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.select
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
public suspend inline fun <reified T : Any> TaskResultBuilder<T>.from(
task: Name,
taskMeta: Meta = Meta.EMPTY,
): DataSet<T> = workspace.produce(task, taskMeta).select()
@Suppress("UNCHECKED_CAST")
public suspend fun <R : Any> TaskResultBuilder<*>.from(
reference: TaskReference<R>,
taskMeta: Meta = Meta.EMPTY,
): DataSet<R> {
if (workspace.tasks[reference.taskName] == reference.task) {
return workspace.produce(reference.taskName, taskMeta) as TaskResult<R>
} else {
throw error("Task ${reference.taskName} does not belong to the workspace")
}
}

View File

@ -1,8 +0,0 @@
package space.kscience.dataforge.workspace
import kotlinx.coroutines.runBlocking
import space.kscience.dataforge.data.DataSetBuilder
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
buildData(builder)
}

View File

@ -0,0 +1,21 @@
package space.kscience.dataforge.workspace
import kotlinx.coroutines.runBlocking
import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.DataSetBuilder
import space.kscience.dataforge.data.select
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name
public fun WorkspaceBuilder.data(builder: suspend DataSetBuilder<Any>.() -> Unit): Unit = runBlocking {
buildData(builder)
}
public inline fun <reified T: Any> TaskResultBuilder<*>.data(namePattern: Name? = null): DataSelector<T> = object : DataSelector<T> {
override suspend fun select(workspace: Workspace, meta: Meta): DataSet<T> = workspace.data.select(namePattern)
}
public suspend inline fun <reified T : Any> TaskResultBuilder<*>.fromTask(
task: Name,
taskMeta: Meta = Meta.EMPTY,
): DataSet<T> = workspace.produce(task, taskMeta).select()

View File

@ -16,7 +16,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
val allData by task<Int> {
val selectedData = workspace.data.select<Int>()
val result: Data<Int> = selectedData.flow().foldToData(0) { result, data ->
val result: Data<Int> = selectedData.flowData().foldToData(0) { result, data ->
result + data.await()
}
emit("result", result)
@ -58,7 +58,7 @@ class DataPropagationTest {
fun testAllData() {
runBlocking {
val node = testWorkspace.produce("Test.allData")
assertEquals(4950, node.flow().single().await())
assertEquals(4950, node.flowData().single().await())
}
}
@ -66,7 +66,7 @@ class DataPropagationTest {
fun testSingleData() {
runBlocking {
val node = testWorkspace.produce("Test.singleData")
assertEquals(12, node.flow().single().await())
assertEquals(12, node.flowData().single().await())
}
}
}

View File

@ -71,21 +71,32 @@ class SimpleWorkspaceTest {
}
val square by task<Int> {
workspace.data.select<Int>().forEach { data ->
if (data.meta["testFlag"].boolean == true) {
pipeFrom(data<Int>()) { arg, name, meta ->
if (meta["testFlag"].boolean == true) {
println("flag")
}
val value = data.await()
workspace.logger.info { "Starting square on $value" }
emit(data.name, data.map { it * it })
workspace.logger.info { "Starting square on $name" }
arg * arg
}
// workspace.data.select<Int>().forEach { data ->
// if (data.meta["testFlag"].boolean == true) {
// println("flag")
// }
// val value = data.await()
// workspace.logger.info { "Starting square on $value" }
// emit(data.name, data.map { it * it })
// }
}
val linear by task<Int> {
workspace.data.select<Int>().forEach { data ->
workspace.logger.info { "Starting linear on $data" }
emit(data.name, data.data.map { it * 2 + 1 })
pipeFrom(data<Int>()) { arg, name, _ ->
workspace.logger.info { "Starting linear on $name" }
arg * 2 + 1
}
// workspace.data.select<Int>().forEach { data ->
// workspace.logger.info { "Starting linear on $data" }
// emit(data.name, data.data.map { it * 2 + 1 })
// }
}
val fullSquare by task<Int> {
@ -151,7 +162,7 @@ class SimpleWorkspaceTest {
fun testWorkspace() {
runBlocking {
val node = workspace.runBlocking("sum")
val res = node.flow().single()
val res = node.flowData().single()
assertEquals(328350, res.await())
}
}
@ -161,7 +172,7 @@ class SimpleWorkspaceTest {
fun testMetaPropagation() {
runBlocking {
val node = workspace.produce("sum") { "testFlag" put true }
val res = node.flow().single().await()
val res = node.flowData().single().await()
}
}
@ -184,7 +195,7 @@ class SimpleWorkspaceTest {
fun testFilter() {
runBlocking {
val node = workspace.produce("filterOne")
assertEquals(12, node.flow().first().await())
assertEquals(12, node.flowData().first().await())
}
}
}

View File

@ -4,6 +4,6 @@ org.gradle.parallel=true
kotlin.code.style=official
kotlin.parallel.tasks.in.project=true
kotlin.mpp.enableGranularSourceSetsMetadata=true
kotlin.native.enableDependencyPropagation=false
#kotlin.mpp.enableGranularSourceSetsMetadata=true
#kotlin.native.enableDependencyPropagation=false
kotlin.mpp.stability.nowarn=true

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -5,7 +5,7 @@ pluginManagement {
gradlePluginPortal()
}
val toolsVersion = "0.10.2"
val toolsVersion = "0.10.7"
plugins {
id("ru.mipt.npm.gradle.project") version toolsVersion