Merge branch 'dev' into kotlin-1.4.20

# Conflicts:
#	settings.gradle.kts
This commit is contained in:
Alexander Nozik 2020-09-15 21:15:13 +03:00
commit 810f79b9ed
47 changed files with 437 additions and 302 deletions

3
.gitignore vendored
View File

@ -6,5 +6,4 @@ out/
build/ build/
!gradle-wrapper.jar !gradle-wrapper.jar
gradle.properties

View File

@ -4,15 +4,20 @@
### Added ### Added
### Changed ### Changed
- Context content resolution refactor
- Kotlin 1.4.10 (build tools 0.6.0)
- Empty query in Name is null instead of "" - Empty query in Name is null instead of ""
- Provider provides an empty map instead of error by default - Provider provides an empty map instead of error by default
- Hidden delegates hierarchy in favor of stdlib properties
### Deprecated ### Deprecated
- Context activation API
### Removed ### Removed
- Functional server prototype - Functional server prototype
### Fixed ### Fixed
- Global context CoroutineScope resolution - Global context CoroutineScope resolution
- Library mode compliance
### Security ### Security

View File

@ -5,7 +5,7 @@ plugins {
apply(plugin = "org.jetbrains.dokka") apply(plugin = "org.jetbrains.dokka")
val dataforgeVersion by extra("0.2.0-dev-1") val dataforgeVersion by extra("0.2.0-dev-2")
val bintrayRepo by extra("dataforge") val bintrayRepo by extra("dataforge")
val githubProject by extra("dataforge-core") val githubProject by extra("dataforge-core")

View File

@ -10,16 +10,12 @@ kscience {
useCoroutines() useCoroutines()
} }
repositories {
maven("https://maven.pkg.github.com/altavir/kotlin-logging")
}
kotlin { kotlin {
sourceSets { sourceSets {
val commonMain by getting { val commonMain by getting {
dependencies { dependencies {
api(project(":dataforge-meta")) api(project(":dataforge-meta"))
api("io.github.microutils:kotlin-logging:1.9.0-dev-npm") api("io.github.microutils:kotlin-logging:1.9.0-dev-npm-2")
} }
} }
val jvmMain by getting { val jvmMain by getting {

View File

@ -1,18 +1,18 @@
package hep.dataforge.context package hep.dataforge.context
import hep.dataforge.meta.* import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr
import hep.dataforge.meta.sequence
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.plus import hep.dataforge.names.plus
import hep.dataforge.provider.Provider import hep.dataforge.provider.Provider
import hep.dataforge.provider.top
import hep.dataforge.values.Value
import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job import kotlinx.coroutines.Job
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import mu.KLogger import mu.KLogger
import mu.KotlinLogging import mu.KotlinLogging
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
import kotlin.jvm.JvmName
/** /**
* The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top. * The local environment for anything being done in DataForge framework. Contexts are organized into tree structure with [Global] at the top.
@ -23,23 +23,20 @@ import kotlin.jvm.JvmName
* different plugins with the same interface in different contexts in the hierarchy. The usual behaviour is to use nearest one, but it could * different plugins with the same interface in different contexts in the hierarchy. The usual behaviour is to use nearest one, but it could
* be overridden by plugin implementation. * be overridden by plugin implementation.
* *
* Since plugins could contain mutable state, context has two states: active and inactive. No changes are allowed to active context.
* @author Alexander Nozik
*/ */
public open class Context( public open class Context(
final override val name: Name, final override val name: Name,
public val parent: Context? = Global, public val parent: Context?,
meta: Meta,
) : Named, MetaRepr, Provider, CoroutineScope { ) : Named, MetaRepr, Provider, CoroutineScope {
private val config = Config()
/** /**
* Context properties. Working as substitute for environment variables * Context properties. Working as substitute for environment variables
*/ */
private val properties: Meta = if (parent == null) { private val properties: Laminate = if (parent == null) {
config Laminate(meta)
} else { } else {
Laminate(config, parent.properties) Laminate(meta, parent.properties)
} }
/** /**
@ -50,28 +47,21 @@ public open class Context(
/** /**
* A [PluginManager] for current context * A [PluginManager] for current context
*/ */
public val plugins: PluginManager by lazy { PluginManager(this) } public val plugins: PluginManager = PluginManager(this)
@Deprecated("To be removed in favor of immutable plugins")
private val activators = HashSet<Any>() private val activators = HashSet<Any>()
/** /**
* Defines if context is used in any kind of active computations. Active context properties and plugins could not be changed * Defines if context is used in any kind of active computations. Active context properties and plugins could not be changed
*/ */
@Deprecated("To be removed in favor of immutable plugins")
public val isActive: Boolean = activators.isNotEmpty() public val isActive: Boolean = activators.isNotEmpty()
override val defaultTarget: String get() = Plugin.PLUGIN_TARGET
override fun provideTop(target: String): Map<Name, Any> {
return when (target) {
Value.TYPE -> properties.sequence().toMap()
Plugin.PLUGIN_TARGET -> plugins.sequence(true).associateBy { it.name }
else -> emptyMap()
}
}
/** /**
* Mark context as active and used by [activator] * Mark context as active and used by [activator]
*/ */
@Deprecated("To be removed in favor of immutable plugins")
public fun activate(activator: Any) { public fun activate(activator: Any) {
activators.add(activator) activators.add(activator)
} }
@ -79,19 +69,32 @@ public open class Context(
/** /**
* Mark context unused by [activator] * Mark context unused by [activator]
*/ */
@Deprecated("To be removed in favor of immutable plugins")
public fun deactivate(activator: Any) { public fun deactivate(activator: Any) {
activators.remove(activator) activators.remove(activator)
} }
/** override val defaultTarget: String get() = Plugin.TARGET
* Change the properties of the context. If active, throw an exception
*/ public fun content(target: String, inherit: Boolean): Map<Name, Any> {
public fun configure(action: Config.() -> Unit) { return if (inherit) {
if (isActive) error("Can't configure active context") when (target) {
config.action() PROPERTY_TARGET -> properties.sequence().toMap()
Plugin.TARGET -> plugins.list(true).associateBy { it.name }
else -> emptyMap()
}
} else {
when (target) {
PROPERTY_TARGET -> properties.layers.firstOrNull()?.sequence()?.toMap() ?: emptyMap()
Plugin.TARGET -> plugins.list(false).associateBy { it.name }
else -> emptyMap()
}
}
} }
open override val coroutineContext: CoroutineContext by lazy { override fun content(target: String): Map<Name, Any> = content(target, true)
override val coroutineContext: CoroutineContext by lazy {
(parent ?: Global).coroutineContext.let { parenContext -> (parent ?: Global).coroutineContext.let { parenContext ->
parenContext + SupervisorJob(parenContext[Job]) parenContext + SupervisorJob(parenContext[Job])
} }
@ -101,6 +104,7 @@ public open class Context(
* Detach all plugins and terminate context * Detach all plugins and terminate context
*/ */
public open fun close() { public open fun close() {
@Suppress("DEPRECATION")
if (isActive) error("Can't close active context") if (isActive) error("Can't close active context")
//detach all plugins //detach all plugins
plugins.forEach { it.detach() } plugins.forEach { it.detach() }
@ -108,22 +112,15 @@ public open class Context(
override fun toMeta(): Meta = Meta { override fun toMeta(): Meta = Meta {
"parent" to parent?.name "parent" to parent?.name
"properties" put properties.seal() "properties" put properties.layers.firstOrNull()
"plugins" put plugins.map { it.toMeta() } "plugins" put plugins.map { it.toMeta() }
} }
public companion object {
public const val PROPERTY_TARGET: String = "context.property"
}
} }
/**
* A map of all objects provided by plugins with given target and type
*/
@JvmName("typedContent")
public inline fun <reified T : Any> Context.resolve(target: String): Map<Name, T> = plugins.flatMap { plugin ->
plugin.top<T>(target).entries.map { (plugin.name + it.key) to it.value }
}.associate { it }
public fun Context.resolve(target: String): Map<Name, Any> = resolve<Any>(target)
/** /**
* The interface for something that encapsulated in context * The interface for something that encapsulated in context
* *

View File

@ -1,8 +1,6 @@
package hep.dataforge.context package hep.dataforge.context
import hep.dataforge.meta.DFBuilder import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.names.toName import hep.dataforge.names.toName
/** /**
@ -21,10 +19,11 @@ public class ContextBuilder(private val parent: Context = Global, public var nam
plugins.add(plugin) plugins.add(plugin)
} }
public fun plugin(tag: PluginTag, action: MetaBuilder.() -> Unit = {}) { @OptIn(DFExperimental::class)
val factory = parent.resolve<PluginFactory<*>>(PluginFactory.TYPE).values public fun plugin(tag: PluginTag, metaBuilder: MetaBuilder.() -> Unit = {}) {
val factory = parent.gatherInSequence<PluginFactory<*>>(PluginFactory.TYPE).values
.find { it.tag.matches(tag) } ?: error("Can't resolve plugin factory for $tag") .find { it.tag.matches(tag) } ?: error("Can't resolve plugin factory for $tag")
val plugin = factory.invoke(Meta(action), parent) val plugin = factory.invoke(Meta(metaBuilder), parent)
plugins.add(plugin) plugins.add(plugin)
} }
@ -37,7 +36,7 @@ public class ContextBuilder(private val parent: Context = Global, public var nam
} }
public fun build(): Context { public fun build(): Context {
return Context(name.toName(), parent).apply { return Context(name.toName(), parent, meta.seal()).apply {
this@ContextBuilder.plugins.forEach { this@ContextBuilder.plugins.forEach {
plugins.load(it) plugins.load(it)
} }

View File

@ -1,14 +1,17 @@
package hep.dataforge.context package hep.dataforge.context
import hep.dataforge.meta.Meta
import hep.dataforge.names.asName import hep.dataforge.names.asName
import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.SupervisorJob
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
import kotlin.native.concurrent.ThreadLocal
/** /**
* A global root context. Closing [Global] terminates the framework. * A global root context. Closing [Global] terminates the framework.
*/ */
public object Global : Context("GLOBAL".asName(), null) { @ThreadLocal
public object Global : Context("GLOBAL".asName(), null, Meta.EMPTY) {
override val coroutineContext: CoroutineContext = GlobalScope.coroutineContext + SupervisorJob() override val coroutineContext: CoroutineContext = GlobalScope.coroutineContext + SupervisorJob()
@ -38,6 +41,8 @@ public object Global : Context("GLOBAL".asName(), null) {
} }
public fun context(name: String, parent: Context = this, block: ContextBuilder.() -> Unit = {}): Context = public fun context(name: String, parent: Context = this, block: ContextBuilder.() -> Unit = {}): Context =
ContextBuilder(parent, name).apply(block).build() ContextBuilder(parent, name).apply(block).build().also {
contextRegistry[name] = it
}
} }

View File

@ -1,27 +1,23 @@
package hep.dataforge.context package hep.dataforge.context
import hep.dataforge.context.Plugin.Companion.TARGET
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr import hep.dataforge.meta.MetaRepr
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.toName import hep.dataforge.names.toName
import hep.dataforge.provider.Provider import hep.dataforge.provider.Provider
import hep.dataforge.provider.Type
/** /**
* The interface to define a Context plugin. A plugin stores all runtime features of a context. * The interface to define a Context plugin. A plugin stores all runtime features of a context.
* The plugin is by default configurable and a Provider (both features could be ignored). * The plugin is by default configurable and a Provider (both features could be ignored).
* The plugin must in most cases have an empty constructor in order to be able to load it from library. * The plugin must in most cases have an empty constructor in order to be able to load it from library.
* *
*
* The plugin lifecycle is the following: * The plugin lifecycle is the following:
* *
*
* create - configure - attach - detach - destroy * create - configure - attach - detach - destroy
*
*
* Configuration of attached plugin is possible for a context which is not in a runtime mode, but it is not recommended.
*
* @author Alexander Nozik
*/ */
@Type(TARGET)
public interface Plugin : Named, ContextAware, Provider, MetaRepr { public interface Plugin : Named, ContextAware, Provider, MetaRepr {
/** /**
@ -64,8 +60,7 @@ public interface Plugin : Named, ContextAware, Provider, MetaRepr {
} }
public companion object { public companion object {
public const val TARGET: String = "plugin"
public const val PLUGIN_TARGET = "plugin"
} }
} }

View File

@ -2,14 +2,16 @@ package hep.dataforge.context
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder import hep.dataforge.meta.MetaBuilder
import hep.dataforge.provider.Type
import kotlin.reflect.KClass import kotlin.reflect.KClass
@Type(PluginFactory.TYPE)
public interface PluginFactory<T : Plugin> : Factory<T> { public interface PluginFactory<T : Plugin> : Factory<T> {
public val tag: PluginTag public val tag: PluginTag
public val type: KClass<out T> public val type: KClass<out T>
public companion object{ public companion object {
public const val TYPE: String = "pluginFactory" public const val TYPE: String = "pluginFactory"
} }
} }
@ -23,6 +25,8 @@ public interface PluginFactory<T : Plugin> : Factory<T> {
*/ */
public class PluginManager(override val context: Context) : ContextAware, Iterable<Plugin> { public class PluginManager(override val context: Context) : ContextAware, Iterable<Plugin> {
//TODO refactor to read-only container
/** /**
* A set of loaded plugins * A set of loaded plugins
*/ */
@ -33,21 +37,24 @@ public class PluginManager(override val context: Context) : ContextAware, Iterab
*/ */
private val parent: PluginManager? = context.parent?.plugins private val parent: PluginManager? = context.parent?.plugins
/**
public fun sequence(recursive: Boolean): Sequence<Plugin> { * List plugins stored in this [PluginManager]. If [inherit] is true, include parent plugins as well
return if (recursive && parent != null) { */
plugins.asSequence() + parent.sequence(true) public fun list(inherit: Boolean): Collection<Plugin> {
return if (inherit && parent != null) {
plugins + parent.list(true)
} else { } else {
plugins.asSequence() plugins
} }
} }
/** /**
* Get existing plugin or return null if not present. Only first matching plugin is returned. * Get existing plugin or return null if not present. Only first matching plugin is returned.
* @param recursive search for parent [PluginManager] plugins * @param inherit search for parent [PluginManager] plugins
* @param predicate condition for the plugin * @param predicate condition for the plugin
*/ */
public fun find(recursive: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? = sequence(recursive).find(predicate) public fun find(inherit: Boolean = true, predicate: (Plugin) -> Boolean): Plugin? =
list(inherit).find(predicate)
/** /**
@ -56,7 +63,8 @@ public class PluginManager(override val context: Context) : ContextAware, Iterab
* @param tag * @param tag
* @return * @return
*/ */
public operator fun get(tag: PluginTag, recursive: Boolean = true): Plugin? = find(recursive) { tag.matches(it.tag) } public operator fun get(tag: PluginTag, inherit: Boolean = true): Plugin? =
find(inherit) { tag.matches(it.tag) }
/** /**
@ -142,7 +150,7 @@ public class PluginManager(override val context: Context) : ContextAware, Iterab
public fun <T : Plugin> fetch( public fun <T : Plugin> fetch(
factory: PluginFactory<T>, factory: PluginFactory<T>,
recursive: Boolean = true, recursive: Boolean = true,
metaBuilder: MetaBuilder.() -> Unit metaBuilder: MetaBuilder.() -> Unit,
): T = fetch(factory, recursive, Meta(metaBuilder)) ): T = fetch(factory, recursive, Meta(metaBuilder))
override fun iterator(): Iterator<Plugin> = plugins.iterator() override fun iterator(): Iterator<Plugin> = plugins.iterator()

View File

@ -0,0 +1,96 @@
package hep.dataforge.context
import hep.dataforge.meta.DFExperimental
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.provider.Provider
import hep.dataforge.provider.top
import kotlin.reflect.KClass
import kotlin.reflect.cast
/**
* Resolve a specific element in top level elements of the provider and attempt to cast it to the given type
*/
private fun <T : Any> Provider.provide(target: String, name: Name, type: KClass<out T>): T? {
return content(target)[name]?.let { type.cast(it) }
}
/**
* Resolve a top level object with given [target] and [name] in a [Context] own scope or its plugins.
*/
public fun <T : Any> Context.resolve(target: String, name: Name, type: KClass<out T>): T? {
//Try searching for plugin an context property
provide(target, name, type)?.let { return it }
val pluginContent = plugins.mapNotNull { it.provide(target, name, type) }
return if (pluginContent.isEmpty()) {
parent?.resolve<T>(target, name, type)
} else {
pluginContent.single() // throws error in case of name/type conflicts
}
}
/**
* Resolve a top level object with given [target] and [name] in a [Context] own scope or its plugins.
*/
public inline fun <reified T : Any> Context.resolve(target: String, name: Name): T? =
resolve(target, name, T::class)
/**
* Gather a map of all top-level objects with given [target] from context plugins.
* Content from plugins is prefixed by plugin name so name conflicts are impossible
* This operation could be slow in case of large number of plugins
*/
public fun <T : Any> Context.gather(
target: String,
type: KClass<out T>,
inherit: Boolean = true,
): Map<Name, T> = buildMap {
putAll(top(target, type))
plugins.forEach { plugin ->
plugin.top(target, type).forEach { (name, value) ->
if (containsKey(name)) error("Name conflict during gather. An item with name $name could not be gathered from $plugin because key is already present.")
put(plugin.name + name, value)
}
}
if (inherit) {
parent?.gather(target, type, inherit)?.forEach {
//put all values from parent if they are not conflicting
if (!containsKey(it.key)) {
put(it.key, it.value)
}
}
}
}
public inline fun <reified T : Any> Context.gather(target: String, inherit: Boolean = true): Map<Name, T> =
gather(target, T::class, inherit)
/**
* Gather all content from context itself and its plugins in a form of sequence of name-value pairs. Ignores name conflicts.
*
* Adds parent context sequence as well if [inherit] is true
*/
@DFExperimental
public fun <T : Any> Context.gatherInSequence(
target: String,
type: KClass<out T>,
inherit: Boolean = true,
): Sequence<Map.Entry<Name, T>> = sequence {
yieldAll(top(target, type).entries)
plugins.forEach { plugin ->
yieldAll(plugin.top(target, type).mapKeys { plugin.name + it.key }.entries)
}
if (inherit) {
parent?.gather(target, type, inherit)?.let {
yieldAll(it.entries)
}
}
}
@DFExperimental
public inline fun <reified T : Any> Context.gatherInSequence(
target: String,
inherit: Boolean = true,
): Sequence<Map.Entry<Name, T>> = gatherInSequence(target, T::class, inherit)
public val <T> Sequence<Map.Entry<Name, T>>.values: Sequence<T> get() = map { it.value }

View File

@ -42,7 +42,7 @@ public inline class Path(public val tokens: List<PathToken>) : Iterable<PathToke
override fun iterator(): Iterator<PathToken> = tokens.iterator() override fun iterator(): Iterator<PathToken> = tokens.iterator()
public companion object { public companion object {
public const val PATH_SEGMENT_SEPARATOR = "/" public const val PATH_SEGMENT_SEPARATOR: String = "/"
public fun parse(path: String): Path { public fun parse(path: String): Path {
val head = path.substringBefore(PATH_SEGMENT_SEPARATOR) val head = path.substringBefore(PATH_SEGMENT_SEPARATOR)

View File

@ -16,6 +16,8 @@
package hep.dataforge.provider package hep.dataforge.provider
import hep.dataforge.names.Name import hep.dataforge.names.Name
import kotlin.reflect.KClass
import kotlin.reflect.safeCast
/** /**
* A marker utility interface for providers. * A marker utility interface for providers.
@ -37,14 +39,14 @@ public interface Provider {
/** /**
* A map of direct children for specific target * A map of direct children for specific target
*/ */
public fun provideTop(target: String): Map<Name, Any> = emptyMap() public fun content(target: String): Map<Name, Any> = emptyMap()
} }
public fun Provider.provide(path: Path, targetOverride: String? = null): Any? { public fun Provider.provide(path: Path, targetOverride: String? = null): Any? {
if (path.length == 0) throw IllegalArgumentException("Can't provide by empty path") if (path.length == 0) throw IllegalArgumentException("Can't provide by empty path")
val first = path.first() val first = path.first()
val target = targetOverride ?: first.target ?: defaultTarget val target = targetOverride ?: first.target ?: defaultTarget
val res = provideTop(target)[first.name] ?: return null val res = content(target)[first.name] ?: return null
return when (path.length) { return when (path.length) {
1 -> res 1 -> res
else -> { else -> {
@ -66,17 +68,22 @@ public inline fun <reified T : Any> Provider.provide(path: String, targetOverrid
//inline fun <reified T : Any> Provider.provide(target: String, name: Name): T? { //inline fun <reified T : Any> Provider.provide(target: String, name: Name): T? {
// return provide(PathToken(name, target).toPath()) as? T // return provide(PathToken(name, target).toPath()) as? T
//} //}
//
//inline fun <reified T : Any> Provider.provide(target: String, name: String): T? = //inline fun <reified T : Any> Provider.provide(target: String, name: String): T? =
// provide(target, name.toName()) // provide(target, name.toName())
/** /**
* Typed top level content * Typed top level content
*/ */
public inline fun <reified T : Any> Provider.top(target: String): Map<Name, T> { public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> {
return provideTop(target).mapValues { return content(target).mapValues {
it.value as? T ?: error("The type of element $it is ${it::class} but ${T::class} is expected") type.safeCast(it.value) ?: error("The type of element $it is ${it::class} but $type is expected")
} }
} }
/**
* Typed top level content
*/
public inline fun <reified T : Any> Provider.top(target: String): Map<Name, T> = top(target, T::class)

View File

@ -11,7 +11,7 @@ class ContextTest {
class DummyPlugin : AbstractPlugin() { class DummyPlugin : AbstractPlugin() {
override val tag get() = PluginTag("test") override val tag get() = PluginTag("test")
override fun provideTop(target: String): Map<Name, Any> { override fun content(target: String): Map<Name, Any> {
return when(target){ return when(target){
"test" -> listOf("a", "b", "c.d").associate { it.toName() to it.toName() } "test" -> listOf("a", "b", "c.d").associate { it.toName() to it.toName() }
else -> emptyMap() else -> emptyMap()
@ -21,8 +21,11 @@ class ContextTest {
@Test @Test
fun testPluginManager() { fun testPluginManager() {
Global.plugins.load(DummyPlugin()) val context = Global.context("test"){
val members = Global.resolve<Name>("test") plugin(DummyPlugin())
}
//Global.plugins.load(DummyPlugin())
val members = context.gather<Name>("test")
assertEquals(3, members.count()) assertEquals(3, members.count())
members.forEach { members.forEach {
assertEquals(it.key, it.value.appendLeft("test")) assertEquals(it.key, it.value.appendLeft("test"))

View File

@ -1,44 +0,0 @@
package hep.dataforge.provider
import hep.dataforge.context.Context
import hep.dataforge.context.resolve
import hep.dataforge.names.Name
import kotlin.reflect.KClass
import kotlin.reflect.full.findAnnotation
/**
*
*/
object Types {
operator fun get(cl: KClass<*>): String {
return cl.findAnnotation<Type>()?.id ?: cl.simpleName ?: ""
}
operator fun get(obj: Any): String {
return get(obj::class)
}
}
/**
* Provide an object with given name inferring target from its type using [Type] annotation
*/
inline fun <reified T : Any> Provider.provideByType(name: String): T? {
val target = Types[T::class]
return provide(target, name)
}
//
//inline fun <reified T : Any> Provider.provideByType(name: Name): T? {
// val target = Types[T::class]
// return provide(target, name)
//}
inline fun <reified T : Any> Provider.top(): Map<Name, T> {
val target = Types[T::class]
return top(target)
}
/**
* A sequences of all objects provided by plugins with given target and type
*/
inline fun <reified T : Any> Context.content(): Map<Name, T> = resolve<T>(Types[T::class])

View File

@ -0,0 +1,36 @@
package hep.dataforge.provider
import hep.dataforge.context.Context
import hep.dataforge.context.gather
import hep.dataforge.meta.DFExperimental
import hep.dataforge.names.Name
import kotlin.reflect.KClass
import kotlin.reflect.full.findAnnotation
@DFExperimental
public val KClass<*>.dfType: String
get() = findAnnotation<Type>()?.id ?: simpleName ?: ""
/**
* Provide an object with given name inferring target from its type using [Type] annotation
*/
@DFExperimental
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {
val target = T::class.dfType
return provide(target, name)
}
@DFExperimental
public inline fun <reified T : Any> Provider.top(): Map<Name, T> {
val target = T::class.dfType
return top(target)
}
/**
* All objects provided by plugins with given target and type
*/
@DFExperimental
public inline fun <reified T : Any> Context.gather(inherit: Boolean = true): Map<Name, T> =
gather<T>(T::class.dfType, inherit)

View File

@ -1,7 +1,7 @@
plugins { plugins {
id("ru.mipt.npm.mpp") id("ru.mipt.npm.mpp")
id("ru.mipt.npm.node") id("ru.mipt.npm.node")
// id("ru.mipt.npm.native") id("ru.mipt.npm.native")
} }
kscience{ kscience{

View File

@ -15,11 +15,12 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr{
/** /**
* Type marker for the data. The type is known before the calculation takes place so it could be checked. * Type marker for the data. The type is known before the calculation takes place so it could be checked.
*/ */
val type: KClass<out T> public val type: KClass<out T>
/** /**
* Meta for the data * Meta for the data
*/ */
val meta: Meta public val meta: Meta
override fun toMeta(): Meta = Meta { override fun toMeta(): Meta = Meta {
"type" put (type.simpleName?:"undefined") "type" put (type.simpleName?:"undefined")
@ -28,10 +29,10 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr{
} }
} }
companion object { public companion object {
const val TYPE = "data" public const val TYPE: String = "data"
operator fun <T : Any> invoke( public operator fun <T : Any> invoke(
type: KClass<out T>, type: KClass<out T>,
meta: Meta = Meta.EMPTY, meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext, context: CoroutineContext = EmptyCoroutineContext,
@ -39,14 +40,14 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr{
block: suspend CoroutineScope.() -> T block: suspend CoroutineScope.() -> T
): Data<T> = DynamicData(type, meta, context, dependencies, block) ): Data<T> = DynamicData(type, meta, context, dependencies, block)
inline operator fun <reified T : Any> invoke( public inline operator fun <reified T : Any> invoke(
meta: Meta = Meta.EMPTY, meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext, context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(), dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend CoroutineScope.() -> T noinline block: suspend CoroutineScope.() -> T
): Data<T> = invoke(T::class, meta, context, dependencies, block) ): Data<T> = invoke(T::class, meta, context, dependencies, block)
operator fun <T : Any> invoke( public operator fun <T : Any> invoke(
name: String, name: String,
type: KClass<out T>, type: KClass<out T>,
meta: Meta = Meta.EMPTY, meta: Meta = Meta.EMPTY,
@ -55,7 +56,7 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr{
block: suspend CoroutineScope.() -> T block: suspend CoroutineScope.() -> T
): Data<T> = NamedData(name, invoke(type, meta, context, dependencies, block)) ): Data<T> = NamedData(name, invoke(type, meta, context, dependencies, block))
inline operator fun <reified T : Any> invoke( public inline operator fun <reified T : Any> invoke(
name: String, name: String,
meta: Meta = Meta.EMPTY, meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext, context: CoroutineContext = EmptyCoroutineContext,
@ -64,13 +65,13 @@ public interface Data<out T : Any> : Goal<T>, MetaRepr{
): Data<T> = ): Data<T> =
invoke(name, T::class, meta, context, dependencies, block) invoke(name, T::class, meta, context, dependencies, block)
fun <T : Any> static(value: T, meta: Meta = Meta.EMPTY): Data<T> = public fun <T : Any> static(value: T, meta: Meta = Meta.EMPTY): Data<T> =
StaticData(value, meta) StaticData(value, meta)
} }
} }
class DynamicData<T : Any>( public class DynamicData<T : Any>(
override val type: KClass<out T>, override val type: KClass<out T>,
override val meta: Meta = Meta.EMPTY, override val meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext, context: CoroutineContext = EmptyCoroutineContext,
@ -78,16 +79,16 @@ class DynamicData<T : Any>(
block: suspend CoroutineScope.() -> T block: suspend CoroutineScope.() -> T
) : Data<T>, DynamicGoal<T>(context, dependencies, block) ) : Data<T>, DynamicGoal<T>(context, dependencies, block)
class StaticData<T : Any>( public class StaticData<T : Any>(
value: T, value: T,
override val meta: Meta = Meta.EMPTY override val meta: Meta = Meta.EMPTY
) : Data<T>, StaticGoal<T>(value) { ) : Data<T>, StaticGoal<T>(value) {
override val type: KClass<out T> get() = value::class override val type: KClass<out T> get() = value::class
} }
class NamedData<out T : Any>(val name: String, data: Data<T>) : Data<T> by data public class NamedData<out T : Any>(public val name: String, data: Data<T>) : Data<T> by data
fun <T : Any, R : Any> Data<T>.map( public fun <T : Any, R : Any> Data<T>.map(
outputType: KClass<out R>, outputType: KClass<out R>,
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta, meta: Meta = this.meta,
@ -100,7 +101,7 @@ fun <T : Any, R : Any> Data<T>.map(
/** /**
* Create a data pipe * Create a data pipe
*/ */
inline fun <T : Any, reified R : Any> Data<T>.map( public inline fun <T : Any, reified R : Any> Data<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta = this.meta, meta: Meta = this.meta,
noinline block: suspend CoroutineScope.(T) -> R noinline block: suspend CoroutineScope.(T) -> R
@ -111,7 +112,7 @@ inline fun <T : Any, reified R : Any> Data<T>.map(
/** /**
* Create a joined data. * Create a joined data.
*/ */
inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduce( public inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduce(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta, meta: Meta,
noinline block: suspend CoroutineScope.(Collection<T>) -> R noinline block: suspend CoroutineScope.(Collection<T>) -> R
@ -124,7 +125,7 @@ inline fun <T : Any, reified R : Any> Collection<Data<T>>.reduce(
block(map { run { it.await() } }) block(map { run { it.await() } })
} }
fun <K, T : Any, R : Any> Map<K, Data<T>>.reduce( public fun <K, T : Any, R : Any> Map<K, Data<T>>.reduce(
outputType: KClass<out R>, outputType: KClass<out R>,
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta, meta: Meta,
@ -145,7 +146,7 @@ fun <K, T : Any, R : Any> Map<K, Data<T>>.reduce(
* @param T type of the input goal * @param T type of the input goal
* @param R type of the result goal * @param R type of the result goal
*/ */
inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduce( public inline fun <K, T : Any, reified R : Any> Map<K, Data<T>>.reduce(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
meta: Meta, meta: Meta,
noinline block: suspend CoroutineScope.(Map<K, T>) -> R noinline block: suspend CoroutineScope.(Map<K, T>) -> R

View File

@ -4,31 +4,31 @@ import hep.dataforge.meta.*
import hep.dataforge.names.toName import hep.dataforge.names.toName
class DataFilter : Scheme() { public class DataFilter : Scheme() {
/** /**
* A source node for the filter * A source node for the filter
*/ */
var from by string() public var from: String? by string()
/** /**
* A target placement for the filtered node * A target placement for the filtered node
*/ */
var to by string() public var to: String? by string()
/** /**
* A regular expression pattern for the filter * A regular expression pattern for the filter
*/ */
var pattern by string(".*") public var pattern: String by string(".*")
// val prefix by string() // val prefix by string()
// val suffix by string() // val suffix by string()
fun isEmpty(): Boolean = config.isEmpty() public fun isEmpty(): Boolean = config.isEmpty()
companion object : SchemeSpec<DataFilter>(::DataFilter) public companion object : SchemeSpec<DataFilter>(::DataFilter)
} }
/** /**
* Apply meta-based filter to given data node * Apply meta-based filter to given data node
*/ */
fun <T : Any> DataNode<T>.filter(filter: DataFilter): DataNode<T> { public fun <T : Any> DataNode<T>.filter(filter: DataFilter): DataNode<T> {
val sourceNode = filter.from?.let { get(it.toName()).node } ?: this@filter val sourceNode = filter.from?.let { get(it.toName()).node } ?: this@filter
val regex = filter.pattern.toRegex() val regex = filter.pattern.toRegex()
val targetNode = DataTreeBuilder(type).apply { val targetNode = DataTreeBuilder(type).apply {
@ -46,10 +46,10 @@ fun <T : Any> DataNode<T>.filter(filter: DataFilter): DataNode<T> {
/** /**
* Filter data using [DataFilter] specification * Filter data using [DataFilter] specification
*/ */
fun <T : Any> DataNode<T>.filter(filter: Meta): DataNode<T> = filter(DataFilter.wrap(filter)) public fun <T : Any> DataNode<T>.filter(filter: Meta): DataNode<T> = filter(DataFilter.wrap(filter))
/** /**
* Filter data using [DataFilter] builder * Filter data using [DataFilter] builder
*/ */
fun <T : Any> DataNode<T>.filter(filterBuilder: DataFilter.() -> Unit): DataNode<T> = public fun <T : Any> DataNode<T>.filter(filterBuilder: DataFilter.() -> Unit): DataNode<T> =
filter(DataFilter(filterBuilder)) filter(DataFilter(filterBuilder))

View File

@ -11,12 +11,12 @@ import kotlin.collections.component2
import kotlin.collections.set import kotlin.collections.set
import kotlin.reflect.KClass import kotlin.reflect.KClass
sealed class DataItem<out T : Any> : MetaRepr { public sealed class DataItem<out T : Any> : MetaRepr {
abstract val type: KClass<out T> public abstract val type: KClass<out T>
abstract val meta: Meta public abstract val meta: Meta
class Node<out T : Any>(val node: DataNode<T>) : DataItem<T>() { public class Node<out T : Any>(public val node: DataNode<T>) : DataItem<T>() {
override val type: KClass<out T> get() = node.type override val type: KClass<out T> get() = node.type
override fun toMeta(): Meta = node.toMeta() override fun toMeta(): Meta = node.toMeta()
@ -24,7 +24,7 @@ sealed class DataItem<out T : Any> : MetaRepr {
override val meta: Meta get() = node.meta override val meta: Meta get() = node.meta
} }
class Leaf<out T : Any>(val data: Data<T>) : DataItem<T>() { public class Leaf<out T : Any>(public val data: Data<T>) : DataItem<T>() {
override val type: KClass<out T> get() = data.type override val type: KClass<out T> get() = data.type
override fun toMeta(): Meta = data.toMeta() override fun toMeta(): Meta = data.toMeta()
@ -36,16 +36,16 @@ sealed class DataItem<out T : Any> : MetaRepr {
/** /**
* A tree-like data structure grouped into the node. All data inside the node must inherit its type * A tree-like data structure grouped into the node. All data inside the node must inherit its type
*/ */
interface DataNode<out T : Any> : MetaRepr { public interface DataNode<out T : Any> : MetaRepr {
/** /**
* The minimal common ancestor to all data in the node * The minimal common ancestor to all data in the node
*/ */
val type: KClass<out T> public val type: KClass<out T>
val items: Map<NameToken, DataItem<T>> public val items: Map<NameToken, DataItem<T>>
val meta: Meta public val meta: Meta
override fun toMeta(): Meta = Meta { override fun toMeta(): Meta = Meta {
"type" put (type.simpleName ?: "undefined") "type" put (type.simpleName ?: "undefined")
@ -60,7 +60,7 @@ interface DataNode<out T : Any> : MetaRepr {
* Start computation for all goals in data node and return a job for the whole node * Start computation for all goals in data node and return a job for the whole node
*/ */
@Suppress("DeferredResultUnused") @Suppress("DeferredResultUnused")
fun CoroutineScope.startAll(): Job = launch { public fun CoroutineScope.startAll(): Job = launch {
items.values.forEach { items.values.forEach {
when (it) { when (it) {
is DataItem.Node<*> -> it.node.run { startAll() } is DataItem.Node<*> -> it.node.run { startAll() }
@ -69,36 +69,36 @@ interface DataNode<out T : Any> : MetaRepr {
} }
} }
companion object { public companion object {
const val TYPE = "dataNode" public const val TYPE: String = "dataNode"
operator fun <T : Any> invoke(type: KClass<out T>, block: DataTreeBuilder<T>.() -> Unit) = public operator fun <T : Any> invoke(type: KClass<out T>, block: DataTreeBuilder<T>.() -> Unit): DataTree<T> =
DataTreeBuilder(type).apply(block).build() DataTreeBuilder(type).apply(block).build()
inline operator fun <reified T : Any> invoke(noinline block: DataTreeBuilder<T>.() -> Unit) = public inline operator fun <reified T : Any> invoke(noinline block: DataTreeBuilder<T>.() -> Unit): DataTree<T> =
DataTreeBuilder(T::class).apply(block).build() DataTreeBuilder(T::class).apply(block).build()
fun <T : Any> builder(type: KClass<out T>) = DataTreeBuilder(type) public fun <T : Any> builder(type: KClass<out T>): DataTreeBuilder<T> = DataTreeBuilder(type)
} }
} }
suspend fun <T: Any> DataNode<T>.join(): Unit = coroutineScope { startAll().join() } public suspend fun <T: Any> DataNode<T>.join(): Unit = coroutineScope { startAll().join() }
val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.node public val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.node
val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.data public val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.data
operator fun <T : Any> DataNode<T>.get(name: Name): DataItem<T>? = when (name.length) { public operator fun <T : Any> DataNode<T>.get(name: Name): DataItem<T>? = when (name.length) {
0 -> error("Empty name") 0 -> error("Empty name")
1 -> items[name.firstOrNull()] 1 -> items[name.firstOrNull()]
else -> get(name.firstOrNull()!!.asName()).node?.get(name.cutFirst()) else -> get(name.firstOrNull()!!.asName()).node?.get(name.cutFirst())
} }
operator fun <T : Any> DataNode<T>.get(name: String): DataItem<T>? = get(name.toName()) public operator fun <T : Any> DataNode<T>.get(name: String): DataItem<T>? = get(name.toName())
/** /**
* Sequence of all children including nodes * Sequence of all children including nodes
*/ */
fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequence { public fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequence {
items.forEach { (head, item) -> items.forEach { (head, item) ->
yield(head.asName() to item) yield(head.asName() to item)
if (item is DataItem.Node) { if (item is DataItem.Node) {
@ -112,7 +112,7 @@ fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequ
/** /**
* Sequence of data entries * Sequence of data entries
*/ */
fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence { public fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence {
items.forEach { (head, item) -> items.forEach { (head, item) ->
when (item) { when (item) {
is DataItem.Leaf -> yield(head.asName() to item.data) is DataItem.Leaf -> yield(head.asName() to item.data)
@ -125,9 +125,9 @@ fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequen
} }
} }
operator fun <T : Any> DataNode<T>.iterator(): Iterator<Pair<Name, DataItem<T>>> = asSequence().iterator() public operator fun <T : Any> DataNode<T>.iterator(): Iterator<Pair<Name, DataItem<T>>> = asSequence().iterator()
class DataTree<out T : Any> internal constructor( public class DataTree<out T : Any> internal constructor(
override val type: KClass<out T>, override val type: KClass<out T>,
override val items: Map<NameToken, DataItem<T>>, override val items: Map<NameToken, DataItem<T>>,
override val meta: Meta override val meta: Meta
@ -142,17 +142,17 @@ private sealed class DataTreeBuilderItem<out T : Any> {
* A builder for a DataTree. * A builder for a DataTree.
*/ */
@DFBuilder @DFBuilder
class DataTreeBuilder<T : Any>(val type: KClass<out T>) { public class DataTreeBuilder<T : Any>(public val type: KClass<out T>) {
private val map = HashMap<NameToken, DataTreeBuilderItem<T>>() private val map = HashMap<NameToken, DataTreeBuilderItem<T>>()
private var meta = MetaBuilder() private var meta = MetaBuilder()
operator fun set(token: NameToken, node: DataTreeBuilder<out T>) { public operator fun set(token: NameToken, node: DataTreeBuilder<out T>) {
if (map.containsKey(token)) error("Tree entry with name $token is not empty") if (map.containsKey(token)) error("Tree entry with name $token is not empty")
map[token] = DataTreeBuilderItem.Node(node) map[token] = DataTreeBuilderItem.Node(node)
} }
operator fun set(token: NameToken, data: Data<T>) { public operator fun set(token: NameToken, data: Data<T>) {
if (map.containsKey(token)) error("Tree entry with name $token is not empty") if (map.containsKey(token)) error("Tree entry with name $token is not empty")
map[token] = DataTreeBuilderItem.Leaf(data) map[token] = DataTreeBuilderItem.Leaf(data)
} }
@ -173,7 +173,7 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
} }
} }
operator fun set(name: Name, data: Data<T>) { public operator fun set(name: Name, data: Data<T>) {
when (name.length) { when (name.length) {
0 -> error("Can't add data with empty name") 0 -> error("Can't add data with empty name")
1 -> set(name.firstOrNull()!!, data) 1 -> set(name.firstOrNull()!!, data)
@ -181,7 +181,7 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
} }
} }
operator fun set(name: Name, node: DataTreeBuilder<out T>) { public operator fun set(name: Name, node: DataTreeBuilder<out T>) {
when (name.length) { when (name.length) {
0 -> error("Can't add data with empty name") 0 -> error("Can't add data with empty name")
1 -> set(name.firstOrNull()!!, node) 1 -> set(name.firstOrNull()!!, node)
@ -189,9 +189,9 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
} }
} }
operator fun set(name: Name, node: DataNode<T>) = set(name, node.builder()) public operator fun set(name: Name, node: DataNode<T>): Unit = set(name, node.builder())
operator fun set(name: Name, item: DataItem<T>) = when (item) { public operator fun set(name: Name, item: DataItem<T>): Unit = when (item) {
is DataItem.Node<T> -> set(name, item.node.builder()) is DataItem.Node<T> -> set(name, item.node.builder())
is DataItem.Leaf<T> -> set(name, item.data) is DataItem.Leaf<T> -> set(name, item.data)
} }
@ -199,25 +199,25 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
/** /**
* Append data to node * Append data to node
*/ */
infix fun String.put(data: Data<T>) = set(toName(), data) public infix fun String.put(data: Data<T>): Unit = set(toName(), data)
/** /**
* Append node * Append node
*/ */
infix fun String.put(node: DataNode<T>) = set(toName(), node) public infix fun String.put(node: DataNode<T>): Unit = set(toName(), node)
infix fun String.put(item: DataItem<T>) = set(toName(), item) public infix fun String.put(item: DataItem<T>): Unit = set(toName(), item)
/** /**
* Build and append node * Build and append node
*/ */
infix fun String.put(block: DataTreeBuilder<T>.() -> Unit) = set(toName(), DataTreeBuilder(type).apply(block)) public infix fun String.put(block: DataTreeBuilder<T>.() -> Unit): Unit = set(toName(), DataTreeBuilder(type).apply(block))
/** /**
* Update data with given node data and meta with node meta. * Update data with given node data and meta with node meta.
*/ */
fun update(node: DataNode<T>) { public fun update(node: DataNode<T>) {
node.dataSequence().forEach { node.dataSequence().forEach {
//TODO check if the place is occupied //TODO check if the place is occupied
this[it.first] = it.second this[it.first] = it.second
@ -225,13 +225,13 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
meta.update(node.meta) meta.update(node.meta)
} }
fun meta(block: MetaBuilder.() -> Unit) = meta.apply(block) public fun meta(block: MetaBuilder.() -> Unit): MetaBuilder = meta.apply(block)
fun meta(meta: Meta) { public fun meta(meta: Meta) {
this.meta = meta.builder() this.meta = meta.builder()
} }
fun build(): DataTree<T> { public fun build(): DataTree<T> {
val resMap = map.mapValues { (_, value) -> val resMap = map.mapValues { (_, value) ->
when (value) { when (value) {
is DataTreeBuilderItem.Leaf -> DataItem.Leaf(value.value) is DataTreeBuilderItem.Leaf -> DataItem.Leaf(value.value)
@ -242,50 +242,50 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
} }
} }
fun <T : Any> DataTreeBuilder<T>.datum(name: Name, data: Data<T>) { public fun <T : Any> DataTreeBuilder<T>.datum(name: Name, data: Data<T>) {
this[name] = data this[name] = data
} }
fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) { public fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) {
this[name.toName()] = data this[name.toName()] = data
} }
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY) { public fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY) {
this[name] = Data.static(data, meta) this[name] = Data.static(data, meta)
} }
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, block: MetaBuilder.() -> Unit = {}) { public fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, block: MetaBuilder.() -> Unit = {}) {
this[name] = Data.static(data, Meta(block)) this[name] = Data.static(data, Meta(block))
} }
fun <T : Any> DataTreeBuilder<T>.static(name: String, data: T, block: MetaBuilder.() -> Unit = {}) { public fun <T : Any> DataTreeBuilder<T>.static(name: String, data: T, block: MetaBuilder.() -> Unit = {}) {
this[name.toName()] = Data.static(data, Meta(block)) this[name.toName()] = Data.static(data, Meta(block))
} }
fun <T : Any> DataTreeBuilder<T>.node(name: Name, node: DataNode<T>) { public fun <T : Any> DataTreeBuilder<T>.node(name: Name, node: DataNode<T>) {
this[name] = node this[name] = node
} }
fun <T : Any> DataTreeBuilder<T>.node(name: String, node: DataNode<T>) { public fun <T : Any> DataTreeBuilder<T>.node(name: String, node: DataNode<T>) {
this[name.toName()] = node this[name.toName()] = node
} }
inline fun <reified T : Any> DataTreeBuilder<T>.node(name: Name, noinline block: DataTreeBuilder<T>.() -> Unit) { public inline fun <reified T : Any> DataTreeBuilder<T>.node(name: Name, noinline block: DataTreeBuilder<T>.() -> Unit) {
this[name] = DataNode(T::class, block) this[name] = DataNode(T::class, block)
} }
inline fun <reified T : Any> DataTreeBuilder<T>.node(name: String, noinline block: DataTreeBuilder<T>.() -> Unit) { public inline fun <reified T : Any> DataTreeBuilder<T>.node(name: String, noinline block: DataTreeBuilder<T>.() -> Unit) {
this[name.toName()] = DataNode(T::class, block) this[name.toName()] = DataNode(T::class, block)
} }
/** /**
* Generate a mutable builder from this node. Node content is not changed * Generate a mutable builder from this node. Node content is not changed
*/ */
fun <T : Any> DataNode<T>.builder(): DataTreeBuilder<T> = DataTreeBuilder(type).apply { public fun <T : Any> DataNode<T>.builder(): DataTreeBuilder<T> = DataTreeBuilder(type).apply {
dataSequence().forEach { (name, data) -> this[name] = data } dataSequence().forEach { (name, data) -> this[name] = data }
} }
fun <T : Any> DataNode<T>.filter(predicate: (Name, Data<T>) -> Boolean): DataNode<T> = DataNode.invoke(type) { public fun <T : Any> DataNode<T>.filter(predicate: (Name, Data<T>) -> Boolean): DataNode<T> = DataNode.invoke(type) {
dataSequence().forEach { (name, data) -> dataSequence().forEach { (name, data) ->
if (predicate(name, data)) { if (predicate(name, data)) {
this[name] = data this[name] = data
@ -293,4 +293,4 @@ fun <T : Any> DataNode<T>.filter(predicate: (Name, Data<T>) -> Boolean): DataNod
} }
} }
fun <T : Any> DataNode<T>.first(): Data<T>? = dataSequence().first().second public fun <T : Any> DataNode<T>.first(): Data<T>? = dataSequence().first().second

View File

@ -1,37 +1,36 @@
package hep.dataforge.data package hep.dataforge.data
import hep.dataforge.meta.DFExperimental
import kotlinx.coroutines.* import kotlinx.coroutines.*
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext import kotlin.coroutines.EmptyCoroutineContext
interface Goal<out T> { public interface Goal<out T> {
val dependencies: Collection<Goal<*>> public val dependencies: Collection<Goal<*>>
/** /**
* Returns current running coroutine if the goal is started * Returns current running coroutine if the goal is started
*/ */
val result: Deferred<T>? public val result: Deferred<T>?
/** /**
* Get ongoing computation or start a new one. * Get ongoing computation or start a new one.
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations. * Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
*/ */
fun CoroutineScope.startAsync(): Deferred<T> public fun CoroutineScope.startAsync(): Deferred<T>
/** /**
* Reset the computation * Reset the computation
*/ */
fun reset() public fun reset()
companion object { public companion object
}
} }
suspend fun <T> Goal<T>.await(): T = coroutineScope { startAsync().await() } public suspend fun <T> Goal<T>.await(): T = coroutineScope { startAsync().await() }
val Goal<*>.isComplete get() = result?.isCompleted ?: false public val Goal<*>.isComplete: Boolean get() = result?.isCompleted ?: false
open class StaticGoal<T>(val value: T) : Goal<T> { public open class StaticGoal<T>(public val value: T) : Goal<T> {
override val dependencies: Collection<Goal<*>> get() = emptyList() override val dependencies: Collection<Goal<*>> get() = emptyList()
override val result: Deferred<T> = CompletableDeferred(value) override val result: Deferred<T> = CompletableDeferred(value)
@ -42,10 +41,10 @@ open class StaticGoal<T>(val value: T) : Goal<T> {
} }
} }
open class DynamicGoal<T>( public open class DynamicGoal<T>(
val coroutineContext: CoroutineContext = EmptyCoroutineContext, private val coroutineContext: CoroutineContext = EmptyCoroutineContext,
override val dependencies: Collection<Goal<*>> = emptyList(), override val dependencies: Collection<Goal<*>> = emptyList(),
val block: suspend CoroutineScope.() -> T public val block: suspend CoroutineScope.() -> T
) : Goal<T> { ) : Goal<T> {
final override var result: Deferred<T>? = null final override var result: Deferred<T>? = null
@ -55,6 +54,7 @@ open class DynamicGoal<T>(
* Get ongoing computation or start a new one. * Get ongoing computation or start a new one.
* Does not guarantee thread safety. In case of multi-thread access, could create orphan computations. * Does not guarantee thread safety. In case of multi-thread access, could create orphan computations.
*/ */
@DFExperimental
override fun CoroutineScope.startAsync(): Deferred<T> { override fun CoroutineScope.startAsync(): Deferred<T> {
val startedDependencies = this@DynamicGoal.dependencies.map { goal -> val startedDependencies = this@DynamicGoal.dependencies.map { goal ->
goal.run { startAsync() } goal.run { startAsync() }
@ -82,7 +82,7 @@ open class DynamicGoal<T>(
/** /**
* Create a one-to-one goal based on existing goal * Create a one-to-one goal based on existing goal
*/ */
fun <T, R> Goal<T>.map( public fun <T, R> Goal<T>.map(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
block: suspend CoroutineScope.(T) -> R block: suspend CoroutineScope.(T) -> R
): Goal<R> = DynamicGoal(coroutineContext, listOf(this)) { ): Goal<R> = DynamicGoal(coroutineContext, listOf(this)) {
@ -92,7 +92,7 @@ fun <T, R> Goal<T>.map(
/** /**
* Create a joining goal. * Create a joining goal.
*/ */
fun <T, R> Collection<Goal<T>>.reduce( public fun <T, R> Collection<Goal<T>>.reduce(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
block: suspend CoroutineScope.(Collection<T>) -> R block: suspend CoroutineScope.(Collection<T>) -> R
): Goal<R> = DynamicGoal(coroutineContext, this) { ): Goal<R> = DynamicGoal(coroutineContext, this) {
@ -105,7 +105,7 @@ fun <T, R> Collection<Goal<T>>.reduce(
* @param T type of the input goal * @param T type of the input goal
* @param R type of the result goal * @param R type of the result goal
*/ */
fun <K, T, R> Map<K, Goal<T>>.reduce( public fun <K, T, R> Map<K, Goal<T>>.reduce(
coroutineContext: CoroutineContext = EmptyCoroutineContext, coroutineContext: CoroutineContext = EmptyCoroutineContext,
block: suspend CoroutineScope.(Map<K, T>) -> R block: suspend CoroutineScope.(Map<K, T>) -> R
): Goal<R> = DynamicGoal(coroutineContext, this.values) { ): Goal<R> = DynamicGoal(coroutineContext, this.values) {

View File

@ -19,10 +19,10 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.string import hep.dataforge.meta.string
interface GroupRule { public interface GroupRule {
operator fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> public operator fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>>
companion object{ public companion object{
/** /**
* Create grouping rule that creates groups for different values of value * Create grouping rule that creates groups for different values of value
* field with name [key] * field with name [key]
@ -31,7 +31,7 @@ interface GroupRule {
* @param defaultTagValue * @param defaultTagValue
* @return * @return
*/ */
fun byValue(key: String, defaultTagValue: String): GroupRule = object : public fun byValue(key: String, defaultTagValue: String): GroupRule = object :
GroupRule { GroupRule {
override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> { override fun <T : Any> invoke(node: DataNode<T>): Map<String, DataNode<T>> {
val map = HashMap<String, DataTreeBuilder<T>>() val map = HashMap<String, DataTreeBuilder<T>>()
@ -52,7 +52,7 @@ interface GroupRule {
// def = "default", // def = "default",
// info = "Default value which should be used for content in which the grouping value is not presented" // info = "Default value which should be used for content in which the grouping value is not presented"
// ) // )
fun byMeta(config: Meta): GroupRule { public fun byMeta(config: Meta): GroupRule {
//TODO expand grouping options //TODO expand grouping options
return config["byValue"]?.string?.let { return config["byValue"]?.string?.let {
byValue( byValue(

View File

@ -7,32 +7,31 @@ import kotlin.reflect.KClass
/** /**
* Action environment includes data name, data meta and action configuration meta * Action environment includes data name, data meta and action configuration meta
*/ */
data class ActionEnv( public data class ActionEnv(
val name: Name, val name: Name,
val meta: Meta, val meta: Meta,
val actionMeta: Meta val actionMeta: Meta
) )
/** /**
* Action environment * Action environment
*/ */
@DFBuilder @DFBuilder
class MapActionBuilder<T, R>(var name: Name, var meta: MetaBuilder, val actionMeta: Meta) { public class MapActionBuilder<T, R>(public var name: Name, public var meta: MetaBuilder, public val actionMeta: Meta) {
lateinit var result: suspend ActionEnv.(T) -> R public lateinit var result: suspend ActionEnv.(T) -> R
/** /**
* Calculate the result of goal * Calculate the result of goal
*/ */
fun result(f: suspend ActionEnv.(T) -> R) { public fun result(f: suspend ActionEnv.(T) -> R) {
result = f; result = f;
} }
} }
class MapAction<T : Any, out R : Any>( public class MapAction<T : Any, out R : Any>(
val inputType: KClass<T>, public val inputType: KClass<T>,
val outputType: KClass<out R>, public val outputType: KClass<out R>,
private val block: MapActionBuilder<T, R>.() -> Unit private val block: MapActionBuilder<T, R>.() -> Unit
) : Action<T, R> { ) : Action<T, R> {
@ -67,7 +66,7 @@ class MapAction<T : Any, out R : Any>(
} }
} }
inline fun <reified T : Any, reified R : Any> DataNode<T>.map( public inline fun <reified T : Any, reified R : Any> DataNode<T>.map(
meta: Meta, meta: Meta,
noinline action: MapActionBuilder<in T, out R>.() -> Unit noinline action: MapActionBuilder<in T, out R>.() -> Unit
): DataNode<R> = MapAction(T::class, R::class, action).invoke(this, meta) ): DataNode<R> = MapAction(T::class, R::class, action).invoke(this, meta)

View File

@ -7,25 +7,25 @@ import hep.dataforge.names.toName
import kotlin.reflect.KClass import kotlin.reflect.KClass
class JoinGroup<T : Any, R : Any>(var name: String, internal val node: DataNode<T>) { public class JoinGroup<T : Any, R : Any>(public var name: String, internal val node: DataNode<T>) {
var meta: MetaBuilder = MetaBuilder() public var meta: MetaBuilder = MetaBuilder()
lateinit var result: suspend ActionEnv.(Map<Name, T>) -> R public lateinit var result: suspend ActionEnv.(Map<Name, T>) -> R
fun result(f: suspend ActionEnv.(Map<Name, T>) -> R) { public fun result(f: suspend ActionEnv.(Map<Name, T>) -> R) {
this.result = f; this.result = f;
} }
} }
class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) { public class ReduceGroupBuilder<T : Any, R : Any>(public val actionMeta: Meta) {
private val groupRules: MutableList<(DataNode<T>) -> List<JoinGroup<T, R>>> = ArrayList(); private val groupRules: MutableList<(DataNode<T>) -> List<JoinGroup<T, R>>> = ArrayList();
/** /**
* introduce grouping by value name * introduce grouping by value name
*/ */
fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) { public fun byValue(tag: String, defaultTag: String = "@default", action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node -> groupRules += { node ->
GroupRule.byValue(tag, defaultTag).invoke(node).map { GroupRule.byValue(tag, defaultTag).invoke(node).map {
JoinGroup<T, R>(it.key, it.value).apply(action) JoinGroup<T, R>(it.key, it.value).apply(action)
@ -36,7 +36,7 @@ class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) {
/** /**
* Add a single fixed group to grouping rules * Add a single fixed group to grouping rules
*/ */
fun group(groupName: String, filter: DataFilter, action: JoinGroup<T, R>.() -> Unit) { public fun group(groupName: String, filter: DataFilter, action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node -> groupRules += { node ->
listOf( listOf(
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action) JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
@ -44,7 +44,7 @@ class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) {
} }
} }
fun group(groupName: String, filter: (Name, Data<T>) -> Boolean, action: JoinGroup<T, R>.() -> Unit) { public fun group(groupName: String, filter: (Name, Data<T>) -> Boolean, action: JoinGroup<T, R>.() -> Unit) {
groupRules += { node -> groupRules += { node ->
listOf( listOf(
JoinGroup<T, R>(groupName, node.filter(filter)).apply(action) JoinGroup<T, R>(groupName, node.filter(filter)).apply(action)
@ -55,7 +55,7 @@ class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) {
/** /**
* Apply transformation to the whole node * Apply transformation to the whole node
*/ */
fun result(resultName: String, f: suspend ActionEnv.(Map<Name, T>) -> R) { public fun result(resultName: String, f: suspend ActionEnv.(Map<Name, T>) -> R) {
groupRules += { node -> groupRules += { node ->
listOf(JoinGroup<T, R>(resultName, node).apply { result(f) }) listOf(JoinGroup<T, R>(resultName, node).apply { result(f) })
} }
@ -71,9 +71,9 @@ class ReduceGroupBuilder<T : Any, R : Any>(val actionMeta: Meta) {
/** /**
* The same rules as for KPipe * The same rules as for KPipe
*/ */
class ReduceAction<T : Any, R : Any>( public class ReduceAction<T : Any, R : Any>(
val inputType: KClass<T>, public val inputType: KClass<T>,
val outputType: KClass<out R>, public val outputType: KClass<out R>,
private val action: ReduceGroupBuilder<T, R>.() -> Unit private val action: ReduceGroupBuilder<T, R>.() -> Unit
) : Action<T, R> { ) : Action<T, R> {
@ -104,4 +104,4 @@ class ReduceAction<T : Any, R : Any>(
} }
} }
operator fun <T> Map<Name, T>.get(name: String) = get(name.toName()) public operator fun <T> Map<Name, T>.get(name: String): T? = get(name.toName())

View File

@ -10,16 +10,16 @@ import kotlin.collections.set
import kotlin.reflect.KClass import kotlin.reflect.KClass
class FragmentRule<T : Any, R : Any>(val name: Name, var meta: MetaBuilder) { public class FragmentRule<T : Any, R : Any>(public val name: Name, public var meta: MetaBuilder) {
lateinit var result: suspend (T) -> R public lateinit var result: suspend (T) -> R
fun result(f: suspend (T) -> R) { public fun result(f: suspend (T) -> R) {
result = f; result = f;
} }
} }
class SplitBuilder<T : Any, R : Any>(val name: Name, val meta: Meta) { public class SplitBuilder<T : Any, R : Any>(public val name: Name, public val meta: Meta) {
internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap() internal val fragments: MutableMap<Name, FragmentRule<T, R>.() -> Unit> = HashMap()
/** /**
@ -27,14 +27,14 @@ class SplitBuilder<T : Any, R : Any>(val name: Name, val meta: Meta) {
* @param name the name of a fragment * @param name the name of a fragment
* @param rule the rule to transform fragment name and meta using * @param rule the rule to transform fragment name and meta using
*/ */
fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) { public fun fragment(name: String, rule: FragmentRule<T, R>.() -> Unit) {
fragments[name.toName()] = rule fragments[name.toName()] = rule
} }
} }
class SplitAction<T : Any, R : Any>( public class SplitAction<T : Any, R : Any>(
val inputType: KClass<T>, public val inputType: KClass<T>,
val outputType: KClass<out R>, public val outputType: KClass<out R>,
private val action: SplitBuilder<T, R>.() -> Unit private val action: SplitBuilder<T, R>.() -> Unit
) : Action<T, R> { ) : Action<T, R> {

View File

@ -6,7 +6,7 @@ import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Deferred import kotlinx.coroutines.Deferred
import kotlin.reflect.KClass import kotlin.reflect.KClass
fun <R : Any, T : R> Data<T>.upcast(type: KClass<out R>): Data<R> { public fun <R : Any, T : R> Data<T>.upcast(type: KClass<out R>): Data<R> {
return object : Data<R> by this { return object : Data<R> by this {
override val type: KClass<out R> = type override val type: KClass<out R> = type
} }
@ -15,7 +15,7 @@ fun <R : Any, T : R> Data<T>.upcast(type: KClass<out R>): Data<R> {
/** /**
* Safe upcast a [Data] to a supertype * Safe upcast a [Data] to a supertype
*/ */
inline fun <reified R : Any, T : R> Data<T>.upcast(): Data<R> = upcast(R::class) public inline fun <reified R : Any, T : R> Data<T>.upcast(): Data<R> = upcast(R::class)
/** /**
* Check if node could be safely cast to given class * Check if node could be safely cast to given class
@ -27,7 +27,7 @@ internal expect fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean
*/ */
internal expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean internal expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean
fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) { public fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) {
is DataItem.Node -> node.canCast(type) is DataItem.Node -> node.canCast(type)
is DataItem.Leaf -> data.canCast(type) is DataItem.Leaf -> data.canCast(type)
} }
@ -36,7 +36,7 @@ fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) {
* Unsafe cast of data node * Unsafe cast of data node
*/ */
@Suppress("UNCHECKED_CAST") @Suppress("UNCHECKED_CAST")
fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> { public fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> {
return object : Data<R> { return object : Data<R> {
override val meta: Meta get() = this@cast.meta override val meta: Meta get() = this@cast.meta
override val dependencies: Collection<Goal<*>> get() = this@cast.dependencies override val dependencies: Collection<Goal<*>> get() = this@cast.dependencies
@ -47,10 +47,10 @@ fun <R : Any> Data<*>.cast(type: KClass<out R>): Data<R> {
} }
} }
inline fun <reified R : Any> Data<*>.cast(): Data<R> = cast(R::class) public inline fun <reified R : Any> Data<*>.cast(): Data<R> = cast(R::class)
@Suppress("UNCHECKED_CAST") @Suppress("UNCHECKED_CAST")
fun <R : Any> DataNode<*>.cast(type: KClass<out R>): DataNode<R> { public fun <R : Any> DataNode<*>.cast(type: KClass<out R>): DataNode<R> {
return object : DataNode<R> { return object : DataNode<R> {
override val meta: Meta get() = this@cast.meta override val meta: Meta get() = this@cast.meta
override val type: KClass<out R> = type override val type: KClass<out R> = type
@ -58,12 +58,12 @@ fun <R : Any> DataNode<*>.cast(type: KClass<out R>): DataNode<R> {
} }
} }
inline fun <reified R : Any> DataNode<*>.cast(): DataNode<R> = cast(R::class) public inline fun <reified R : Any> DataNode<*>.cast(): DataNode<R> = cast(R::class)
/** /**
* Check that node is compatible with given type meaning that each element could be cast to the type * Check that node is compatible with given type meaning that each element could be cast to the type
*/ */
fun <T : Any> DataNode<*>.ensureType(type: KClass<out T>) { public fun <T : Any> DataNode<*>.ensureType(type: KClass<out T>) {
if (!canCast(type)) { if (!canCast(type)) {
error("$type expected, but $type received") error("$type expected, but $type received")
} }

View File

@ -6,11 +6,9 @@ import kotlin.reflect.KClass
* Check that node is compatible with given type meaning that each element could be cast to the type * Check that node is compatible with given type meaning that each element could be cast to the type
*/ */
internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean { internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean {
//Not supported in js yet return this.type == type
return true
} }
internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean { internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean {
//Not supported in js yet return this.type == type
return true
} }

View File

@ -8,7 +8,7 @@ import kotlin.reflect.KClass
/** /**
* A zero-copy data node wrapper that returns only children with appropriate type. * A zero-copy data node wrapper that returns only children with appropriate type.
*/ */
class TypeFilteredDataNode<out T : Any>(val origin: DataNode<*>, override val type: KClass<out T>) : DataNode<T> { public class TypeFilteredDataNode<out T : Any>(public val origin: DataNode<*>, override val type: KClass<out T>) : DataNode<T> {
override val meta: Meta get() = origin.meta override val meta: Meta get() = origin.meta
override val items: Map<NameToken, DataItem<T>> by lazy { override val items: Map<NameToken, DataItem<T>> by lazy {
origin.items.mapNotNull { (key, item) -> origin.items.mapNotNull { (key, item) ->

View File

@ -39,10 +39,10 @@ public fun <R : Any> DataNode<*>.filterIsInstance(type: KClass<out R>): DataNode
/** /**
* Filter all elements of given data item that could be cast to given type. If no elements are available, return null. * Filter all elements of given data item that could be cast to given type. If no elements are available, return null.
*/ */
fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) { public fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) {
null -> null null -> null
is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type)) is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type))
is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) } is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) }
} }
inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class) public inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class)

View File

@ -0,0 +1,14 @@
package hep.dataforge.data
import kotlin.reflect.KClass
/**
* Check that node is compatible with given type meaning that each element could be cast to the type
*/
internal actual fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean {
return this.type == type
}
internal actual fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean {
return this.type == type
}

View File

@ -1,7 +1,7 @@
plugins { plugins {
id("ru.mipt.npm.mpp") id("ru.mipt.npm.mpp")
id("ru.mipt.npm.node") id("ru.mipt.npm.node")
// id("ru.mipt.npm.native") id("ru.mipt.npm.native")
} }
description = "IO module" description = "IO module"
@ -12,7 +12,7 @@ kscience {
} }
} }
val ioVersion by rootProject.extra("0.2.0-npm-dev-10") val ioVersion by rootProject.extra("0.2.0-npm-dev-11")
kotlin { kotlin {
sourceSets { sourceSets {

View File

@ -15,7 +15,7 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
public val ioFormatFactories: Collection<IOFormatFactory<*>> by lazy { public val ioFormatFactories: Collection<IOFormatFactory<*>> by lazy {
context.resolve<IOFormatFactory<*>>(IO_FORMAT_TYPE).values context.gather<IOFormatFactory<*>>(IO_FORMAT_TYPE).values
} }
public fun <T : Any> resolveIOFormat(item: MetaItem<*>, type: KClass<out T>): IOFormat<T>? { public fun <T : Any> resolveIOFormat(item: MetaItem<*>, type: KClass<out T>): IOFormat<T>? {
@ -30,7 +30,7 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
public val metaFormatFactories: Collection<MetaFormatFactory> by lazy { public val metaFormatFactories: Collection<MetaFormatFactory> by lazy {
context.resolve<MetaFormatFactory>(META_FORMAT_TYPE).values context.gather<MetaFormatFactory>(META_FORMAT_TYPE).values
} }
public fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? = public fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? =
@ -40,7 +40,7 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
metaFormatFactories.find { it.shortName == name }?.invoke(meta) metaFormatFactories.find { it.shortName == name }?.invoke(meta)
public val envelopeFormatFactories: Collection<EnvelopeFormatFactory> by lazy { public val envelopeFormatFactories: Collection<EnvelopeFormatFactory> by lazy {
context.resolve<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values context.gather<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values
} }
private fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? = private fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? =
@ -52,11 +52,11 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
return resolveEnvelopeFormat(name.toName(), meta) return resolveEnvelopeFormat(name.toName(), meta)
} }
override fun provideTop(target: String): Map<Name, Any> { override fun content(target: String): Map<Name, Any> {
return when (target) { return when (target) {
META_FORMAT_TYPE -> defaultMetaFormats.toMap() META_FORMAT_TYPE -> defaultMetaFormats.toMap()
ENVELOPE_FORMAT_TYPE -> defaultEnvelopeFormats.toMap() ENVELOPE_FORMAT_TYPE -> defaultEnvelopeFormats.toMap()
else -> super.provideTop(target) else -> super.content(target)
} }
} }

View File

@ -48,8 +48,10 @@ class MetaSerializerTest {
assertEquals(name, restored) assertEquals(name, restored)
} }
@OptIn(ExperimentalSerializationApi::class)
@Test @Test
fun testMetaItemDescriptor() { fun testMetaItemDescriptor() {
val descriptor = MetaItem.serializer(MetaSerializer).descriptor.getElementDescriptor(0) val descriptor = MetaItem.serializer(MetaSerializer).descriptor.getElementDescriptor(0)
println(descriptor)
} }
} }

View File

@ -214,7 +214,7 @@ public fun IOPlugin.writeEnvelopeFile(
* Write separate meta and data files to given directory [path] * Write separate meta and data files to given directory [path]
*/ */
@DFExperimental @DFExperimental
fun IOPlugin.writeEnvelopeDirectory( public fun IOPlugin.writeEnvelopeDirectory(
path: Path, path: Path,
envelope: Envelope, envelope: Envelope,
metaFormat: MetaFormatFactory = JsonMetaFormat, metaFormat: MetaFormatFactory = JsonMetaFormat,

View File

@ -17,8 +17,6 @@ public class Laminate(layers: List<Meta>) : MetaBase() {
} }
} }
public constructor(vararg layers: Meta?) : this(layers.filterNotNull())
override val items: Map<NameToken, MetaItem<Meta>> by lazy { override val items: Map<NameToken, MetaItem<Meta>> by lazy {
layers.map { it.items.keys }.flatten().associateWith { key -> layers.map { it.items.keys }.flatten().associateWith { key ->
layers.asSequence().map { it.items[key] }.filterNotNull().let(replaceRule) layers.asSequence().map { it.items[key] }.filterNotNull().let(replaceRule)
@ -80,6 +78,9 @@ public class Laminate(layers: List<Meta>) : MetaBase() {
} }
} }
@Suppress("FunctionName")
public fun Laminate(vararg layers: Meta?): Laminate = Laminate(layers.filterNotNull())
/** /**
* Performance optimized version of get method * Performance optimized version of get method
*/ */

View File

@ -132,11 +132,12 @@ public interface Meta : MetaRepr, ItemProvider {
override fun toMeta(): Meta = seal() override fun toMeta(): Meta = seal()
override fun equals(other: Any?): Boolean //TODO to be restored on 1.4.30 after https://youtrack.jetbrains.com/issue/KT-41765 si fixed
// override fun equals(other: Any?): Boolean
override fun hashCode(): Int //
// override fun hashCode(): Int
override fun toString(): String //
// override fun toString(): String
public companion object { public companion object {
public const val TYPE: String = "meta" public const val TYPE: String = "meta"

View File

@ -138,8 +138,8 @@ public inline class MetaTransformation(public val transformations: Collection<Tr
} }
} }
companion object { public companion object {
fun make(block: MetaTransformationBuilder.() -> Unit): MetaTransformation = public fun make(block: MetaTransformationBuilder.() -> Unit): MetaTransformation =
MetaTransformationBuilder().apply(block).build() MetaTransformationBuilder().apply(block).build()
} }
} }

View File

@ -51,7 +51,7 @@ public interface Value {
override fun hashCode(): Int override fun hashCode(): Int
public companion object { public companion object {
public const val TYPE: String = "value" public const val TARGET: String = "value"
/** /**
* Convert object to value * Convert object to value

View File

@ -1,7 +1,7 @@
plugins { plugins {
id("ru.mipt.npm.mpp") id("ru.mipt.npm.mpp")
id("ru.mipt.npm.node") id("ru.mipt.npm.node")
// id("ru.mipt.npm.native") id("ru.mipt.npm.native")
} }
kotlin { kotlin {

View File

@ -0,0 +1,6 @@
package hep.dataforge.output
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
public actual val Dispatchers.Output: CoroutineDispatcher get() = Dispatchers.Default

View File

@ -1,5 +1,7 @@
plugins { plugins {
id("ru.mipt.npm.mpp") id("ru.mipt.npm.mpp")
id("ru.mipt.npm.node")
id("ru.mipt.npm.native")
} }
kotlin { kotlin {

View File

@ -1,5 +1,7 @@
plugins { plugins {
id("ru.mipt.npm.mpp") id("ru.mipt.npm.mpp")
id("ru.mipt.npm.node")
id("ru.mipt.npm.native")
} }
kotlin { kotlin {

View File

@ -1,7 +1,7 @@
package hep.dataforge.workspace package hep.dataforge.workspace
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.context.resolve import hep.dataforge.context.gather
import hep.dataforge.context.toMap import hep.dataforge.context.toMap
import hep.dataforge.data.DataNode import hep.dataforge.data.DataNode
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
@ -19,7 +19,7 @@ public class SimpleWorkspace(
) : Workspace { ) : Workspace {
override val tasks: Map<Name, Task<*>> by lazy { override val tasks: Map<Name, Task<*>> by lazy {
context.resolve<Task<*>>(Task.TYPE) + tasks.toMap() context.gather<Task<*>>(Task.TYPE) + tasks.toMap()
} }
public companion object { public companion object {

View File

@ -49,6 +49,6 @@ public interface Task<out R : Any> : Named, Described {
public fun run(workspace: Workspace, model: TaskModel): DataNode<R> public fun run(workspace: Workspace, model: TaskModel): DataNode<R>
public companion object { public companion object {
public const val TYPE = "task" public const val TYPE: String = "task"
} }
} }

View File

@ -46,7 +46,7 @@ public data class TaskModel(
} }
public companion object { public companion object {
public val MODEL_TARGET_KEY = "@target".asName() public val MODEL_TARGET_KEY: Name = "@target".asName()
} }
} }

View File

@ -31,7 +31,7 @@ public interface Workspace : ContextAware, Provider {
*/ */
public val tasks: Map<Name, Task<*>> public val tasks: Map<Name, Task<*>>
override fun provideTop(target: String): Map<Name, Any> { override fun content(target: String): Map<Name, Any> {
return when (target) { return when (target) {
"target", Meta.TYPE -> targets.mapKeys { it.key.toName() } "target", Meta.TYPE -> targets.mapKeys { it.key.toName() }
Task.TYPE -> tasks Task.TYPE -> tasks

View File

@ -13,7 +13,7 @@ public abstract class WorkspacePlugin : AbstractPlugin() {
private val _tasks = HashSet<Task<*>>() private val _tasks = HashSet<Task<*>>()
public val tasks: Collection<Task<*>> get() = _tasks public val tasks: Collection<Task<*>> get() = _tasks
override fun provideTop(target: String): Map<Name, Any> { override fun content(target: String): Map<Name, Any> {
return when (target) { return when (target) {
Task.TYPE -> tasks.toMap() Task.TYPE -> tasks.toMap()
else -> emptyMap() else -> emptyMap()

7
gradle.properties Normal file
View File

@ -0,0 +1,7 @@
kotlin.code.style=official
kotlin.parallel.tasks.in.project=true
kotlin.mpp.enableGranularSourceSetsMetadata=true
org.gradle.jvmargs=-XX:MaxMetaspaceSize=512m
org.gradle.parallel=true
systemProp.org.gradle.internal.publish.checksums.insecure=true