Merge branch 'dev' into distributed

This commit is contained in:
Andrey Stoyan 2022-05-25 14:08:56 +03:00
commit 9c55d26be5
38 changed files with 327 additions and 84 deletions

View File

@ -2,8 +2,11 @@
## [Unreleased] ## [Unreleased]
### Added ### Added
- Add `specOrNull` delegate to meta and Scheme
- Suspended read methods to the `Binary`
### Changed ### Changed
- `Factory` is now `fun interface` and uses `build` instead of `invoke`. `invoke moved to an extension.
### Deprecated ### Deprecated

View File

@ -4,7 +4,7 @@ plugins {
allprojects { allprojects {
group = "space.kscience" group = "space.kscience"
version = "0.5.2" version = "0.5.3-dev-4"
repositories{ repositories{
mavenCentral() mavenCentral()
} }

View File

@ -68,7 +68,7 @@ public class ContextBuilder internal constructor(
// Add if does not exist // Add if does not exist
if (existing == null) { if (existing == null) {
//TODO bypass if parent already has plugin with given meta? //TODO bypass if parent already has plugin with given meta?
val plugin = factory(meta, parent) val plugin = factory.build(parent, meta)
for ((depFactory, deoMeta) in plugin.dependsOn()) { for ((depFactory, deoMeta) in plugin.dependsOn()) {
addPlugin(depFactory, deoMeta) addPlugin(depFactory, deoMeta)

View File

@ -2,6 +2,11 @@ package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
public interface Factory<out T : Any> { public fun interface Factory<out T> {
public operator fun invoke(meta: Meta = Meta.EMPTY, context: Context = Global): T public fun build(context: Context, meta: Meta): T
} }
public operator fun <T> Factory<T>.invoke(
meta: Meta = Meta.EMPTY,
context: Context = Global,
): T = build(context, meta)

View File

@ -63,7 +63,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<DefaultLogManager> { public companion object : PluginFactory<DefaultLogManager> {
override fun invoke(meta: Meta, context: Context): DefaultLogManager = DefaultLogManager() override fun build(context: Context, meta: Meta): DefaultLogManager = DefaultLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.default") override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.default")
override val type: KClass<out DefaultLogManager> = DefaultLogManager::class override val type: KClass<out DefaultLogManager> = DefaultLogManager::class
@ -75,7 +75,7 @@ public class DefaultLogManager : AbstractPlugin(), LogManager {
*/ */
public val Context.logger: LogManager public val Context.logger: LogManager
get() = plugins.find(inherit = true) { it is LogManager } as? LogManager get() = plugins.find(inherit = true) { it is LogManager } as? LogManager
?: getGlobalLoggerFactory()(context = Global).apply { attach(Global) } ?: getGlobalLoggerFactory().build(context = Global, meta = Meta.EMPTY).apply { attach(Global) }
/** /**
* The named proxy logger for a context member * The named proxy logger for a context member

View File

@ -18,7 +18,7 @@ public interface PluginFactory<T : Plugin> : Factory<T> {
* Plugin factory created for the specific actual plugin * Plugin factory created for the specific actual plugin
*/ */
internal class DeFactoPluginFactory<T : Plugin>(val plugin: T) : PluginFactory<T> { internal class DeFactoPluginFactory<T : Plugin>(val plugin: T) : PluginFactory<T> {
override fun invoke(meta: Meta, context: Context): T = plugin override fun build(context: Context, meta: Meta): T = plugin
override val tag: PluginTag get() = plugin.tag override val tag: PluginTag get() = plugin.tag
override val type: KClass<out T> get() = plugin::class override val type: KClass<out T> get() = plugin::class
} }

View File

@ -22,7 +22,7 @@ public class ConsoleLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<ConsoleLogManager> { public companion object : PluginFactory<ConsoleLogManager> {
override fun invoke(meta: Meta, context: Context): ConsoleLogManager = ConsoleLogManager() override fun build(context: Context, meta: Meta): ConsoleLogManager = ConsoleLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.jsConsole") override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.jsConsole")
override val type: KClass<out ConsoleLogManager> = ConsoleLogManager::class override val type: KClass<out ConsoleLogManager> = ConsoleLogManager::class

View File

@ -24,7 +24,7 @@ public class SlfLogManager : AbstractPlugin(), LogManager {
override val tag: PluginTag get() = Companion.tag override val tag: PluginTag get() = Companion.tag
public companion object : PluginFactory<SlfLogManager> { public companion object : PluginFactory<SlfLogManager> {
override fun invoke(meta: Meta, context: Context): SlfLogManager = SlfLogManager() override fun build(context: Context, meta: Meta): SlfLogManager = SlfLogManager()
override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.kotlinLogging") override val tag: PluginTag = PluginTag(group = PluginTag.DATAFORGE_GROUP, name = "log.kotlinLogging")
override val type: KClass<out SlfLogManager> = SlfLogManager::class override val type: KClass<out SlfLogManager> = SlfLogManager::class

View File

@ -3,11 +3,25 @@ package space.kscience.dataforge.data
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
public enum class GoalExecutionRestrictionPolicy { public enum class GoalExecutionRestrictionPolicy {
/**
* Allow eager execution
*/
NONE, NONE,
/**
* Give warning on eager execution
*/
WARNING, WARNING,
/**
* Throw error on eager execution
*/
ERROR ERROR
} }
/**
* A special coroutine context key that allows or disallows goal execution during configuration time (eager execution).
*/
public class GoalExecutionRestriction( public class GoalExecutionRestriction(
public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR, public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR,
) : CoroutineContext.Element { ) : CoroutineContext.Element {

View File

@ -2,6 +2,9 @@ package space.kscience.dataforge.data
import kotlin.coroutines.CoroutineContext import kotlin.coroutines.CoroutineContext
/**
* Coroutine context element that provides logging capabilities
*/
public interface GoalLogger : CoroutineContext.Element { public interface GoalLogger : CoroutineContext.Element {
override val key: CoroutineContext.Key<*> get() = GoalLogger override val key: CoroutineContext.Key<*> get() = GoalLogger

View File

@ -2,9 +2,11 @@ package space.kscience.dataforge.distributed
import io.lambdarpc.utils.Endpoint import io.lambdarpc.utils.Endpoint
import space.kscience.dataforge.context.AbstractPlugin import space.kscience.dataforge.context.AbstractPlugin
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.context.Plugin import space.kscience.dataforge.context.Plugin
import space.kscience.dataforge.context.PluginFactory import space.kscience.dataforge.context.PluginFactory
import space.kscience.dataforge.context.PluginTag import space.kscience.dataforge.context.PluginTag
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.workspace.SerializableResultTask import space.kscience.dataforge.workspace.SerializableResultTask
import space.kscience.dataforge.workspace.Task import space.kscience.dataforge.workspace.Task
@ -16,7 +18,8 @@ import space.kscience.dataforge.workspace.Task
*/ */
public class RemotePlugin<P : Plugin>(private val plugin: P, private val endpoint: String) : AbstractPlugin() { public class RemotePlugin<P : Plugin>(private val plugin: P, private val endpoint: String) : AbstractPlugin() {
public constructor(factory: PluginFactory<P>, endpoint: String) : this(factory(), endpoint) // TODO
public constructor(factory: PluginFactory<P>, endpoint: String) : this(factory.build(Global, Meta.EMPTY), endpoint)
override val tag: PluginTag override val tag: PluginTag
get() = plugin.tag get() = plugin.tag

View File

@ -28,13 +28,13 @@ internal class MyPlugin1 : WorkspacePlugin() {
} }
companion object Factory : PluginFactory<MyPlugin1> { companion object Factory : PluginFactory<MyPlugin1> {
override fun invoke(meta: Meta, context: Context): MyPlugin1 = MyPlugin1()
override val tag: PluginTag override val tag: PluginTag
get() = PluginTag("Plg1") get() = PluginTag("Plg1")
override val type: KClass<out MyPlugin1> override val type: KClass<out MyPlugin1>
get() = MyPlugin1::class get() = MyPlugin1::class
override fun build(context: Context, meta: Meta): MyPlugin1 = MyPlugin1()
} }
} }
@ -50,12 +50,12 @@ internal class MyPlugin2 : WorkspacePlugin() {
} }
companion object Factory : PluginFactory<MyPlugin2> { companion object Factory : PluginFactory<MyPlugin2> {
override fun invoke(meta: Meta, context: Context): MyPlugin2 = MyPlugin2()
override val tag: PluginTag override val tag: PluginTag
get() = PluginTag("Plg2") get() = PluginTag("Plg2")
override val type: KClass<out MyPlugin2> override val type: KClass<out MyPlugin2>
get() = MyPlugin2::class get() = MyPlugin2::class
override fun build(context: Context, meta: Meta): MyPlugin2 = MyPlugin2()
} }
} }

View File

@ -18,7 +18,7 @@ kotlin {
commonMain { commonMain {
dependencies { dependencies {
api(project(":dataforge-context")) api(project(":dataforge-context"))
api("io.ktor:ktor-io:${ru.mipt.npm.gradle.KScienceVersions.ktorVersion}") api(npmlibs.ktor.io)
} }
} }
} }

View File

@ -5,6 +5,7 @@ import io.ktor.utils.io.core.Output
import io.ktor.utils.io.core.readBytes import io.ktor.utils.io.core.readBytes
import io.ktor.utils.io.core.readUTF8Line import io.ktor.utils.io.core.readUTF8Line
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.* import space.kscience.dataforge.io.*
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
@ -71,7 +72,7 @@ public class FrontMatterEnvelopeFormat(
metaFormatFactory: MetaFormatFactory, metaFormatFactory: MetaFormatFactory,
formatMeta: Meta, formatMeta: Meta,
) { ) {
val metaFormat = metaFormatFactory(formatMeta, this@FrontMatterEnvelopeFormat.io.context) val metaFormat = metaFormatFactory.build(this@FrontMatterEnvelopeFormat.io.context, formatMeta)
output.writeRawString("$SEPARATOR\r\n") output.writeRawString("$SEPARATOR\r\n")
metaFormat.run { this.writeObject(output, envelope.meta) } metaFormat.run { this.writeObject(output, envelope.meta) }
output.writeRawString("$SEPARATOR\r\n") output.writeRawString("$SEPARATOR\r\n")
@ -91,20 +92,20 @@ public class FrontMatterEnvelopeFormat(
private val metaTypeRegex = "---(\\w*)\\s*".toRegex() private val metaTypeRegex = "---(\\w*)\\s*".toRegex()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat { override fun build(context: Context, meta: Meta): EnvelopeFormat {
return FrontMatterEnvelopeFormat(context.io, meta) return FrontMatterEnvelopeFormat(context.io, meta)
} }
override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read { override fun peekFormat(io: IOPlugin, binary: Binary): EnvelopeFormat? = binary.read {
val line = readSafeUtf8Line() val line = readSafeUtf8Line()
return@read if (line.startsWith("---")) { return@read if (line.startsWith("---")) {
invoke() default
} else { } else {
null null
} }
} }
private val default by lazy { invoke() } private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope = override fun readPartial(input: Input): PartialEnvelope =
default.readPartial(input) default.readPartial(input)

View File

@ -115,13 +115,13 @@ public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
} }
public companion object : MetaFormatFactory { public companion object : MetaFormatFactory {
override fun invoke(meta: Meta, context: Context): MetaFormat = YamlMetaFormat(meta) override fun build(context: Context, meta: Meta): MetaFormat = YamlMetaFormat(meta)
override val shortName: String = "yaml" override val shortName: String = "yaml"
override val key: Short = 0x594d //YM override val key: Short = 0x594d //YM
private val default = YamlMetaFormat() private val default = YamlMetaFormat(Meta.EMPTY)
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?): Unit = override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?): Unit =
default.writeMeta(output, meta, descriptor) default.writeMeta(output, meta, descriptor)

View File

@ -27,6 +27,6 @@ public class YamlPlugin(meta: Meta) : AbstractPlugin(meta) {
override val tag: PluginTag = PluginTag("io.yaml", group = PluginTag.DATAFORGE_GROUP) override val tag: PluginTag = PluginTag("io.yaml", group = PluginTag.DATAFORGE_GROUP)
override val type: KClass<out YamlPlugin> = YamlPlugin::class override val type: KClass<out YamlPlugin> = YamlPlugin::class
override fun invoke(meta: Meta, context: Context): YamlPlugin = YamlPlugin(meta) override fun build(context: Context, meta: Meta): YamlPlugin = YamlPlugin(meta)
} }
} }

View File

@ -12,12 +12,15 @@ public interface Binary {
public val size: Int public val size: Int
/** /**
* Read maximum of [atMost] bytes as input from the binary, starting at [offset]. The generated input is always closed * Read maximum of [atMost] bytes as input from the binary, starting at [offset]. The generated input is always closed
* when leaving scope, so it could not be leaked outside of scope of [block]. * when leaving scope, so it could not be leaked outside of scope of [block].
*/ */
public fun <R> read(offset: Int = 0, atMost: Int = size - offset, block: Input.() -> R): R public fun <R> read(offset: Int = 0, atMost: Int = size - offset, block: Input.() -> R): R
public suspend fun <R> readSuspend(offset: Int = 0, atMost: Int = size - offset, block: suspend Input.() -> R): R
public companion object { public companion object {
public val EMPTY: Binary = ByteArrayBinary(ByteArray(0)) public val EMPTY: Binary = ByteArrayBinary(ByteArray(0))
} }
@ -39,6 +42,21 @@ internal class ByteArrayBinary(
) )
return input.use(block) return input.use(block)
} }
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R {
require(offset >= 0) { "Offset must be positive" }
require(offset < array.size) { "Offset $offset is larger than array size" }
val input = ByteReadPacket(
array,
offset + start,
min(atMost, size - offset)
)
return try {
block(input)
} finally {
input.close()
}
}
} }
public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this) public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this)

View File

@ -33,7 +33,7 @@ public class EnvelopeBuilder : Envelope {
/** /**
* Construct a data binary from given builder * Construct a data binary from given builder
*/ */
public fun data(block: Output.() -> Unit) { public inline fun data(block: Output.() -> Unit) {
data = buildByteArray { block() }.asBinary() data = buildByteArray { block() }.asBinary()
} }

View File

@ -42,7 +42,7 @@ public interface EnvelopeFormatFactory : IOFormatFactory<Envelope>, EnvelopeForm
override val name: Name get() = "envelope".asName() override val name: Name get() = "envelope".asName()
override val type: KType get() = typeOf<Envelope>() override val type: KType get() = typeOf<Envelope>()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat override fun build(context: Context, meta: Meta): EnvelopeFormat
/** /**
* Try to infer specific format from input and return null if the attempt is failed. * Try to infer specific format from input and return null if the attempt is failed.

View File

@ -102,7 +102,7 @@ public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named, MetaRep
public fun <T : Any> IOFormat<T>.toBinary(obj: T): Binary = Binary { writeObject(this, obj) } public fun <T : Any> IOFormat<T>.toBinary(obj: T): Binary = Binary { writeObject(this, obj) }
public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> { public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
override fun invoke(meta: Meta, context: Context): IOFormat<Double> = this override fun build(context: Context, meta: Meta): IOFormat<Double> = this
override val name: Name = "double".asName() override val name: Name = "double".asName()

View File

@ -25,7 +25,7 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
return ioFormatFactories.find { it.name == name }?.let { return ioFormatFactories.find { it.name == name }?.let {
@Suppress("UNCHECKED_CAST") @Suppress("UNCHECKED_CAST")
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type") if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
else it.invoke(item[META_KEY] ?: Meta.EMPTY, context) as IOFormat<T> else it.build(context, item[META_KEY] ?: Meta.EMPTY) as IOFormat<T>
} }
} }
@ -34,17 +34,17 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
} }
public fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? = public fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.key == key }?.invoke(meta) metaFormatFactories.find { it.key == key }?.build(context, meta)
public fun resolveMetaFormat(name: String, meta: Meta = Meta.EMPTY): MetaFormat? = public fun resolveMetaFormat(name: String, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.shortName == name }?.invoke(meta) metaFormatFactories.find { it.shortName == name }?.build(context, meta)
public val envelopeFormatFactories: Collection<EnvelopeFormatFactory> by lazy { public val envelopeFormatFactories: Collection<EnvelopeFormatFactory> by lazy {
context.gather<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values context.gather<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values
} }
private fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? = private fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? =
envelopeFormatFactories.find { it.name == name }?.invoke(meta, context) envelopeFormatFactories.find { it.name == name }?.build(context, meta)
public fun resolveEnvelopeFormat(item: Meta): EnvelopeFormat? { public fun resolveEnvelopeFormat(item: Meta): EnvelopeFormat? {
val name = item.string ?: item[NAME_KEY]?.string ?: error("Envelope format name not defined") val name = item.string ?: item[NAME_KEY]?.string ?: error("Envelope format name not defined")
@ -66,7 +66,7 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP) override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP)
override val type: KClass<out IOPlugin> = IOPlugin::class override val type: KClass<out IOPlugin> = IOPlugin::class
override fun invoke(meta: Meta, context: Context): IOPlugin = IOPlugin(meta) override fun build(context: Context, meta: Meta): IOPlugin = IOPlugin(meta)
} }
} }

View File

@ -41,7 +41,7 @@ public class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat
public companion object : MetaFormatFactory { public companion object : MetaFormatFactory {
public val DEFAULT_JSON: Json = Json { prettyPrint = true } public val DEFAULT_JSON: Json = Json { prettyPrint = true }
override fun invoke(meta: Meta, context: Context): MetaFormat = default override fun build(context: Context, meta: Meta): MetaFormat = default
override val shortName: String = "json" override val shortName: String = "json"
override val key: Short = 0x4a53//"JS" override val key: Short = 0x4a53//"JS"

View File

@ -5,6 +5,7 @@ import io.ktor.utils.io.core.Input
import io.ktor.utils.io.core.Output import io.ktor.utils.io.core.Output
import io.ktor.utils.io.core.use import io.ktor.utils.io.core.use
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor import space.kscience.dataforge.meta.descriptors.MetaDescriptor
@ -46,7 +47,7 @@ public interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
public val key: Short get() = name.hashCode().toShort() public val key: Short get() = name.hashCode().toShort()
override operator fun invoke(meta: Meta, context: Context): MetaFormat override fun build(context: Context, meta: Meta): MetaFormat
public companion object { public companion object {
public const val META_FORMAT_TYPE: String = "io.format.meta" public const val META_FORMAT_TYPE: String = "io.format.meta"
@ -59,12 +60,12 @@ public fun Meta.toString(format: MetaFormat): String = buildByteArray {
} }
}.decodeToString() }.decodeToString()
public fun Meta.toString(formatFactory: MetaFormatFactory): String = toString(formatFactory()) public fun Meta.toString(formatFactory: MetaFormatFactory): String = toString(formatFactory.build(Global, Meta.EMPTY))
public fun MetaFormat.parse(str: String): Meta { public fun MetaFormat.parse(str: String): Meta {
return ByteReadPacket(str.encodeToByteArray()).use { readObject(it) } return ByteReadPacket(str.encodeToByteArray()).use { readObject(it) }
} }
public fun MetaFormatFactory.parse(str: String, formatMeta: Meta): Meta = invoke(formatMeta).parse(str) public fun MetaFormatFactory.parse(str: String, formatMeta: Meta): Meta = build(Global, formatMeta).parse(str)

View File

@ -2,6 +2,7 @@ package space.kscience.dataforge.io
import io.ktor.utils.io.core.* import io.ktor.utils.io.core.*
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
@ -47,7 +48,7 @@ public class TaggedEnvelopeFormat(
metaFormatFactory: MetaFormatFactory, metaFormatFactory: MetaFormatFactory,
formatMeta: Meta, formatMeta: Meta,
) { ) {
val metaFormat = metaFormatFactory.invoke(formatMeta, this@TaggedEnvelopeFormat.io.context) val metaFormat = metaFormatFactory.build(this@TaggedEnvelopeFormat.io.context, formatMeta)
val metaBytes = metaFormat.toBinary(envelope.meta) val metaBytes = metaFormat.toBinary(envelope.meta)
val actualSize: ULong = (envelope.data?.size ?: 0).toULong() val actualSize: ULong = (envelope.data?.size ?: 0).toULong()
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize) val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize)
@ -118,7 +119,7 @@ public class TaggedEnvelopeFormat(
override val name: Name = super.name + "tagged" override val name: Name = super.name + "tagged"
override fun invoke(meta: Meta, context: Context): EnvelopeFormat { override fun build(context: Context, meta: Meta): EnvelopeFormat {
val io = context.io val io = context.io
val metaFormatName = meta["name"].string?.let { Name.parse(it) } ?: JsonMetaFormat.name val metaFormatName = meta["name"].string?.let { Name.parse(it) } ?: JsonMetaFormat.name
@ -161,7 +162,7 @@ public class TaggedEnvelopeFormat(
} }
} }
private val default by lazy { invoke() } private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope = override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) } default.run { readPartial(input) }

View File

@ -2,6 +2,7 @@ package space.kscience.dataforge.io
import io.ktor.utils.io.core.* import io.ktor.utils.io.core.*
import space.kscience.dataforge.context.Context import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
@ -34,7 +35,7 @@ public class TaglessEnvelopeFormat(
metaFormatFactory: MetaFormatFactory, metaFormatFactory: MetaFormatFactory,
formatMeta: Meta formatMeta: Meta
) { ) {
val metaFormat = metaFormatFactory(formatMeta, this.io.context) val metaFormat = metaFormatFactory.build(this.io.context, formatMeta)
//printing header //printing header
output.writeRawString(TAGLESS_ENVELOPE_HEADER + "\r\n") output.writeRawString(TAGLESS_ENVELOPE_HEADER + "\r\n")
@ -193,9 +194,9 @@ public class TaglessEnvelopeFormat(
override val name: Name = TAGLESS_ENVELOPE_TYPE.asName() override val name: Name = TAGLESS_ENVELOPE_TYPE.asName()
override fun invoke(meta: Meta, context: Context): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta) override fun build(context: Context, meta: Meta): EnvelopeFormat = TaglessEnvelopeFormat(context.io, meta)
private val default by lazy { invoke() } private val default by lazy { build(Global, Meta.EMPTY) }
override fun readPartial(input: Input): PartialEnvelope = override fun readPartial(input: Input): PartialEnvelope =
default.run { readPartial(input) } default.run { readPartial(input) }

View File

@ -3,6 +3,8 @@ package space.kscience.dataforge.io
import io.ktor.utils.io.charsets.Charsets import io.ktor.utils.io.charsets.Charsets
import io.ktor.utils.io.charsets.decodeExactBytes import io.ktor.utils.io.charsets.decodeExactBytes
import io.ktor.utils.io.core.* import io.ktor.utils.io.core.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFExperimental
import kotlin.math.min import kotlin.math.min
public fun Output.writeRawString(str: String) { public fun Output.writeRawString(str: String) {
@ -41,11 +43,58 @@ public class BinaryView(private val source: Binary, private val start: Int, over
require(start + size <= source.size) { "View boundary is outside source binary size" } require(start + size <= source.size) { "View boundary is outside source binary size" }
} }
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R { override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R =
return source.read(start + offset, min(size, atMost), block) source.read(start + offset, min(size, atMost), block)
}
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R =
source.readSuspend(start + offset, min(size, atMost), block)
} }
public fun Binary.view(start: Int, size: Int): BinaryView = BinaryView(this, start, size) public fun Binary.view(start: Int, size: Int): BinaryView = BinaryView(this, start, size)
public operator fun Binary.get(range: IntRange): BinaryView = view(range.first, range.last - range.first) public operator fun Binary.get(range: IntRange): BinaryView = view(range.first, range.last - range.first)
/**
* Return inferred [EnvelopeFormat] if only one format could read given file. If no format accepts the binary, return null. If
* multiple formats accepts binary, throw an error.
*/
public fun IOPlugin.peekBinaryEnvelopeFormat(binary: Binary): EnvelopeFormat? {
val formats = envelopeFormatFactories.mapNotNull { factory ->
factory.peekFormat(this@peekBinaryEnvelopeFormat, binary)
}
return when (formats.size) {
0 -> null
1 -> formats.first()
else -> error("Envelope format binary recognition clash: $formats")
}
}
/**
* Zero-copy read this binary as an envelope using given [this@toEnvelope]
*/
@DFExperimental
public fun EnvelopeFormat.readBinary(binary: Binary): Envelope {
val partialEnvelope: PartialEnvelope = binary.read {
run {
readPartial(this@read)
}
}
val offset: Int = partialEnvelope.dataOffset.toInt()
val size: Int = partialEnvelope.dataSize?.toInt() ?: (binary.size - offset)
val envelopeBinary = BinaryView(binary, offset, size)
return SimpleEnvelope(partialEnvelope.meta, envelopeBinary)
}
/**
* A zero-copy read from
*/
@DFExperimental
public fun IOPlugin.readEnvelopeBinary(
binary: Binary,
readNonEnvelopes: Boolean = false,
formatPicker: IOPlugin.(Binary) -> EnvelopeFormat? = IOPlugin::peekBinaryEnvelopeFormat,
): Envelope = formatPicker(binary)?.readBinary(binary) ?: if (readNonEnvelopes) {
// if no format accepts file, read it as binary
SimpleEnvelope(Meta.EMPTY, binary)
} else error("Can't infer format for $binary")

View File

@ -2,6 +2,7 @@ package space.kscience.dataforge.io
import io.ktor.utils.io.core.* import io.ktor.utils.io.core.*
import io.ktor.utils.io.streams.asOutput import io.ktor.utils.io.streams.asOutput
import kotlinx.coroutines.runBlocking
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.isEmpty import space.kscience.dataforge.meta.isEmpty
@ -9,7 +10,6 @@ import space.kscience.dataforge.misc.DFExperimental
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption
import kotlin.io.path.ExperimentalPathApi
import kotlin.io.path.inputStream import kotlin.io.path.inputStream
import kotlin.math.min import kotlin.math.min
import kotlin.reflect.full.isSupertypeOf import kotlin.reflect.full.isSupertypeOf
@ -23,8 +23,11 @@ internal class PathBinary(
override val size: Int = Files.size(path).toInt() - fileOffset, override val size: Int = Files.size(path).toInt() - fileOffset,
) : Binary { ) : Binary {
@OptIn(ExperimentalPathApi::class) override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R = runBlocking {
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R { readSuspend(offset, atMost, block)
}
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R {
val actualOffset = offset + fileOffset val actualOffset = offset + fileOffset
val actualSize = min(atMost, size - offset) val actualSize = min(atMost, size - offset)
val array = path.inputStream().use { val array = path.inputStream().use {
@ -69,15 +72,14 @@ public fun Path.rewrite(block: Output.() -> Unit): Unit {
stream.asOutput().use(block) stream.asOutput().use(block)
} }
public fun Path.readEnvelope(format: EnvelopeFormat): Envelope { @DFExperimental
val partialEnvelope: PartialEnvelope = asBinary().read { public fun EnvelopeFormat.readFile(path: Path): Envelope {
format.run { val partialEnvelope: PartialEnvelope = path.asBinary().read {
readPartial(this@read) readPartial(this@read)
}
} }
val offset: Int = partialEnvelope.dataOffset.toInt() val offset: Int = partialEnvelope.dataOffset.toInt()
val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(this).toInt() - offset) val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(path).toInt() - offset)
val binary = PathBinary(this, offset, size) val binary = PathBinary(path, offset, size)
return SimpleEnvelope(partialEnvelope.meta, binary) return SimpleEnvelope(partialEnvelope.meta, binary)
} }
@ -110,10 +112,8 @@ public fun IOPlugin.readMetaFile(
val extension = actualPath.fileName.toString().substringAfterLast('.') val extension = actualPath.fileName.toString().substringAfterLast('.')
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: error("Can't resolve meta format $extension") val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: error("Can't resolve meta format $extension")
return metaFormat.run { return actualPath.read {
actualPath.read { metaFormat.readMeta(this, descriptor)
readMeta(this, descriptor)
}
} }
} }
@ -145,15 +145,7 @@ public fun IOPlugin.writeMetaFile(
*/ */
public fun IOPlugin.peekFileEnvelopeFormat(path: Path): EnvelopeFormat? { public fun IOPlugin.peekFileEnvelopeFormat(path: Path): EnvelopeFormat? {
val binary = path.asBinary() val binary = path.asBinary()
val formats = envelopeFormatFactories.mapNotNull { factory -> return peekBinaryEnvelopeFormat(binary)
factory.peekFormat(this@peekFileEnvelopeFormat, binary)
}
return when (formats.size) {
0 -> null
1 -> formats.first()
else -> error("Envelope format binary recognition clash: $formats")
}
} }
public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta" public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
@ -204,7 +196,7 @@ public fun IOPlugin.readEnvelopeFile(
} }
return formatPicker(path)?.let { format -> return formatPicker(path)?.let { format ->
path.readEnvelope(format) format.readFile(path)
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary } ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
SimpleEnvelope(Meta.EMPTY, path.asBinary()) SimpleEnvelope(Meta.EMPTY, path.asBinary())
} else error("Can't infer format for file $path") } else error("Can't infer format for file $path")

View File

@ -0,0 +1,9 @@
package space.kscience.dataforge.io
import io.ktor.utils.io.core.Input
import io.ktor.utils.io.streams.asInput
public fun IOPlugin.resource(name: String): Binary? = context.javaClass.getResource(name)?.readBytes()?.asBinary()
public inline fun <R> IOPlugin.readResource(name: String, block: Input.() -> R): R =
context.javaClass.getResource(name)?.openStream()?.asInput()?.block() ?: error("Can't read resource $name")

View File

@ -154,7 +154,7 @@ public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit =
*/ */
public open class SchemeSpec<out T : Scheme>( public open class SchemeSpec<out T : Scheme>(
private val builder: () -> T, private val builder: () -> T,
) : Specification<T>, Described { ) : Specification<T> {
override fun read(source: Meta): T = builder().also { override fun read(source: Meta): T = builder().also {
it.wrap(MutableMeta().withDefault(source)) it.wrap(MutableMeta().withDefault(source))

View File

@ -1,12 +1,13 @@
package space.kscience.dataforge.meta package space.kscience.dataforge.meta
import space.kscience.dataforge.meta.descriptors.Described
import space.kscience.dataforge.misc.DFExperimental import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.names.Name import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName import space.kscience.dataforge.names.asName
import kotlin.properties.ReadWriteProperty import kotlin.properties.ReadWriteProperty
import kotlin.reflect.KProperty import kotlin.reflect.KProperty
public interface ReadOnlySpecification<out T : Any> { public interface ReadOnlySpecification<out T : Any>: Described {
/** /**
* Read generic read-only meta with this [Specification] producing instance of desired type. * Read generic read-only meta with this [Specification] producing instance of desired type.
@ -43,7 +44,7 @@ public interface Specification<out T : Any> : ReadOnlySpecification<T> {
*/ */
public fun <T : Any> MutableMeta.updateWith( public fun <T : Any> MutableMeta.updateWith(
spec: Specification<T>, spec: Specification<T>,
action: T.() -> Unit action: T.() -> Unit,
): T = spec.write(this).apply(action) ): T = spec.write(this).apply(action)
@ -82,6 +83,31 @@ public fun <T : Scheme> Scheme.spec(
key: Name? = null, key: Name? = null,
): ReadWriteProperty<Any?, T> = meta.spec(spec, key) ): ReadWriteProperty<Any?, T> = meta.spec(spec, key)
/**
* A delegate that uses a [Specification] to wrap a child of this provider.
* Returns null if meta with given name does not exist.
*/
public fun <T : Scheme> MutableMeta.specOrNull(
spec: Specification<T>,
key: Name? = null,
): ReadWriteProperty<Any?, T?> = object : ReadWriteProperty<Any?, T?> {
override fun getValue(thisRef: Any?, property: KProperty<*>): T? {
val name = key ?: property.name.asName()
return if (get(name) == null) null else spec.write(getOrCreate(name))
}
override fun setValue(thisRef: Any?, property: KProperty<*>, value: T?) {
val name = key ?: property.name.asName()
if (value == null) remove(name)
else set(name, value.toMeta())
}
}
public fun <T : Scheme> Scheme.specOrNull(
spec: Specification<T>,
key: Name? = null,
): ReadWriteProperty<Any?, T?> = meta.specOrNull(spec, key)
/** /**
* A delegate that uses a [Specification] to wrap a list of child providers. * A delegate that uses a [Specification] to wrap a list of child providers.
* If children are mutable, the changes in list elements are reflected on them. * If children are mutable, the changes in list elements are reflected on them.

View File

@ -10,7 +10,7 @@ import space.kscience.dataforge.values.ValueType
import space.kscience.dataforge.values.asValue import space.kscience.dataforge.values.asValue
import kotlin.collections.set import kotlin.collections.set
public class MetaDescriptorBuilder internal constructor() { public class MetaDescriptorBuilder @PublishedApi internal constructor() {
public var info: String? = null public var info: String? = null
public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf() public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf()
public var multiple: Boolean = false public var multiple: Boolean = false
@ -78,6 +78,7 @@ public class MetaDescriptorBuilder internal constructor() {
allowedValues = values.map { Value.of(it) } allowedValues = values.map { Value.of(it) }
} }
@PublishedApi
internal fun build(): MetaDescriptor = MetaDescriptor( internal fun build(): MetaDescriptor = MetaDescriptor(
info = info, info = info,
children = children.mapValues { it.value.build() }, children = children.mapValues { it.value.build() },
@ -93,7 +94,7 @@ public class MetaDescriptorBuilder internal constructor() {
public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder = public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
item(Name.parse(name), block) item(Name.parse(name), block)
public fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor = public inline fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor =
MetaDescriptorBuilder().apply(block).build() MetaDescriptorBuilder().apply(block).build()
/** /**

View File

@ -6,6 +6,10 @@ import space.kscience.dataforge.context.info
import space.kscience.dataforge.context.logger import space.kscience.dataforge.context.logger
import space.kscience.dataforge.data.GoalLogger import space.kscience.dataforge.data.GoalLogger
/**
* A coroutine context key that injects a [Context] bound logger into the scope.
* The message body is computed asynchronously
*/
public class ContextGoalLogger(public val context: Context) : GoalLogger { public class ContextGoalLogger(public val context: Context) : GoalLogger {
override fun emit(vararg tags: String, message: suspend () -> String) { override fun emit(vararg tags: String, message: suspend () -> String) {
context.launch { context.launch {

View File

@ -7,6 +7,8 @@ import space.kscience.dataforge.data.DataSetBuilder
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.data.GoalExecutionRestriction import space.kscience.dataforge.data.GoalExecutionRestriction
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.meta.Specification
import space.kscience.dataforge.meta.descriptors.Described import space.kscience.dataforge.meta.descriptors.Described
import space.kscience.dataforge.meta.descriptors.MetaDescriptor import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.misc.DFInternal import space.kscience.dataforge.misc.DFInternal
@ -16,6 +18,10 @@ import space.kscience.dataforge.workspace.Task.Companion.TYPE
import kotlin.reflect.KType import kotlin.reflect.KType
import kotlin.reflect.typeOf import kotlin.reflect.typeOf
/**
* A configurable task that could be executed on a workspace. The [TaskResult] represents a lazy result of the task.
* In general no computations should be made until the result is called.
*/
@Type(TYPE) @Type(TYPE)
public interface Task<out T : Any> : Described { public interface Task<out T : Any> : Described {
@ -40,7 +46,26 @@ public interface SerializableResultTask<T : Any> : Task<T> {
public val resultSerializer: KSerializer<T> public val resultSerializer: KSerializer<T>
} }
public class TaskResultBuilder<T : Any>( /**
* A [Task] with [Specification] for wrapping and unwrapping task configuration
*/
public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
public val spec: Specification<C>
override val descriptor: MetaDescriptor? get() = spec.descriptor
public suspend fun execute(workspace: Workspace, taskName: Name, configuration: C): TaskResult<T>
override suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T> =
execute(workspace, taskName, spec.read(taskMeta))
}
public suspend fun <T : Any, C : Any> TaskWithSpec<T, C>.execute(
workspace: Workspace,
taskName: Name,
block: C.() -> Unit = {},
): TaskResult<T> = execute(workspace, taskName, spec(block))
public class TaskResultBuilder<in T : Any>(
public val workspace: Workspace, public val workspace: Workspace,
public val taskName: Name, public val taskName: Name,
public val taskMeta: Meta, public val taskMeta: Meta,
@ -56,7 +81,6 @@ public class TaskResultBuilder<T : Any>(
* @param builder for resulting data set * @param builder for resulting data set
*/ */
@Suppress("FunctionName") @Suppress("FunctionName")
@DFInternal
public fun <T : Any> Task( public fun <T : Any> Task(
resultType: KType, resultType: KType,
descriptor: MetaDescriptor? = null, descriptor: MetaDescriptor? = null,
@ -89,7 +113,6 @@ public class SerializableResultTaskImpl<T : Any>(
builder: suspend TaskResultBuilder<T>.() -> Unit, builder: suspend TaskResultBuilder<T>.() -> Unit,
) : SerializableResultTask<T>, Task<T> by Task(resultType, descriptor, builder) ) : SerializableResultTask<T>, Task<T> by Task(resultType, descriptor, builder)
@OptIn(DFInternal::class)
@Suppress("FunctionName") @Suppress("FunctionName")
public inline fun <reified T : Any> Task( public inline fun <reified T : Any> Task(
descriptor: MetaDescriptor? = null, descriptor: MetaDescriptor? = null,
@ -103,3 +126,39 @@ public inline fun <reified T : Any> SerializableResultTask(
descriptor: MetaDescriptor? = null, descriptor: MetaDescriptor? = null,
noinline builder: suspend TaskResultBuilder<T>.() -> Unit, noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): Task<T> = SerializableResultTaskImpl(typeOf<T>(), resultSerializer, descriptor, builder) ): Task<T> = SerializableResultTaskImpl(typeOf<T>(), resultSerializer, descriptor, builder)
/**
* Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
* Data dependency cycles are not allowed.
*
* @param resultType the type boundary for data produced by this task
* @param specification a specification for task configuration
* @param builder for resulting data set
*/
@Suppress("FunctionName")
public fun <T : Any, C : MetaRepr> Task(
resultType: KType,
specification: Specification<C>,
builder: suspend TaskResultBuilder<T>.(C) -> Unit,
): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
override val spec: Specification<C> = specification
override suspend fun execute(
workspace: Workspace,
taskName: Name,
configuration: C,
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
//TODO use safe builder and check for external data on add and detects cycles
val taskMeta = configuration.toMeta()
val dataset = DataTree<T>(resultType) {
TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
}
workspace.wrapResult(dataset, taskName, taskMeta)
}
}
@Suppress("FunctionName")
public inline fun <reified T : Any, C : MetaRepr> Task(
specification: Specification<C>,
noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
): Task<T> = Task(typeOf<T>(), specification, builder)

View File

@ -9,7 +9,9 @@ import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.DataSetBuilder import space.kscience.dataforge.data.DataSetBuilder
import space.kscience.dataforge.data.DataTree import space.kscience.dataforge.data.DataTree
import space.kscience.dataforge.meta.Meta import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.meta.MutableMeta import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.meta.Specification
import space.kscience.dataforge.meta.descriptors.MetaDescriptor import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
import space.kscience.dataforge.misc.DFBuilder import space.kscience.dataforge.misc.DFBuilder
@ -29,13 +31,16 @@ public data class TaskReference<T : Any>(public val taskName: Name, public val t
error("Task $taskName does not belong to the workspace") error("Task $taskName does not belong to the workspace")
} }
} }
} }
public interface TaskContainer { public interface TaskContainer {
/**
* Register task in container
*/
public fun registerTask(taskName: Name, task: Task<*>) public fun registerTask(taskName: Name, task: Task<*>)
} }
@Deprecated("use buildTask instead", ReplaceWith("buildTask(name, descriptorBuilder, builder)"))
public inline fun <reified T : Any> TaskContainer.registerTask( public inline fun <reified T : Any> TaskContainer.registerTask(
name: String, name: String,
resultSerializer: KSerializer<T>? = null, resultSerializer: KSerializer<T>? = null,
@ -48,6 +53,18 @@ public inline fun <reified T : Any> TaskContainer.registerTask(
registerTask(Name.parse(name), task) registerTask(Name.parse(name), task)
} }
public inline fun <reified T : Any> TaskContainer.buildTask(
name: String,
descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
): TaskReference<T> {
val theName = Name.parse(name)
val descriptor = MetaDescriptor(descriptorBuilder)
val task = Task(descriptor, builder)
registerTask(theName, task)
return TaskReference(theName, task)
}
public inline fun <reified T : Any> TaskContainer.task( public inline fun <reified T : Any> TaskContainer.task(
descriptor: MetaDescriptor, descriptor: MetaDescriptor,
resultSerializer: KSerializer<T>? = null, resultSerializer: KSerializer<T>? = null,
@ -60,6 +77,16 @@ public inline fun <reified T : Any> TaskContainer.task(
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) } ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
} }
public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
specification: Specification<C>,
noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
val taskName = Name.parse(property.name)
val task = Task(specification, builder)
registerTask(taskName, task)
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
}
public inline fun <reified T : Any> TaskContainer.task( public inline fun <reified T : Any> TaskContainer.task(
resultSerializer: KSerializer<T>? = null, resultSerializer: KSerializer<T>? = null,
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {}, noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},

View File

@ -34,7 +34,7 @@ class DataPropagationTestPlugin : WorkspacePlugin() {
override val type: KClass<out DataPropagationTestPlugin> = DataPropagationTestPlugin::class override val type: KClass<out DataPropagationTestPlugin> = DataPropagationTestPlugin::class
override fun invoke(meta: Meta, context: Context): DataPropagationTestPlugin = DataPropagationTestPlugin() override fun build(context: Context, meta: Meta): DataPropagationTestPlugin = DataPropagationTestPlugin()
override val tag: PluginTag = PluginTag("Test") override val tag: PluginTag = PluginTag("Test")
} }

View File

@ -22,7 +22,7 @@ import kotlin.test.assertTrue
* Make a fake-factory for a one single plugin. Useful for unique or test plugins * Make a fake-factory for a one single plugin. Useful for unique or test plugins
*/ */
public inline fun <reified P : Plugin> P.toFactory(): PluginFactory<P> = object : PluginFactory<P> { public inline fun <reified P : Plugin> P.toFactory(): PluginFactory<P> = object : PluginFactory<P> {
override fun invoke(meta: Meta, context: Context): P = this@toFactory override fun build(context: Context, meta: Meta): P = this@toFactory
override val tag: PluginTag = this@toFactory.tag override val tag: PluginTag = this@toFactory.tag
override val type: KClass<out P> = P::class override val type: KClass<out P> = P::class

View File

@ -1,11 +1,13 @@
org.gradle.jvmargs=-XX:MaxMetaspaceSize=2G org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
org.gradle.parallel=true org.gradle.parallel=true
kotlin.code.style=official kotlin.code.style=official
kotlin.parallel.tasks.in.project=true
#kotlin.mpp.enableGranularSourceSetsMetadata=true #kotlin.mpp.enableGranularSourceSetsMetadata=true
#kotlin.native.enableDependencyPropagation=false #kotlin.native.enableDependencyPropagation=false
kotlin.mpp.stability.nowarn=true kotlin.mpp.stability.nowarn=true
publishing.github=false
publishing.sonatype=false publishing.sonatype=false
toolsVersion=0.11.1-kotlin-1.6.10

View File

@ -1,12 +1,19 @@
rootProject.name = "dataforge-core"
enableFeaturePreview("TYPESAFE_PROJECT_ACCESSORS")
enableFeaturePreview("VERSION_CATALOGS")
pluginManagement { pluginManagement {
val toolsVersion: String by extra
repositories { repositories {
mavenLocal()
maven("https://repo.kotlin.link") maven("https://repo.kotlin.link")
mavenCentral() mavenCentral()
gradlePluginPortal() gradlePluginPortal()
} }
val toolsVersion = "0.10.7"
plugins { plugins {
id("ru.mipt.npm.gradle.project") version toolsVersion id("ru.mipt.npm.gradle.project") version toolsVersion
id("ru.mipt.npm.gradle.mpp") version toolsVersion id("ru.mipt.npm.gradle.mpp") version toolsVersion
@ -15,6 +22,23 @@ pluginManagement {
} }
} }
dependencyResolutionManagement {
val toolsVersion: String by extra
repositories {
mavenLocal()
maven("https://repo.kotlin.link")
mavenCentral()
}
versionCatalogs {
create("npmlibs") {
from("ru.mipt.npm:version-catalog:$toolsVersion")
}
}
}
include( include(
":dataforge-meta", ":dataforge-meta",
":dataforge-io", ":dataforge-io",