0.5.3-dev-4
This commit is contained in:
parent
91621864c2
commit
11143e4ba1
@ -3,6 +3,7 @@
|
||||
## [Unreleased]
|
||||
### Added
|
||||
- Add `specOrNull` delegate to meta and Scheme
|
||||
- Suspended read methods to the `Binary`
|
||||
|
||||
### Changed
|
||||
- `Factory` is now `fun interface` and uses `build` instead of `invoke`. `invoke moved to an extension.
|
||||
|
@ -4,7 +4,7 @@ plugins {
|
||||
|
||||
allprojects {
|
||||
group = "space.kscience"
|
||||
version = "0.5.3-dev-1"
|
||||
version = "0.5.3-dev-4"
|
||||
repositories{
|
||||
mavenCentral()
|
||||
}
|
||||
|
@ -3,11 +3,25 @@ package space.kscience.dataforge.data
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
public enum class GoalExecutionRestrictionPolicy {
|
||||
/**
|
||||
* Allow eager execution
|
||||
*/
|
||||
NONE,
|
||||
|
||||
/**
|
||||
* Give warning on eager execution
|
||||
*/
|
||||
WARNING,
|
||||
|
||||
/**
|
||||
* Throw error on eager execution
|
||||
*/
|
||||
ERROR
|
||||
}
|
||||
|
||||
/**
|
||||
* A special coroutine context key that allows or disallows goal execution during configuration time (eager execution).
|
||||
*/
|
||||
public class GoalExecutionRestriction(
|
||||
public val policy: GoalExecutionRestrictionPolicy = GoalExecutionRestrictionPolicy.ERROR,
|
||||
) : CoroutineContext.Element {
|
||||
|
@ -2,6 +2,9 @@ package space.kscience.dataforge.data
|
||||
|
||||
import kotlin.coroutines.CoroutineContext
|
||||
|
||||
/**
|
||||
* Coroutine context element that provides logging capabilities
|
||||
*/
|
||||
public interface GoalLogger : CoroutineContext.Element {
|
||||
override val key: CoroutineContext.Key<*> get() = GoalLogger
|
||||
|
||||
|
@ -18,7 +18,7 @@ kotlin {
|
||||
commonMain {
|
||||
dependencies {
|
||||
api(project(":dataforge-context"))
|
||||
api("io.ktor:ktor-io:${ru.mipt.npm.gradle.KScienceVersions.ktorVersion}")
|
||||
api(npmlibs.ktor.io)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,12 +12,15 @@ public interface Binary {
|
||||
|
||||
public val size: Int
|
||||
|
||||
|
||||
/**
|
||||
* Read maximum of [atMost] bytes as input from the binary, starting at [offset]. The generated input is always closed
|
||||
* when leaving scope, so it could not be leaked outside of scope of [block].
|
||||
*/
|
||||
public fun <R> read(offset: Int = 0, atMost: Int = size - offset, block: Input.() -> R): R
|
||||
|
||||
public suspend fun <R> readSuspend(offset: Int = 0, atMost: Int = size - offset, block: suspend Input.() -> R): R
|
||||
|
||||
public companion object {
|
||||
public val EMPTY: Binary = ByteArrayBinary(ByteArray(0))
|
||||
}
|
||||
@ -39,6 +42,21 @@ internal class ByteArrayBinary(
|
||||
)
|
||||
return input.use(block)
|
||||
}
|
||||
|
||||
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R {
|
||||
require(offset >= 0) { "Offset must be positive" }
|
||||
require(offset < array.size) { "Offset $offset is larger than array size" }
|
||||
val input = ByteReadPacket(
|
||||
array,
|
||||
offset + start,
|
||||
min(atMost, size - offset)
|
||||
)
|
||||
return try {
|
||||
block(input)
|
||||
} finally {
|
||||
input.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public fun ByteArray.asBinary(): Binary = ByteArrayBinary(this)
|
||||
|
@ -3,6 +3,8 @@ package space.kscience.dataforge.io
|
||||
import io.ktor.utils.io.charsets.Charsets
|
||||
import io.ktor.utils.io.charsets.decodeExactBytes
|
||||
import io.ktor.utils.io.core.*
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import kotlin.math.min
|
||||
|
||||
public fun Output.writeRawString(str: String) {
|
||||
@ -41,11 +43,58 @@ public class BinaryView(private val source: Binary, private val start: Int, over
|
||||
require(start + size <= source.size) { "View boundary is outside source binary size" }
|
||||
}
|
||||
|
||||
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
|
||||
return source.read(start + offset, min(size, atMost), block)
|
||||
}
|
||||
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R =
|
||||
source.read(start + offset, min(size, atMost), block)
|
||||
|
||||
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R =
|
||||
source.readSuspend(start + offset, min(size, atMost), block)
|
||||
}
|
||||
|
||||
public fun Binary.view(start: Int, size: Int): BinaryView = BinaryView(this, start, size)
|
||||
|
||||
public operator fun Binary.get(range: IntRange): BinaryView = view(range.first, range.last - range.first)
|
||||
public operator fun Binary.get(range: IntRange): BinaryView = view(range.first, range.last - range.first)
|
||||
|
||||
/**
|
||||
* Return inferred [EnvelopeFormat] if only one format could read given file. If no format accepts the binary, return null. If
|
||||
* multiple formats accepts binary, throw an error.
|
||||
*/
|
||||
public fun IOPlugin.peekBinaryEnvelopeFormat(binary: Binary): EnvelopeFormat? {
|
||||
val formats = envelopeFormatFactories.mapNotNull { factory ->
|
||||
factory.peekFormat(this@peekBinaryEnvelopeFormat, binary)
|
||||
}
|
||||
|
||||
return when (formats.size) {
|
||||
0 -> null
|
||||
1 -> formats.first()
|
||||
else -> error("Envelope format binary recognition clash: $formats")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Zero-copy read this binary as an envelope using given [this@toEnvelope]
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun EnvelopeFormat.readBinary(binary: Binary): Envelope {
|
||||
val partialEnvelope: PartialEnvelope = binary.read {
|
||||
run {
|
||||
readPartial(this@read)
|
||||
}
|
||||
}
|
||||
val offset: Int = partialEnvelope.dataOffset.toInt()
|
||||
val size: Int = partialEnvelope.dataSize?.toInt() ?: (binary.size - offset)
|
||||
val envelopeBinary = BinaryView(binary, offset, size)
|
||||
return SimpleEnvelope(partialEnvelope.meta, envelopeBinary)
|
||||
}
|
||||
|
||||
/**
|
||||
* A zero-copy read from
|
||||
*/
|
||||
@DFExperimental
|
||||
public fun IOPlugin.readEnvelopeBinary(
|
||||
binary: Binary,
|
||||
readNonEnvelopes: Boolean = false,
|
||||
formatPicker: IOPlugin.(Binary) -> EnvelopeFormat? = IOPlugin::peekBinaryEnvelopeFormat,
|
||||
): Envelope = formatPicker(binary)?.readBinary(binary) ?: if (readNonEnvelopes) {
|
||||
// if no format accepts file, read it as binary
|
||||
SimpleEnvelope(Meta.EMPTY, binary)
|
||||
} else error("Can't infer format for $binary")
|
@ -2,6 +2,7 @@ package space.kscience.dataforge.io
|
||||
|
||||
import io.ktor.utils.io.core.*
|
||||
import io.ktor.utils.io.streams.asOutput
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||
import space.kscience.dataforge.meta.isEmpty
|
||||
@ -9,7 +10,6 @@ import space.kscience.dataforge.misc.DFExperimental
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardOpenOption
|
||||
import kotlin.io.path.ExperimentalPathApi
|
||||
import kotlin.io.path.inputStream
|
||||
import kotlin.math.min
|
||||
import kotlin.reflect.full.isSupertypeOf
|
||||
@ -23,8 +23,11 @@ internal class PathBinary(
|
||||
override val size: Int = Files.size(path).toInt() - fileOffset,
|
||||
) : Binary {
|
||||
|
||||
@OptIn(ExperimentalPathApi::class)
|
||||
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
|
||||
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R = runBlocking {
|
||||
readSuspend(offset, atMost, block)
|
||||
}
|
||||
|
||||
override suspend fun <R> readSuspend(offset: Int, atMost: Int, block: suspend Input.() -> R): R {
|
||||
val actualOffset = offset + fileOffset
|
||||
val actualSize = min(atMost, size - offset)
|
||||
val array = path.inputStream().use {
|
||||
@ -69,15 +72,14 @@ public fun Path.rewrite(block: Output.() -> Unit): Unit {
|
||||
stream.asOutput().use(block)
|
||||
}
|
||||
|
||||
public fun Path.readEnvelope(format: EnvelopeFormat): Envelope {
|
||||
val partialEnvelope: PartialEnvelope = asBinary().read {
|
||||
format.run {
|
||||
readPartial(this@read)
|
||||
}
|
||||
@DFExperimental
|
||||
public fun EnvelopeFormat.readFile(path: Path): Envelope {
|
||||
val partialEnvelope: PartialEnvelope = path.asBinary().read {
|
||||
readPartial(this@read)
|
||||
}
|
||||
val offset: Int = partialEnvelope.dataOffset.toInt()
|
||||
val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(this).toInt() - offset)
|
||||
val binary = PathBinary(this, offset, size)
|
||||
val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(path).toInt() - offset)
|
||||
val binary = PathBinary(path, offset, size)
|
||||
return SimpleEnvelope(partialEnvelope.meta, binary)
|
||||
}
|
||||
|
||||
@ -110,10 +112,8 @@ public fun IOPlugin.readMetaFile(
|
||||
val extension = actualPath.fileName.toString().substringAfterLast('.')
|
||||
|
||||
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: error("Can't resolve meta format $extension")
|
||||
return metaFormat.run {
|
||||
actualPath.read {
|
||||
readMeta(this, descriptor)
|
||||
}
|
||||
return actualPath.read {
|
||||
metaFormat.readMeta(this, descriptor)
|
||||
}
|
||||
}
|
||||
|
||||
@ -145,15 +145,7 @@ public fun IOPlugin.writeMetaFile(
|
||||
*/
|
||||
public fun IOPlugin.peekFileEnvelopeFormat(path: Path): EnvelopeFormat? {
|
||||
val binary = path.asBinary()
|
||||
val formats = envelopeFormatFactories.mapNotNull { factory ->
|
||||
factory.peekFormat(this@peekFileEnvelopeFormat, binary)
|
||||
}
|
||||
|
||||
return when (formats.size) {
|
||||
0 -> null
|
||||
1 -> formats.first()
|
||||
else -> error("Envelope format binary recognition clash: $formats")
|
||||
}
|
||||
return peekBinaryEnvelopeFormat(binary)
|
||||
}
|
||||
|
||||
public val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
|
||||
@ -204,7 +196,7 @@ public fun IOPlugin.readEnvelopeFile(
|
||||
}
|
||||
|
||||
return formatPicker(path)?.let { format ->
|
||||
path.readEnvelope(format)
|
||||
format.readFile(path)
|
||||
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
|
||||
SimpleEnvelope(Meta.EMPTY, path.asBinary())
|
||||
} else error("Can't infer format for file $path")
|
||||
|
@ -0,0 +1,9 @@
|
||||
package space.kscience.dataforge.io
|
||||
|
||||
import io.ktor.utils.io.core.Input
|
||||
import io.ktor.utils.io.streams.asInput
|
||||
|
||||
public fun IOPlugin.resource(name: String): Binary? = context.javaClass.getResource(name)?.readBytes()?.asBinary()
|
||||
|
||||
public inline fun <R> IOPlugin.readResource(name: String, block: Input.() -> R): R =
|
||||
context.javaClass.getResource(name)?.openStream()?.asInput()?.block() ?: error("Can't read resource $name")
|
@ -154,7 +154,7 @@ public inline fun <T : Scheme> T.copy(spec: SchemeSpec<T>, block: T.() -> Unit =
|
||||
*/
|
||||
public open class SchemeSpec<out T : Scheme>(
|
||||
private val builder: () -> T,
|
||||
) : Specification<T>, Described {
|
||||
) : Specification<T> {
|
||||
|
||||
override fun read(source: Meta): T = builder().also {
|
||||
it.wrap(MutableMeta().withDefault(source))
|
||||
|
@ -1,12 +1,13 @@
|
||||
package space.kscience.dataforge.meta
|
||||
|
||||
import space.kscience.dataforge.meta.descriptors.Described
|
||||
import space.kscience.dataforge.misc.DFExperimental
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.names.asName
|
||||
import kotlin.properties.ReadWriteProperty
|
||||
import kotlin.reflect.KProperty
|
||||
|
||||
public interface ReadOnlySpecification<out T : Any> {
|
||||
public interface ReadOnlySpecification<out T : Any>: Described {
|
||||
|
||||
/**
|
||||
* Read generic read-only meta with this [Specification] producing instance of desired type.
|
||||
|
@ -10,7 +10,7 @@ import space.kscience.dataforge.values.ValueType
|
||||
import space.kscience.dataforge.values.asValue
|
||||
import kotlin.collections.set
|
||||
|
||||
public class MetaDescriptorBuilder internal constructor() {
|
||||
public class MetaDescriptorBuilder @PublishedApi internal constructor() {
|
||||
public var info: String? = null
|
||||
public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf()
|
||||
public var multiple: Boolean = false
|
||||
@ -78,6 +78,7 @@ public class MetaDescriptorBuilder internal constructor() {
|
||||
allowedValues = values.map { Value.of(it) }
|
||||
}
|
||||
|
||||
@PublishedApi
|
||||
internal fun build(): MetaDescriptor = MetaDescriptor(
|
||||
info = info,
|
||||
children = children.mapValues { it.value.build() },
|
||||
@ -93,7 +94,7 @@ public class MetaDescriptorBuilder internal constructor() {
|
||||
public fun MetaDescriptorBuilder.item(name: String, block: MetaDescriptorBuilder.() -> Unit): MetaDescriptorBuilder =
|
||||
item(Name.parse(name), block)
|
||||
|
||||
public fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor =
|
||||
public inline fun MetaDescriptor(block: MetaDescriptorBuilder.() -> Unit): MetaDescriptor =
|
||||
MetaDescriptorBuilder().apply(block).build()
|
||||
|
||||
/**
|
||||
|
@ -6,6 +6,10 @@ import space.kscience.dataforge.context.info
|
||||
import space.kscience.dataforge.context.logger
|
||||
import space.kscience.dataforge.data.GoalLogger
|
||||
|
||||
/**
|
||||
* A coroutine context key that injects a [Context] bound logger into the scope.
|
||||
* The message body is computed asynchronously
|
||||
*/
|
||||
public class ContextGoalLogger(public val context: Context) : GoalLogger {
|
||||
override fun emit(vararg tags: String, message: suspend () -> String) {
|
||||
context.launch {
|
||||
|
@ -5,15 +5,20 @@ import space.kscience.dataforge.data.DataSetBuilder
|
||||
import space.kscience.dataforge.data.DataTree
|
||||
import space.kscience.dataforge.data.GoalExecutionRestriction
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MetaRepr
|
||||
import space.kscience.dataforge.meta.Specification
|
||||
import space.kscience.dataforge.meta.descriptors.Described
|
||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||
import space.kscience.dataforge.misc.DFInternal
|
||||
import space.kscience.dataforge.misc.Type
|
||||
import space.kscience.dataforge.names.Name
|
||||
import space.kscience.dataforge.workspace.Task.Companion.TYPE
|
||||
import kotlin.reflect.KType
|
||||
import kotlin.reflect.typeOf
|
||||
|
||||
/**
|
||||
* A configurable task that could be executed on a workspace. The [TaskResult] represents a lazy result of the task.
|
||||
* In general no computations should be made until the result is called.
|
||||
*/
|
||||
@Type(TYPE)
|
||||
public interface Task<out T : Any> : Described {
|
||||
|
||||
@ -32,7 +37,26 @@ public interface Task<out T : Any> : Described {
|
||||
}
|
||||
}
|
||||
|
||||
public class TaskResultBuilder<T : Any>(
|
||||
/**
|
||||
* A [Task] with [Specification] for wrapping and unwrapping task configuration
|
||||
*/
|
||||
public interface TaskWithSpec<out T : Any, C : Any> : Task<T> {
|
||||
public val spec: Specification<C>
|
||||
override val descriptor: MetaDescriptor? get() = spec.descriptor
|
||||
|
||||
public suspend fun execute(workspace: Workspace, taskName: Name, configuration: C): TaskResult<T>
|
||||
|
||||
override suspend fun execute(workspace: Workspace, taskName: Name, taskMeta: Meta): TaskResult<T> =
|
||||
execute(workspace, taskName, spec.read(taskMeta))
|
||||
}
|
||||
|
||||
public suspend fun <T : Any, C : Any> TaskWithSpec<T, C>.execute(
|
||||
workspace: Workspace,
|
||||
taskName: Name,
|
||||
block: C.() -> Unit = {},
|
||||
): TaskResult<T> = execute(workspace, taskName, spec(block))
|
||||
|
||||
public class TaskResultBuilder<in T : Any>(
|
||||
public val workspace: Workspace,
|
||||
public val taskName: Name,
|
||||
public val taskMeta: Meta,
|
||||
@ -48,7 +72,6 @@ public class TaskResultBuilder<T : Any>(
|
||||
* @param builder for resulting data set
|
||||
*/
|
||||
@Suppress("FunctionName")
|
||||
@DFInternal
|
||||
public fun <T : Any> Task(
|
||||
resultType: KType,
|
||||
descriptor: MetaDescriptor? = null,
|
||||
@ -70,9 +93,45 @@ public fun <T : Any> Task(
|
||||
}
|
||||
}
|
||||
|
||||
@OptIn(DFInternal::class)
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <reified T : Any> Task(
|
||||
descriptor: MetaDescriptor? = null,
|
||||
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
|
||||
): Task<T> = Task(typeOf<T>(), descriptor, builder)
|
||||
): Task<T> = Task(typeOf<T>(), descriptor, builder)
|
||||
|
||||
|
||||
/**
|
||||
* Create a [Task] that composes a result using [builder]. Only data from the workspace could be used.
|
||||
* Data dependency cycles are not allowed.
|
||||
*
|
||||
* @param resultType the type boundary for data produced by this task
|
||||
* @param specification a specification for task configuration
|
||||
* @param builder for resulting data set
|
||||
*/
|
||||
@Suppress("FunctionName")
|
||||
public fun <T : Any, C : MetaRepr> Task(
|
||||
resultType: KType,
|
||||
specification: Specification<C>,
|
||||
builder: suspend TaskResultBuilder<T>.(C) -> Unit,
|
||||
): TaskWithSpec<T, C> = object : TaskWithSpec<T, C> {
|
||||
override val spec: Specification<C> = specification
|
||||
|
||||
override suspend fun execute(
|
||||
workspace: Workspace,
|
||||
taskName: Name,
|
||||
configuration: C,
|
||||
): TaskResult<T> = withContext(GoalExecutionRestriction() + workspace.goalLogger) {
|
||||
//TODO use safe builder and check for external data on add and detects cycles
|
||||
val taskMeta = configuration.toMeta()
|
||||
val dataset = DataTree<T>(resultType) {
|
||||
TaskResultBuilder(workspace, taskName, taskMeta, this).apply { builder(configuration) }
|
||||
}
|
||||
workspace.wrapResult(dataset, taskName, taskMeta)
|
||||
}
|
||||
}
|
||||
|
||||
@Suppress("FunctionName")
|
||||
public inline fun <reified T : Any, C : MetaRepr> Task(
|
||||
specification: Specification<C>,
|
||||
noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
|
||||
): Task<T> = Task(typeOf<T>(), specification, builder)
|
@ -8,7 +8,9 @@ import space.kscience.dataforge.data.DataSet
|
||||
import space.kscience.dataforge.data.DataSetBuilder
|
||||
import space.kscience.dataforge.data.DataTree
|
||||
import space.kscience.dataforge.meta.Meta
|
||||
import space.kscience.dataforge.meta.MetaRepr
|
||||
import space.kscience.dataforge.meta.MutableMeta
|
||||
import space.kscience.dataforge.meta.Specification
|
||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptorBuilder
|
||||
import space.kscience.dataforge.misc.DFBuilder
|
||||
@ -28,19 +30,34 @@ public data class TaskReference<T : Any>(public val taskName: Name, public val t
|
||||
error("Task $taskName does not belong to the workspace")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public interface TaskContainer {
|
||||
/**
|
||||
* Register task in container
|
||||
*/
|
||||
public fun registerTask(taskName: Name, task: Task<*>)
|
||||
}
|
||||
|
||||
@Deprecated("use buildTask instead", ReplaceWith("buildTask(name, descriptorBuilder, builder)"))
|
||||
public inline fun <reified T : Any> TaskContainer.registerTask(
|
||||
name: String,
|
||||
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
|
||||
descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
|
||||
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
|
||||
): Unit = registerTask(Name.parse(name), Task(MetaDescriptor(descriptorBuilder), builder))
|
||||
|
||||
public inline fun <reified T : Any> TaskContainer.buildTask(
|
||||
name: String,
|
||||
descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
|
||||
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
|
||||
): TaskReference<T> {
|
||||
val theName = Name.parse(name)
|
||||
val descriptor = MetaDescriptor(descriptorBuilder)
|
||||
val task = Task(descriptor, builder)
|
||||
registerTask(theName, task)
|
||||
return TaskReference(theName, task)
|
||||
}
|
||||
|
||||
public inline fun <reified T : Any> TaskContainer.task(
|
||||
descriptor: MetaDescriptor,
|
||||
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
|
||||
@ -51,6 +68,16 @@ public inline fun <reified T : Any> TaskContainer.task(
|
||||
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
|
||||
}
|
||||
|
||||
public inline fun <reified T : Any, C : MetaRepr> TaskContainer.task(
|
||||
specification: Specification<C>,
|
||||
noinline builder: suspend TaskResultBuilder<T>.(C) -> Unit,
|
||||
): PropertyDelegateProvider<Any?, ReadOnlyProperty<Any?, TaskReference<T>>> = PropertyDelegateProvider { _, property ->
|
||||
val taskName = Name.parse(property.name)
|
||||
val task = Task(specification, builder)
|
||||
registerTask(taskName, task)
|
||||
ReadOnlyProperty { _, _ -> TaskReference(taskName, task) }
|
||||
}
|
||||
|
||||
public inline fun <reified T : Any> TaskContainer.task(
|
||||
noinline descriptorBuilder: MetaDescriptorBuilder.() -> Unit = {},
|
||||
noinline builder: suspend TaskResultBuilder<T>.() -> Unit,
|
||||
|
@ -1,8 +1,7 @@
|
||||
org.gradle.jvmargs=-XX:MaxMetaspaceSize=2G
|
||||
org.gradle.jvmargs=-XX:MaxMetaspaceSize=1G
|
||||
org.gradle.parallel=true
|
||||
|
||||
kotlin.code.style=official
|
||||
kotlin.parallel.tasks.in.project=true
|
||||
|
||||
#kotlin.mpp.enableGranularSourceSetsMetadata=true
|
||||
#kotlin.native.enableDependencyPropagation=false
|
||||
@ -10,3 +9,5 @@ kotlin.mpp.stability.nowarn=true
|
||||
|
||||
publishing.github=false
|
||||
publishing.sonatype=false
|
||||
|
||||
toolsVersion=0.11.1-kotlin-1.6.10
|
||||
|
@ -1,12 +1,19 @@
|
||||
rootProject.name = "dataforge-core"
|
||||
|
||||
enableFeaturePreview("TYPESAFE_PROJECT_ACCESSORS")
|
||||
enableFeaturePreview("VERSION_CATALOGS")
|
||||
|
||||
pluginManagement {
|
||||
|
||||
val toolsVersion: String by extra
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
maven("https://repo.kotlin.link")
|
||||
mavenCentral()
|
||||
gradlePluginPortal()
|
||||
}
|
||||
|
||||
val toolsVersion = "0.10.7"
|
||||
|
||||
plugins {
|
||||
id("ru.mipt.npm.gradle.project") version toolsVersion
|
||||
id("ru.mipt.npm.gradle.mpp") version toolsVersion
|
||||
@ -15,6 +22,23 @@ pluginManagement {
|
||||
}
|
||||
}
|
||||
|
||||
dependencyResolutionManagement {
|
||||
|
||||
val toolsVersion: String by extra
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
maven("https://repo.kotlin.link")
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
versionCatalogs {
|
||||
create("npmlibs") {
|
||||
from("ru.mipt.npm:version-catalog:$toolsVersion")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
include(
|
||||
":dataforge-meta",
|
||||
":dataforge-io",
|
||||
|
Loading…
Reference in New Issue
Block a user