v0.8.2 #80

Open
altavir wants to merge 63 commits from dev into master
20 changed files with 68 additions and 45 deletions
Showing only changes of commit 5461a83417 - Show all commits

View File

@ -8,7 +8,7 @@ plugins {
allprojects {
group = "space.kscience"
version = "0.7.0"
version = "0.7.1"
}
subprojects {

View File

@ -3,7 +3,7 @@ package space.kscience.dataforge.context
import space.kscience.dataforge.context.Plugin.Companion.TARGET
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.parseAsName
@ -18,7 +18,7 @@ import space.kscience.dataforge.provider.Provider
*
* create - configure - attach - detach - destroy
*/
@DfId(TARGET)
@DfType(TARGET)
public interface Plugin : Named, ContextAware, Provider, MetaRepr {
/**

View File

@ -1,9 +1,9 @@
package space.kscience.dataforge.context
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
@DfId(PluginFactory.TYPE)
@DfType(PluginFactory.TYPE)
public interface PluginFactory<T : Plugin> : Factory<T> {
public val tag: PluginTag

View File

@ -4,7 +4,7 @@ import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.PluginBuilder
import space.kscience.dataforge.context.gather
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import kotlin.reflect.KClass
@ -13,10 +13,10 @@ import kotlin.reflect.full.findAnnotation
@DFExperimental
public val KClass<*>.dfId: String
get() = findAnnotation<DfId>()?.id ?: simpleName ?: ""
get() = findAnnotation<DfType>()?.id ?: simpleName ?: ""
/**
* Provide an object with given name inferring target from its type using [DfId] annotation
* Provide an object with given name inferring target from its type using [DfType] annotation
*/
@DFExperimental
public inline fun <reified T : Any> Provider.provideByType(name: String): T? {

View File

@ -98,8 +98,7 @@ internal class MapAction<in T : Any, R : Any>(
* A one-to-one mapping action
*/
@DFExperimental
@Suppress("FunctionName")
public inline fun <T : Any, reified R : Any> Action.Companion.map(
public inline fun <T : Any, reified R : Any> Action.Companion.mapping(
noinline builder: MapActionBuilder<T, R>.() -> Unit,
): Action<T, R> = MapAction(typeOf<R>(), builder)

View File

@ -112,6 +112,6 @@ internal class ReduceAction<T : Any, R : Any>(
* A one-to-one mapping action
*/
@DFExperimental
public inline fun <reified T : Any, reified R : Any> Action.Companion.reduce(
public inline fun <reified T : Any, reified R : Any> Action.Companion.reducing(
noinline builder: ReduceGroupBuilder<T, R>.() -> Unit,
): Action<T, R> = ReduceAction(typeOf<R>(), builder)

View File

@ -87,6 +87,6 @@ internal class SplitAction<T : Any, R : Any>(
* Action that splits each incoming element into a number of fragments defined in builder
*/
@DFExperimental
public inline fun <T : Any, reified R : Any> Action.Companion.split(
public inline fun <T : Any, reified R : Any> Action.Companion.splitting(
noinline builder: SplitBuilder<T, R>.() -> Unit,
): Action<T, R> = SplitAction(typeOf<R>(), builder)

View File

@ -5,7 +5,7 @@ import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.meta.isEmpty
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
import kotlin.reflect.KType
@ -14,7 +14,7 @@ import kotlin.reflect.typeOf
/**
* A data element characterized by its meta
*/
@DfId(Data.TYPE)
@DfType(Data.TYPE)
public interface Data<out T> : Goal<T>, MetaRepr {
/**
* Type marker for the data. The type is known before the calculation takes place so it could be checked.

View File

@ -2,7 +2,7 @@ package space.kscience.dataforge.data
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DFInternal
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.names.*
import kotlin.collections.component1
import kotlin.collections.component2
@ -31,7 +31,7 @@ public val <T : Any> DataTreeItem<T>.type: KType
/**
* A tree-like [DataSet] grouped into the node. All data inside the node must inherit its type
*/
@DfId(DataTree.TYPE)
@DfType(DataTree.TYPE)
public interface DataTree<out T : Any> : DataSet<T> {
/**

View File

@ -1,2 +0,0 @@
package space.kscience.dataforge.data

View File

@ -4,7 +4,7 @@ import kotlinx.io.Source
import space.kscience.dataforge.context.Context
import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import kotlin.reflect.KType
@ -17,7 +17,7 @@ public interface EnvelopeFormat : IOFormat<Envelope> {
public fun EnvelopeFormat.read(input: Source): Envelope = readFrom(input)
@DfId(ENVELOPE_FORMAT_TYPE)
@DfType(ENVELOPE_FORMAT_TYPE)
public interface EnvelopeFormatFactory : IOFormatFactory<Envelope>, EnvelopeFormat {
override val type: KType get() = typeOf<Envelope>()

View File

@ -7,7 +7,7 @@ import space.kscience.dataforge.context.Context
import space.kscience.dataforge.context.Factory
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.misc.Named
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
@ -72,7 +72,7 @@ public fun <T : Any> Sink.writeWith(format: IOWriter<T>, obj: T): Unit =
format.writeTo(this, obj)
@DfId(IO_FORMAT_TYPE)
@DfType(IO_FORMAT_TYPE)
public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
/**
* Explicit type for dynamic type checks

View File

@ -9,7 +9,7 @@ import space.kscience.dataforge.context.Global
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.names.asName
import space.kscience.dataforge.names.plus
@ -38,7 +38,7 @@ public interface MetaFormat : IOFormat<Meta> {
public fun readMeta(source: Source, descriptor: MetaDescriptor? = null): Meta
}
@DfId(META_FORMAT_TYPE)
@DfType(META_FORMAT_TYPE)
public interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
public val shortName: String

View File

@ -2,7 +2,7 @@ package space.kscience.dataforge.meta
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.misc.unsafeCast
import space.kscience.dataforge.names.*
import kotlin.jvm.JvmName
@ -31,7 +31,7 @@ public fun interface MetaProvider : ValueProvider {
* TODO add documentation
* Same name siblings are supported via elements with the same [Name] but different indices.
*/
@DfId(Meta.TYPE)
@DfType(Meta.TYPE)
@Serializable(MetaSerializer::class)
public interface Meta : MetaRepr, MetaProvider {
public val value: Value?
@ -248,7 +248,7 @@ public inline fun <reified E : Enum<E>> Meta?.enum(): E? = this?.value?.let {
}
}
public val Meta.stringList: List<String>? get() = value?.list?.map { it.string }
public val Meta?.stringList: List<String>? get() = this?.value?.list?.map { it.string }
/**
* Create a provider that uses given provider for default values if those are not found in this provider

View File

@ -9,7 +9,11 @@ import space.kscience.dataforge.names.length
import kotlin.collections.set
public class MetaDescriptorBuilder @PublishedApi internal constructor() {
public var info: String? = null
public var description: String? = null
@Deprecated("Replace by description", ReplaceWith("description"))
public var info: String? by ::description
public var children: MutableMap<String, MetaDescriptorBuilder> = linkedMapOf()
public var multiple: Boolean = false
public var valueRestriction: ValueRestriction = ValueRestriction.NONE
@ -87,7 +91,7 @@ public class MetaDescriptorBuilder @PublishedApi internal constructor() {
@PublishedApi
internal fun build(): MetaDescriptor = MetaDescriptor(
description = info,
description = description,
children = children.mapValues { it.value.build() },
multiple = multiple,
valueRestriction = valueRestriction,
@ -165,7 +169,7 @@ public inline fun <reified E : Enum<E>> MetaDescriptorBuilder.enum(
}
private fun MetaDescriptor.toBuilder(): MetaDescriptorBuilder = MetaDescriptorBuilder().apply {
info = this@toBuilder.description
description = this@toBuilder.description
children = this@toBuilder.children.mapValuesTo(LinkedHashMap()) { it.value.toBuilder() }
multiple = this@toBuilder.multiple
valueRestriction = this@toBuilder.valueRestriction

View File

@ -5,4 +5,7 @@ package space.kscience.dataforge.misc
*/
@MustBeDocumented
@Target(AnnotationTarget.CLASS)
public annotation class DfId(val id: String)
public annotation class DfType(val id: String)
@Deprecated("Replace with DfType", replaceWith = ReplaceWith("DfType"))
public typealias DfId = DfType

View File

@ -11,14 +11,14 @@ class DescriptorTest {
val descriptor = MetaDescriptor {
node("aNode") {
info = "A root demo node"
description = "A root demo node"
value("b", ValueType.NUMBER) {
info = "b number value"
description = "b number value"
}
node("otherNode") {
value("otherValue", ValueType.BOOLEAN) {
default(false)
info = "default value"
description = "default value"
}
}
}

View File

@ -9,7 +9,7 @@ import space.kscience.dataforge.meta.MetaRepr
import space.kscience.dataforge.meta.Specification
import space.kscience.dataforge.meta.descriptors.Described
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.workspace.Task.Companion.TYPE
import kotlin.reflect.KType
@ -19,7 +19,7 @@ import kotlin.reflect.typeOf
* A configurable task that could be executed on a workspace. The [TaskResult] represents a lazy result of the task.
* In general no computations should be made until the result is called.
*/
@DfId(TYPE)
@DfType(TYPE)
public interface Task<out T : Any> : Described {
/**

View File

@ -6,7 +6,7 @@ import space.kscience.dataforge.data.DataSet
import space.kscience.dataforge.data.asSequence
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.MutableMeta
import space.kscience.dataforge.misc.DfId
import space.kscience.dataforge.misc.DfType
import space.kscience.dataforge.names.Name
import space.kscience.dataforge.provider.Provider
@ -18,7 +18,7 @@ public interface DataSelector<T: Any>{
/**
* An environment for pull-mode computation
*/
@DfId(Workspace.TYPE)
@DfType(Workspace.TYPE)
public interface Workspace : ContextAware, Provider {
/**
* The whole data node for current workspace

View File

@ -11,7 +11,6 @@ import space.kscience.dataforge.data.*
import space.kscience.dataforge.io.*
import space.kscience.dataforge.meta.Meta
import space.kscience.dataforge.meta.copy
import space.kscience.dataforge.meta.get
import space.kscience.dataforge.meta.string
import space.kscience.dataforge.misc.DFExperimental
import space.kscience.dataforge.misc.DFInternal
@ -27,10 +26,7 @@ import java.nio.file.WatchEvent
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.spi.FileSystemProvider
import java.time.Instant
import kotlin.io.path.extension
import kotlin.io.path.name
import kotlin.io.path.nameWithoutExtension
import kotlin.io.path.readAttributes
import kotlin.io.path.*
import kotlin.reflect.KType
import kotlin.reflect.typeOf
@ -92,7 +88,7 @@ public fun <T : Any> IOPlugin.readDataFile(
context(IOPlugin) @DFExperimental
private fun <T : Any> DataSetBuilder<T>.directory(
public fun <T : Any> DataSetBuilder<T>.directory(
path: Path,
ignoreExtensions: Set<String>,
formatResolver: FileFormatResolver<T>,
@ -145,7 +141,7 @@ public inline fun <reified T : Any> IOPlugin.readDataDirectory(
): DataTree<T> = readDataDirectory(typeOf<T>(), path, ignoreExtensions, formatResolver)
/**
* Read raw binary data tree from the directory. All files are read as-is (save for meta files).
* Read a raw binary data tree from the directory. All files are read as-is (save for meta files).
*/
@DFExperimental
public fun IOPlugin.readRawDirectory(
@ -260,6 +256,29 @@ public suspend fun <T : Any> IOPlugin.writeDataDirectory(
}
}
/**
* Reads the specified resources and returns a [DataTree] containing the data.
*
* @param resources The names of the resources to read.
* @param classLoader The class loader to use for loading the resources. By default, it uses the current thread's context class loader.
* @return A DataTree containing the data read from the resources.
*/
@DFExperimental
private fun IOPlugin.readResources(
vararg resources: String,
classLoader: ClassLoader = Thread.currentThread().contextClassLoader,
): DataTree<Binary> {
// require(resource.isNotBlank()) {"Can't mount root resource tree as data root"}
return DataTree {
resources.forEach { resource ->
val path = classLoader.getResource(resource)?.toURI()?.toPath() ?: error(
"Resource with name $resource is not resolved"
)
node(resource, readRawDirectory(path))
}
}
}
/**
* Add file/directory-based data tree item
*