- Remove all unnecessary properties for IOFormat
- Separate interfaces for `IOReader` and `IOWriter`
This commit is contained in:
parent
a546552540
commit
0fc2198832
@ -17,6 +17,8 @@
|
|||||||
- `ActiveDataSet` renamed to `DataSource`
|
- `ActiveDataSet` renamed to `DataSource`
|
||||||
- `selectOne`->`getByType`
|
- `selectOne`->`getByType`
|
||||||
- Data traversal in `DataSet` is done via iterator
|
- Data traversal in `DataSet` is done via iterator
|
||||||
|
- Remove all unnecessary properties for `IOFormat`
|
||||||
|
- Separate interfaces for `IOReader` and `IOWriter`
|
||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||||
|
|
||||||
plugins {
|
plugins {
|
||||||
id("ru.mipt.npm.gradle.project")
|
id("ru.mipt.npm.gradle.project")
|
||||||
}
|
}
|
||||||
@ -5,6 +7,12 @@ plugins {
|
|||||||
allprojects {
|
allprojects {
|
||||||
group = "space.kscience"
|
group = "space.kscience"
|
||||||
version = "0.6.0-dev-7"
|
version = "0.6.0-dev-7"
|
||||||
|
|
||||||
|
tasks.withType<KotlinCompile>{
|
||||||
|
kotlinOptions{
|
||||||
|
freeCompilerArgs = freeCompilerArgs + "-Xcontext-receivers"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
subprojects {
|
subprojects {
|
||||||
|
@ -75,10 +75,8 @@ public inline fun <reified T : Any> Provider.provide(path: String, targetOverrid
|
|||||||
/**
|
/**
|
||||||
* Typed top level content
|
* Typed top level content
|
||||||
*/
|
*/
|
||||||
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> {
|
public fun <T : Any> Provider.top(target: String, type: KClass<out T>): Map<Name, T> = content(target).mapValues {
|
||||||
return content(target).mapValues {
|
type.safeCast(it.value) ?: error("The type of element ${it.value} is ${it.value::class} but $type is expected")
|
||||||
type.safeCast(it.value) ?: error("The type of element $it is ${it::class} but $type is expected")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -7,10 +7,7 @@ import io.ktor.utils.io.core.readBytes
|
|||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.context.Global
|
import space.kscience.dataforge.context.Global
|
||||||
import space.kscience.dataforge.io.*
|
import space.kscience.dataforge.io.*
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import space.kscience.dataforge.misc.DFExperimental
|
|
||||||
import space.kscience.dataforge.names.Name
|
import space.kscience.dataforge.names.Name
|
||||||
import space.kscience.dataforge.names.plus
|
import space.kscience.dataforge.names.plus
|
||||||
|
|
||||||
@ -65,11 +62,6 @@ public class FrontMatterEnvelopeFormat(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put name.toString()
|
|
||||||
META_KEY put meta
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object : EnvelopeFormatFactory {
|
public companion object : EnvelopeFormatFactory {
|
||||||
public const val SEPARATOR: String = "---"
|
public const val SEPARATOR: String = "---"
|
||||||
|
|
||||||
|
@ -4,8 +4,6 @@ import io.ktor.utils.io.core.Input
|
|||||||
import io.ktor.utils.io.core.Output
|
import io.ktor.utils.io.core.Output
|
||||||
import net.mamoe.yamlkt.*
|
import net.mamoe.yamlkt.*
|
||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.io.MetaFormat
|
import space.kscience.dataforge.io.MetaFormat
|
||||||
import space.kscience.dataforge.io.MetaFormatFactory
|
import space.kscience.dataforge.io.MetaFormatFactory
|
||||||
import space.kscience.dataforge.io.readUtf8String
|
import space.kscience.dataforge.io.readUtf8String
|
||||||
@ -14,7 +12,6 @@ import space.kscience.dataforge.meta.Meta
|
|||||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||||
import space.kscience.dataforge.meta.descriptors.get
|
import space.kscience.dataforge.meta.descriptors.get
|
||||||
import space.kscience.dataforge.meta.isLeaf
|
import space.kscience.dataforge.meta.isLeaf
|
||||||
import space.kscience.dataforge.misc.DFExperimental
|
|
||||||
import space.kscience.dataforge.names.NameToken
|
import space.kscience.dataforge.names.NameToken
|
||||||
import space.kscience.dataforge.names.withIndex
|
import space.kscience.dataforge.names.withIndex
|
||||||
import space.kscience.dataforge.values.ListValue
|
import space.kscience.dataforge.values.ListValue
|
||||||
@ -108,11 +105,6 @@ public class YamlMetaFormat(private val meta: Meta) : MetaFormat {
|
|||||||
return yaml.toMeta()
|
return yaml.toMeta()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put FrontMatterEnvelopeFormat.name.toString()
|
|
||||||
META_KEY put meta
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object : MetaFormatFactory {
|
public companion object : MetaFormatFactory {
|
||||||
override fun build(context: Context, meta: Meta): MetaFormat = YamlMetaFormat(meta)
|
override fun build(context: Context, meta: Meta): MetaFormat = YamlMetaFormat(meta)
|
||||||
|
|
||||||
|
@ -17,8 +17,6 @@ import kotlin.reflect.typeOf
|
|||||||
public data class PartialEnvelope(val meta: Meta, val dataOffset: Int, val dataSize: ULong?)
|
public data class PartialEnvelope(val meta: Meta, val dataOffset: Int, val dataSize: ULong?)
|
||||||
|
|
||||||
public interface EnvelopeFormat : IOFormat<Envelope> {
|
public interface EnvelopeFormat : IOFormat<Envelope> {
|
||||||
override val type: KType get() = typeOf<Envelope>()
|
|
||||||
|
|
||||||
public val defaultMetaFormat: MetaFormatFactory get() = JsonMetaFormat
|
public val defaultMetaFormat: MetaFormatFactory get() = JsonMetaFormat
|
||||||
|
|
||||||
public fun readPartial(input: Input): PartialEnvelope
|
public fun readPartial(input: Input): PartialEnvelope
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package space.kscience.dataforge.io
|
package space.kscience.dataforge.io
|
||||||
|
|
||||||
|
import space.kscience.dataforge.context.invoke
|
||||||
import space.kscience.dataforge.io.Envelope.Companion.ENVELOPE_NODE_KEY
|
import space.kscience.dataforge.io.Envelope.Companion.ENVELOPE_NODE_KEY
|
||||||
import space.kscience.dataforge.io.PartDescriptor.Companion.DEFAULT_MULTIPART_DATA_SEPARATOR
|
import space.kscience.dataforge.io.PartDescriptor.Companion.DEFAULT_MULTIPART_DATA_SEPARATOR
|
||||||
import space.kscience.dataforge.io.PartDescriptor.Companion.MULTIPART_DATA_TYPE
|
import space.kscience.dataforge.io.PartDescriptor.Companion.MULTIPART_DATA_TYPE
|
||||||
@ -35,7 +36,7 @@ public typealias EnvelopeParts = List<EnvelopePart>
|
|||||||
|
|
||||||
public fun EnvelopeBuilder.multipart(
|
public fun EnvelopeBuilder.multipart(
|
||||||
parts: EnvelopeParts,
|
parts: EnvelopeParts,
|
||||||
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
|
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR,
|
||||||
) {
|
) {
|
||||||
dataType = MULTIPART_DATA_TYPE
|
dataType = MULTIPART_DATA_TYPE
|
||||||
|
|
||||||
@ -67,17 +68,25 @@ public fun EnvelopeBuilder.multipart(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Put a list of envelopes as parts of given envelope
|
||||||
|
*/
|
||||||
public fun EnvelopeBuilder.envelopes(
|
public fun EnvelopeBuilder.envelopes(
|
||||||
envelopes: List<Envelope>,
|
envelopes: List<Envelope>,
|
||||||
format: EnvelopeFormat = TaggedEnvelopeFormat,
|
formatFactory: EnvelopeFormatFactory = TaggedEnvelopeFormat,
|
||||||
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
|
formatMeta: Meta? = null,
|
||||||
|
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR,
|
||||||
) {
|
) {
|
||||||
val parts = envelopes.map {
|
val parts = envelopes.map {
|
||||||
val binary = format.toBinary(it)
|
val format = formatMeta?.let { formatFactory(formatMeta) } ?: formatFactory
|
||||||
|
val binary = Binary(it, format)
|
||||||
EnvelopePart(binary, null)
|
EnvelopePart(binary, null)
|
||||||
}
|
}
|
||||||
meta{
|
meta {
|
||||||
set(MULTIPART_KEY + PART_FORMAT_KEY, format.toMeta())
|
(MULTIPART_KEY + PART_FORMAT_KEY) put {
|
||||||
|
IOFormatFactory.NAME_KEY put formatFactory.name.toString()
|
||||||
|
formatMeta?.let { IOFormatFactory.META_KEY put formatMeta }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
multipart(parts, separator)
|
multipart(parts, separator)
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
package space.kscience.dataforge.io
|
package space.kscience.dataforge.io
|
||||||
|
|
||||||
import io.ktor.utils.io.core.*
|
import io.ktor.utils.io.core.Input
|
||||||
|
import io.ktor.utils.io.core.Output
|
||||||
|
import io.ktor.utils.io.core.readDouble
|
||||||
|
import io.ktor.utils.io.core.writeDouble
|
||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.context.Factory
|
import space.kscience.dataforge.context.Factory
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
|
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import space.kscience.dataforge.meta.MetaRepr
|
|
||||||
import space.kscience.dataforge.misc.Named
|
import space.kscience.dataforge.misc.Named
|
||||||
import space.kscience.dataforge.misc.Type
|
import space.kscience.dataforge.misc.Type
|
||||||
import space.kscience.dataforge.names.Name
|
import space.kscience.dataforge.names.Name
|
||||||
@ -14,92 +15,53 @@ import space.kscience.dataforge.names.asName
|
|||||||
import kotlin.reflect.KType
|
import kotlin.reflect.KType
|
||||||
import kotlin.reflect.typeOf
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
|
public fun interface IOReader<out T : Any> {
|
||||||
|
|
||||||
|
public fun readObject(input: Input): T
|
||||||
|
}
|
||||||
|
|
||||||
|
public fun interface IOWriter<in T : Any> {
|
||||||
|
public fun writeObject(output: Output, obj: T)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* And interface for reading and writing objects into with IO streams
|
* And interface for reading and writing objects into with IO streams
|
||||||
*/
|
*/
|
||||||
public interface IOFormat<T : Any> : MetaRepr {
|
public interface IOFormat<T : Any> : IOReader<T>, IOWriter<T>
|
||||||
public val type: KType
|
|
||||||
|
|
||||||
public fun writeObject(output: Output, obj: T)
|
public fun <T : Any> Input.readObject(format: IOReader<T>): T = format.readObject(this@readObject)
|
||||||
public fun readObject(input: Input): T
|
|
||||||
|
|
||||||
public companion object {
|
public fun <T : Any> IOFormat<T>.readObjectFrom(binary: Binary): T = binary.read {
|
||||||
public val NAME_KEY: Name = "name".asName()
|
|
||||||
public val META_KEY: Name = "meta".asName()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public fun <T : Any> Input.readWith(format: IOFormat<T>): T = format.readObject(this@readWith)
|
|
||||||
|
|
||||||
public fun <T: Any> IOFormat<T>.readObject(binary: Binary): T = binary.read {
|
|
||||||
readObject(this)
|
readObject(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read given binary as object using given format
|
* Read given binary as object using given format
|
||||||
*/
|
*/
|
||||||
public fun <T : Any> Binary.readWith(format: IOFormat<T>): T = read {
|
public fun <T : Any> Binary.readWith(format: IOReader<T>): T = read {
|
||||||
readWith(format)
|
readObject(format)
|
||||||
}
|
}
|
||||||
|
|
||||||
public fun <T : Any> Output.writeWith(format: IOFormat<T>, obj: T): Unit =
|
public fun <T : Any> Output.writeObject(format: IOWriter<T>, obj: T): Unit =
|
||||||
format.run { writeObject(this@writeWith, obj) }
|
format.run { writeObject(this@writeObject, obj) }
|
||||||
|
|
||||||
public inline fun <reified T : Any> IOFormat.Companion.listOf(
|
|
||||||
format: IOFormat<T>,
|
|
||||||
): IOFormat<List<T>> = object : IOFormat<List<T>> {
|
|
||||||
override val type: KType = typeOf<List<T>>()
|
|
||||||
|
|
||||||
override fun writeObject(output: Output, obj: List<T>) {
|
|
||||||
output.writeInt(obj.size)
|
|
||||||
format.run {
|
|
||||||
obj.forEach {
|
|
||||||
writeObject(output, it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun readObject(input: Input): List<T> {
|
|
||||||
val size = input.readInt()
|
|
||||||
return format.run {
|
|
||||||
List(size) { readObject(input) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put "list"
|
|
||||||
"contentFormat" put format.toMeta()
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
//public fun ObjectPool<Buffer>.fill(block: Buffer.() -> Unit): Buffer {
|
|
||||||
// val buffer = borrow()
|
|
||||||
// return try {
|
|
||||||
// buffer.apply(block)
|
|
||||||
// } catch (ex: Exception) {
|
|
||||||
// //recycle(buffer)
|
|
||||||
// throw ex
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
||||||
@Type(IO_FORMAT_TYPE)
|
@Type(IO_FORMAT_TYPE)
|
||||||
public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named, MetaRepr {
|
public interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
|
||||||
/**
|
/**
|
||||||
* Explicit type for dynamic type checks
|
* Explicit type for dynamic type checks
|
||||||
*/
|
*/
|
||||||
public val type: KType
|
public val type: KType
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put name.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object {
|
public companion object {
|
||||||
public const val IO_FORMAT_TYPE: String = "io.format"
|
public const val IO_FORMAT_TYPE: String = "io.format"
|
||||||
|
public val NAME_KEY: Name = "name".asName()
|
||||||
|
public val META_KEY: Name = "meta".asName()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public fun <T : Any> IOFormat<T>.toBinary(obj: T): Binary = Binary { writeObject(this, obj) }
|
public fun <T : Any> Binary(obj: T, format: IOWriter<T>): Binary = Binary { format.writeObject(this, obj) }
|
||||||
|
|
||||||
public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
|
public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
|
||||||
override fun build(context: Context, meta: Meta): IOFormat<Double> = this
|
override fun build(context: Context, meta: Meta): IOFormat<Double> = this
|
||||||
@ -113,21 +75,4 @@ public object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun readObject(input: Input): Double = input.readDouble()
|
override fun readObject(input: Input): Double = input.readDouble()
|
||||||
}
|
}
|
||||||
|
|
||||||
//public object ValueIOFormat : IOFormat<Value>, IOFormatFactory<Value> {
|
|
||||||
// override fun invoke(meta: Meta, context: Context): IOFormat<Value> = this
|
|
||||||
//
|
|
||||||
// override val name: Name = "value".asName()
|
|
||||||
//
|
|
||||||
// override val type: KType get() = typeOf<Value>()
|
|
||||||
//
|
|
||||||
// override fun writeObject(output: Output, obj: Value) {
|
|
||||||
// BinaryMetaFormat.run { output.writeValue(obj) }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// override fun readObject(input: Input): Value {
|
|
||||||
// return (BinaryMetaFormat.run { input.readMetaItem() } as? MetaItemValue)?.value
|
|
||||||
// ?: error("The item is not a value")
|
|
||||||
// }
|
|
||||||
//}
|
|
@ -2,8 +2,6 @@ package space.kscience.dataforge.io
|
|||||||
|
|
||||||
import space.kscience.dataforge.context.*
|
import space.kscience.dataforge.context.*
|
||||||
import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
|
import space.kscience.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
|
import space.kscience.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
|
||||||
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
|
import space.kscience.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
@ -20,12 +18,12 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public fun <T : Any> resolveIOFormat(item: Meta, type: KClass<out T>): IOFormat<T>? {
|
public fun <T : Any> resolveIOFormat(item: Meta, type: KClass<out T>): IOFormat<T>? {
|
||||||
val key = item.string ?: item[NAME_KEY]?.string ?: error("Format name not defined")
|
val key = item.string ?: item[IOFormatFactory.NAME_KEY]?.string ?: error("Format name not defined")
|
||||||
val name = Name.parse(key)
|
val name = Name.parse(key)
|
||||||
return ioFormatFactories.find { it.name == name }?.let {
|
return ioFormatFactories.find { it.name == name }?.let {
|
||||||
@Suppress("UNCHECKED_CAST")
|
@Suppress("UNCHECKED_CAST")
|
||||||
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
|
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
|
||||||
else it.build(context, item[META_KEY] ?: Meta.EMPTY) as IOFormat<T>
|
else it.build(context, item[IOFormatFactory.META_KEY] ?: Meta.EMPTY) as IOFormat<T>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,8 +45,8 @@ public class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
|
|||||||
envelopeFormatFactories.find { it.name == name }?.build(context, meta)
|
envelopeFormatFactories.find { it.name == name }?.build(context, meta)
|
||||||
|
|
||||||
public fun resolveEnvelopeFormat(item: Meta): EnvelopeFormat? {
|
public fun resolveEnvelopeFormat(item: Meta): EnvelopeFormat? {
|
||||||
val name = item.string ?: item[NAME_KEY]?.string ?: error("Envelope format name not defined")
|
val name = item.string ?: item[IOFormatFactory.NAME_KEY]?.string ?: error("Envelope format name not defined")
|
||||||
val meta = item[META_KEY] ?: Meta.EMPTY
|
val meta = item[IOFormatFactory.META_KEY] ?: Meta.EMPTY
|
||||||
return resolveEnvelopeFormat(Name.parse(name), meta)
|
return resolveEnvelopeFormat(Name.parse(name), meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,30 +8,21 @@ import io.ktor.utils.io.core.Output
|
|||||||
import kotlinx.serialization.json.Json
|
import kotlinx.serialization.json.Json
|
||||||
import kotlinx.serialization.json.JsonObject
|
import kotlinx.serialization.json.JsonObject
|
||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
import space.kscience.dataforge.meta.descriptors.MetaDescriptor
|
||||||
import space.kscience.dataforge.meta.toJson
|
import space.kscience.dataforge.meta.toJson
|
||||||
import space.kscience.dataforge.meta.toMeta
|
import space.kscience.dataforge.meta.toMeta
|
||||||
import kotlin.reflect.KType
|
|
||||||
import kotlin.reflect.typeOf
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Json format for Meta representation
|
* A Json format for Meta representation
|
||||||
*/
|
*/
|
||||||
public class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat {
|
public class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat {
|
||||||
|
|
||||||
override val type: KType get() = typeOf<Meta>()
|
|
||||||
|
|
||||||
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?) {
|
override fun writeMeta(output: Output, meta: Meta, descriptor: MetaDescriptor?) {
|
||||||
val jsonObject = meta.toJson(descriptor)
|
val jsonObject = meta.toJson(descriptor)
|
||||||
output.writeUtf8String(json.encodeToString(JsonObject.serializer(), jsonObject))
|
output.writeUtf8String(json.encodeToString(JsonObject.serializer(), jsonObject))
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put name.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta {
|
override fun readMeta(input: Input, descriptor: MetaDescriptor?): Meta {
|
||||||
val str = input.readUtf8String()//readByteArray().decodeToString()
|
val str = input.readUtf8String()//readByteArray().decodeToString()
|
||||||
val jsonElement = json.parseToJsonElement(str)
|
val jsonElement = json.parseToJsonElement(str)
|
||||||
|
@ -20,7 +20,6 @@ import kotlin.reflect.typeOf
|
|||||||
* A format for meta serialization
|
* A format for meta serialization
|
||||||
*/
|
*/
|
||||||
public interface MetaFormat : IOFormat<Meta> {
|
public interface MetaFormat : IOFormat<Meta> {
|
||||||
override val type: KType get() = typeOf<Meta>()
|
|
||||||
|
|
||||||
override fun writeObject(output: Output, obj: Meta) {
|
override fun writeObject(output: Output, obj: Meta) {
|
||||||
writeMeta(output, obj, null)
|
writeMeta(output, obj, null)
|
||||||
|
@ -3,8 +3,6 @@ package space.kscience.dataforge.io
|
|||||||
import io.ktor.utils.io.core.*
|
import io.ktor.utils.io.core.*
|
||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.context.Global
|
import space.kscience.dataforge.context.Global
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import space.kscience.dataforge.meta.enum
|
import space.kscience.dataforge.meta.enum
|
||||||
import space.kscience.dataforge.meta.get
|
import space.kscience.dataforge.meta.get
|
||||||
@ -49,7 +47,7 @@ public class TaggedEnvelopeFormat(
|
|||||||
formatMeta: Meta,
|
formatMeta: Meta,
|
||||||
) {
|
) {
|
||||||
val metaFormat = metaFormatFactory.build(this@TaggedEnvelopeFormat.io.context, formatMeta)
|
val metaFormat = metaFormatFactory.build(this@TaggedEnvelopeFormat.io.context, formatMeta)
|
||||||
val metaBytes = metaFormat.toBinary(envelope.meta)
|
val metaBytes = Binary(envelope.meta,metaFormat)
|
||||||
val actualSize: ULong = (envelope.data?.size ?: 0).toULong()
|
val actualSize: ULong = (envelope.data?.size ?: 0).toULong()
|
||||||
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize)
|
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize)
|
||||||
output.writeBinary(tag.toBinary())
|
output.writeBinary(tag.toBinary())
|
||||||
@ -74,7 +72,7 @@ public class TaggedEnvelopeFormat(
|
|||||||
|
|
||||||
val metaBinary = input.readBinary(tag.metaSize.toInt())
|
val metaBinary = input.readBinary(tag.metaSize.toInt())
|
||||||
|
|
||||||
val meta: Meta = metaFormat.readObject(metaBinary)
|
val meta: Meta = metaFormat.readObjectFrom(metaBinary)
|
||||||
|
|
||||||
val data = input.readBinary(tag.dataSize.toInt())
|
val data = input.readBinary(tag.dataSize.toInt())
|
||||||
|
|
||||||
@ -89,7 +87,7 @@ public class TaggedEnvelopeFormat(
|
|||||||
|
|
||||||
val metaBinary = input.readBinary(tag.metaSize.toInt())
|
val metaBinary = input.readBinary(tag.metaSize.toInt())
|
||||||
|
|
||||||
val meta: Meta = metaFormat.readObject(metaBinary)
|
val meta: Meta = metaFormat.readObjectFrom(metaBinary)
|
||||||
|
|
||||||
|
|
||||||
return PartialEnvelope(meta, (version.tagSize + tag.metaSize).toInt(), tag.dataSize)
|
return PartialEnvelope(meta, (version.tagSize + tag.metaSize).toInt(), tag.dataSize)
|
||||||
@ -106,13 +104,6 @@ public class TaggedEnvelopeFormat(
|
|||||||
DF03(24u)
|
DF03(24u)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put name.toString()
|
|
||||||
META_KEY put {
|
|
||||||
"version" put version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object : EnvelopeFormatFactory {
|
public companion object : EnvelopeFormatFactory {
|
||||||
private const val START_SEQUENCE = "#~"
|
private const val START_SEQUENCE = "#~"
|
||||||
private const val END_SEQUENCE = "~#\r\n"
|
private const val END_SEQUENCE = "~#\r\n"
|
||||||
|
@ -3,8 +3,6 @@ package space.kscience.dataforge.io
|
|||||||
import io.ktor.utils.io.core.*
|
import io.ktor.utils.io.core.*
|
||||||
import space.kscience.dataforge.context.Context
|
import space.kscience.dataforge.context.Context
|
||||||
import space.kscience.dataforge.context.Global
|
import space.kscience.dataforge.context.Global
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.META_KEY
|
|
||||||
import space.kscience.dataforge.io.IOFormat.Companion.NAME_KEY
|
|
||||||
import space.kscience.dataforge.meta.Meta
|
import space.kscience.dataforge.meta.Meta
|
||||||
import space.kscience.dataforge.meta.get
|
import space.kscience.dataforge.meta.get
|
||||||
import space.kscience.dataforge.meta.isEmpty
|
import space.kscience.dataforge.meta.isEmpty
|
||||||
@ -50,11 +48,11 @@ public class TaglessEnvelopeFormat(
|
|||||||
|
|
||||||
//Printing meta
|
//Printing meta
|
||||||
if (!envelope.meta.isEmpty()) {
|
if (!envelope.meta.isEmpty()) {
|
||||||
val metaBytes = metaFormat.toBinary(envelope.meta)
|
val metaBinary = Binary(envelope.meta, metaFormat)
|
||||||
output.writeProperty(META_LENGTH_PROPERTY,
|
output.writeProperty(META_LENGTH_PROPERTY,
|
||||||
metaBytes.size + 2)
|
metaBinary.size + 2)
|
||||||
output.writeUtf8String(this.metaStart + "\r\n")
|
output.writeUtf8String(this.metaStart + "\r\n")
|
||||||
output.writeBinary(metaBytes)
|
output.writeBinary(metaBinary)
|
||||||
output.writeRawString("\r\n")
|
output.writeRawString("\r\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,7 +100,7 @@ public class TaglessEnvelopeFormat(
|
|||||||
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
|
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
|
||||||
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
|
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
|
||||||
meta = if (metaSize != null) {
|
meta = if (metaSize != null) {
|
||||||
metaFormat.readObject(input.readBinary(metaSize))
|
metaFormat.readObjectFrom(input.readBinary(metaSize))
|
||||||
} else {
|
} else {
|
||||||
error("Can't partially read an envelope with undefined meta size")
|
error("Can't partially read an envelope with undefined meta size")
|
||||||
}
|
}
|
||||||
@ -170,7 +168,7 @@ public class TaglessEnvelopeFormat(
|
|||||||
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
|
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
|
||||||
meta = if (metaSize != null) {
|
meta = if (metaSize != null) {
|
||||||
offset += metaSize
|
offset += metaSize
|
||||||
metaFormat.readObject(input.readBinary(metaSize))
|
metaFormat.readObjectFrom(input.readBinary(metaSize))
|
||||||
} else {
|
} else {
|
||||||
error("Can't partially read an envelope with undefined meta size")
|
error("Can't partially read an envelope with undefined meta size")
|
||||||
}
|
}
|
||||||
@ -187,11 +185,6 @@ public class TaglessEnvelopeFormat(
|
|||||||
return PartialEnvelope(meta, offset, dataSize)
|
return PartialEnvelope(meta, offset, dataSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
NAME_KEY put name.toString()
|
|
||||||
META_KEY put meta
|
|
||||||
}
|
|
||||||
|
|
||||||
public companion object : EnvelopeFormatFactory {
|
public companion object : EnvelopeFormatFactory {
|
||||||
|
|
||||||
private val propertyPattern = "#\\?\\s*([\\w.]*)\\s*:\\s*([^;]*);?".toRegex()
|
private val propertyPattern = "#\\?\\s*([\\w.]*)\\s*:\\s*([^;]*);?".toRegex()
|
||||||
|
@ -183,7 +183,7 @@ public fun Input.discardWithSeparator(
|
|||||||
atMost: Int = Int.MAX_VALUE,
|
atMost: Int = Int.MAX_VALUE,
|
||||||
skipUntilEndOfLine: Boolean = false,
|
skipUntilEndOfLine: Boolean = false,
|
||||||
): Int {
|
): Int {
|
||||||
val dummy: Output = object :Output(ChunkBuffer.Pool){
|
val dummy: Output = object : Output(ChunkBuffer.Pool) {
|
||||||
override fun closeDestination() {
|
override fun closeDestination() {
|
||||||
// Do nothing
|
// Do nothing
|
||||||
}
|
}
|
||||||
|
@ -32,18 +32,17 @@ class MultipartTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
fun testParts() {
|
fun testParts() {
|
||||||
TaglessEnvelopeFormat.run {
|
val format = TaglessEnvelopeFormat
|
||||||
val singleEnvelopeData = toBinary(envelopes[0])
|
val singleEnvelopeData = Binary(envelopes[0], format)
|
||||||
val singleEnvelopeSize = singleEnvelopeData.size
|
val singleEnvelopeSize = singleEnvelopeData.size
|
||||||
val bytes = toBinary(partsEnvelope)
|
val bytes = Binary(partsEnvelope, format)
|
||||||
assertTrue(envelopes.size * singleEnvelopeSize < bytes.size)
|
assertTrue(envelopes.size * singleEnvelopeSize < bytes.size)
|
||||||
val reconstructed = bytes.readWith(this)
|
val reconstructed = bytes.readWith(format)
|
||||||
println(reconstructed.meta)
|
println(reconstructed.meta)
|
||||||
val parts = reconstructed.parts()
|
val parts = reconstructed.parts()
|
||||||
val envelope = parts[2].envelope(io)
|
val envelope = parts[2].envelope(io)
|
||||||
assertEquals(2, envelope.meta["value"].int)
|
assertEquals(2, envelope.meta["value"].int)
|
||||||
println(reconstructed.data!!.size)
|
println(reconstructed.data!!.size)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -97,7 +97,36 @@ public val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Read file containing meta using given [formatOverride] or file extension to infer meta type.
|
* Read file containing meta using given [formatOverride] or file extension to infer meta type.
|
||||||
* If [path] is a directory search for file starting with `meta` in it
|
* If [path] is a directory search for file starting with `meta` in it.
|
||||||
|
*
|
||||||
|
* Returns null if meta could not be resolved
|
||||||
|
*/
|
||||||
|
public fun IOPlugin.readMetaFileOrNull(
|
||||||
|
path: Path,
|
||||||
|
formatOverride: MetaFormat? = null,
|
||||||
|
descriptor: MetaDescriptor? = null,
|
||||||
|
): Meta? {
|
||||||
|
if (!Files.exists(path)) return null
|
||||||
|
|
||||||
|
val actualPath: Path = if (Files.isDirectory(path)) {
|
||||||
|
Files.list(path).asSequence().find { it.fileName.startsWith(IOPlugin.META_FILE_NAME) }
|
||||||
|
?: return null
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
}
|
||||||
|
val extension = actualPath.fileName.toString().substringAfterLast('.')
|
||||||
|
|
||||||
|
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: return null
|
||||||
|
return actualPath.read {
|
||||||
|
metaFormat.readMeta(this, descriptor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read file containing meta using given [formatOverride] or file extension to infer meta type.
|
||||||
|
* If [path] is a directory search for file starting with `meta` in it.
|
||||||
|
*
|
||||||
|
* Fails if nothing works.
|
||||||
*/
|
*/
|
||||||
public fun IOPlugin.readMetaFile(
|
public fun IOPlugin.readMetaFile(
|
||||||
path: Path,
|
path: Path,
|
||||||
@ -120,6 +149,7 @@ public fun IOPlugin.readMetaFile(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write meta to file using [metaFormat]. If [path] is a directory, write a file with name equals name of [metaFormat].
|
* Write meta to file using [metaFormat]. If [path] is a directory, write a file with name equals name of [metaFormat].
|
||||||
* Like "meta.json"
|
* Like "meta.json"
|
||||||
@ -196,22 +226,11 @@ public fun IOPlugin.readEnvelopeFile(
|
|||||||
return SimpleEnvelope(meta, data)
|
return SimpleEnvelope(meta, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
return formatPicker(path)?.let { format ->
|
return formatPicker(path)?.readFile(path) ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
|
||||||
format.readFile(path)
|
|
||||||
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
|
|
||||||
SimpleEnvelope(Meta.EMPTY, path.asBinary())
|
SimpleEnvelope(Meta.EMPTY, path.asBinary())
|
||||||
} else error("Can't infer format for file $path")
|
} else error("Can't infer format for file $path")
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Write a binary into file. Throws an error if file already exists
|
|
||||||
*/
|
|
||||||
public fun <T : Any> IOFormat<T>.writeToFile(path: Path, obj: T) {
|
|
||||||
path.write {
|
|
||||||
writeObject(this, obj)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write envelope file to given [path] using [envelopeFormat] and optional [metaFormat]
|
* Write envelope file to given [path] using [envelopeFormat] and optional [metaFormat]
|
||||||
*/
|
*/
|
||||||
|
@ -4,19 +4,18 @@ import space.kscience.dataforge.data.Data
|
|||||||
import space.kscience.dataforge.data.await
|
import space.kscience.dataforge.data.await
|
||||||
import space.kscience.dataforge.io.*
|
import space.kscience.dataforge.io.*
|
||||||
import space.kscience.dataforge.misc.DFInternal
|
import space.kscience.dataforge.misc.DFInternal
|
||||||
|
import kotlin.reflect.typeOf
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
|
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
|
||||||
*/
|
*/
|
||||||
@OptIn(DFInternal::class)
|
@OptIn(DFInternal::class)
|
||||||
public fun <T : Any> Envelope.toData(format: IOFormat<T>): Data<T> {
|
public inline fun <reified T : Any> Envelope.toData(format: IOReader<T>): Data<T> = Data(typeOf<T>(), meta) {
|
||||||
return Data(format.type, meta) {
|
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
|
||||||
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public suspend fun <T : Any> Data<T>.toEnvelope(format: IOFormat<T>): Envelope {
|
public suspend fun <T : Any> Data<T>.toEnvelope(format: IOWriter<T>): Envelope {
|
||||||
val obj = await()
|
val obj = await()
|
||||||
val binary = format.toBinary(obj)
|
val binary = Binary(obj, format)
|
||||||
return SimpleEnvelope(meta, binary)
|
return SimpleEnvelope(meta, binary)
|
||||||
}
|
}
|
@ -2,11 +2,12 @@ package space.kscience.dataforge.workspace
|
|||||||
|
|
||||||
import io.ktor.utils.io.streams.asOutput
|
import io.ktor.utils.io.streams.asOutput
|
||||||
import kotlinx.coroutines.Dispatchers
|
import kotlinx.coroutines.Dispatchers
|
||||||
import kotlinx.coroutines.runBlocking
|
|
||||||
import kotlinx.coroutines.withContext
|
import kotlinx.coroutines.withContext
|
||||||
import space.kscience.dataforge.data.*
|
import space.kscience.dataforge.data.*
|
||||||
import space.kscience.dataforge.io.*
|
import space.kscience.dataforge.io.*
|
||||||
import space.kscience.dataforge.meta.*
|
import space.kscience.dataforge.meta.Meta
|
||||||
|
import space.kscience.dataforge.meta.get
|
||||||
|
import space.kscience.dataforge.meta.string
|
||||||
import space.kscience.dataforge.misc.DFExperimental
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
import java.nio.file.FileSystem
|
import java.nio.file.FileSystem
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
@ -15,28 +16,13 @@ import java.nio.file.StandardOpenOption
|
|||||||
import java.nio.file.spi.FileSystemProvider
|
import java.nio.file.spi.FileSystemProvider
|
||||||
import java.util.zip.ZipEntry
|
import java.util.zip.ZipEntry
|
||||||
import java.util.zip.ZipOutputStream
|
import java.util.zip.ZipOutputStream
|
||||||
import kotlin.reflect.KType
|
|
||||||
import kotlin.reflect.typeOf
|
|
||||||
import kotlin.streams.toList
|
import kotlin.streams.toList
|
||||||
|
|
||||||
//public typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
|
//public typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
|
||||||
|
|
||||||
public interface FileFormatResolver<T : Any> {
|
public typealias FileFormatResolver<T> = (path: Path, meta: Meta) -> IOReader<T>
|
||||||
public val type: KType
|
|
||||||
public operator fun invoke(path: Path, meta: Meta): IOFormat<T>
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@PublishedApi
|
|
||||||
internal inline fun <reified T : Any> IOPlugin.formatResolver(): FileFormatResolver<T> =
|
|
||||||
object : FileFormatResolver<T> {
|
|
||||||
override val type: KType = typeOf<T>()
|
|
||||||
|
|
||||||
@OptIn(DFExperimental::class)
|
|
||||||
override fun invoke(path: Path, meta: Meta): IOFormat<T> =
|
|
||||||
resolveIOFormat<T>() ?: error("Can't resolve IO format for ${T::class}")
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun newZFS(path: Path): FileSystem {
|
private fun newZFS(path: Path): FileSystem {
|
||||||
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
|
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
|
||||||
?: error("Zip file system provider not found")
|
?: error("Zip file system provider not found")
|
||||||
@ -46,14 +32,9 @@ private fun newZFS(path: Path): FileSystem {
|
|||||||
/**
|
/**
|
||||||
* Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file.
|
* Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file.
|
||||||
* The operation is blocking since it must read meta header. The reading of envelope body is lazy
|
* The operation is blocking since it must read meta header. The reading of envelope body is lazy
|
||||||
* @param type explicit type of data read
|
|
||||||
* @param dataFormat binary format
|
|
||||||
* @param envelopeFormat the format of envelope. If null, file is read directly
|
|
||||||
* @param metaFile the relative file for optional meta override
|
|
||||||
* @param metaFileFormat the meta format for override
|
|
||||||
*/
|
*/
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public fun <T : Any> IOPlugin.readDataFile(
|
public inline fun <reified T : Any> IOPlugin.readDataFile(
|
||||||
path: Path,
|
path: Path,
|
||||||
formatResolver: FileFormatResolver<T>,
|
formatResolver: FileFormatResolver<T>,
|
||||||
): Data<T> {
|
): Data<T> {
|
||||||
@ -62,34 +43,26 @@ public fun <T : Any> IOPlugin.readDataFile(
|
|||||||
return envelope.toData(format)
|
return envelope.toData(format)
|
||||||
}
|
}
|
||||||
|
|
||||||
@DFExperimental
|
|
||||||
public inline fun <reified T : Any> IOPlugin.readDataFile(path: Path): Data<T> = readDataFile(path, formatResolver())
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add file/directory-based data tree item
|
* Add file/directory-based data tree item
|
||||||
*/
|
*/
|
||||||
@DFExperimental
|
context(IOPlugin) @DFExperimental
|
||||||
public suspend fun <T : Any> DataSetBuilder<T>.file(
|
public fun DataSetBuilder<Any>.file(
|
||||||
plugin: IOPlugin,
|
|
||||||
path: Path,
|
path: Path,
|
||||||
formatResolver: FileFormatResolver<T>,
|
formatResolver: FileFormatResolver<Any>,
|
||||||
) {
|
) {
|
||||||
//If path is a single file or a special directory, read it as single datum
|
//If path is a single file or a special directory, read it as single datum
|
||||||
if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
|
if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
|
||||||
plugin.run {
|
val data = readDataFile(path, formatResolver)
|
||||||
val data = readDataFile(path, formatResolver)
|
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
|
||||||
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
|
?: path.fileName.toString().replace(".df", "")
|
||||||
?: path.fileName.toString().replace(".df", "")
|
data(name, data)
|
||||||
data(name, data)
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
//otherwise, read as directory
|
//otherwise, read as directory
|
||||||
plugin.run {
|
val data = readDataDirectory(path, formatResolver)
|
||||||
val data = readDataDirectory(path, formatResolver)
|
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
|
||||||
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
|
?: path.fileName.toString().replace(".df", "")
|
||||||
?: path.fileName.toString().replace(".df", "")
|
node(name, data)
|
||||||
node(name, data)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,10 +70,10 @@ public suspend fun <T : Any> DataSetBuilder<T>.file(
|
|||||||
* Read the directory as a data node. If [path] is a zip archive, read it as directory
|
* Read the directory as a data node. If [path] is a zip archive, read it as directory
|
||||||
*/
|
*/
|
||||||
@DFExperimental
|
@DFExperimental
|
||||||
public suspend fun <T : Any> IOPlugin.readDataDirectory(
|
public fun IOPlugin.readDataDirectory(
|
||||||
path: Path,
|
path: Path,
|
||||||
formatResolver: FileFormatResolver<T>,
|
formatResolver: FileFormatResolver<Any>,
|
||||||
): DataTree<T> {
|
): DataTree<Any> {
|
||||||
//read zipped data node
|
//read zipped data node
|
||||||
if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
|
if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
|
||||||
//Using explicit Zip file system to avoid bizarre compatibility bugs
|
//Using explicit Zip file system to avoid bizarre compatibility bugs
|
||||||
@ -108,24 +81,18 @@ public suspend fun <T : Any> IOPlugin.readDataDirectory(
|
|||||||
return readDataDirectory(fs.rootDirectories.first(), formatResolver)
|
return readDataDirectory(fs.rootDirectories.first(), formatResolver)
|
||||||
}
|
}
|
||||||
if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
|
if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
|
||||||
return DataTree(formatResolver.type) {
|
return DataTree<Any> {
|
||||||
Files.list(path).toList().forEach { path ->
|
Files.list(path).toList().forEach { path ->
|
||||||
val fileName = path.fileName.toString()
|
val fileName = path.fileName.toString()
|
||||||
if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
|
if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
|
||||||
meta(readMetaFile(path))
|
meta(readMetaFile(path))
|
||||||
} else if (!fileName.startsWith("@")) {
|
} else if (!fileName.startsWith("@")) {
|
||||||
runBlocking {
|
file(path, formatResolver)
|
||||||
file(this@readDataDirectory, path, formatResolver)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@DFExperimental
|
|
||||||
public suspend inline fun <reified T : Any> IOPlugin.readDataDirectory(path: Path): DataTree<T> =
|
|
||||||
readDataDirectory(path, formatResolver())
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
|
* Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
|
||||||
*/
|
*/
|
||||||
@ -133,7 +100,7 @@ public suspend inline fun <reified T : Any> IOPlugin.readDataDirectory(path: Pat
|
|||||||
public suspend fun <T : Any> IOPlugin.writeDataDirectory(
|
public suspend fun <T : Any> IOPlugin.writeDataDirectory(
|
||||||
path: Path,
|
path: Path,
|
||||||
tree: DataTree<T>,
|
tree: DataTree<T>,
|
||||||
format: IOFormat<T>,
|
format: IOWriter<T>,
|
||||||
envelopeFormat: EnvelopeFormat? = null,
|
envelopeFormat: EnvelopeFormat? = null,
|
||||||
metaFormat: MetaFormatFactory? = null,
|
metaFormat: MetaFormatFactory? = null,
|
||||||
) {
|
) {
|
||||||
@ -179,11 +146,9 @@ private suspend fun <T : Any> ZipOutputStream.writeNode(
|
|||||||
val envelope = treeItem.data.toEnvelope(dataFormat)
|
val envelope = treeItem.data.toEnvelope(dataFormat)
|
||||||
val entry = ZipEntry(name)
|
val entry = ZipEntry(name)
|
||||||
putNextEntry(entry)
|
putNextEntry(entry)
|
||||||
envelopeFormat.run {
|
asOutput().run {
|
||||||
asOutput().run {
|
envelopeFormat.writeEnvelope(this, envelope)
|
||||||
writeEnvelope(this, envelope)
|
flush()
|
||||||
flush()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is DataTreeItem.Node -> {
|
is DataTreeItem.Node -> {
|
||||||
|
@ -9,12 +9,8 @@ import space.kscience.dataforge.io.IOFormat
|
|||||||
import space.kscience.dataforge.io.io
|
import space.kscience.dataforge.io.io
|
||||||
import space.kscience.dataforge.io.readUtf8String
|
import space.kscience.dataforge.io.readUtf8String
|
||||||
import space.kscience.dataforge.io.writeUtf8String
|
import space.kscience.dataforge.io.writeUtf8String
|
||||||
import space.kscience.dataforge.meta.Meta
|
|
||||||
import space.kscience.dataforge.misc.DFExperimental
|
import space.kscience.dataforge.misc.DFExperimental
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
|
||||||
import kotlin.reflect.KType
|
|
||||||
import kotlin.reflect.typeOf
|
|
||||||
import kotlin.test.Test
|
import kotlin.test.Test
|
||||||
import kotlin.test.assertEquals
|
import kotlin.test.assertEquals
|
||||||
|
|
||||||
@ -35,8 +31,6 @@ class FileDataTest {
|
|||||||
|
|
||||||
object StringIOFormat : IOFormat<String> {
|
object StringIOFormat : IOFormat<String> {
|
||||||
|
|
||||||
override val type: KType = typeOf<String>()
|
|
||||||
|
|
||||||
override fun writeObject(output: Output, obj: String) {
|
override fun writeObject(output: Output, obj: String) {
|
||||||
output.writeUtf8String(obj)
|
output.writeUtf8String(obj)
|
||||||
}
|
}
|
||||||
@ -45,17 +39,6 @@ class FileDataTest {
|
|||||||
return input.readUtf8String()
|
return input.readUtf8String()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toMeta(): Meta = Meta {
|
|
||||||
IOFormat.NAME_KEY put "string"
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
object StringFormatResolver : FileFormatResolver<String> {
|
|
||||||
override val type: KType = typeOf<String>()
|
|
||||||
|
|
||||||
override fun invoke(path: Path, meta: Meta): IOFormat<String> = StringIOFormat
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -66,7 +49,7 @@ class FileDataTest {
|
|||||||
runBlocking {
|
runBlocking {
|
||||||
writeDataDirectory(dir, dataNode, StringIOFormat)
|
writeDataDirectory(dir, dataNode, StringIOFormat)
|
||||||
println(dir.toUri().toString())
|
println(dir.toUri().toString())
|
||||||
val reconstructed = readDataDirectory(dir, StringFormatResolver)
|
val reconstructed = readDataDirectory(dir) { _, _ -> StringIOFormat }
|
||||||
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
|
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
|
||||||
assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
|
assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
|
||||||
}
|
}
|
||||||
@ -82,7 +65,7 @@ class FileDataTest {
|
|||||||
runBlocking {
|
runBlocking {
|
||||||
writeZip(zip, dataNode, StringIOFormat)
|
writeZip(zip, dataNode, StringIOFormat)
|
||||||
println(zip.toUri().toString())
|
println(zip.toUri().toString())
|
||||||
val reconstructed = readDataDirectory(zip, StringFormatResolver)
|
val reconstructed = readDataDirectory(zip) { _, _ -> StringIOFormat }
|
||||||
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
|
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
|
||||||
assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
|
assertEquals(dataNode["b"]?.await(), reconstructed["b"]?.await())
|
||||||
}
|
}
|
||||||
|
@ -4,4 +4,4 @@ kotlin.code.style=official
|
|||||||
|
|
||||||
kotlin.mpp.stability.nowarn=true
|
kotlin.mpp.stability.nowarn=true
|
||||||
|
|
||||||
toolsVersion=0.11.4-kotlin-1.6.20
|
toolsVersion=0.11.5-kotlin-1.6.21
|
||||||
|
Loading…
Reference in New Issue
Block a user