Merge remote-tracking branch 'remotes/origin/dev' into dev-zelenyy-2

This commit is contained in:
Mikhail Zelenyy 2019-11-19 18:05:34 +03:00
commit 0cf060e93d
31 changed files with 483 additions and 157 deletions

17
.github/workflows/gradle.yml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Gradle build
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Build with Gradle
run: ./gradlew build

View File

@ -1,5 +1,5 @@
[![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678)
# Questions and Answers # # Questions and Answers #

View File

@ -1,12 +1,12 @@
import scientifik.ScientifikExtension import scientifik.ScientifikExtension
plugins { plugins {
id("scientifik.mpp") version "0.2.2" apply false id("scientifik.mpp") version "0.2.4" apply false
id("scientifik.jvm") version "0.2.2" apply false id("scientifik.jvm") version "0.2.4" apply false
id("scientifik.publish") version "0.2.2" apply false id("scientifik.publish") version "0.2.4" apply false
} }
val dataforgeVersion by extra("0.1.5-dev-2") val dataforgeVersion by extra("0.1.5-dev-3")
val bintrayRepo by extra("dataforge") val bintrayRepo by extra("dataforge")
val githubProject by extra("dataforge-core") val githubProject by extra("dataforge-core")

View File

@ -15,20 +15,20 @@ sealed class DataItem<out T : Any> : MetaRepr {
abstract val meta: Meta abstract val meta: Meta
class Node<out T : Any>(val value: DataNode<T>) : DataItem<T>() { class Node<out T : Any>(val node: DataNode<T>) : DataItem<T>() {
override val type: KClass<out T> get() = value.type override val type: KClass<out T> get() = node.type
override fun toMeta(): Meta = value.toMeta() override fun toMeta(): Meta = node.toMeta()
override val meta: Meta get() = value.meta override val meta: Meta get() = node.meta
} }
class Leaf<out T : Any>(val value: Data<T>) : DataItem<T>() { class Leaf<out T : Any>(val data: Data<T>) : DataItem<T>() {
override val type: KClass<out T> get() = value.type override val type: KClass<out T> get() = data.type
override fun toMeta(): Meta = value.toMeta() override fun toMeta(): Meta = data.toMeta()
override val meta: Meta get() = value.meta override val meta: Meta get() = data.meta
} }
} }
@ -68,8 +68,8 @@ interface DataNode<out T : Any> : MetaRepr {
} }
} }
val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.value val <T : Any> DataItem<T>?.node: DataNode<T>? get() = (this as? DataItem.Node<T>)?.node
val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.value val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.data
/** /**
* Start computation for all goals in data node and return a job for the whole node * Start computation for all goals in data node and return a job for the whole node
@ -77,8 +77,8 @@ val <T : Any> DataItem<T>?.data: Data<T>? get() = (this as? DataItem.Leaf<T>)?.v
fun DataNode<*>.launchAll(scope: CoroutineScope): Job = scope.launch { fun DataNode<*>.launchAll(scope: CoroutineScope): Job = scope.launch {
items.values.forEach { items.values.forEach {
when (it) { when (it) {
is DataItem.Node<*> -> it.value.launchAll(scope) is DataItem.Node<*> -> it.node.launchAll(scope)
is DataItem.Leaf<*> -> it.value.start(scope) is DataItem.Leaf<*> -> it.data.start(scope)
} }
} }
} }
@ -98,7 +98,7 @@ fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequ
items.forEach { (head, item) -> items.forEach { (head, item) ->
yield(head.asName() to item) yield(head.asName() to item)
if (item is DataItem.Node) { if (item is DataItem.Node) {
val subSequence = item.value.asSequence() val subSequence = item.node.asSequence()
.map { (name, data) -> (head.asName() + name) to data } .map { (name, data) -> (head.asName() + name) to data }
yieldAll(subSequence) yieldAll(subSequence)
} }
@ -111,9 +111,9 @@ fun <T : Any> DataNode<T>.asSequence(): Sequence<Pair<Name, DataItem<T>>> = sequ
fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence { fun <T : Any> DataNode<T>.dataSequence(): Sequence<Pair<Name, Data<T>>> = sequence {
items.forEach { (head, item) -> items.forEach { (head, item) ->
when (item) { when (item) {
is DataItem.Leaf -> yield(head.asName() to item.value) is DataItem.Leaf -> yield(head.asName() to item.data)
is DataItem.Node -> { is DataItem.Node -> {
val subSequence = item.value.dataSequence() val subSequence = item.node.dataSequence()
.map { (name, data) -> (head.asName() + name) to data } .map { (name, data) -> (head.asName() + name) to data }
yieldAll(subSequence) yieldAll(subSequence)
} }
@ -188,8 +188,8 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
operator fun set(name: Name, node: DataNode<T>) = set(name, node.builder()) operator fun set(name: Name, node: DataNode<T>) = set(name, node.builder())
operator fun set(name: Name, item: DataItem<T>) = when (item) { operator fun set(name: Name, item: DataItem<T>) = when (item) {
is DataItem.Node<T> -> set(name, item.value.builder()) is DataItem.Node<T> -> set(name, item.node.builder())
is DataItem.Leaf<T> -> set(name, item.value) is DataItem.Leaf<T> -> set(name, item.data)
} }
/** /**
@ -223,6 +223,10 @@ class DataTreeBuilder<T : Any>(val type: KClass<out T>) {
fun meta(block: MetaBuilder.() -> Unit) = meta.apply(block) fun meta(block: MetaBuilder.() -> Unit) = meta.apply(block)
fun meta(meta: Meta) {
this.meta = meta.builder()
}
fun build(): DataTree<T> { fun build(): DataTree<T> {
val resMap = map.mapValues { (_, value) -> val resMap = map.mapValues { (_, value) ->
when (value) { when (value) {

View File

@ -28,8 +28,8 @@ expect fun <R : Any> DataNode<*>.canCast(type: KClass<out R>): Boolean
expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean expect fun <R : Any> Data<*>.canCast(type: KClass<out R>): Boolean
fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) { fun <R : Any> DataItem<*>.canCast(type: KClass<out R>): Boolean = when (this) {
is DataItem.Node -> value.canCast(type) is DataItem.Node -> node.canCast(type)
is DataItem.Leaf -> value.canCast(type) is DataItem.Leaf -> data.canCast(type)
} }
/** /**

View File

@ -14,12 +14,12 @@ class TypeFilteredDataNode<out T : Any>(val origin: DataNode<*>, override val ty
origin.items.mapNotNull { (key, item) -> origin.items.mapNotNull { (key, item) ->
when (item) { when (item) {
is DataItem.Leaf -> { is DataItem.Leaf -> {
(item.value.filterIsInstance(type))?.let { (item.data.filterIsInstance(type))?.let {
key to DataItem.Leaf(it) key to DataItem.Leaf(it)
} }
} }
is DataItem.Node -> { is DataItem.Node -> {
key to DataItem.Node(item.value.filterIsInstance(type)) key to DataItem.Node(item.node.filterIsInstance(type))
} }
} }
}.associate { it } }.associate { it }

View File

@ -42,8 +42,8 @@ fun <R : Any> DataNode<*>.filterIsInstance(type: KClass<out R>): DataNode<R> {
*/ */
fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) { fun <R : Any> DataItem<*>?.filterIsInstance(type: KClass<out R>): DataItem<R>? = when (this) {
null -> null null -> null
is DataItem.Node -> DataItem.Node(this.value.filterIsInstance(type)) is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type))
is DataItem.Leaf -> this.value.filterIsInstance(type)?.let { DataItem.Leaf(it) } is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) }
} }
inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class) inline fun <reified R : Any> DataItem<*>?.filterIsInstance(): DataItem<R>? = this@filterIsInstance.filterIsInstance(R::class)

View File

@ -7,6 +7,4 @@ description = "YAML meta IO"
dependencies { dependencies {
api(project(":dataforge-io")) api(project(":dataforge-io"))
api("org.yaml:snakeyaml:1.25") api("org.yaml:snakeyaml:1.25")
testImplementation(kotlin("test"))
testImplementation(kotlin("test-junit"))
} }

View File

@ -6,7 +6,7 @@ import hep.dataforge.meta.Meta
import hep.dataforge.meta.buildMeta import hep.dataforge.meta.buildMeta
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.seal import hep.dataforge.meta.seal
import org.junit.Test import org.junit.jupiter.api.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -10,7 +10,7 @@ interface Binary {
/** /**
* The size of binary in bytes. [ULong.MAX_VALUE] if size is not defined and input should be read until its end is reached * The size of binary in bytes. [ULong.MAX_VALUE] if size is not defined and input should be read until its end is reached
*/ */
val size: ULong val size: ULong get() = ULong.MAX_VALUE
/** /**
* Read continuous [Input] from this binary stating from the beginning. * Read continuous [Input] from this binary stating from the beginning.
@ -41,7 +41,11 @@ interface RandomAccessBinary : Binary {
} }
fun Binary.toBytes(): ByteArray = read { fun Binary.toBytes(): ByteArray = read {
this.readBytes() readBytes()
}
fun Binary.contentToString(): String = read {
readText()
} }
@ExperimentalUnsignedTypes @ExperimentalUnsignedTypes

View File

@ -83,4 +83,3 @@ fun Envelope.withMetaLayers(vararg layers: Meta): Envelope {
else -> ProxyEnvelope(this, *layers) else -> ProxyEnvelope(this, *layers)
} }
} }

View File

@ -23,11 +23,15 @@ interface EnvelopeFormat : IOFormat<Envelope> {
fun Input.readPartial(): PartialEnvelope fun Input.readPartial(): PartialEnvelope
fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta = EmptyMeta) fun Output.writeEnvelope(
envelope: Envelope,
metaFormatFactory: MetaFormatFactory = defaultMetaFormat,
formatMeta: Meta = EmptyMeta
)
override fun Input.readObject(): Envelope override fun Input.readObject(): Envelope
override fun Output.writeObject(obj: Envelope): Unit = writeEnvelope(obj, defaultMetaFormat) override fun Output.writeObject(obj: Envelope): Unit = writeEnvelope(obj)
} }
@Type(ENVELOPE_FORMAT_TYPE) @Type(ENVELOPE_FORMAT_TYPE)

View File

@ -3,6 +3,7 @@ package hep.dataforge.io
import hep.dataforge.context.Global import hep.dataforge.context.Global
import hep.dataforge.io.EnvelopeParts.FORMAT_META_KEY import hep.dataforge.io.EnvelopeParts.FORMAT_META_KEY
import hep.dataforge.io.EnvelopeParts.FORMAT_NAME_KEY import hep.dataforge.io.EnvelopeParts.FORMAT_NAME_KEY
import hep.dataforge.io.EnvelopeParts.INDEX_KEY
import hep.dataforge.io.EnvelopeParts.MULTIPART_DATA_TYPE import hep.dataforge.io.EnvelopeParts.MULTIPART_DATA_TYPE
import hep.dataforge.io.EnvelopeParts.SIZE_KEY import hep.dataforge.io.EnvelopeParts.SIZE_KEY
import hep.dataforge.meta.* import hep.dataforge.meta.*
@ -13,6 +14,7 @@ import hep.dataforge.names.toName
object EnvelopeParts { object EnvelopeParts {
val MULTIPART_KEY = "multipart".asName() val MULTIPART_KEY = "multipart".asName()
val SIZE_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "size" val SIZE_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "size"
val INDEX_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "index"
val FORMAT_NAME_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "format" val FORMAT_NAME_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "format"
val FORMAT_META_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "meta" val FORMAT_META_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "meta"
@ -37,8 +39,37 @@ fun EnvelopeBuilder.multipart(format: EnvelopeFormatFactory, envelopes: Collecti
} }
} }
fun EnvelopeBuilder.multipart(formatFactory: EnvelopeFormatFactory, builder: suspend SequenceScope<Envelope>.() -> Unit) = /**
multipart(formatFactory, sequence(builder).toList()) * Create a multipart partition in the envelope adding additional name-index mapping in meta
*/
@DFExperimental
fun EnvelopeBuilder.multipart(format: EnvelopeFormatFactory, envelopes: Map<String, Envelope>) {
dataType = MULTIPART_DATA_TYPE
meta {
SIZE_KEY put envelopes.size
FORMAT_NAME_KEY put format.name.toString()
}
data {
format.run {
var counter = 0
envelopes.forEach {(key, envelope)->
writeObject(envelope)
meta{
append(INDEX_KEY, buildMeta {
"key" put key
"index" put counter
})
}
counter++
}
}
}
}
fun EnvelopeBuilder.multipart(
formatFactory: EnvelopeFormatFactory,
builder: suspend SequenceScope<Envelope>.() -> Unit
) = multipart(formatFactory, sequence(builder).toList())
/** /**
* If given envelope supports multipart data, return a sequence of those parts (could be empty). Otherwise return null. * If given envelope supports multipart data, return a sequence of those parts (could be empty). Otherwise return null.

View File

@ -79,8 +79,10 @@ inline fun buildPacketWithoutPool(headerSizeHint: Int = 0, block: BytePacketBuil
} }
fun <T : Any> IOFormat<T>.writePacket(obj: T): ByteReadPacket = buildPacket { writeObject(obj) } fun <T : Any> IOFormat<T>.writePacket(obj: T): ByteReadPacket = buildPacket { writeObject(obj) }
//TODO Double buffer copy. fix all that with IO-2
@Deprecated("Not to be used outside tests due to double buffer write")
fun <T : Any> IOFormat<T>.writeBytes(obj: T): ByteArray = buildPacket { writeObject(obj) }.readBytes() fun <T : Any> IOFormat<T>.writeBytes(obj: T): ByteArray = buildPacket { writeObject(obj) }.readBytes()
@Deprecated("Not to be used outside tests due to double buffer write")
fun <T : Any> IOFormat<T>.readBytes(array: ByteArray): T = buildPacket { writeFully(array) }.readObject() fun <T : Any> IOFormat<T>.readBytes(array: ByteArray): T = buildPacket { writeFully(array) }.readObject()
object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> { object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {

View File

@ -20,7 +20,7 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
metaFormatFactories.find { it.key == key }?.invoke(meta) metaFormatFactories.find { it.key == key }?.invoke(meta)
fun metaFormat(name: String, meta: Meta = EmptyMeta): MetaFormat? = fun metaFormat(name: String, meta: Meta = EmptyMeta): MetaFormat? =
metaFormatFactories.find { it.name.toString() == name }?.invoke(meta) metaFormatFactories.find { it.name.last().toString() == name }?.invoke(meta)
val envelopeFormatFactories by lazy { val envelopeFormatFactories by lazy {
context.content<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values context.content<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values
@ -52,7 +52,7 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
companion object : PluginFactory<IOPlugin> { companion object : PluginFactory<IOPlugin> {
val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat, BinaryMetaFormat) val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat, BinaryMetaFormat)
val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat) val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat, TaglessEnvelopeFormat)
override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP) override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP)

View File

@ -38,15 +38,15 @@ class TaggedEnvelopeFormat(
override fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta) { override fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta) {
val metaFormat = metaFormatFactory.invoke(formatMeta, io.context) val metaFormat = metaFormatFactory.invoke(formatMeta, io.context)
val metaBytes = metaFormat.writeBytes(envelope.meta) val metaBytes = metaFormat.writePacket(envelope.meta)
val actualSize: ULong = if (envelope.data == null) { val actualSize: ULong = if (envelope.data == null) {
0u 0u
} else { } else {
envelope.data?.size ?: ULong.MAX_VALUE envelope.data?.size ?: ULong.MAX_VALUE
} }
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize) val tag = Tag(metaFormatFactory.key, metaBytes.remaining.toUInt() + 2u, actualSize)
writePacket(tag.toBytes()) writePacket(tag.toBytes())
writeFully(metaBytes) writePacket(metaBytes)
writeText("\r\n") writeText("\r\n")
envelope.data?.read { copyTo(this@writeEnvelope) } envelope.data?.read { copyTo(this@writeEnvelope) }
flush() flush()

View File

@ -37,10 +37,10 @@ class TaglessEnvelopeFormat(
//Printing meta //Printing meta
if (!envelope.meta.isEmpty()) { if (!envelope.meta.isEmpty()) {
val metaBytes = metaFormat.writeBytes(envelope.meta) val metaBytes = metaFormat.writePacket(envelope.meta)
writeProperty(META_LENGTH_PROPERTY, metaBytes.size) writeProperty(META_LENGTH_PROPERTY, metaBytes.remaining)
writeText(metaStart + "\r\n") writeText(metaStart + "\r\n")
writeFully(metaBytes) writePacket(metaBytes)
writeText("\r\n") writeText("\r\n")
} }
@ -192,7 +192,7 @@ class TaglessEnvelopeFormat(
return try { return try {
val buffer = ByteArray(TAGLESS_ENVELOPE_HEADER.length) val buffer = ByteArray(TAGLESS_ENVELOPE_HEADER.length)
input.readFully(buffer) input.readFully(buffer)
return if (buffer.toString() == TAGLESS_ENVELOPE_HEADER) { return if (String(buffer) == TAGLESS_ENVELOPE_HEADER) {
TaglessEnvelopeFormat(io) TaglessEnvelopeFormat(io)
} else { } else {
null null

View File

@ -4,6 +4,9 @@ import hep.dataforge.descriptors.NodeDescriptor
import hep.dataforge.meta.DFExperimental import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.EmptyMeta import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.isEmpty
import kotlinx.io.core.Output
import kotlinx.io.core.copyTo
import kotlinx.io.nio.asInput import kotlinx.io.nio.asInput
import kotlinx.io.nio.asOutput import kotlinx.io.nio.asOutput
import java.nio.file.Files import java.nio.file.Files
@ -12,10 +15,15 @@ import java.nio.file.StandardOpenOption
import kotlin.reflect.full.isSuperclassOf import kotlin.reflect.full.isSuperclassOf
import kotlin.streams.asSequence import kotlin.streams.asSequence
/**
* Resolve IOFormat based on type
*/
@DFExperimental
inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? { inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? {
return ioFormats.values.find { it.type.isSuperclassOf(T::class) } as IOFormat<T>? return ioFormats.values.find { it.type.isSuperclassOf(T::class) } as IOFormat<T>?
} }
/** /**
* Read file containing meta using given [formatOverride] or file extension to infer meta type. * Read file containing meta using given [formatOverride] or file extension to infer meta type.
* If [path] is a directory search for file starting with `meta` in it * If [path] is a directory search for file starting with `meta` in it
@ -43,11 +51,12 @@ fun IOPlugin.readMetaFile(path: Path, formatOverride: MetaFormat? = null, descri
*/ */
fun IOPlugin.writeMetaFile( fun IOPlugin.writeMetaFile(
path: Path, path: Path,
meta: Meta,
metaFormat: MetaFormatFactory = JsonMetaFormat, metaFormat: MetaFormatFactory = JsonMetaFormat,
descriptor: NodeDescriptor? = null descriptor: NodeDescriptor? = null
) { ) {
val actualPath = if (Files.isDirectory(path)) { val actualPath = if (Files.isDirectory(path)) {
path.resolve(metaFormat.name.toString()) path.resolve("@" + metaFormat.name.toString())
} else { } else {
path path
} }
@ -58,6 +67,28 @@ fun IOPlugin.writeMetaFile(
} }
} }
/**
* Return inferred [EnvelopeFormat] if only one format could read given file. If no format accepts file, return null. If
* multiple formats accepts file, throw an error.
*/
fun IOPlugin.peekBinaryFormat(path: Path): EnvelopeFormat? {
val binary = path.asBinary()
val formats = envelopeFormatFactories.mapNotNull { factory ->
binary.read {
factory.peekFormat(this@peekBinaryFormat, this@read)
}
}
return when (formats.size) {
0 -> null
1 -> formats.first()
else -> error("Envelope format binary recognition clash")
}
}
val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta"
val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data"
/** /**
* Read and envelope from file if the file exists, return null if file does not exist. * Read and envelope from file if the file exists, return null if file does not exist.
* *
@ -72,13 +103,17 @@ fun IOPlugin.writeMetaFile(
* Return null otherwise. * Return null otherwise.
*/ */
@DFExperimental @DFExperimental
fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): Envelope? { fun IOPlugin.readEnvelopeFile(
path: Path,
readNonEnvelopes: Boolean = false,
formatPeeker: IOPlugin.(Path) -> EnvelopeFormat? = IOPlugin::peekBinaryFormat
): Envelope? {
if (!Files.exists(path)) return null if (!Files.exists(path)) return null
//read two-files directory //read two-files directory
if (Files.isDirectory(path)) { if (Files.isDirectory(path)) {
val metaFile = Files.list(path).asSequence() val metaFile = Files.list(path).asSequence()
.singleOrNull { it.fileName.toString().startsWith("meta") } .singleOrNull { it.fileName.toString().startsWith(IOPlugin.META_FILE_NAME) }
val meta = if (metaFile == null) { val meta = if (metaFile == null) {
EmptyMeta EmptyMeta
@ -86,7 +121,7 @@ fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): En
readMetaFile(metaFile) readMetaFile(metaFile)
} }
val dataFile = path.resolve("data") val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
val data: Binary? = if (Files.exists(dataFile)) { val data: Binary? = if (Files.exists(dataFile)) {
dataFile.asBinary() dataFile.asBinary()
@ -97,41 +132,76 @@ fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): En
return SimpleEnvelope(meta, data) return SimpleEnvelope(meta, data)
} }
val binary = path.asBinary() return formatPeeker(path)?.let { format ->
FileEnvelope(path, format)
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
SimpleEnvelope(Meta.empty, path.asBinary())
} else null
}
val formats = envelopeFormatFactories.mapNotNull { factory -> private fun Path.useOutput(consumer: Output.() -> Unit) {
binary.read { //TODO forbid rewrite?
factory.peekFormat(this@readEnvelopeFile, this@read) Files.newByteChannel(
} this,
} StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING
return when (formats.size) { ).asOutput().use {
0 -> if (readNonEnvelopes) { it.consumer()
SimpleEnvelope(Meta.empty, binary) it.flush()
} else {
null
}
1 -> formats.first().run {
binary.read {
readObject()
}
}
else -> error("Envelope format file recognition clash")
} }
} }
/**
* Write a binary into file. Throws an error if file already exists
*/
fun <T : Any> IOFormat<T>.writeToFile(path: Path, obj: T) {
path.useOutput {
writeObject(obj)
flush()
}
}
/**
* Write envelope file to given [path] using [envelopeFormat] and optional [metaFormat]
*/
@DFExperimental
fun IOPlugin.writeEnvelopeFile( fun IOPlugin.writeEnvelopeFile(
path: Path, path: Path,
envelope: Envelope, envelope: Envelope,
format: EnvelopeFormat = TaggedEnvelopeFormat envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
metaFormat: MetaFormatFactory? = null
) { ) {
val output = Files.newByteChannel( path.useOutput {
path, with(envelopeFormat) {
StandardOpenOption.WRITE, writeEnvelope(envelope, metaFormat ?: envelopeFormat.defaultMetaFormat)
StandardOpenOption.CREATE, }
StandardOpenOption.TRUNCATE_EXISTING }
).asOutput() }
with(format) { /**
output.writeObject(envelope) * Write separate meta and data files to given directory [path]
*/
@DFExperimental
fun IOPlugin.writeEnvelopeDirectory(
path: Path,
envelope: Envelope,
metaFormat: MetaFormatFactory = JsonMetaFormat
) {
if (!Files.exists(path)) {
Files.createDirectories(path)
}
if (!Files.isDirectory(path)) {
error("Can't write envelope directory to file")
}
if (!envelope.meta.isEmpty()) {
writeMetaFile(path, envelope.meta, metaFormat)
}
val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
dataFile.useOutput {
envelope.data?.read {
val copied = copyTo(this@useOutput)
if (envelope.data?.size != ULong.MAX_VALUE && copied != envelope.data?.size?.toLong()) {
error("The number of copied bytes does not equal data size")
}
}
} }
} }

View File

@ -26,12 +26,25 @@ class FileEnvelopeTest {
@Test @Test
fun testFileWriteRead() { fun testFileWriteRead() {
Global.io.run {
val tmpPath = Files.createTempFile("dataforge_test", ".df") val tmpPath = Files.createTempFile("dataforge_test", ".df")
Global.io.writeEnvelopeFile(tmpPath,envelope) writeEnvelopeFile(tmpPath, envelope)
println(tmpPath.toUri()) println(tmpPath.toUri())
val restored: Envelope = Global.io.readEnvelopeFile(tmpPath)!! val restored: Envelope = readEnvelopeFile(tmpPath)!!
assertTrue { envelope.contentEquals(restored) } assertTrue { envelope.contentEquals(restored) }
} }
}
@Test
fun testFileWriteReadTagless() {
Global.io.run {
val tmpPath = Files.createTempFile("dataforge_test_tagless", ".df")
writeEnvelopeFile(tmpPath, envelope, envelopeFormat = TaglessEnvelopeFormat)
println(tmpPath.toUri())
val restored: Envelope = readEnvelopeFile(tmpPath)!!
assertTrue { envelope.contentEquals(restored) }
}
}
@Test @Test
fun testFileWriteTagged() { fun testFileWriteTagged() {

View File

@ -7,9 +7,9 @@ import hep.dataforge.io.TaggedEnvelopeFormat
import hep.dataforge.io.writeBytes import hep.dataforge.io.writeBytes
import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.runBlocking import kotlinx.coroutines.runBlocking
import org.junit.AfterClass import org.junit.jupiter.api.AfterAll
import org.junit.BeforeClass import org.junit.jupiter.api.BeforeAll
import org.junit.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals
import kotlin.time.ExperimentalTime import kotlin.time.ExperimentalTime
@ -30,13 +30,13 @@ class EnvelopeServerTest {
@JvmStatic @JvmStatic
val echoEnvelopeServer = EnvelopeServer(Global, 7778, EchoResponder, GlobalScope) val echoEnvelopeServer = EnvelopeServer(Global, 7778, EchoResponder, GlobalScope)
@BeforeClass @BeforeAll
@JvmStatic @JvmStatic
fun start() { fun start() {
echoEnvelopeServer.start() echoEnvelopeServer.start()
} }
@AfterClass @AfterAll
@JvmStatic @JvmStatic
fun close() { fun close() {
echoEnvelopeServer.stop() echoEnvelopeServer.stop()

View File

@ -19,8 +19,6 @@ kotlin {
} }
val jvmTest by getting { val jvmTest by getting {
dependencies { dependencies {
implementation(kotlin("test"))
implementation(kotlin("test-junit"))
implementation("ch.qos.logback:logback-classic:1.2.3") implementation("ch.qos.logback:logback-classic:1.2.3")
} }
} }

View File

@ -6,7 +6,7 @@ import hep.dataforge.meta.int
import hep.dataforge.workspace.SimpleWorkspaceBuilder import hep.dataforge.workspace.SimpleWorkspaceBuilder
import hep.dataforge.workspace.context import hep.dataforge.workspace.context
import hep.dataforge.workspace.target import hep.dataforge.workspace.target
import org.junit.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -1,14 +0,0 @@
package hep.dataforge.workspace
import hep.dataforge.data.Data
import hep.dataforge.io.Envelope
import hep.dataforge.io.IOFormat
import hep.dataforge.io.readWith
import kotlin.reflect.KClass
/**
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
*/
fun <T : Any> Envelope.toData(type: KClass<out T>, format: IOFormat<T>): Data<T> = Data(type, meta) {
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
}

View File

@ -0,0 +1,33 @@
package hep.dataforge.workspace
import hep.dataforge.data.Data
import hep.dataforge.data.await
import hep.dataforge.io.*
import kotlinx.coroutines.coroutineScope
import kotlinx.io.core.Input
import kotlinx.io.core.buildPacket
import kotlin.reflect.KClass
/**
* Convert an [Envelope] to a data via given format. The actual parsing is done lazily.
*/
fun <T : Any> Envelope.toData(type: KClass<out T>, format: IOFormat<T>): Data<T> = Data(type, meta) {
data?.readWith(format) ?: error("Can't convert envelope without data to Data")
}
suspend fun <T : Any> Data<T>.toEnvelope(format: IOFormat<T>): Envelope {
val obj = coroutineScope {
await(this)
}
val binary = object : Binary {
override fun <R> read(block: Input.() -> R): R {
//TODO optimize away additional copy by creating inputs that reads directly from output
val packet = buildPacket {
format.run { writeObject(obj) }
}
return packet.block()
}
}
return SimpleEnvelope(meta, binary)
}

View File

@ -1,17 +1,27 @@
package hep.dataforge.workspace package hep.dataforge.workspace
//import jdk.nio.zipfs.ZipFileSystemProvider
import hep.dataforge.data.* import hep.dataforge.data.*
import hep.dataforge.io.Envelope import hep.dataforge.io.*
import hep.dataforge.io.IOFormat import hep.dataforge.meta.*
import hep.dataforge.io.IOPlugin import kotlinx.coroutines.Dispatchers
import hep.dataforge.io.readEnvelopeFile import kotlinx.coroutines.withContext
import hep.dataforge.meta.Meta import java.nio.file.FileSystem
import hep.dataforge.meta.get
import hep.dataforge.meta.string
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.nio.file.StandardCopyOption
import java.nio.file.spi.FileSystemProvider
import kotlin.reflect.KClass import kotlin.reflect.KClass
typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
//private val zipFSProvider = ZipFileSystemProvider()
private fun newZFS(path: Path): FileSystem {
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
?: error("Zip file system provider not found")
return fsProvider.newFileSystem(path, mapOf("create" to "true"))
}
/** /**
* Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file. * Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file.
@ -22,53 +32,42 @@ import kotlin.reflect.KClass
* @param metaFile the relative file for optional meta override * @param metaFile the relative file for optional meta override
* @param metaFileFormat the meta format for override * @param metaFileFormat the meta format for override
*/ */
@DFExperimental
fun <T : Any> IOPlugin.readDataFile( fun <T : Any> IOPlugin.readDataFile(
path: Path, path: Path,
type: KClass<out T>, type: KClass<out T>,
formatResolver: (Meta) -> IOFormat<T> formatResolver: FileFormatResolver<T>
): Data<T> { ): Data<T> {
val envelope = readEnvelopeFile(path, true) ?: error("Can't read data from $path") val envelope = readEnvelopeFile(path, true) ?: error("Can't read data from $path")
val format = formatResolver(envelope.meta) val format = formatResolver(path, envelope.meta)
return envelope.toData(type, format) return envelope.toData(type, format)
} }
//TODO wants multi-receiver @DFExperimental
inline fun <reified T : Any> IOPlugin.readDataFile(path: Path): Data<T> =
readDataFile(path, T::class) { _, _ ->
resolveIOFormat<T>() ?: error("Can't resolve IO format for ${T::class}")
}
/**
* Add file/directory-based data tree item
*/
@DFExperimental
fun <T : Any> DataTreeBuilder<T>.file( fun <T : Any> DataTreeBuilder<T>.file(
plugin: IOPlugin, plugin: IOPlugin,
path: Path, path: Path,
formatResolver: (Meta) -> IOFormat<T> formatResolver: FileFormatResolver<T>
) { ) {
//If path is a single file or a special directory, read it as single datum
if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) {
plugin.run { plugin.run {
val data = readDataFile(path, type, formatResolver) val data = readDataFile(path, type, formatResolver)
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
?: path.fileName.toString().replace(".df", "") ?: path.fileName.toString().replace(".df", "")
datum(name, data) datum(name, data)
} }
} } else {
//otherwise, read as directory
/**
* Read the directory as a data node
*/
fun <T : Any> IOPlugin.readDataDirectory(
path: Path,
type: KClass<out T>,
formatResolver: (Meta) -> IOFormat<T>
): DataNode<T> {
if (!Files.isDirectory(path)) error("Provided path $this is not a directory")
return DataNode(type) {
Files.list(path).forEach { path ->
if (!path.fileName.toString().endsWith(".meta")) {
file(this@readDataDirectory, path, formatResolver)
}
}
}
}
fun <T : Any> DataTreeBuilder<T>.directory(
plugin: IOPlugin,
path: Path,
formatResolver: (Meta) -> IOFormat<T>
) {
plugin.run { plugin.run {
val data = readDataDirectory(path, type, formatResolver) val data = readDataDirectory(path, type, formatResolver)
val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string
@ -76,8 +75,109 @@ fun <T : Any> DataTreeBuilder<T>.directory(
node(name, data) node(name, data)
} }
} }
}
/**
* Read the directory as a data node. If [path] is a zip archive, read it as directory
*/
@DFExperimental
fun <T : Any> IOPlugin.readDataDirectory(
path: Path,
type: KClass<out T>,
formatResolver: FileFormatResolver<T>
): DataNode<T> {
//read zipped data node
if (path.fileName != null && path.fileName.toString().endsWith(".zip")) {
//Using explicit Zip file system to avoid bizarre compatibility bugs
val fs = newZFS(path)
return readDataDirectory(fs.rootDirectories.first(), type, formatResolver)
}
if (!Files.isDirectory(path)) error("Provided path $path is not a directory")
return DataNode(type) {
Files.list(path).forEach { path ->
val fileName = path.fileName.toString()
if (fileName.startsWith(IOPlugin.META_FILE_NAME)) {
meta(readMetaFile(path))
} else if (!fileName.startsWith("@")) {
file(this@readDataDirectory, path, formatResolver)
}
}
}
}
@DFExperimental
inline fun <reified T : Any> IOPlugin.readDataDirectory(path: Path): DataNode<T> =
readDataDirectory(path, T::class) { _, _ ->
resolveIOFormat<T>() ?: error("Can't resolve IO format for ${T::class}")
}
/**
* Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider
*/
@DFExperimental
suspend fun <T : Any> IOPlugin.writeDataDirectory(
path: Path,
node: DataNode<T>,
format: IOFormat<T>,
envelopeFormat: EnvelopeFormat? = null,
metaFormat: MetaFormatFactory? = null
) {
withContext(Dispatchers.IO) {
if (!Files.exists(path)) {
Files.createDirectories(path)
} else if (!Files.isDirectory(path)) {
error("Can't write a node into file")
}
node.items.forEach { (token, item) ->
val childPath = path.resolve(token.toString())
when (item) {
is DataItem.Node -> {
writeDataDirectory(childPath, item.node, format, envelopeFormat)
}
is DataItem.Leaf -> {
val envelope = item.data.toEnvelope(format)
if (envelopeFormat != null) {
writeEnvelopeFile(childPath, envelope, envelopeFormat, metaFormat)
} else {
writeEnvelopeDirectory(childPath, envelope, metaFormat ?: JsonMetaFormat)
}
}
}
}
if (!node.meta.isEmpty()) {
writeMetaFile(path, node.meta, metaFormat ?: JsonMetaFormat)
}
}
}
suspend fun <T : Any> IOPlugin.writeZip(
path: Path,
node: DataNode<T>,
format: IOFormat<T>,
envelopeFormat: EnvelopeFormat? = null,
metaFormat: MetaFormatFactory? = null
) {
withContext(Dispatchers.IO) {
val actualFile = if (path.toString().endsWith(".zip")) {
path
} else {
path.resolveSibling(path.fileName.toString() + ".zip")
}
if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) {
Files.delete(path)
}
//Files.createFile(actualFile)
newZFS(actualFile).use { zipfs ->
val internalTargetPath = zipfs.getPath("/")
Files.createDirectories(internalTargetPath)
val tmp = Files.createTempDirectory("df_zip")
writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat)
Files.list(tmp).forEach { sourcePath ->
val targetPath = sourcePath.fileName.toString()
val internalTargetPath = internalTargetPath.resolve(targetPath)
Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING)
}
}
}
}

View File

@ -6,8 +6,8 @@ import hep.dataforge.context.PluginTag
import hep.dataforge.data.* import hep.dataforge.data.*
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.names.asName import hep.dataforge.names.asName
import org.junit.Test
import kotlin.reflect.KClass import kotlin.reflect.KClass
import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -0,0 +1,72 @@
package hep.dataforge.workspace
import hep.dataforge.context.Global
import hep.dataforge.data.*
import hep.dataforge.io.IOFormat
import hep.dataforge.io.io
import hep.dataforge.meta.DFExperimental
import kotlinx.coroutines.runBlocking
import kotlinx.io.core.Input
import kotlinx.io.core.Output
import kotlinx.io.core.readText
import kotlinx.io.core.writeText
import java.nio.file.Files
import kotlin.test.Ignore
import kotlin.test.Test
import kotlin.test.assertEquals
class FileDataTest {
val dataNode = DataNode<String> {
node("dir") {
static("a", "Some string") {
"content" put "Some string"
}
}
static("b", "root data")
meta {
"content" put "This is root meta node"
}
}
object StringIOFormat : IOFormat<String> {
override fun Output.writeObject(obj: String) {
writeText(obj)
}
override fun Input.readObject(): String {
return readText()
}
}
@Test
@DFExperimental
fun testDataWriteRead() {
Global.io.run {
val dir = Files.createTempDirectory("df_data_node")
runBlocking {
writeDataDirectory(dir, dataNode, StringIOFormat)
}
println(dir.toUri().toString())
val reconstructed = readDataDirectory(dir, String::class) { _, _ -> StringIOFormat }
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.get())
}
}
@Test
@Ignore
fun testZipWriteRead() {
Global.io.run {
val zip = Files.createTempFile("df_data_node", ".zip")
runBlocking {
writeZip(zip, dataNode, StringIOFormat)
}
println(zip.toUri().toString())
val reconstructed = readDataDirectory(zip, String::class) { _, _ -> StringIOFormat }
assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta)
assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.get())
}
}
}

View File

@ -7,7 +7,7 @@ import hep.dataforge.meta.builder
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.names.plus import hep.dataforge.names.plus
import org.junit.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals
import kotlin.test.assertTrue import kotlin.test.assertTrue

Binary file not shown.

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-bin.zip distributionUrl=https\://services.gradle.org/distributions/gradle-6.0-bin.zip
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists

29
gradlew vendored Normal file → Executable file
View File

@ -154,19 +154,19 @@ if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
else else
eval `echo args$i`="\"$arg\"" eval `echo args$i`="\"$arg\""
fi fi
i=$((i+1)) i=`expr $i + 1`
done done
case $i in case $i in
(0) set -- ;; 0) set -- ;;
(1) set -- "$args0" ;; 1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;; 2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;; 3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;; 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac esac
fi fi
@ -175,14 +175,9 @@ save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " " echo " "
} }
APP_ARGS=$(save "$@") APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules # Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@" exec "$JAVACMD" "$@"