Merge pull request #46 from mipt-npm/dev

0.1.7
This commit is contained in:
Alexander Nozik 2020-04-08 09:53:22 +03:00 committed by GitHub
commit 572e5eea60
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 851 additions and 645 deletions

View File

@ -1,12 +1,12 @@
plugins {
val toolsVersion = "0.4.0"
val toolsVersion = "0.4.2"
id("scientifik.mpp") version toolsVersion apply false
id("scientifik.jvm") version toolsVersion apply false
id("scientifik.publish") version toolsVersion apply false
}
val dataforgeVersion by extra("0.1.5")
val dataforgeVersion by extra("0.1.7")
val bintrayRepo by extra("dataforge")
val githubProject by extra("dataforge-core")

View File

@ -1,13 +1,12 @@
package hep.dataforge.context
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KClass
import kotlin.reflect.KProperty
abstract class AbstractPlugin(override val meta: Meta = EmptyMeta) : Plugin {
abstract class AbstractPlugin(override val meta: Meta = Meta.EMPTY) : Plugin {
private var _context: Context? = null
private val dependencies = ArrayList<PluginFactory<*>>()

View File

@ -1,8 +1,7 @@
package hep.dataforge.context
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
interface Factory<out T : Any> {
operator fun invoke(meta: Meta = EmptyMeta, context: Context = Global): T
operator fun invoke(meta: Meta = Meta.EMPTY, context: Context = Global): T
}

View File

@ -1,9 +1,7 @@
package hep.dataforge.context
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.buildMeta
import kotlin.reflect.KClass
/**
@ -98,7 +96,7 @@ class PluginManager(override val context: Context) : ContextAware, Iterable<Plug
/**
* Load a plugin using its factory
*/
fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = EmptyMeta): T =
fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
load(factory(meta, context))
fun <T : Plugin> load(factory: PluginFactory<T>, metaBuilder: MetaBuilder.() -> Unit): T =
@ -121,7 +119,7 @@ class PluginManager(override val context: Context) : ContextAware, Iterable<Plug
* Get an existing plugin with given meta or load new one using provided factory
*
*/
fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = EmptyMeta): T {
fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = Meta.EMPTY): T {
val loaded = get(factory.type, factory.tag, recursive)
return when {
loaded == null -> load(factory(meta, context))

View File

@ -1,6 +1,5 @@
package hep.dataforge.context
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlin.reflect.KClass
@ -23,7 +22,7 @@ expect object PluginRepository {
/**
* Fetch specific plugin and instantiate it with given meta
*/
fun PluginRepository.fetch(tag: PluginTag, meta: Meta = EmptyMeta): Plugin =
fun PluginRepository.fetch(tag: PluginTag, meta: Meta = Meta.EMPTY): Plugin =
list().find { it.tag.matches(tag) }?.invoke(meta = meta)
?: error("Plugin with tag $tag not found in the repository")

View File

@ -1,17 +1,5 @@
package hep.dataforge.descriptors
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.descriptors.attributes
import hep.dataforge.meta.scheme.ConfigurableDelegate
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.values.parseValue
import kotlin.reflect.KProperty1
import kotlin.reflect.full.findAnnotation
import kotlin.reflect.full.isSubclassOf
import kotlin.reflect.full.memberProperties
//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
// T::class.apply {

View File

@ -1,6 +1,8 @@
package hep.dataforge.data
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr
import hep.dataforge.meta.isEmpty
import kotlinx.coroutines.CoroutineScope
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
@ -31,14 +33,14 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
operator fun <T : Any> invoke(
type: KClass<out T>,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
): Data<T> = DynamicData(type, meta, context, dependencies, block)
inline operator fun <reified T : Any> invoke(
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend CoroutineScope.() -> T
@ -47,7 +49,7 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
operator fun <T : Any> invoke(
name: String,
type: KClass<out T>,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
@ -55,14 +57,14 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
inline operator fun <reified T : Any> invoke(
name: String,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend CoroutineScope.() -> T
): Data<T> =
invoke(name, T::class, meta, context, dependencies, block)
fun <T : Any> static(value: T, meta: Meta = EmptyMeta): Data<T> =
fun <T : Any> static(value: T, meta: Meta = Meta.EMPTY): Data<T> =
StaticData(value, meta)
}
}
@ -70,7 +72,7 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
class DynamicData<T : Any>(
override val type: KClass<out T>,
override val meta: Meta = EmptyMeta,
override val meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
@ -78,7 +80,7 @@ class DynamicData<T : Any>(
class StaticData<T : Any>(
value: T,
override val meta: Meta = EmptyMeta
override val meta: Meta = Meta.EMPTY
) : Data<T>, StaticGoal<T>(value) {
override val type: KClass<out T> get() = value::class
}

View File

@ -1,9 +1,6 @@
package hep.dataforge.data
import hep.dataforge.meta.*
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.SchemeSpec
import hep.dataforge.meta.scheme.string
import hep.dataforge.names.toName
@ -55,4 +52,4 @@ fun <T : Any> DataNode<T>.filter(filter: Meta): DataNode<T> = filter(DataFilter.
* Filter data using [DataFilter] builder
*/
fun <T : Any> DataNode<T>.filter(filterBuilder: DataFilter.() -> Unit): DataNode<T> =
filter(DataFilter.invoke(filterBuilder))
filter(DataFilter(filterBuilder))

View File

@ -250,7 +250,7 @@ fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) {
this[name.toName()] = data
}
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = EmptyMeta) {
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY) {
this[name] = Data.static(data, meta)
}

View File

@ -11,7 +11,7 @@ serialization(sourceSet = TEST){
cbor()
}
val ioVersion by rootProject.extra("0.2.0-npm-dev-4")
val ioVersion by rootProject.extra("0.2.0-npm-dev-7")
kotlin {
sourceSets {

View File

@ -2,19 +2,19 @@ package hep.dataforge.io.yaml
import hep.dataforge.context.Context
import hep.dataforge.io.*
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlinx.io.*
import kotlinx.io.text.readUtf8Line
import kotlinx.io.text.writeRawString
import kotlinx.io.text.writeUtf8String
import kotlinx.serialization.toUtf8Bytes
@DFExperimental
class FrontMatterEnvelopeFormat(
val io: IOPlugin,
meta: Meta = EmptyMeta
val meta: Meta = Meta.EMPTY
) : EnvelopeFormat {
override fun Input.readPartial(): PartialEnvelope {
@ -27,9 +27,10 @@ class FrontMatterEnvelopeFormat(
val readMetaFormat =
metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.metaFormat(it) } ?: YamlMetaFormat
?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
val meta = buildBytes {
//TODO replace by preview
val meta = Binary {
do {
line = readUtf8Line()
writeUtf8String(line + "\r\n")
@ -51,18 +52,18 @@ class FrontMatterEnvelopeFormat(
val readMetaFormat =
metaTypeRegex.matchEntire(line)?.groupValues?.first()
?.let { io.metaFormat(it) } ?: YamlMetaFormat
?.let { io.resolveMetaFormat(it) } ?: YamlMetaFormat
val meta = buildBytes {
val meta = Binary {
do {
writeUtf8String(readUtf8Line() + "\r\n")
writeUtf8String(readUtf8Line() + "\r\n")
} while (!line.startsWith(SEPARATOR))
}.read {
readMetaFormat.run {
readMeta()
}
}
val bytes = readRemaining()
val bytes = readByteArray()
val data = bytes.asBinary()
return SimpleEnvelope(meta, data)
}
@ -78,6 +79,11 @@ class FrontMatterEnvelopeFormat(
}
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put meta
}
companion object : EnvelopeFormatFactory {
const val SEPARATOR = "---"
@ -88,11 +94,13 @@ class FrontMatterEnvelopeFormat(
}
override fun peekFormat(io: IOPlugin, input: Input): EnvelopeFormat? {
val line = input.readUtf8Line()
return if (line.startsWith("---")) {
invoke()
} else {
null
return input.preview {
val line = readUtf8Line()
return@preview if (line.startsWith("---")) {
invoke()
} else {
null
}
}
}

View File

@ -1,6 +1,8 @@
package hep.dataforge.io.yaml
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.MetaFormat
import hep.dataforge.io.MetaFormatFactory
import hep.dataforge.meta.DFExperimental
@ -10,23 +12,9 @@ import hep.dataforge.meta.toMap
import hep.dataforge.meta.toMeta
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.readUByte
import kotlinx.io.asInputStream
import kotlinx.io.text.writeUtf8String
import org.yaml.snakeyaml.Yaml
import java.io.InputStream
private class InputAsStream(val input: Input) : InputStream() {
override fun read(): Int {
if (input.eof()) return -1
return input.readUByte().toInt()
}
override fun close() {
input.close()
}
}
private fun Input.asStream() = InputAsStream(this)
@DFExperimental
class YamlMetaFormat(val meta: Meta) : MetaFormat {
@ -38,10 +26,15 @@ class YamlMetaFormat(val meta: Meta) : MetaFormat {
}
override fun Input.readMeta(descriptor: NodeDescriptor?): Meta {
val map: Map<String, Any?> = yaml.load(asStream())
val map: Map<String, Any?> = yaml.load(asInputStream())
return map.toMeta(descriptor)
}
override fun toMeta(): Meta = Meta{
NAME_KEY put FrontMatterEnvelopeFormat.name.toString()
META_KEY put meta
}
companion object : MetaFormatFactory {
override fun invoke(meta: Meta, context: Context): MetaFormat = YamlMetaFormat(meta)

View File

@ -2,6 +2,7 @@ package hep.dataforge.io.yaml
import hep.dataforge.io.parse
import hep.dataforge.io.toString
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.seal
@ -9,6 +10,7 @@ import kotlin.test.Test
import kotlin.test.assertEquals
@DFExperimental
class YamlMetaFormatTest {
@Test
fun testYamlMetaFormat() {
@ -34,7 +36,7 @@ class YamlMetaFormatTest {
assertEquals<Meta>(meta, meta.seal())
meta.items.keys.forEach {
if (meta[it] != result[it]) error("${meta[it]} != ${result[it]}")
assertEquals(meta[it],result[it],"${meta[it]} != ${result[it]}")
}
assertEquals(meta, result)

View File

@ -1,10 +1,7 @@
package hep.dataforge.io
import hep.dataforge.meta.*
import kotlinx.io.ArrayBinary
import kotlinx.io.Binary
import kotlinx.io.ExperimentalIoApi
import kotlinx.io.Output
import kotlinx.io.*
class EnvelopeBuilder {
private val metaBuilder = MetaBuilder()
@ -34,9 +31,11 @@ class EnvelopeBuilder {
/**
* Construct a data binary from given builder
*/
@ExperimentalIoApi
@OptIn(ExperimentalIoApi::class)
fun data(block: Output.() -> Unit) {
data = ArrayBinary.write(builder = block)
val arrayBuilder = ByteArrayOutput()
arrayBuilder.block()
data = arrayBuilder.toByteArray().asBinary()
}
fun build() = SimpleEnvelope(metaBuilder.seal(), data)

View File

@ -2,7 +2,6 @@ package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import hep.dataforge.names.asName
@ -26,7 +25,7 @@ interface EnvelopeFormat : IOFormat<Envelope> {
fun Output.writeEnvelope(
envelope: Envelope,
metaFormatFactory: MetaFormatFactory = defaultMetaFormat,
formatMeta: Meta = EmptyMeta
formatMeta: Meta = Meta.EMPTY
)
override fun Input.readObject(): Envelope

View File

@ -1,125 +1,121 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.io.EnvelopeParts.FORMAT_META_KEY
import hep.dataforge.io.EnvelopeParts.FORMAT_NAME_KEY
import hep.dataforge.io.EnvelopeParts.INDEX_KEY
import hep.dataforge.io.EnvelopeParts.MULTIPART_DATA_SEPARATOR
import hep.dataforge.io.EnvelopeParts.MULTIPART_DATA_TYPE
import hep.dataforge.io.EnvelopeParts.SIZE_KEY
import hep.dataforge.io.Envelope.Companion.ENVELOPE_NODE_KEY
import hep.dataforge.io.PartDescriptor.Companion.DEFAULT_MULTIPART_DATA_SEPARATOR
import hep.dataforge.io.PartDescriptor.Companion.MULTIPART_DATA_TYPE
import hep.dataforge.io.PartDescriptor.Companion.MULTIPART_KEY
import hep.dataforge.io.PartDescriptor.Companion.PARTS_KEY
import hep.dataforge.io.PartDescriptor.Companion.PART_FORMAT_KEY
import hep.dataforge.io.PartDescriptor.Companion.SEPARATOR_KEY
import hep.dataforge.meta.*
import hep.dataforge.names.asName
import hep.dataforge.names.plus
import hep.dataforge.names.toName
import kotlinx.io.text.readRawString
import kotlinx.io.text.writeRawString
import kotlinx.io.Binary
import kotlinx.io.writeBinary
object EnvelopeParts {
val MULTIPART_KEY = "multipart".asName()
val SIZE_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "size"
val INDEX_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "index"
val FORMAT_NAME_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "format"
val FORMAT_META_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "meta"
private class PartDescriptor : Scheme() {
var offset by int(0)
var size by int(0)
var meta by node()
const val MULTIPART_DATA_SEPARATOR = "\r\n#~PART~#\r\n"
companion object : SchemeSpec<PartDescriptor>(::PartDescriptor) {
val MULTIPART_KEY = ENVELOPE_NODE_KEY + "multipart"
val PARTS_KEY = MULTIPART_KEY + "parts"
val SEPARATOR_KEY = MULTIPART_KEY + "separator"
const val MULTIPART_DATA_TYPE = "envelope.multipart"
const val DEFAULT_MULTIPART_DATA_SEPARATOR = "\r\n#~PART~#\r\n"
val PART_FORMAT_KEY = "format".asName()
const val MULTIPART_DATA_TYPE = "envelope.multipart"
}
}
/**
* Append multiple serialized envelopes to the data block. Previous data is erased if it was present
*/
@DFExperimental
data class EnvelopePart(val binary: Binary, val description: Meta?)
typealias EnvelopeParts = List<EnvelopePart>
fun EnvelopeBuilder.multipart(
envelopes: Collection<Envelope>,
format: EnvelopeFormatFactory,
formatMeta: Meta = EmptyMeta
parts: EnvelopeParts,
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
) {
dataType = MULTIPART_DATA_TYPE
meta {
SIZE_KEY put envelopes.size
FORMAT_NAME_KEY put format.name.toString()
if (!formatMeta.isEmpty()) {
FORMAT_META_KEY put formatMeta
var offsetCounter = 0
val separatorSize = separator.length
val partDescriptors = parts.map { (binary, description) ->
offsetCounter += separatorSize
PartDescriptor {
offset = offsetCounter
size = binary.size
meta = description
}.also {
offsetCounter += binary.size
}
}
meta {
if (separator != DEFAULT_MULTIPART_DATA_SEPARATOR) {
SEPARATOR_KEY put separator
}
setIndexed(PARTS_KEY, partDescriptors.map { it.toMeta() })
}
data {
format(formatMeta).run {
envelopes.forEach {
writeRawString(MULTIPART_DATA_SEPARATOR)
writeEnvelope(it)
}
parts.forEach {
writeRawString(separator)
writeBinary(it.binary)
}
}
}
/**
* Create a multipart partition in the envelope adding additional name-index mapping in meta
*/
@DFExperimental
fun EnvelopeBuilder.multipart(
envelopes: Map<String, Envelope>,
format: EnvelopeFormatFactory,
formatMeta: Meta = EmptyMeta
fun EnvelopeBuilder.envelopes(
envelopes: List<Envelope>,
format: EnvelopeFormat = TaggedEnvelopeFormat,
separator: String = DEFAULT_MULTIPART_DATA_SEPARATOR
) {
dataType = MULTIPART_DATA_TYPE
meta {
SIZE_KEY put envelopes.size
FORMAT_NAME_KEY put format.name.toString()
if (!formatMeta.isEmpty()) {
FORMAT_META_KEY put formatMeta
}
val parts = envelopes.map {
val binary = format.toBinary(it)
EnvelopePart(binary, null)
}
data {
format.run {
var counter = 0
envelopes.forEach { (key, envelope) ->
writeRawString(MULTIPART_DATA_SEPARATOR)
writeEnvelope(envelope)
meta {
append(INDEX_KEY, Meta {
"key" put key
"index" put counter
})
}
counter++
}
meta{
set(MULTIPART_KEY + PART_FORMAT_KEY, format.toMeta())
}
multipart(parts, separator)
}
fun Envelope.parts(): EnvelopeParts {
if (data == null) return emptyList()
//TODO add zip folder reader
val parts = meta.getIndexed(PARTS_KEY).values.mapNotNull { it.node }.map {
PartDescriptor.wrap(it)
}
return if (parts.isEmpty()) {
listOf(EnvelopePart(data!!, meta[MULTIPART_KEY].node))
} else {
parts.map {
val binary = data!!.view(it.offset, it.size)
val meta = Laminate(it.meta, meta[MULTIPART_KEY].node)
EnvelopePart(binary, meta)
}
}
}
@DFExperimental
fun EnvelopeBuilder.multipart(
formatFactory: EnvelopeFormatFactory,
formatMeta: Meta = EmptyMeta,
builder: suspend SequenceScope<Envelope>.() -> Unit
) = multipart(sequence(builder).toList(), formatFactory, formatMeta)
fun EnvelopePart.envelope(format: EnvelopeFormat): Envelope = binary.readWith(format)
val EnvelopePart.name: String? get() = description?.get("name").string
/**
* If given envelope supports multipart data, return a sequence of those parts (could be empty). Otherwise return null.
* Represent envelope part by an envelope
*/
@DFExperimental
fun Envelope.parts(io: IOPlugin = Global.plugins.fetch(IOPlugin)): Sequence<Envelope>? {
return when (dataType) {
MULTIPART_DATA_TYPE -> {
val size = meta[SIZE_KEY].int ?: error("Unsized parts not supported yet")
val formatName = meta[FORMAT_NAME_KEY].string?.toName()
?: error("Inferring parts format is not supported at the moment")
val formatMeta = meta[FORMAT_META_KEY].node ?: EmptyMeta
val format = io.envelopeFormat(formatName, formatMeta)
?: error("Format $formatName is not resolved by $io")
return format.run {
data?.read {
sequence {
repeat(size) {
val separator = readRawString(MULTIPART_DATA_SEPARATOR.length)
if(separator!= MULTIPART_DATA_SEPARATOR) error("Separator is expected, but $separator found")
yield(readObject())
}
}
} ?: emptySequence()
}
}
else -> null
fun EnvelopePart.envelope(plugin: IOPlugin): Envelope {
val formatItem = description?.get(PART_FORMAT_KEY)
return if (formatItem != null) {
val format: EnvelopeFormat = plugin.resolveEnvelopeFormat(formatItem)
?: error("Envelope format for $formatItem is not resolved")
binary.readWith(format)
} else {
error("Envelope description not found")
//SimpleEnvelope(description ?: Meta.EMPTY, binary)
}
}
}

View File

@ -3,9 +3,11 @@ package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.context.Factory
import hep.dataforge.context.Named
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaItem
import hep.dataforge.meta.MetaRepr
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.provider.Type
@ -18,12 +20,25 @@ import kotlin.reflect.KClass
/**
* And interface for reading and writing objects into with IO streams
*/
interface IOFormat<T : Any> {
interface IOFormat<T : Any> : MetaRepr {
fun Output.writeObject(obj: T)
fun Input.readObject(): T
companion object{
val NAME_KEY = "name".asName()
val META_KEY = "meta".asName()
}
}
fun <T : Any> Input.readWith(format: IOFormat<T>): T = format.run { readObject() }
/**
* Read given binary as object using given format
*/
fun <T : Any> Binary.readWith(format: IOFormat<T>): T = read {
readWith(format)
}
fun <T : Any> Output.writeWith(format: IOFormat<T>, obj: T) = format.run { writeObject(obj) }
class ListIOFormat<T : Any>(val format: IOFormat<T>) : IOFormat<List<T>> {
@ -42,6 +57,11 @@ class ListIOFormat<T : Any>(val format: IOFormat<T>) : IOFormat<List<T>> {
List(size) { readObject() }
}
}
override fun toMeta(): Meta = Meta {
NAME_KEY put "list"
"contentFormat" put format.toMeta()
}
}
val <T : Any> IOFormat<T>.list get() = ListIOFormat(this)
@ -57,22 +77,22 @@ fun ObjectPool<Buffer>.fill(block: Buffer.() -> Unit): Buffer {
}
@Type(IO_FORMAT_TYPE)
interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named {
interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named, MetaRepr {
/**
* Explicit type for dynamic type checks
*/
val type: KClass<out T>
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
}
companion object {
const val IO_FORMAT_TYPE = "io.format"
}
}
fun <T : Any> IOFormat<T>.writeBytes(obj: T): Bytes = buildBytes { writeObject(obj) }
fun <T : Any> IOFormat<T>.writeByteArray(obj: T): ByteArray = buildBytes { writeObject(obj) }.toByteArray()
fun <T : Any> IOFormat<T>.readByteArray(array: ByteArray): T = array.asBinary().read { readObject() }
fun <T : Any> IOFormat<T>.toBinary(obj: T): Binary = Binary { writeObject(obj) }
object DoubleIOFormat : IOFormat<Double>, IOFormatFactory<Double> {
override fun invoke(meta: Meta, context: Context): IOFormat<Double> = this
@ -103,13 +123,4 @@ object ValueIOFormat : IOFormat<Value>, IOFormatFactory<Value> {
return (BinaryMetaFormat.run { readMetaItem() } as? MetaItem.ValueItem)?.value
?: error("The item is not a value")
}
}
/**
* Read given binary as object using given format
*/
fun <T : Any> Binary.readWith(format: IOFormat<T>): T = format.run {
read {
readObject()
}
}

View File

@ -2,33 +2,56 @@ package hep.dataforge.io
import hep.dataforge.context.*
import hep.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import hep.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import hep.dataforge.names.get
import hep.dataforge.names.toName
import kotlin.reflect.KClass
class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
override val tag: PluginTag get() = Companion.tag
val ioFormatFactories by lazy {
context.content<IOFormatFactory<*>>(IO_FORMAT_TYPE).values
}
fun <T : Any> resolveIOFormat(item: MetaItem<*>, type: KClass<out T>): IOFormat<T>? {
val key = item.string ?: item.node[NAME_KEY]?.string ?: error("Format name not defined")
val name = key.toName()
return ioFormatFactories.find { it.name == name }?.let {
@Suppress("UNCHECKED_CAST")
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
else it.invoke(item.node[META_KEY].node ?: Meta.EMPTY, context) as IOFormat<T>
}
}
val metaFormatFactories by lazy {
context.content<MetaFormatFactory>(META_FORMAT_TYPE).values
}
fun metaFormat(key: Short, meta: Meta = EmptyMeta): MetaFormat? =
fun resolveMetaFormat(key: Short, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.key == key }?.invoke(meta)
fun metaFormat(name: String, meta: Meta = EmptyMeta): MetaFormat? =
fun resolveMetaFormat(name: String, meta: Meta = Meta.EMPTY): MetaFormat? =
metaFormatFactories.find { it.shortName == name }?.invoke(meta)
val envelopeFormatFactories by lazy {
context.content<EnvelopeFormatFactory>(ENVELOPE_FORMAT_TYPE).values
}
fun envelopeFormat(name: Name, meta: Meta = EmptyMeta) =
fun resolveEnvelopeFormat(name: Name, meta: Meta = Meta.EMPTY): EnvelopeFormat? =
envelopeFormatFactories.find { it.name == name }?.invoke(meta, context)
fun resolveEnvelopeFormat(item: MetaItem<*>): EnvelopeFormat? {
val name = item.string ?: item.node[NAME_KEY]?.string ?: error("Envelope format name not defined")
val meta = item.node[META_KEY].node ?: Meta.EMPTY
return resolveEnvelopeFormat(name.toName(), meta)
}
override fun provideTop(target: String): Map<Name, Any> {
return when (target) {
META_FORMAT_TYPE -> defaultMetaFormats.toMap()
@ -37,19 +60,6 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
}
val ioFormats: Map<Name, IOFormatFactory<*>> by lazy {
context.content<IOFormatFactory<*>>(IO_FORMAT_TYPE)
}
fun <T : Any> resolveIOFormat(item: MetaItem<*>, type: KClass<out T>): IOFormat<T>? {
val key = item.string ?: item.node["name"]?.string ?: error("Format name not defined")
return ioFormats[key]?.let {
@Suppress("UNCHECKED_CAST")
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
else it.invoke(item.node["meta"].node ?: EmptyMeta, context) as IOFormat<T>
}
}
companion object : PluginFactory<IOPlugin> {
val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat, BinaryMetaFormat)
val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat, TaglessEnvelopeFormat)

View File

@ -4,6 +4,7 @@ package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.node
@ -11,7 +12,7 @@ import hep.dataforge.meta.toJson
import hep.dataforge.meta.toMetaItem
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.text.readUtf8String
import kotlinx.io.readByteArray
import kotlinx.io.text.writeUtf8String
import kotlinx.serialization.UnstableDefault
import kotlinx.serialization.json.Json
@ -25,8 +26,12 @@ class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat {
writeUtf8String(json.stringify(JsonObjectSerializer, jsonObject))
}
override fun toMeta(): Meta = Meta{
NAME_KEY put name.toString()
}
override fun Input.readMeta(descriptor: NodeDescriptor?): Meta {
val str = readUtf8String()
val str = readByteArray().decodeToString()
val jsonElement = json.parseJson(str)
val item = jsonElement.toMetaItem(descriptor)
return item.node ?: Meta.EMPTY

View File

@ -1,14 +1,17 @@
package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.names.plus
import hep.dataforge.provider.Type
import kotlinx.io.*
import kotlinx.io.ByteArrayInput
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.use
import kotlin.reflect.KClass
/**
@ -44,14 +47,14 @@ interface MetaFormatFactory : IOFormatFactory<Meta>, MetaFormat {
}
}
fun Meta.toString(format: MetaFormat): String = buildBytes {
fun Meta.toString(format: MetaFormat): String = buildByteArray {
format.run { writeObject(this@toString) }
}.toByteArray().decodeToString()
}.decodeToString()
fun Meta.toString(formatFactory: MetaFormatFactory): String = toString(formatFactory())
fun MetaFormat.parse(str: String): Meta {
return str.encodeToByteArray().read { readObject() }
return ByteArrayInput(str.encodeToByteArray()).use { it.readObject() }
}
fun MetaFormatFactory.parse(str: String, formatMeta: Meta): Meta = invoke(formatMeta).parse(str)

View File

@ -1,17 +1,17 @@
package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.enum
import hep.dataforge.meta.get
import hep.dataforge.meta.string
import hep.dataforge.names.Name
import hep.dataforge.names.plus
import hep.dataforge.names.toName
import kotlinx.io.*
import kotlinx.io.text.readRawString
import kotlinx.io.text.writeRawString
@ExperimentalIoApi
class TaggedEnvelopeFormat(
val io: IOPlugin,
val version: VERSION = VERSION.DF02
@ -21,7 +21,7 @@ class TaggedEnvelopeFormat(
// ?: error("Meta format with key $metaFormatKey could not be resolved in $io")
private fun Tag.toBytes() = buildBytes(24) {
private fun Tag.toBinary() = Binary(24) {
writeRawString(START_SEQUENCE)
writeRawString(version.name)
writeShort(metaFormatKey)
@ -39,14 +39,10 @@ class TaggedEnvelopeFormat(
override fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta) {
val metaFormat = metaFormatFactory.invoke(formatMeta, io.context)
val metaBytes = metaFormat.writeBytes(envelope.meta)
val actualSize: ULong = if (envelope.data == null) {
0
} else {
envelope.data?.size ?: Binary.INFINITE
}.toULong()
val metaBytes = metaFormat.toBinary(envelope.meta)
val actualSize: ULong = (envelope.data?.size ?: 0).toULong()
val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize)
writeBinary(tag.toBytes())
writeBinary(tag.toBinary())
writeBinary(metaBytes)
writeRawString("\r\n")
envelope.data?.let {
@ -64,7 +60,7 @@ class TaggedEnvelopeFormat(
override fun Input.readObject(): Envelope {
val tag = readTag(version)
val metaFormat = io.metaFormat(tag.metaFormatKey)
val metaFormat = io.resolveMetaFormat(tag.metaFormatKey)
?: error("Meta format with key ${tag.metaFormatKey} not found")
val meta: Meta = limit(tag.metaSize.toInt()).run {
@ -73,7 +69,7 @@ class TaggedEnvelopeFormat(
}
}
val data = ByteArray(tag.dataSize.toInt()).also { readArray(it) }.asBinary()
val data = readBinary(tag.dataSize.toInt())
return SimpleEnvelope(meta, data)
}
@ -81,7 +77,7 @@ class TaggedEnvelopeFormat(
override fun Input.readPartial(): PartialEnvelope {
val tag = readTag(version)
val metaFormat = io.metaFormat(tag.metaFormatKey)
val metaFormat = io.resolveMetaFormat(tag.metaFormatKey)
?: error("Meta format with key ${tag.metaFormatKey} not found")
val meta: Meta = limit(tag.metaSize.toInt()).run {
@ -104,6 +100,13 @@ class TaggedEnvelopeFormat(
DF03(24u)
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put {
"version" put version
}
}
companion object : EnvelopeFormatFactory {
private const val START_SEQUENCE = "#~"
private const val END_SEQUENCE = "~#\r\n"
@ -117,7 +120,9 @@ class TaggedEnvelopeFormat(
//Check if appropriate factory exists
io.metaFormatFactories.find { it.name == metaFormatName } ?: error("Meta format could not be resolved")
return TaggedEnvelopeFormat(io)
val version: VERSION = meta["version"].enum<VERSION>() ?: VERSION.DF02
return TaggedEnvelopeFormat(io, version)
}
private fun Input.readTag(version: VERSION): Tag {
@ -138,11 +143,13 @@ class TaggedEnvelopeFormat(
override fun peekFormat(io: IOPlugin, input: Input): EnvelopeFormat? {
return try {
val header = input.readRawString(6)
when (header.substring(2..5)) {
VERSION.DF02.name -> TaggedEnvelopeFormat(io, VERSION.DF02)
VERSION.DF03.name -> TaggedEnvelopeFormat(io, VERSION.DF03)
else -> null
input.preview {
val header = readRawString(6)
return@preview when (header.substring(2..5)) {
VERSION.DF02.name -> TaggedEnvelopeFormat(io, VERSION.DF02)
VERSION.DF03.name -> TaggedEnvelopeFormat(io, VERSION.DF03)
else -> null
}
}
} catch (ex: Exception) {
null

View File

@ -1,18 +1,21 @@
package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.meta.*
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.isEmpty
import hep.dataforge.meta.string
import hep.dataforge.names.asName
import kotlinx.io.*
import kotlinx.io.text.readRawString
import kotlinx.io.text.readUtf8Line
import kotlinx.io.text.writeRawString
import kotlinx.io.text.writeUtf8String
import kotlin.collections.set
@ExperimentalIoApi
class TaglessEnvelopeFormat(
val io: IOPlugin,
meta: Meta = EmptyMeta
val meta: Meta = Meta.EMPTY
) : EnvelopeFormat {
private val metaStart = meta[META_START_PROPERTY].string ?: DEFAULT_META_START
@ -31,17 +34,13 @@ class TaglessEnvelopeFormat(
//printing all properties
writeProperty(META_TYPE_PROPERTY, metaFormatFactory.shortName)
//TODO add optional metaFormat properties
val actualSize: Int = if (envelope.data == null) {
0
} else {
envelope.data?.size ?: Binary.INFINITE
}
val actualSize: Int = envelope.data?.size ?: 0
writeProperty(DATA_LENGTH_PROPERTY, actualSize)
//Printing meta
if (!envelope.meta.isEmpty()) {
val metaBytes = metaFormat.writeBytes(envelope.meta)
val metaBytes = metaFormat.toBinary(envelope.meta)
writeProperty(META_LENGTH_PROPERTY, metaBytes.size + 2)
writeUtf8String(metaStart + "\r\n")
writeBinary(metaBytes)
@ -71,14 +70,14 @@ class TaglessEnvelopeFormat(
properties[key] = value
}
//If can't read line, return envelope without data
if (eof()) return SimpleEnvelope(Meta.EMPTY, null)
if (exhausted()) return SimpleEnvelope(Meta.EMPTY, null)
line = readUtf8Line()
}
var meta: Meta = EmptyMeta
var meta: Meta = Meta.EMPTY
if (line.startsWith(metaStart)) {
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.metaFormat(it) } ?: JsonMetaFormat
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
meta = if (metaSize != null) {
limit(metaSize).run {
@ -101,12 +100,13 @@ class TaglessEnvelopeFormat(
} while (!line.startsWith(dataStart))
val data: Binary? = if (properties.containsKey(DATA_LENGTH_PROPERTY)) {
val bytes = ByteArray(properties[DATA_LENGTH_PROPERTY]!!.toInt())
readArray(bytes)
bytes.asBinary()
readBinary(properties[DATA_LENGTH_PROPERTY]!!.toInt())
// val bytes = ByteArray(properties[DATA_LENGTH_PROPERTY]!!.toInt())
// readByteArray(bytes)
// bytes.asBinary()
} else {
ArrayBinary.write {
writeInput(this@readObject)
Binary {
copyTo(this)
}
}
@ -138,10 +138,10 @@ class TaglessEnvelopeFormat(
}
}
var meta: Meta = EmptyMeta
var meta: Meta = Meta.EMPTY
if (line.startsWith(metaStart)) {
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.metaFormat(it) } ?: JsonMetaFormat
val metaFormat = properties[META_TYPE_PROPERTY]?.let { io.resolveMetaFormat(it) } ?: JsonMetaFormat
val metaSize = properties[META_LENGTH_PROPERTY]?.toInt()
meta = if (metaSize != null) {
offset += metaSize.toUInt()
@ -163,6 +163,11 @@ class TaglessEnvelopeFormat(
return PartialEnvelope(meta, offset, dataSize)
}
override fun toMeta(): Meta = Meta {
NAME_KEY put name.toString()
META_KEY put meta
}
companion object : EnvelopeFormatFactory {
private val propertyPattern = "#\\?\\s*(?<key>[\\w.]*)\\s*:\\s*(?<value>[^;]*);?".toRegex()
@ -201,11 +206,13 @@ class TaglessEnvelopeFormat(
override fun peekFormat(io: IOPlugin, input: Input): EnvelopeFormat? {
return try {
val string = input.readRawString(TAGLESS_ENVELOPE_HEADER.length)
return if (string == TAGLESS_ENVELOPE_HEADER) {
TaglessEnvelopeFormat(io)
} else {
null
input.preview {
val string = readRawString(TAGLESS_ENVELOPE_HEADER.length)
return@preview if (string == TAGLESS_ENVELOPE_HEADER) {
TaglessEnvelopeFormat(io)
} else {
null
}
}
} catch (ex: Exception) {
null

View File

@ -6,7 +6,6 @@ import hep.dataforge.io.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import kotlin.reflect.KClass
class RemoteFunctionClient(override val context: Context, val responder: Responder) : FunctionServer, ContextAware {

View File

@ -8,7 +8,6 @@ import hep.dataforge.io.Responder
import hep.dataforge.io.type
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
class RemoteFunctionServer(
override val context: Context,

View File

@ -0,0 +1,39 @@
package hep.dataforge.io
import kotlinx.io.*
import kotlin.math.min
fun Output.writeRawString(str: String) {
str.forEach { writeByte(it.toByte()) }
}
fun Input.readRawString(size: Int): String {
val array = CharArray(size) { readByte().toChar() }
return String(array)
}
inline fun buildByteArray(expectedSize: Int = 16, block: Output.() -> Unit): ByteArray =
ByteArrayOutput(expectedSize).apply(block).toByteArray()
@Suppress("FunctionName")
inline fun Binary(expectedSize: Int = 16, block: Output.() -> Unit): Binary =
buildByteArray(expectedSize, block).asBinary()
/**
* View section of a [Binary] as an independent binary
*/
class BinaryView(private val source: Binary, private val start: Int, override val size: Int) : Binary {
init {
require(start > 0)
require(start + size <= source.size) { "View boundary is outside source binary size" }
}
override fun <R> read(offset: Int, atMost: Int, block: Input.() -> R): R {
return source.read(start + offset, min(size, atMost), block)
}
}
fun Binary.view(start: Int, size: Int) = BinaryView(this, start, size)
operator fun Binary.get(range: IntRange) = view(range.first, range.last - range.first)

View File

@ -0,0 +1,20 @@
package hep.dataforge.io
import kotlinx.io.asBinary
import kotlinx.io.readByte
import kotlinx.io.readInt
import kotlin.test.Test
import kotlin.test.assertEquals
class BinaryTest {
@Test
fun testBinaryAccess(){
val binary = ByteArray(128){it.toByte()}.asBinary()
binary[3..12].read {
readInt()
val res = readByte()
assertEquals(7, res)
}
}
}

View File

@ -23,7 +23,7 @@ class EnvelopeFormatTest {
@Test
fun testTaggedFormat(){
TaggedEnvelopeFormat.run {
val byteArray = this.writeByteArray(envelope)
val byteArray = this.toByteArray(envelope)
//println(byteArray.decodeToString())
val res = readByteArray(byteArray)
assertEquals(envelope.meta,res.meta)
@ -37,7 +37,7 @@ class EnvelopeFormatTest {
@Test
fun testTaglessFormat(){
TaglessEnvelopeFormat.run {
val byteArray = writeByteArray(envelope)
val byteArray = toByteArray(envelope)
//println(byteArray.decodeToString())
val res = readByteArray(byteArray)
assertEquals(envelope.meta,res.meta)

View File

@ -1,20 +1,19 @@
package hep.dataforge.io
import hep.dataforge.meta.*
import kotlinx.io.Bytes
import kotlinx.io.buildBytes
import kotlinx.io.asBinary
import kotlinx.serialization.json.JsonPrimitive
import kotlinx.serialization.json.json
import kotlinx.serialization.json.jsonArray
import kotlin.test.Test
import kotlin.test.assertEquals
fun Meta.toBytes(format: MetaFormat = JsonMetaFormat): Bytes = buildBytes {
format.run { writeObject(this@toBytes) }
fun Meta.toByteArray(format: MetaFormat = JsonMetaFormat) = buildByteArray {
format.run { writeObject(this@toByteArray) }
}
fun MetaFormat.fromBytes(packet: Bytes): Meta {
return packet.read { readObject() }
fun MetaFormat.fromByteArray(packet: ByteArray): Meta {
return packet.asBinary().read { readObject() }
}
class MetaFormatTest {
@ -28,8 +27,8 @@ class MetaFormatTest {
"array" put doubleArrayOf(1.0, 2.0, 3.0)
}
}
val bytes = meta.toBytes(BinaryMetaFormat)
val result = BinaryMetaFormat.fromBytes(bytes)
val bytes = meta.toByteArray(BinaryMetaFormat)
val result = BinaryMetaFormat.fromByteArray(bytes)
assertEquals(meta, result)
}
@ -56,12 +55,12 @@ class MetaFormatTest {
}
@Test
fun testJsonToMeta(){
val json = jsonArray{
fun testJsonToMeta() {
val json = jsonArray {
//top level array
+jsonArray {
+JsonPrimitive(88)
+json{
+json {
"c" to "aasdad"
"d" to true
}
@ -77,7 +76,7 @@ class MetaFormatTest {
assertEquals(true, meta["@value[0].@value[1].d"].boolean)
assertEquals("value", meta["@value[1]"].string)
assertEquals(listOf(1.0,2.0,3.0),meta["@value[2"].value?.list?.map{it.number.toDouble()})
assertEquals(listOf(1.0, 2.0, 3.0), meta["@value[2"].value?.list?.map { it.number.toDouble() })
}
}

View File

@ -1,12 +1,9 @@
package hep.dataforge.io
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaItem
import hep.dataforge.meta.MetaSerializer
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.serialization.cbor.Cbor
import kotlinx.serialization.json.Json
import kotlin.test.Test
import kotlin.test.assertEquals
@ -22,8 +19,8 @@ class MetaSerializerTest {
@Test
fun testMetaSerialization() {
val string = Json.indented.stringify(MetaSerializer, meta)
val restored = Json.plain.parse(MetaSerializer, string)
val string = JSON_PRETTY.stringify(MetaSerializer, meta)
val restored = JSON_PLAIN.parse(MetaSerializer, string)
assertEquals(restored, meta)
}
@ -38,8 +35,8 @@ class MetaSerializerTest {
@Test
fun testNameSerialization() {
val name = "a.b.c".toName()
val string = Json.indented.stringify(Name.serializer(), name)
val restored = Json.plain.parse(Name.serializer(), string)
val string = JSON_PRETTY.stringify(Name.serializer(), name)
val restored = JSON_PLAIN.parse(Name.serializer(), string)
assertEquals(restored, name)
}

View File

@ -1,19 +1,19 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import kotlinx.io.text.writeRawString
import kotlinx.io.text.writeUtf8String
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertTrue
@DFExperimental
class MultipartTest {
val envelopes = (0..5).map {
val io: IOPlugin = Global.io
val envelopes = (0 until 5).map {
Envelope {
meta {
"value" put it
@ -28,19 +28,21 @@ class MultipartTest {
}
val partsEnvelope = Envelope {
multipart(envelopes, TaggedEnvelopeFormat)
envelopes(envelopes, TaglessEnvelopeFormat)
}
@Test
fun testParts() {
TaggedEnvelopeFormat.run {
val singleEnvelopeData = writeBytes(envelopes[0])
TaglessEnvelopeFormat.run {
val singleEnvelopeData = toBinary(envelopes[0])
val singleEnvelopeSize = singleEnvelopeData.size
val bytes = writeBytes(partsEnvelope)
assertTrue(5*singleEnvelopeSize < bytes.size)
val bytes = toBinary(partsEnvelope)
assertTrue(envelopes.size * singleEnvelopeSize < bytes.size)
val reconstructed = bytes.readWith(this)
val parts = reconstructed.parts()?.toList() ?: emptyList()
assertEquals(2, parts[2].meta["value"].int)
println(reconstructed.meta)
val parts = reconstructed.parts()
val envelope = parts[2].envelope(io)
assertEquals(2, envelope.meta["value"].int)
println(reconstructed.data!!.size)
}
}

View File

@ -0,0 +1,7 @@
package hep.dataforge.io
import kotlinx.io.ByteArrayInput
import kotlinx.io.use
fun <T : Any> IOFormat<T>.toByteArray(obj: T): ByteArray = buildByteArray { writeObject(obj) }
fun <T : Any> IOFormat<T>.readByteArray(array: ByteArray): T = ByteArrayInput(array).use { it.readObject() }

View File

@ -1,22 +0,0 @@
package hep.dataforge.io
import hep.dataforge.meta.Meta
import kotlinx.io.Binary
import kotlinx.io.ExperimentalIoApi
import kotlinx.io.FileBinary
import kotlinx.io.read
import java.nio.file.Path
@ExperimentalIoApi
class FileEnvelope internal constructor(val path: Path, val format: EnvelopeFormat) : Envelope {
//TODO do not like this constructor. Hope to replace it later
private val partialEnvelope: PartialEnvelope = path.read {
format.run { readPartial() }
}
override val meta: Meta get() = partialEnvelope.meta
override val data: Binary? = FileBinary(path, partialEnvelope.dataOffset.toInt(), partialEnvelope.dataSize?.toInt())
}

View File

@ -1,23 +1,65 @@
package hep.dataforge.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.isEmpty
import kotlinx.io.*
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
import kotlin.reflect.full.isSuperclassOf
import kotlin.streams.asSequence
fun <R> Path.read(block: Input.() -> R): R = asBinary().read(block = block)
/**
* Write a live output to a newly created file. If file does not exist, throws error
*/
fun Path.write(block: Output.() -> Unit): Unit {
val stream = Files.newOutputStream(this, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW)
stream.asOutput().use(block)
}
/**
* Create a new file or append to exiting one with given output [block]
*/
fun Path.append(block: Output.() -> Unit): Unit {
val stream = Files.newOutputStream(
this,
StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE
)
stream.asOutput().use(block)
}
/**
* Create a new file or replace existing one using given output [block]
*/
fun Path.rewrite(block: Output.() -> Unit): Unit {
val stream = Files.newOutputStream(
this,
StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE
)
stream.asOutput().use(block)
}
fun Path.readEnvelope(format: EnvelopeFormat): Envelope {
val partialEnvelope: PartialEnvelope = asBinary().read {
format.run { readPartial() }
}
val offset: Int = partialEnvelope.dataOffset.toInt()
val size: Int = partialEnvelope.dataSize?.toInt() ?: (Files.size(this).toInt() - offset)
val binary = FileBinary(this, offset, size)
return SimpleEnvelope(partialEnvelope.meta, binary)
}
/**
* Resolve IOFormat based on type
*/
@Suppress("UNCHECKED_CAST")
@DFExperimental
inline fun <reified T : Any> IOPlugin.resolveIOFormat(): IOFormat<T>? {
return ioFormats.values.find { it.type.isSuperclassOf(T::class) } as IOFormat<T>?
return ioFormatFactories.find { it.type.isSuperclassOf(T::class) } as IOFormat<T>?
}
/**
@ -35,9 +77,9 @@ fun IOPlugin.readMetaFile(path: Path, formatOverride: MetaFormat? = null, descri
}
val extension = actualPath.fileName.toString().substringAfterLast('.')
val metaFormat = formatOverride ?: metaFormat(extension) ?: error("Can't resolve meta format $extension")
val metaFormat = formatOverride ?: resolveMetaFormat(extension) ?: error("Can't resolve meta format $extension")
return metaFormat.run {
actualPath.read{
actualPath.read {
readMeta(descriptor)
}
}
@ -59,7 +101,7 @@ fun IOPlugin.writeMetaFile(
path
}
metaFormat.run {
actualPath.write{
actualPath.write {
writeMeta(meta, descriptor)
}
}
@ -114,7 +156,7 @@ fun IOPlugin.readEnvelopeFile(
.singleOrNull { it.fileName.toString().startsWith(IOPlugin.META_FILE_NAME) }
val meta = if (metaFile == null) {
EmptyMeta
Meta.EMPTY
} else {
readMetaFile(metaFile)
}
@ -131,7 +173,7 @@ fun IOPlugin.readEnvelopeFile(
}
return formatPeeker(path)?.let { format ->
FileEnvelope(path, format)
path.readEnvelope(format)
} ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary
SimpleEnvelope(Meta.EMPTY, path.asBinary())
} else null
@ -156,7 +198,7 @@ fun IOPlugin.writeEnvelopeFile(
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat,
metaFormat: MetaFormatFactory? = null
) {
path.write {
path.rewrite {
with(envelopeFormat) {
writeEnvelope(envelope, metaFormat ?: envelopeFormat.defaultMetaFormat)
}
@ -184,8 +226,8 @@ fun IOPlugin.writeEnvelopeDirectory(
val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME)
dataFile.write {
envelope.data?.read {
val copied = writeInput(this)
if (envelope.data?.size != Binary.INFINITE && copied != envelope.data?.size) {
val copied = copyTo(this@write)
if (copied != envelope.data?.size) {
error("The number of copied bytes does not equal data size")
}
}

View File

@ -9,7 +9,7 @@ import kotlin.reflect.full.isSuperclassOf
fun IOPlugin.resolveIOFormatName(type: KClass<*>): Name {
return ioFormats.entries.find { it.value.type.isSuperclassOf(type) }?.key
return ioFormatFactories.find { it.type.isSuperclassOf(type) }?.name
?: error("Can't resolve IOFormat for type $type")
}

View File

@ -3,7 +3,6 @@ package hep.dataforge.io.tcp
import hep.dataforge.context.Context
import hep.dataforge.context.ContextAware
import hep.dataforge.io.*
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlinx.coroutines.asCoroutineDispatcher
import kotlinx.coroutines.withContext
@ -17,7 +16,7 @@ class EnvelopeClient(
val host: String,
val port: Int,
formatFactory: EnvelopeFormatFactory = TaggedEnvelopeFormat,
formatMeta: Meta = EmptyMeta
formatMeta: Meta = Meta.EMPTY
) : Responder, ContextAware {
private val dispatcher = Executors.newSingleThreadExecutor().asCoroutineDispatcher()

View File

@ -6,7 +6,6 @@ import hep.dataforge.io.EnvelopeFormatFactory
import hep.dataforge.io.Responder
import hep.dataforge.io.TaggedEnvelopeFormat
import hep.dataforge.io.type
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlinx.coroutines.*
import java.net.ServerSocket
@ -19,7 +18,7 @@ class EnvelopeServer(
val responder: Responder,
val scope: CoroutineScope,
formatFactory: EnvelopeFormatFactory = TaggedEnvelopeFormat,
formatMeta: Meta = EmptyMeta
formatMeta: Meta = Meta.EMPTY
) : ContextAware {
private var job: Job? = null
@ -78,7 +77,7 @@ class EnvelopeServer(
logger.debug { "Accepted request with type ${request.type} from ${socket.remoteSocketAddress}" }
if (request.type == SHUTDOWN_ENVELOPE_TYPE) {
//Echo shutdown command
outputStream.write{
outputStream.write {
writeObject(request)
}
logger.info { "Accepted graceful shutdown signal from ${socket.inetAddress}" }

View File

@ -1,62 +1,45 @@
package hep.dataforge.io.tcp
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.asBinary
import kotlinx.io.*
import kotlinx.io.buffer.Buffer
import kotlinx.io.buffer.get
import kotlinx.io.buffer.set
import java.io.InputStream
import java.io.OutputStream
private class InputStreamInput(val source: InputStream, val waitForInput: Boolean = false) : Input() {
private class BlockingStreamInput(val source: InputStream) : Input() {
override fun closeSource() {
source.close()
}
override fun fill(buffer: Buffer): Int {
if (waitForInput) {
while (source.available() == 0) {
//block until input is available
}
override fun fill(buffer: Buffer, startIndex: Int, endIndex: Int): Int {
while (source.available() == 0) {
//block until input is available
}
var bufferPos = 0
do {
// Zero-copy attempt
if (buffer.buffer.hasArray()) {
val result = source.read(buffer.buffer.array(), startIndex, endIndex - startIndex)
return result.coerceAtLeast(0) // -1 when IS is closed
}
for (i in startIndex until endIndex) {
val byte = source.read()
buffer[bufferPos] = byte.toByte()
bufferPos++
} while (byte > 0 && bufferPos < buffer.size && source.available() > 0)
return bufferPos
}
}
private class OutputStreamOutput(val out: OutputStream) : Output() {
override fun flush(source: Buffer, length: Int) {
for (i in 0..length) {
out.write(source[i].toInt())
if (byte == -1) return (i - startIndex)
buffer[i] = byte.toByte()
}
out.flush()
}
override fun closeSource() {
out.flush()
out.close()
return endIndex - startIndex
}
}
fun <R> InputStream.read(size: Int, block: Input.() -> R): R {
val buffer = ByteArray(size)
read(buffer)
return buffer.asBinary().read(block)
return buffer.asBinary().read(block = block)
}
fun <R> InputStream.read(block: Input.() -> R): R =
InputStreamInput(this, false).block()
fun <R> InputStream.read(block: Input.() -> R): R = asInput().block()
fun <R> InputStream.readBlocking(block: Input.() -> R): R =
InputStreamInput(this, true).block()
fun <R> InputStream.readBlocking(block: Input.() -> R): R = BlockingStreamInput(this).block()
fun OutputStream.write(block: Output.() -> Unit) {
OutputStreamOutput(this).block()
inline fun OutputStream.write(block: Output.() -> Unit) {
asOutput().block()
}

View File

@ -1,6 +1,7 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import kotlinx.io.asBinary
import kotlinx.io.toByteArray
import kotlinx.io.writeDouble
@ -46,6 +47,7 @@ class FileBinaryTest {
}
@DFExperimental
@Test
fun testFileDataSizeRewriting() {
println(System.getProperty("user.dir"))
@ -53,6 +55,6 @@ class FileBinaryTest {
Global.io.writeEnvelopeFile(tmpPath, envelope)
val binary = Global.io.readEnvelopeFile(tmpPath)?.data!!
assertEquals(binary.size.toInt(), binary.toByteArray().size)
assertEquals(binary.size, binary.toByteArray().size)
}
}

View File

@ -1,12 +1,14 @@
package hep.dataforge.io
import hep.dataforge.context.Global
import hep.dataforge.meta.DFExperimental
import kotlinx.io.writeDouble
import java.nio.file.Files
import kotlin.test.Test
import kotlin.test.assertTrue
@DFExperimental
class FileEnvelopeTest {
val envelope = Envelope {
meta {

View File

@ -4,12 +4,13 @@ import hep.dataforge.context.Global
import hep.dataforge.io.Envelope
import hep.dataforge.io.Responder
import hep.dataforge.io.TaggedEnvelopeFormat
import hep.dataforge.io.writeByteArray
import hep.dataforge.io.toByteArray
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.runBlocking
import kotlinx.io.writeDouble
import org.junit.AfterClass
import org.junit.BeforeClass
import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.BeforeAll
import org.junit.jupiter.api.Timeout
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.time.ExperimentalTime
@ -17,7 +18,7 @@ import kotlin.time.ExperimentalTime
@ExperimentalStdlibApi
object EchoResponder : Responder {
override suspend fun respond(request: Envelope): Envelope {
val string = TaggedEnvelopeFormat().run { writeByteArray(request).decodeToString() }
val string = TaggedEnvelopeFormat().run { toByteArray(request).decodeToString() }
println("ECHO:")
println(string)
return request
@ -31,22 +32,23 @@ class EnvelopeServerTest {
@JvmStatic
val echoEnvelopeServer = EnvelopeServer(Global, 7778, EchoResponder, GlobalScope)
@BeforeClass
@BeforeAll
@JvmStatic
fun start() {
echoEnvelopeServer.start()
}
@AfterClass
@AfterAll
@JvmStatic
fun close() {
echoEnvelopeServer.stop()
}
}
@Test(timeout = 1000)
@Test
@Timeout(1)
fun doEchoTest() {
val request = Envelope.invoke {
val request = Envelope {
type = "test.echo"
meta {
"test.value" put 22

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta.scheme
package hep.dataforge.meta
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.*
import hep.dataforge.names.Name
import hep.dataforge.names.toName

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta.scheme
package hep.dataforge.meta
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.values.*
@ -168,9 +167,13 @@ fun Configurable.float(default: Float, key: Name? = null): ReadWriteProperty<Any
/**
* Enum delegate
*/
fun <E : Enum<E>> Configurable.enum(
default: E, key: Name? = null, resolve: MetaItem<*>.() -> E?
): ReadWriteProperty<Any?, E> = item(default, key).transform { it?.resolve() ?: default }
inline fun <reified E : Enum<E>> Configurable.enum(
default: E, key: Name? = null
): ReadWriteProperty<Any?, E> =
item(default, key).transform { item -> item?.string?.let {str->
@Suppress("USELESS_CAST")
enumValueOf<E>(str) as E
} ?: default }
/*
* Extra delegates for special cases
@ -225,7 +228,7 @@ fun <T : Configurable> Configurable.spec(
): ReadWriteProperty<Any?, T> = object : ReadWriteProperty<Any?, T> {
override fun getValue(thisRef: Any?, property: KProperty<*>): T {
val name = key ?: property.name.asName()
return config[name].node?.let { spec.wrap(it) }?:default
return config[name].node?.let { spec.wrap(it) } ?: default
}
override fun setValue(thisRef: Any?, property: KProperty<*>, value: T) {

View File

@ -1,3 +1,5 @@
@file:Suppress("UNUSED_PARAMETER")
package hep.dataforge.meta
import hep.dataforge.meta.descriptors.ItemDescriptor
@ -26,27 +28,75 @@ fun Value.toJson(descriptor: ValueDescriptor? = null): JsonElement {
}
//Use these methods to customize JSON key mapping
private fun NameToken.toJsonKey(descriptor: ItemDescriptor?) = toString()
private fun String.toJsonKey(descriptor: ItemDescriptor?) = descriptor?.getProperty("jsonName").string ?: toString()
//private fun NodeDescriptor?.getDescriptor(key: String) = this?.items?.get(key)
fun Meta.toJson(descriptor: NodeDescriptor? = null): JsonObject {
/**
* Convert given [Meta] to [JsonObject]. Primitives and nodes are copied as is, same name siblings are treated as json arrays
*/
fun Meta.toJson(descriptor: NodeDescriptor? = null, index: String? = null): JsonObject {
//TODO search for same name siblings and arrange them into arrays
val map = this.items.entries.associate { (name, item) ->
val itemDescriptor = descriptor?.items?.get(name.body)
val key = name.toJsonKey(itemDescriptor)
val value = when (item) {
is MetaItem.ValueItem -> {
item.value.toJson(itemDescriptor as? ValueDescriptor)
val elementMap = HashMap<String, JsonElement>()
fun MetaItem<*>.toJsonElement(itemDescriptor: ItemDescriptor?, index: String? = null): JsonElement = when (this) {
is MetaItem.ValueItem -> {
value.toJson(itemDescriptor as? ValueDescriptor)
}
is MetaItem.NodeItem -> {
node.toJson(itemDescriptor as? NodeDescriptor, index)
}
}
fun addElement(key: String) {
val itemDescriptor = descriptor?.items?.get(key)
val jsonKey = key.toJsonKey(itemDescriptor)
val items = getIndexed(key)
when (items.size) {
0 -> {
//do nothing
}
is MetaItem.NodeItem -> {
item.node.toJson(itemDescriptor as? NodeDescriptor)
1 -> {
elementMap[jsonKey] = items.values.first().toJsonElement(itemDescriptor)
}
else -> {
val array = jsonArray {
items.forEach { (index, item) ->
+item.toJsonElement(itemDescriptor, index)
}
}
elementMap[jsonKey] = array
}
}
key to value
}
return JsonObject(map)
((descriptor?.items?.keys ?: emptySet()) + items.keys.map { it.body }).forEach(::addElement)
if (index != null) {
elementMap["@index"] = JsonPrimitive(index)
}
return JsonObject(elementMap)
// // use descriptor keys in the order they are declared
// val keys = (descriptor?.items?.keys ?: emptySet()) + this.items.keys.map { it.body }
//
// //TODO search for same name siblings and arrange them into arrays
// val map = this.items.entries.associate { (name, item) ->
// val itemDescriptor = descriptor?.items?.get(name.body)
// val key = name.toJsonKey(itemDescriptor)
// val value = when (item) {
// is MetaItem.ValueItem -> {
// item.value.toJson(itemDescriptor as? ValueDescriptor)
// }
// is MetaItem.NodeItem -> {
// item.node.toJson(itemDescriptor as? NodeDescriptor)
// }
// }
// key to value
// }
// return JsonObject(map)
}
fun JsonObject.toMeta(descriptor: NodeDescriptor? = null): Meta = JsonMeta(this, descriptor)
@ -88,6 +138,9 @@ fun JsonElement.toMetaItem(descriptor: ItemDescriptor? = null): MetaItem<JsonMet
}
}
/**
* A meta wrapping json object
*/
class JsonMeta(val json: JsonObject, val descriptor: NodeDescriptor? = null) : MetaBase() {
@Suppress("UNCHECKED_CAST")
@ -95,7 +148,8 @@ class JsonMeta(val json: JsonObject, val descriptor: NodeDescriptor? = null) : M
val itemDescriptor = descriptor?.items?.get(key)
return when (value) {
is JsonPrimitive -> {
this[key] = MetaItem.ValueItem(value.toValue(itemDescriptor as? ValueDescriptor)) as MetaItem<JsonMeta>
this[key] =
MetaItem.ValueItem(value.toValue(itemDescriptor as? ValueDescriptor)) as MetaItem<JsonMeta>
}
is JsonObject -> {
this[key] = MetaItem.NodeItem(
@ -125,7 +179,7 @@ class JsonMeta(val json: JsonObject, val descriptor: NodeDescriptor? = null) : M
}
override val items: Map<NameToken, MetaItem<JsonMeta>> by lazy {
val map = HashMap<String, MetaItem<JsonMeta>>()
val map = LinkedHashMap<String, MetaItem<JsonMeta>>()
json.forEach { (key, value) -> map[key] = value }
map.mapKeys { it.key.toName().first()!! }
}

View File

@ -98,7 +98,9 @@ interface Meta : MetaRepr {
*/
const val VALUE_KEY = "@value"
val EMPTY: EmptyMeta = EmptyMeta
val EMPTY: Meta = object: MetaBase() {
override val items: Map<NameToken, MetaItem<*>> = emptyMap()
}
}
}
@ -188,7 +190,7 @@ abstract class MetaBase : Meta {
override fun hashCode(): Int = items.hashCode()
override fun toString(): String = toJson().toString()
override fun toString(): String = JSON_PRETTY.stringify(MetaSerializer, this)
}
/**
@ -216,10 +218,6 @@ fun MetaItem<*>.seal(): MetaItem<SealedMeta> = when (this) {
is NodeItem -> NodeItem(node.seal())
}
object EmptyMeta : MetaBase() {
override val items: Map<NameToken, MetaItem<*>> = emptyMap()
}
/**
* Unsafe methods to access values and nodes directly from [MetaItem]
*/
@ -251,4 +249,4 @@ val <M : Meta> MetaItem<M>?.node: M?
is NodeItem -> node
}
fun Meta.isEmpty() = this === EmptyMeta || this.items.isEmpty()
fun Meta.isEmpty() = this === Meta.EMPTY || this.items.isEmpty()

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta
import hep.dataforge.meta.scheme.Configurable
import hep.dataforge.names.*
import hep.dataforge.values.Value
@ -17,7 +16,7 @@ interface MutableMeta<out M : MutableMeta<M>> : MetaNode<M> {
* Changes in Meta are not thread safe.
*/
abstract class AbstractMutableMeta<M : MutableMeta<M>> : AbstractMetaNode<M>(), MutableMeta<M> {
protected val _items: MutableMap<NameToken, MetaItem<M>> = HashMap()
protected val _items: MutableMap<NameToken, MetaItem<M>> = LinkedHashMap()
override val items: Map<NameToken, MetaItem<M>>
get() = _items

View File

@ -34,9 +34,7 @@ class ReadWriteDelegateWrapper<T, R>(
val reader: (T) -> R,
val writer: (R) -> T
) : ReadWriteProperty<Any?, R> {
override fun getValue(thisRef: Any?, property: KProperty<*>): R {
return reader(delegate.getValue(thisRef, property))
}
override fun getValue(thisRef: Any?, property: KProperty<*>): R = reader(delegate.getValue(thisRef, property))
override fun setValue(thisRef: Any?, property: KProperty<*>, value: R) {
delegate.setValue(thisRef, property, writer(value))

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta.scheme
package hep.dataforge.meta
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.*
import hep.dataforge.names.Name
import hep.dataforge.names.NameToken
@ -24,9 +23,6 @@ open class Scheme() : Configurable, Described, MetaRepr {
var defaultProvider: (Name) -> MetaItem<*>? = { null }
internal set
final override var descriptor: NodeDescriptor? = null
internal set
override fun getDefaultItem(name: Name): MetaItem<*>? {
return defaultProvider(name) ?: descriptor?.get(name)?.defaultItem()
}
@ -59,13 +55,19 @@ inline operator fun <T : Scheme> T.invoke(block: T.() -> Unit) = apply(block)
/**
* A specification for simplified generation of wrappers
*/
open class SchemeSpec<T : Scheme>(val builder: () -> T) : Specification<T> {
open class SchemeSpec<T : Scheme>(val builder: () -> T) :
Specification<T> {
override fun empty(): T = builder()
override fun wrap(config: Config, defaultProvider: (Name) -> MetaItem<*>?): T {
return builder().apply {
return empty().apply {
this.config = config
this.defaultProvider = defaultProvider
}
}
@Suppress("OVERRIDE_BY_INLINE")
final override inline operator fun invoke(action: T.() -> Unit) = empty().apply(action)
}
/**
@ -73,15 +75,10 @@ open class SchemeSpec<T : Scheme>(val builder: () -> T) : Specification<T> {
*/
open class MetaScheme(
val meta: Meta,
descriptor: NodeDescriptor? = null,
override val descriptor: NodeDescriptor? = null,
config: Config = Config()
) : Scheme(config, meta::get) {
init {
this.descriptor = descriptor
}
override val defaultLayer: Meta
get() = Laminate(meta, descriptor?.defaultItem().node)
override val defaultLayer: Meta get() = Laminate(meta, descriptor?.defaultItem().node)
}
fun Meta.asScheme() =

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta.scheme
package hep.dataforge.meta
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import kotlin.jvm.JvmName
@ -10,15 +9,6 @@ import kotlin.jvm.JvmName
*
*/
interface Specification<T : Configurable> {
/**
* Update given configuration using given type as a builder
*/
fun update(config: Config, action: T.() -> Unit): T {
return wrap(config).apply(action)
}
operator fun invoke(action: T.() -> Unit) = update(Config(), action)
fun empty() = wrap()
/**
@ -26,19 +16,29 @@ interface Specification<T : Configurable> {
*/
fun wrap(config: Config = Config(), defaultProvider: (Name) -> MetaItem<*>? = { null }): T
/**
* Wrap a configuration using static meta as default
*/
fun wrap(config: Config = Config(), default: Meta): T = wrap(config) { default[it] }
/**
* Wrap a configuration using static meta as default
*/
fun wrap(default: Meta): T = wrap(
Config()
) { default[it] }
operator fun invoke(action: T.() -> Unit): T = empty().apply(action)
}
/**
* Update given configuration using given type as a builder
*/
fun <T : Configurable> Specification<T>.update(config: Config, action: T.() -> Unit): T = wrap(config).apply(action)
/**
* Wrap a configuration using static meta as default
*/
fun <T : Configurable> Specification<T>.wrap(config: Config = Config(), default: Meta = Meta.EMPTY): T =
wrap(config) { default[it] }
/**
* Wrap a configuration using static meta as default
*/
fun <T : Configurable> Specification<T>.wrap(source: Meta): T {
val default = source.seal()
return wrap(source.asConfig(), default)
}
/**
* Apply specified configuration to configurable
*/

View File

@ -5,6 +5,9 @@ import hep.dataforge.meta.MetaItem
import hep.dataforge.names.NameToken
import hep.dataforge.values.Null
/**
* A [Meta] that wraps a descriptor node
*/
class DescriptorMeta(val descriptor: NodeDescriptor) : MetaBase() {
override val items: Map<NameToken, MetaItem<*>>
get() = descriptor.items.entries.associate { entry ->

View File

@ -1,16 +1,13 @@
package hep.dataforge.meta.descriptors
import hep.dataforge.meta.*
import hep.dataforge.meta.scheme.*
import hep.dataforge.names.Name
import hep.dataforge.names.NameToken
import hep.dataforge.names.asName
import hep.dataforge.names.isEmpty
import hep.dataforge.names.*
import hep.dataforge.values.False
import hep.dataforge.values.True
import hep.dataforge.values.Value
import hep.dataforge.values.ValueType
@DFBuilder
sealed class ItemDescriptor : Scheme() {
/**
@ -68,8 +65,7 @@ fun ItemDescriptor.validateItem(item: MetaItem<*>?): Boolean {
* @author Alexander Nozik
*/
@DFBuilder
class NodeDescriptor : ItemDescriptor() {
class NodeDescriptor private constructor() : ItemDescriptor() {
/**
* True if the node is required
*
@ -84,86 +80,98 @@ class NodeDescriptor : ItemDescriptor() {
*/
var default by node()
val items: Map<String, ItemDescriptor>
get() = config.getIndexed(ITEM_KEY).mapValues { (_, item) ->
val node = item.node ?: error("Node descriptor must be a node")
if (node[IS_NODE_KEY].boolean == true) {
NodeDescriptor.wrap(node)
} else {
ValueDescriptor.wrap(node)
}
}
/**
* The map of children node descriptors
*/
@Suppress("UNCHECKED_CAST")
val nodes: Map<String, NodeDescriptor>
get() = config.getIndexed(NODE_KEY.asName()).entries.associate { (name, node) ->
name to wrap(node.node ?: error("Node descriptor must be a node"))
get() = config.getIndexed(ITEM_KEY).entries.filter {
it.value.node[IS_NODE_KEY].boolean == true
}.associate { (name, item) ->
val node = item.node ?: error("Node descriptor must be a node")
name to NodeDescriptor.wrap(node)
}
/**
* Define a child item descriptor for this node
* The list of value descriptors
*/
fun defineItem(name: String, descriptor: ItemDescriptor) {
if (items.keys.contains(name)) error("The key $name already exists in descriptor")
val token = when (descriptor) {
is NodeDescriptor -> NameToken(NODE_KEY, name)
is ValueDescriptor -> NameToken(VALUE_KEY, name)
val values: Map<String, ValueDescriptor>
get() = config.getIndexed(ITEM_KEY).entries.filter {
it.value.node[IS_NODE_KEY].boolean != true
}.associate { (name, item) ->
val node = item.node ?: error("Node descriptor must be a node")
name to ValueDescriptor.wrap(node)
}
config[token] = descriptor.config
}
fun defineNode(name: String, block: NodeDescriptor.() -> Unit) {
val token = NameToken(NODE_KEY, name)
if (config[token] == null) {
config[token] = NodeDescriptor(block)
} else {
NodeDescriptor.update(config[token].node ?: error("Node expected"), block)
}
}
private fun buildNode(name: Name): NodeDescriptor {
return when (name.length) {
0 -> this
1 -> {
val token = NameToken(NODE_KEY, name.toString())
val config: Config = config[token].node ?: Config().also { config[token] = it }
val token = NameToken(ITEM_KEY.toString(), name.toString())
val config: Config = config[token].node ?: Config().also {
it[IS_NODE_KEY] = true
config[token] = it
}
wrap(config)
}
else -> buildNode(name.first()?.asName()!!).buildNode(name.cutFirst())
}
}
fun defineNode(name: Name, block: NodeDescriptor.() -> Unit) {
buildNode(name).apply(block)
/**
* Define a child item descriptor for this node
*/
private fun newItem(key: String, descriptor: ItemDescriptor) {
if (items.keys.contains(key)) error("The key $key already exists in descriptor")
val token = ITEM_KEY.withIndex(key)
config[token] = descriptor.config
}
/**
* The list of value descriptors
*/
val values: Map<String, ValueDescriptor>
get() = config.getIndexed(VALUE_KEY.asName()).entries.associate { (name, node) ->
name to ValueDescriptor.wrap(node.node ?: error("Value descriptor must be a node"))
}
fun defineItem(name: Name, descriptor: ItemDescriptor) {
buildNode(name.cutLast()).newItem(name.last().toString(), descriptor)
}
fun defineItem(name: String, descriptor: ItemDescriptor) {
defineItem(name.toName(), descriptor)
}
/**
* Add a value descriptor using block for
*/
fun defineValue(name: String, block: ValueDescriptor.() -> Unit) {
defineItem(name, ValueDescriptor(block))
fun defineNode(name: Name, block: NodeDescriptor.() -> Unit) {
defineItem(name, NodeDescriptor(block))
}
fun defineNode(name: String, block: NodeDescriptor.() -> Unit) {
defineNode(name.toName(), block)
}
fun defineValue(name: Name, block: ValueDescriptor.() -> Unit) {
require(name.length >= 1) { "Name length for value descriptor must be non-empty" }
buildNode(name.cutLast()).defineValue(name.last().toString(), block)
defineItem(name, ValueDescriptor(block))
}
val items: Map<String, ItemDescriptor> get() = nodes + values
//override val descriptor: NodeDescriptor = empty("descriptor")
fun defineValue(name: String, block: ValueDescriptor.() -> Unit) {
defineValue(name.toName(), block)
}
companion object : SchemeSpec<NodeDescriptor>(::NodeDescriptor) {
// const val ITEM_KEY = "item"
const val NODE_KEY = "node"
const val VALUE_KEY = "value"
val ITEM_KEY = "item".asName()
val IS_NODE_KEY = "@isNode".asName()
//override fun wrap(config: Config): NodeDescriptor = NodeDescriptor(config)
override fun empty(): NodeDescriptor {
return super.empty().apply {
config[IS_NODE_KEY] = true
}
}
//TODO infer descriptor from spec
}
@ -187,9 +195,9 @@ operator fun ItemDescriptor.get(name: Name): ItemDescriptor? {
*
* @author Alexander Nozik
*/
@DFBuilder
class ValueDescriptor : ItemDescriptor() {
/**
* True if the value is required
*
@ -255,68 +263,5 @@ class ValueDescriptor : ItemDescriptor() {
this.allowedValues = v.map { Value.of(it) }
}
companion object : SchemeSpec<ValueDescriptor>(::ValueDescriptor) {
// inline fun <reified E : Enum<E>> enum(name: String) = ValueDescriptor {
// type(ValueType.STRING)
// this.allowedValues = enumValues<E>().map { Value.of(it.name) }
// }
// /**
// * Build a value descriptor from annotation
// */
// fun build(def: ValueDef): ValueDescriptor {
// val builder = MetaBuilder("value")
// .setValue("name", def.key)
//
// if (def.type.isNotEmpty()) {
// builder.setValue("type", def.type)
// }
//
// if (def.multiple) {
// builder.setValue("multiple", def.multiple)
// }
//
// if (!def.info.isEmpty()) {
// builder.setValue("info", def.info)
// }
//
// if (def.allowed.isNotEmpty()) {
// builder.setValue("allowedValues", def.allowed)
// } else if (def.enumeration != Any::class) {
// if (def.enumeration.java.isEnum) {
// val values = def.enumeration.java.enumConstants
// builder.setValue("allowedValues", values.map { it.toString() })
// } else {
// throw RuntimeException("Only enumeration classes are allowed in 'enumeration' annotation property")
// }
// }
//
// if (def.def.isNotEmpty()) {
// builder.setValue("default", def.def)
// } else if (!def.required) {
// builder.setValue("required", def.required)
// }
//
// if (def.tags.isNotEmpty()) {
// builder.setValue("tags", def.tags)
// }
// return ValueDescriptor(builder)
// }
//
// /**
// * Build empty value descriptor
// */
// fun empty(valueName: String): ValueDescriptor {
// val builder = MetaBuilder("value")
// .setValue("name", valueName)
// return ValueDescriptor(builder)
// }
//
// /**
// * Merge two separate value descriptors
// */
// fun merge(primary: ValueDescriptor, secondary: ValueDescriptor): ValueDescriptor {
// return ValueDescriptor(Laminate(primary.meta, secondary.meta))
// }
}
companion object : SchemeSpec<ValueDescriptor>(::ValueDescriptor)
}

View File

@ -1,12 +1,13 @@
package hep.dataforge.meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.names.toName
import hep.dataforge.values.ListValue
import hep.dataforge.values.Value
/**
* Convert meta to map of maps
*/
@DFExperimental
fun Meta.toMap(descriptor: NodeDescriptor? = null): Map<String, Any?> {
return items.entries.associate { (token, item) ->
token.toString() to when (item) {
@ -17,15 +18,29 @@ fun Meta.toMap(descriptor: NodeDescriptor? = null): Map<String, Any?> {
}
/**
* Convert map of maps to meta
* Convert map of maps to meta. This method will recognize [MetaItem], [Map]<String,Any?> and [List] of all mentioned above as value.
* All other values will be converted to values.
*/
@DFExperimental
fun Map<String, Any?>.toMeta(descriptor: NodeDescriptor? = null): Meta = Meta {
@Suppress("UNCHECKED_CAST")
fun toItem(value: Any?): MetaItem<*> = when (value) {
is MetaItem<*> -> value
is Meta -> MetaItem.NodeItem(value)
is Map<*, *> -> MetaItem.NodeItem((value as Map<String, Any?>).toMeta())
else -> MetaItem.ValueItem(Value.of(value))
}
entries.forEach { (key, value) ->
@Suppress("UNCHECKED_CAST")
when (value) {
is Map<*, *> -> setNode(key, (value as Map<String, Any?>).toMeta())
else -> setValue(key, Value.of(value))
if (value is List<*>) {
val items = value.map { toItem(it) }
if (items.all { it is MetaItem.ValueItem }) {
setValue(key, ListValue(items.map { it.value!! }))
} else {
setIndexedItems(key.toName(), value.map { toItem(it) })
}
} else {
setItem(key, toItem(value))
}
}
}

View File

@ -4,14 +4,14 @@ import hep.dataforge.names.Name
import hep.dataforge.names.toName
/**
* Get all items matching given name.
* Get all items matching given name. The index of the last element, if present is used as a [Regex],
* against which indexes of elements are matched.
*/
@DFExperimental
fun Meta.getIndexed(name: Name): Map<String, MetaItem<*>> {
val root = when (name.length) {
0 -> error("Can't use empty name for that")
0 -> error("Can't use empty name for 'getIndexed'")
1 -> this
else -> (this[name.cutLast()] as? MetaItem.NodeItem<*>)?.node
else -> this[name.cutLast()].node
}
val (body, index) = name.last()!!
@ -23,16 +23,13 @@ fun Meta.getIndexed(name: Name): Map<String, MetaItem<*>> {
?: emptyMap()
}
@DFExperimental
fun Meta.getIndexed(name: String): Map<String, MetaItem<*>> = this@getIndexed.getIndexed(name.toName())
/**
* Get all items matching given name.
*/
@Suppress("UNCHECKED_CAST")
@DFExperimental
fun <M : MetaNode<M>> M.getIndexed(name: Name): Map<String, MetaItem<M>> =
(this as Meta).getIndexed(name) as Map<String, MetaItem<M>>
@DFExperimental
fun <M : MetaNode<M>> M.getIndexed(name: String): Map<String, MetaItem<M>> = getIndexed(name.toName())

View File

@ -3,6 +3,8 @@ package hep.dataforge.meta
import kotlinx.serialization.*
import kotlinx.serialization.builtins.DoubleArraySerializer
import kotlinx.serialization.builtins.serializer
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.JsonConfiguration
fun SerialDescriptorBuilder.boolean(name: String, isOptional: Boolean = false, vararg annotations: Annotation) =
element(name, Boolean.serializer().descriptor, isOptional = isOptional, annotations = annotations.toList())
@ -62,4 +64,9 @@ inline fun Encoder.encodeStructure(
val encoder = beginStructure(desc, *typeParams)
encoder.block()
encoder.endStructure(desc)
}
}
@OptIn(UnstableDefault::class)
val JSON_PRETTY = Json(JsonConfiguration(prettyPrint = true, useArrayPolymorphism = true))
@OptIn(UnstableDefault::class)
val JSON_PLAIN = Json(JsonConfiguration(prettyPrint = true, useArrayPolymorphism = true))

View File

@ -22,7 +22,7 @@ interface TransformationRule {
meta.sequence().filter { matches(it.first, it.second) }.map { it.first }
/**
* Apply transformation for a single item (Node or Value) and return resulting tree with absolute path
* Apply transformation for a single item (Node or Value) to the target
*/
fun <M : MutableMeta<M>> transformItem(name: Name, item: MetaItem<*>?, target: M): Unit
}
@ -89,7 +89,7 @@ inline class MetaTransformation(val transformations: Collection<TransformationRu
/**
* Produce new meta using only those items that match transformation rules
*/
fun transform(source: Meta): Meta =
fun generate(source: Meta): Meta =
Meta {
transformations.forEach { rule ->
rule.selectItems(source).forEach { name ->
@ -98,6 +98,20 @@ inline class MetaTransformation(val transformations: Collection<TransformationRu
}
}
/**
* Generate an observable configuration that contains only elements defined by transformation rules and changes with the source
*/
@DFExperimental
fun generate(source: Config): ObservableMeta = Config().apply {
transformations.forEach { rule ->
rule.selectItems(source).forEach { name ->
rule.transformItem(name, source[name], this)
}
}
bind(source, this)
}
/**
* Transform a meta, replacing all elements found in rules with transformed entries
*/

View File

@ -59,7 +59,8 @@ class Name(val tokens: List<NameToken>) {
val EMPTY = Name(emptyList())
override val descriptor: SerialDescriptor = PrimitiveDescriptor("hep.dataforge.names.Name", PrimitiveKind.STRING)
override val descriptor: SerialDescriptor =
PrimitiveDescriptor("hep.dataforge.names.Name", PrimitiveKind.STRING)
override fun deserialize(decoder: Decoder): Name {
return decoder.decodeString().toName()
@ -99,7 +100,8 @@ data class NameToken(val body: String, val index: String = "") {
@Serializer(NameToken::class)
companion object : KSerializer<NameToken> {
override val descriptor: SerialDescriptor = PrimitiveDescriptor("hep.dataforge.names.NameToken", PrimitiveKind.STRING)
override val descriptor: SerialDescriptor =
PrimitiveDescriptor("hep.dataforge.names.NameToken", PrimitiveKind.STRING)
override fun deserialize(decoder: Decoder): NameToken {
return decoder.decodeString().toName().first()!!
@ -188,8 +190,12 @@ fun Name.isEmpty(): Boolean = this.length == 0
* Set or replace last token index
*/
fun Name.withIndex(index: String): Name {
val tokens = ArrayList(tokens)
val last = NameToken(tokens.last().body, index)
if (length == 0) error("Can't add index to empty name")
if (length == 1) {
return last.asName()
}
val tokens = ArrayList(tokens)
tokens.removeAt(tokens.size - 1)
tokens.add(last)
return Name(tokens)

View File

@ -0,0 +1,39 @@
package hep.dataforge.meta
import kotlinx.serialization.json.int
import kotlinx.serialization.json.json
import kotlinx.serialization.json.jsonArray
import kotlin.test.Test
import kotlin.test.assertEquals
class JsonMetaTest {
val json = json {
"firstValue" to "a"
"secondValue" to "b"
"array" to jsonArray {
+"1"
+"2"
+"3"
}
"nodeArray" to jsonArray {
+json {
"index" to 1
}
+json {
"index" to 2
}
+json {
"index" to 3
}
}
}
@Test
fun jsonMetaConversion() {
val meta = json.toMeta()
val reconstructed = meta.toJson()
println(json)
println(reconstructed)
assertEquals(2, reconstructed["nodeArray"]?.jsonArray?.get(1)?.jsonObject?.get("index")?.int)
}
}

View File

@ -8,7 +8,7 @@ import kotlin.test.assertEquals
class MetaBuilderTest {
@Test
fun testBuilder() {
val meta = buildMeta {
val meta = Meta {
"a" put 22
"b" put listOf(1, 2, 3)
this["c"] = "myValue".asValue()
@ -25,7 +25,7 @@ class MetaBuilderTest {
@Test
fun testSNS(){
val meta = buildMeta {
val meta = Meta {
repeat(10){
"b.a[$it]" put it
}

View File

@ -1,6 +1,5 @@
package hep.dataforge.meta
import hep.dataforge.meta.scheme.*
import kotlin.test.Test
import kotlin.test.assertEquals
@ -20,7 +19,7 @@ class MetaDelegateTest {
class TestScheme : Scheme() {
var myValue by string()
var safeValue by double(2.2)
var enumValue by enum(TestEnum.YES) { enum<TestEnum>() }
var enumValue by enum(TestEnum.YES)
var inner by spec(InnerSpec)
companion object : SchemeSpec<TestScheme>(::TestScheme)

View File

@ -31,8 +31,9 @@ class MetaTest {
assertEquals<Meta>(meta1, meta2)
}
@OptIn(DFExperimental::class)
@Test
fun metaToMap(){
fun metaToMap() {
val meta = Meta {
"a" put 22
"b" put {
@ -47,6 +48,19 @@ class MetaTest {
val map = meta.toMap()
val reconstructed = map.toMeta()
assertEquals(meta,reconstructed)
assertEquals(meta, reconstructed)
}
@Test
fun indexed() {
val meta = Meta {
(0..20).forEach {
set("a[$it]", it)
}
}
val indexed = meta.getIndexed("a[1.]")
assertEquals(10, indexed.size)
assertEquals(null, indexed["8"])
assertEquals(12, indexed["12"].int)
}
}

View File

@ -1,7 +1,5 @@
package hep.dataforge.meta
import hep.dataforge.meta.scheme.asScheme
import hep.dataforge.meta.scheme.getProperty
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -1,8 +1,5 @@
package hep.dataforge.meta
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.Specification
import hep.dataforge.meta.scheme.numberList
import hep.dataforge.names.Name
import kotlin.test.Test
import kotlin.test.assertEquals

View File

@ -26,6 +26,6 @@ class DescriptorTest {
@Test
fun testAllowedValues() {
val allowed = descriptor.nodes["aNode"]?.values?.get("b")?.allowedValues
assertEquals(allowed, emptyList())
assertEquals(emptyList(), allowed)
}
}

View File

@ -26,7 +26,7 @@ class DynamicMetaTest {
@Test
fun testMetaToDynamic(){
val meta = buildMeta {
val meta = Meta {
"a" put 22
"array" put listOf(1, 2, 3)
"b" put "myString"

View File

@ -2,7 +2,6 @@ package hep.dataforge.output
import hep.dataforge.context.*
import hep.dataforge.context.PluginTag.Companion.DATAFORGE_GROUP
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import kotlinx.coroutines.CoroutineDispatcher
@ -24,7 +23,7 @@ interface OutputManager {
type: KClass<out T>,
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = EmptyMeta
meta: Meta = Meta.EMPTY
): Renderer<T>
}
@ -39,7 +38,7 @@ val Context.output: OutputManager get() = plugins.get() ?: ConsoleOutputManager(
inline operator fun <reified T : Any> OutputManager.get(
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = EmptyMeta
meta: Meta = Meta.EMPTY
): Renderer<T> {
return get(T::class, name, stage, meta)
}
@ -47,7 +46,7 @@ inline operator fun <reified T : Any> OutputManager.get(
/**
* Directly render an object using the most suitable renderer
*/
fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = EmptyMeta) =
fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = Meta.EMPTY) =
get(obj::class, name, stage).render(obj, meta)
/**

View File

@ -1,7 +1,6 @@
package hep.dataforge.output
import hep.dataforge.context.ContextAware
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
/**
@ -18,5 +17,5 @@ interface Renderer<in T : Any> : ContextAware {
* By convention actual render is called in asynchronous mode, so this method should never
* block execution
*/
fun render(obj: T, meta: Meta = EmptyMeta)
fun render(obj: T, meta: Meta = Meta.EMPTY)
}

View File

@ -3,7 +3,6 @@ package hep.dataforge.scripting
import hep.dataforge.context.Global
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import hep.dataforge.workspace.SimpleWorkspaceBuilder
import hep.dataforge.workspace.context
import hep.dataforge.workspace.target

View File

@ -1,10 +1,9 @@
package hep.dataforge.tables
import hep.dataforge.meta.Scheme
import hep.dataforge.meta.SchemeSpec
import hep.dataforge.meta.enum
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.SchemeSpec
import hep.dataforge.meta.scheme.enum
import hep.dataforge.meta.scheme.string
import hep.dataforge.meta.string
import hep.dataforge.values.ValueType
open class ColumnScheme : Scheme() {
@ -14,5 +13,5 @@ open class ColumnScheme : Scheme() {
}
class ValueColumnScheme : ColumnScheme() {
var valueType by enum(ValueType.STRING){enum<ValueType>()}
var valueType by enum(ValueType.STRING)
}

View File

@ -9,7 +9,6 @@ import kotlinx.coroutines.flow.toList
import kotlinx.io.Binary
import kotlinx.io.ExperimentalIoApi
import kotlinx.io.Output
import kotlinx.io.RandomAccessBinary
import kotlinx.io.text.forEachUtf8Line
import kotlinx.io.text.readUtf8Line
import kotlinx.io.text.readUtf8StringUntilDelimiter
@ -76,7 +75,7 @@ suspend fun TextRows.buildRowIndex(): List<Int> = indexFlow().toList()
@ExperimentalIoApi
class TextTable(
override val header: ValueTableHeader,
val binary: RandomAccessBinary,
val binary: Binary,
val index: List<Int>
) : Table<Value> {
@ -99,7 +98,7 @@ class TextTable(
}
companion object {
suspend operator fun invoke(header: ValueTableHeader, binary: RandomAccessBinary): TextTable {
suspend operator fun invoke(header: ValueTableHeader, binary: Binary): TextTable {
val index = TextRows(header, binary).buildRowIndex()
return TextTable(header, binary, index)
}

View File

@ -5,8 +5,8 @@ import hep.dataforge.meta.*
import hep.dataforge.tables.SimpleColumnHeader
import hep.dataforge.tables.Table
import hep.dataforge.values.Value
import kotlinx.io.Binary
import kotlinx.io.ByteArrayOutput
import kotlinx.io.EmptyBinary
import kotlinx.io.ExperimentalIoApi
import kotlinx.io.asBinary
@ -38,5 +38,5 @@ fun TextRows.Companion.readEnvelope(envelope: Envelope): TextRows {
.map { (_, item) ->
SimpleColumnHeader(item.node["name"].string!!, Value::class, item.node["meta"].node ?: Meta.EMPTY)
}
return TextRows(header, envelope.data ?: EmptyBinary)
return TextRows(header, envelope.data ?: Binary.EMPTY)
}

View File

@ -124,7 +124,7 @@ fun TaskDependencyContainer.allData(to: Name = Name.EMPTY) = AllDataDependency(t
/**
* A builder for [TaskModel]
*/
class TaskModelBuilder(val name: Name, meta: Meta = EmptyMeta) : TaskDependencyContainer {
class TaskModelBuilder(val name: Name, meta: Meta = Meta.EMPTY) : TaskDependencyContainer {
/**
* Meta for current task. By default uses the whole input meta
*/

View File

@ -2,11 +2,7 @@ package hep.dataforge.workspace
import hep.dataforge.data.Data
import hep.dataforge.data.await
import hep.dataforge.io.Envelope
import hep.dataforge.io.IOFormat
import hep.dataforge.io.SimpleEnvelope
import hep.dataforge.io.readWith
import kotlinx.io.ArrayBinary
import hep.dataforge.io.*
import kotlin.reflect.KClass
/**
@ -18,8 +14,6 @@ fun <T : Any> Envelope.toData(type: KClass<out T>, format: IOFormat<T>): Data<T>
suspend fun <T : Any> Data<T>.toEnvelope(format: IOFormat<T>): Envelope {
val obj = await()
val binary = ArrayBinary.write {
format.run { writeObject(obj) }
}
val binary = format.toBinary(obj)
return SimpleEnvelope(meta, binary)
}

View File

@ -6,16 +6,19 @@ import hep.dataforge.io.*
import hep.dataforge.meta.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import kotlinx.io.asOutput
import java.nio.file.FileSystem
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardCopyOption
import java.nio.file.StandardOpenOption
import java.nio.file.spi.FileSystemProvider
import java.util.zip.ZipEntry
import java.util.zip.ZipOutputStream
import kotlin.reflect.KClass
typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
//private val zipFSProvider = ZipFileSystemProvider()
private fun newZFS(path: Path): FileSystem {
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
@ -150,12 +153,43 @@ suspend fun <T : Any> IOPlugin.writeDataDirectory(
}
}
private suspend fun <T : Any> ZipOutputStream.writeNode(
name: String,
item: DataItem<T>,
dataFormat: IOFormat<T>,
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat
) {
withContext(Dispatchers.IO) {
when (item) {
is DataItem.Leaf -> {
//TODO add directory-based envelope writer
val envelope = item.data.toEnvelope(dataFormat)
val entry = ZipEntry(name)
putNextEntry(entry)
envelopeFormat.run {
asOutput().writeObject(envelope)
}
}
is DataItem.Node -> {
val entry = ZipEntry("$name/")
putNextEntry(entry)
closeEntry()
item.node.items.forEach { (token, item) ->
val childName = "$name/$token"
writeNode(childName, item, dataFormat, envelopeFormat)
}
}
}
}
}
@DFExperimental
suspend fun <T : Any> IOPlugin.writeZip(
path: Path,
node: DataNode<T>,
format: IOFormat<T>,
envelopeFormat: EnvelopeFormat? = null,
metaFormat: MetaFormatFactory? = null
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat
) {
withContext(Dispatchers.IO) {
val actualFile = if (path.toString().endsWith(".zip")) {
@ -163,21 +197,27 @@ suspend fun <T : Any> IOPlugin.writeZip(
} else {
path.resolveSibling(path.fileName.toString() + ".zip")
}
if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) {
Files.delete(path)
}
//Files.createFile(actualFile)
newZFS(actualFile).use { zipfs ->
val internalTargetPath = zipfs.getPath("/")
Files.createDirectories(internalTargetPath)
val tmp = Files.createTempDirectory("df_zip")
writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat)
Files.list(tmp).forEach { sourcePath ->
val targetPath = sourcePath.fileName.toString()
val internalTargetPath = internalTargetPath.resolve(targetPath)
Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING)
}
val fos = Files.newOutputStream(actualFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
val zos = ZipOutputStream(fos)
zos.use {
it.writeNode("", DataItem.Node(node), format, envelopeFormat)
}
// if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) {
// Files.delete(path)
// }
// //Files.createFile(actualFile)
// newZFS(actualFile).use { zipfs ->
// val zipRootPath = zipfs.getPath("/")
// Files.createDirectories(zipRootPath)
// val tmp = Files.createTempDirectory("df_zip")
// writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat)
// Files.list(tmp).forEach { sourcePath ->
// val targetPath = sourcePath.fileName.toString()
// val internalTargetPath = zipRootPath.resolve(targetPath)
// Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING)
// }
// }
}
}

View File

@ -5,16 +5,17 @@ import hep.dataforge.data.*
import hep.dataforge.io.IOFormat
import hep.dataforge.io.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import kotlinx.coroutines.runBlocking
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.text.readUtf8String
import kotlinx.io.text.writeUtf8String
import java.nio.file.Files
import kotlin.test.Ignore
import kotlin.test.Test
import kotlin.test.assertEquals
class FileDataTest {
val dataNode = DataNode<String> {
node("dir") {
@ -37,6 +38,10 @@ class FileDataTest {
return readUtf8String()
}
override fun toMeta(): Meta = Meta {
IOFormat.NAME_KEY put "string"
}
}
@Test
@ -56,7 +61,7 @@ class FileDataTest {
@Test
@Ignore
@DFExperimental
fun testZipWriteRead() {
Global.io.run {
val zip = Files.createTempFile("df_data_node", ".zip")

View File

@ -6,7 +6,6 @@ import hep.dataforge.meta.boolean
import hep.dataforge.meta.builder
import hep.dataforge.meta.get
import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import hep.dataforge.names.plus
import kotlin.test.Test
import kotlin.test.assertEquals