Fix zip writer and name refactoring

This commit is contained in:
Alexander Nozik 2020-04-06 22:50:51 +03:00
parent eebfe534cc
commit 6450696157
25 changed files with 132 additions and 84 deletions

View File

@ -1,9 +1,7 @@
package hep.dataforge.context
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.buildMeta
import kotlin.reflect.KClass
/**
@ -98,7 +96,7 @@ class PluginManager(override val context: Context) : ContextAware, Iterable<Plug
/**
* Load a plugin using its factory
*/
fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = EmptyMeta): T =
fun <T : Plugin> load(factory: PluginFactory<T>, meta: Meta = Meta.EMPTY): T =
load(factory(meta, context))
fun <T : Plugin> load(factory: PluginFactory<T>, metaBuilder: MetaBuilder.() -> Unit): T =
@ -121,7 +119,7 @@ class PluginManager(override val context: Context) : ContextAware, Iterable<Plug
* Get an existing plugin with given meta or load new one using provided factory
*
*/
fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = EmptyMeta): T {
fun <T : Plugin> fetch(factory: PluginFactory<T>, recursive: Boolean = true, meta: Meta = Meta.EMPTY): T {
val loaded = get(factory.type, factory.tag, recursive)
return when {
loaded == null -> load(factory(meta, context))

View File

@ -1,6 +1,8 @@
package hep.dataforge.data
import hep.dataforge.meta.*
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaRepr
import hep.dataforge.meta.isEmpty
import kotlinx.coroutines.CoroutineScope
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
@ -31,14 +33,14 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
operator fun <T : Any> invoke(
type: KClass<out T>,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
): Data<T> = DynamicData(type, meta, context, dependencies, block)
inline operator fun <reified T : Any> invoke(
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend CoroutineScope.() -> T
@ -47,7 +49,7 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
operator fun <T : Any> invoke(
name: String,
type: KClass<out T>,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
@ -55,14 +57,14 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
inline operator fun <reified T : Any> invoke(
name: String,
meta: Meta = EmptyMeta,
meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
noinline block: suspend CoroutineScope.() -> T
): Data<T> =
invoke(name, T::class, meta, context, dependencies, block)
fun <T : Any> static(value: T, meta: Meta = EmptyMeta): Data<T> =
fun <T : Any> static(value: T, meta: Meta = Meta.EMPTY): Data<T> =
StaticData(value, meta)
}
}
@ -70,7 +72,7 @@ interface Data<out T : Any> : Goal<T>, MetaRepr{
class DynamicData<T : Any>(
override val type: KClass<out T>,
override val meta: Meta = EmptyMeta,
override val meta: Meta = Meta.EMPTY,
context: CoroutineContext = EmptyCoroutineContext,
dependencies: Collection<Data<*>> = emptyList(),
block: suspend CoroutineScope.() -> T
@ -78,7 +80,7 @@ class DynamicData<T : Any>(
class StaticData<T : Any>(
value: T,
override val meta: Meta = EmptyMeta
override val meta: Meta = Meta.EMPTY
) : Data<T>, StaticGoal<T>(value) {
override val type: KClass<out T> get() = value::class
}

View File

@ -250,7 +250,7 @@ fun <T : Any> DataTreeBuilder<T>.datum(name: String, data: Data<T>) {
this[name.toName()] = data
}
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = EmptyMeta) {
fun <T : Any> DataTreeBuilder<T>.static(name: Name, data: T, meta: Meta = Meta.EMPTY) {
this[name] = Data.static(data, meta)
}

View File

@ -11,7 +11,7 @@ serialization(sourceSet = TEST){
cbor()
}
val ioVersion by rootProject.extra("0.2.0-npm-dev-6")
val ioVersion by rootProject.extra("0.2.0-npm-dev-7")
kotlin {
sourceSets {

View File

@ -2,6 +2,8 @@ package hep.dataforge.io.yaml
import hep.dataforge.context.Context
import hep.dataforge.io.*
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import kotlinx.io.*
@ -78,8 +80,8 @@ class FrontMatterEnvelopeFormat(
}
override fun toMeta(): Meta = Meta {
IOPlugin.IO_FORMAT_NAME_KEY put name.toString()
IOPlugin.IO_FORMAT_META_KEY put meta
NAME_KEY put name.toString()
META_KEY put meta
}
companion object : EnvelopeFormatFactory {

View File

@ -1,7 +1,8 @@
package hep.dataforge.io.yaml
import hep.dataforge.context.Context
import hep.dataforge.io.IOPlugin
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.MetaFormat
import hep.dataforge.io.MetaFormatFactory
import hep.dataforge.meta.DFExperimental
@ -30,8 +31,8 @@ class YamlMetaFormat(val meta: Meta) : MetaFormat {
}
override fun toMeta(): Meta = Meta{
IOPlugin.IO_FORMAT_NAME_KEY put FrontMatterEnvelopeFormat.name.toString()
IOPlugin.IO_FORMAT_META_KEY put meta
NAME_KEY put FrontMatterEnvelopeFormat.name.toString()
META_KEY put meta
}
companion object : MetaFormatFactory {

View File

@ -2,6 +2,7 @@ package hep.dataforge.io.yaml
import hep.dataforge.io.parse
import hep.dataforge.io.toString
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.seal
@ -9,6 +10,7 @@ import kotlin.test.Test
import kotlin.test.assertEquals
@DFExperimental
class YamlMetaFormatTest {
@Test
fun testYamlMetaFormat() {

View File

@ -3,8 +3,8 @@ package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.context.Factory
import hep.dataforge.context.Named
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import hep.dataforge.io.IOPlugin.Companion.IO_FORMAT_NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaItem
import hep.dataforge.meta.MetaRepr
@ -23,6 +23,11 @@ import kotlin.reflect.KClass
interface IOFormat<T : Any> : MetaRepr {
fun Output.writeObject(obj: T)
fun Input.readObject(): T
companion object{
val NAME_KEY = "name".asName()
val META_KEY = "meta".asName()
}
}
fun <T : Any> Input.readWith(format: IOFormat<T>): T = format.run { readObject() }
@ -54,7 +59,7 @@ class ListIOFormat<T : Any>(val format: IOFormat<T>) : IOFormat<List<T>> {
}
override fun toMeta(): Meta = Meta {
IO_FORMAT_NAME_KEY put "list"
NAME_KEY put "list"
"contentFormat" put format.toMeta()
}
}
@ -79,7 +84,7 @@ interface IOFormatFactory<T : Any> : Factory<IOFormat<T>>, Named, MetaRepr {
val type: KClass<out T>
override fun toMeta(): Meta = Meta {
IO_FORMAT_NAME_KEY put name.toString()
NAME_KEY put name.toString()
}
companion object {

View File

@ -2,11 +2,12 @@ package hep.dataforge.io
import hep.dataforge.context.*
import hep.dataforge.io.EnvelopeFormatFactory.Companion.ENVELOPE_FORMAT_TYPE
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.io.IOFormatFactory.Companion.IO_FORMAT_TYPE
import hep.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import hep.dataforge.names.asName
import hep.dataforge.names.toName
import kotlin.reflect.KClass
@ -18,12 +19,12 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
fun <T : Any> resolveIOFormat(item: MetaItem<*>, type: KClass<out T>): IOFormat<T>? {
val key = item.string ?: item.node[IO_FORMAT_NAME_KEY]?.string ?: error("Format name not defined")
val key = item.string ?: item.node[NAME_KEY]?.string ?: error("Format name not defined")
val name = key.toName()
return ioFormatFactories.find { it.name == name }?.let {
@Suppress("UNCHECKED_CAST")
if (it.type != type) error("Format type ${it.type} is not the same as requested type $type")
else it.invoke(item.node[IO_FORMAT_META_KEY].node ?: Meta.EMPTY, context) as IOFormat<T>
else it.invoke(item.node[META_KEY].node ?: Meta.EMPTY, context) as IOFormat<T>
}
}
@ -46,8 +47,8 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
envelopeFormatFactories.find { it.name == name }?.invoke(meta, context)
fun resolveEnvelopeFormat(item: MetaItem<*>): EnvelopeFormat? {
val name = item.string ?: item.node[IO_FORMAT_NAME_KEY]?.string ?: error("Envelope format name not defined")
val meta = item.node[IO_FORMAT_META_KEY].node ?: Meta.EMPTY
val name = item.string ?: item.node[NAME_KEY]?.string ?: error("Envelope format name not defined")
val meta = item.node[META_KEY].node ?: Meta.EMPTY
return resolveEnvelopeFormat(name.toName(), meta)
}
@ -60,9 +61,6 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) {
}
companion object : PluginFactory<IOPlugin> {
val IO_FORMAT_NAME_KEY = "name".asName()
val IO_FORMAT_META_KEY = "meta".asName()
val defaultMetaFormats: List<MetaFormatFactory> = listOf(JsonMetaFormat, BinaryMetaFormat)
val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat, TaglessEnvelopeFormat)

View File

@ -4,6 +4,7 @@ package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.node
@ -26,7 +27,7 @@ class JsonMetaFormat(private val json: Json = DEFAULT_JSON) : MetaFormat {
}
override fun toMeta(): Meta = Meta{
IOPlugin.IO_FORMAT_NAME_KEY put name.toString()
NAME_KEY put name.toString()
}
override fun Input.readMeta(descriptor: NodeDescriptor?): Meta {

View File

@ -1,6 +1,8 @@
package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.enum
import hep.dataforge.meta.get
@ -99,8 +101,8 @@ class TaggedEnvelopeFormat(
}
override fun toMeta(): Meta = Meta {
IOPlugin.IO_FORMAT_NAME_KEY put name.toString()
IOPlugin.IO_FORMAT_META_KEY put {
NAME_KEY put name.toString()
META_KEY put {
"version" put version
}
}

View File

@ -1,6 +1,8 @@
package hep.dataforge.io
import hep.dataforge.context.Context
import hep.dataforge.io.IOFormat.Companion.META_KEY
import hep.dataforge.io.IOFormat.Companion.NAME_KEY
import hep.dataforge.meta.Meta
import hep.dataforge.meta.get
import hep.dataforge.meta.isEmpty
@ -162,8 +164,8 @@ class TaglessEnvelopeFormat(
}
override fun toMeta(): Meta = Meta {
IOPlugin.IO_FORMAT_NAME_KEY put name.toString()
IOPlugin.IO_FORMAT_META_KEY put meta
NAME_KEY put name.toString()
META_KEY put meta
}
companion object : EnvelopeFormatFactory {

View File

@ -19,9 +19,6 @@ inline fun buildByteArray(expectedSize: Int = 16, block: Output.() -> Unit): Byt
inline fun Binary(expectedSize: Int = 16, block: Output.() -> Unit): Binary =
buildByteArray(expectedSize, block).asBinary()
@Deprecated("To be replaced by Binary.EMPTY", level = DeprecationLevel.WARNING)
val EmptyBinary = ByteArrayBinary(ByteArray(0))
/**
* View section of a [Binary] as an independent binary
*/

View File

@ -1,12 +1,9 @@
package hep.dataforge.io
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaItem
import hep.dataforge.meta.MetaSerializer
import hep.dataforge.meta.*
import hep.dataforge.names.Name
import hep.dataforge.names.toName
import kotlinx.serialization.cbor.Cbor
import kotlinx.serialization.json.Json
import kotlin.test.Test
import kotlin.test.assertEquals
@ -22,8 +19,8 @@ class MetaSerializerTest {
@Test
fun testMetaSerialization() {
val string = Json.indented.stringify(MetaSerializer, meta)
val restored = Json.plain.parse(MetaSerializer, string)
val string = JSON_PRETTY.stringify(MetaSerializer, meta)
val restored = JSON_PLAIN.parse(MetaSerializer, string)
assertEquals(restored, meta)
}
@ -38,8 +35,8 @@ class MetaSerializerTest {
@Test
fun testNameSerialization() {
val name = "a.b.c".toName()
val string = Json.indented.stringify(Name.serializer(), name)
val restored = Json.plain.parse(Name.serializer(), string)
val string = JSON_PRETTY.stringify(Name.serializer(), name)
val restored = JSON_PLAIN.parse(Name.serializer(), string)
assertEquals(restored, name)
}

View File

@ -3,7 +3,6 @@ package hep.dataforge.io.tcp
import hep.dataforge.context.Context
import hep.dataforge.context.ContextAware
import hep.dataforge.io.*
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlinx.coroutines.asCoroutineDispatcher
import kotlinx.coroutines.withContext
@ -17,7 +16,7 @@ class EnvelopeClient(
val host: String,
val port: Int,
formatFactory: EnvelopeFormatFactory = TaggedEnvelopeFormat,
formatMeta: Meta = EmptyMeta
formatMeta: Meta = Meta.EMPTY
) : Responder, ContextAware {
private val dispatcher = Executors.newSingleThreadExecutor().asCoroutineDispatcher()

View File

@ -6,7 +6,6 @@ import hep.dataforge.io.EnvelopeFormatFactory
import hep.dataforge.io.Responder
import hep.dataforge.io.TaggedEnvelopeFormat
import hep.dataforge.io.type
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import kotlinx.coroutines.*
import java.net.ServerSocket
@ -19,7 +18,7 @@ class EnvelopeServer(
val responder: Responder,
val scope: CoroutineScope,
formatFactory: EnvelopeFormatFactory = TaggedEnvelopeFormat,
formatMeta: Meta = EmptyMeta
formatMeta: Meta = Meta.EMPTY
) : ContextAware {
private var job: Job? = null
@ -78,7 +77,7 @@ class EnvelopeServer(
logger.debug { "Accepted request with type ${request.type} from ${socket.remoteSocketAddress}" }
if (request.type == SHUTDOWN_ENVELOPE_TYPE) {
//Echo shutdown command
outputStream.write{
outputStream.write {
writeObject(request)
}
logger.info { "Accepted graceful shutdown signal from ${socket.inetAddress}" }

View File

@ -188,7 +188,7 @@ abstract class MetaBase : Meta {
override fun hashCode(): Int = items.hashCode()
override fun toString(): String = PRETTY_JSON.stringify(MetaSerializer, this)
override fun toString(): String = JSON_PRETTY.stringify(MetaSerializer, this)
}
/**

View File

@ -66,4 +66,5 @@ inline fun Encoder.encodeStructure(
encoder.endStructure(desc)
}
val PRETTY_JSON = Json(JsonConfiguration(prettyPrint = true, useArrayPolymorphism = true))
val JSON_PRETTY = Json(JsonConfiguration(prettyPrint = true, useArrayPolymorphism = true))
val JSON_PLAIN = Json(JsonConfiguration(prettyPrint = true, useArrayPolymorphism = true))

View File

@ -26,7 +26,7 @@ class DynamicMetaTest {
@Test
fun testMetaToDynamic(){
val meta = buildMeta {
val meta = Meta {
"a" put 22
"array" put listOf(1, 2, 3)
"b" put "myString"

View File

@ -2,7 +2,6 @@ package hep.dataforge.output
import hep.dataforge.context.*
import hep.dataforge.context.PluginTag.Companion.DATAFORGE_GROUP
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
import hep.dataforge.names.Name
import kotlinx.coroutines.CoroutineDispatcher
@ -24,7 +23,7 @@ interface OutputManager {
type: KClass<out T>,
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = EmptyMeta
meta: Meta = Meta.EMPTY
): Renderer<T>
}
@ -39,7 +38,7 @@ val Context.output: OutputManager get() = plugins.get() ?: ConsoleOutputManager(
inline operator fun <reified T : Any> OutputManager.get(
name: Name,
stage: Name = Name.EMPTY,
meta: Meta = EmptyMeta
meta: Meta = Meta.EMPTY
): Renderer<T> {
return get(T::class, name, stage, meta)
}
@ -47,7 +46,7 @@ inline operator fun <reified T : Any> OutputManager.get(
/**
* Directly render an object using the most suitable renderer
*/
fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = EmptyMeta) =
fun OutputManager.render(obj: Any, name: Name, stage: Name = Name.EMPTY, meta: Meta = Meta.EMPTY) =
get(obj::class, name, stage).render(obj, meta)
/**

View File

@ -1,7 +1,6 @@
package hep.dataforge.output
import hep.dataforge.context.ContextAware
import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta
/**
@ -18,5 +17,5 @@ interface Renderer<in T : Any> : ContextAware {
* By convention actual render is called in asynchronous mode, so this method should never
* block execution
*/
fun render(obj: T, meta: Meta = EmptyMeta)
fun render(obj: T, meta: Meta = Meta.EMPTY)
}

View File

@ -1,12 +1,11 @@
package hep.dataforge.tables.io
import hep.dataforge.io.Binary
import hep.dataforge.io.EmptyBinary
import hep.dataforge.io.Envelope
import hep.dataforge.meta.*
import hep.dataforge.tables.SimpleColumnHeader
import hep.dataforge.tables.Table
import hep.dataforge.values.Value
import kotlinx.io.Binary
import kotlinx.io.ByteArrayOutput
import kotlinx.io.ExperimentalIoApi
import kotlinx.io.asBinary
@ -39,5 +38,5 @@ fun TextRows.Companion.readEnvelope(envelope: Envelope): TextRows {
.map { (_, item) ->
SimpleColumnHeader(item.node["name"].string!!, Value::class, item.node["meta"].node ?: Meta.EMPTY)
}
return TextRows(header, envelope.data ?: EmptyBinary)
return TextRows(header, envelope.data ?: Binary.EMPTY)
}

View File

@ -124,7 +124,7 @@ fun TaskDependencyContainer.allData(to: Name = Name.EMPTY) = AllDataDependency(t
/**
* A builder for [TaskModel]
*/
class TaskModelBuilder(val name: Name, meta: Meta = EmptyMeta) : TaskDependencyContainer {
class TaskModelBuilder(val name: Name, meta: Meta = Meta.EMPTY) : TaskDependencyContainer {
/**
* Meta for current task. By default uses the whole input meta
*/

View File

@ -6,16 +6,19 @@ import hep.dataforge.io.*
import hep.dataforge.meta.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import kotlinx.io.asOutput
import java.nio.file.FileSystem
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardCopyOption
import java.nio.file.StandardOpenOption
import java.nio.file.spi.FileSystemProvider
import java.util.zip.ZipEntry
import java.util.zip.ZipOutputStream
import kotlin.reflect.KClass
typealias FileFormatResolver<T> = (Path, Meta) -> IOFormat<T>
//private val zipFSProvider = ZipFileSystemProvider()
private fun newZFS(path: Path): FileSystem {
val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" }
@ -150,12 +153,43 @@ suspend fun <T : Any> IOPlugin.writeDataDirectory(
}
}
private suspend fun <T : Any> ZipOutputStream.writeNode(
name: String,
item: DataItem<T>,
dataFormat: IOFormat<T>,
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat
) {
withContext(Dispatchers.IO) {
when (item) {
is DataItem.Leaf -> {
//TODO add directory-based envelope writer
val envelope = item.data.toEnvelope(dataFormat)
val entry = ZipEntry(name)
putNextEntry(entry)
envelopeFormat.run {
asOutput().writeObject(envelope)
}
}
is DataItem.Node -> {
val entry = ZipEntry("$name/")
putNextEntry(entry)
closeEntry()
item.node.items.forEach { (token, item) ->
val childName = "$name/$token"
writeNode(childName, item, dataFormat, envelopeFormat)
}
}
}
}
}
@DFExperimental
suspend fun <T : Any> IOPlugin.writeZip(
path: Path,
node: DataNode<T>,
format: IOFormat<T>,
envelopeFormat: EnvelopeFormat? = null,
metaFormat: MetaFormatFactory? = null
envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat
) {
withContext(Dispatchers.IO) {
val actualFile = if (path.toString().endsWith(".zip")) {
@ -163,21 +197,27 @@ suspend fun <T : Any> IOPlugin.writeZip(
} else {
path.resolveSibling(path.fileName.toString() + ".zip")
}
if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) {
Files.delete(path)
}
//Files.createFile(actualFile)
newZFS(actualFile).use { zipfs ->
val internalTargetPath = zipfs.getPath("/")
Files.createDirectories(internalTargetPath)
val tmp = Files.createTempDirectory("df_zip")
writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat)
Files.list(tmp).forEach { sourcePath ->
val targetPath = sourcePath.fileName.toString()
val internalTargetPath = internalTargetPath.resolve(targetPath)
Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING)
}
val fos = Files.newOutputStream(actualFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)
val zos = ZipOutputStream(fos)
zos.use {
it.writeNode("", DataItem.Node(node), format, envelopeFormat)
}
// if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) {
// Files.delete(path)
// }
// //Files.createFile(actualFile)
// newZFS(actualFile).use { zipfs ->
// val zipRootPath = zipfs.getPath("/")
// Files.createDirectories(zipRootPath)
// val tmp = Files.createTempDirectory("df_zip")
// writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat)
// Files.list(tmp).forEach { sourcePath ->
// val targetPath = sourcePath.fileName.toString()
// val internalTargetPath = zipRootPath.resolve(targetPath)
// Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING)
// }
// }
}
}

View File

@ -5,16 +5,17 @@ import hep.dataforge.data.*
import hep.dataforge.io.IOFormat
import hep.dataforge.io.io
import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta
import kotlinx.coroutines.runBlocking
import kotlinx.io.Input
import kotlinx.io.Output
import kotlinx.io.text.readUtf8String
import kotlinx.io.text.writeUtf8String
import java.nio.file.Files
import kotlin.test.Ignore
import kotlin.test.Test
import kotlin.test.assertEquals
class FileDataTest {
val dataNode = DataNode<String> {
node("dir") {
@ -37,6 +38,10 @@ class FileDataTest {
return readUtf8String()
}
override fun toMeta(): Meta = Meta {
IOFormat.NAME_KEY put "string"
}
}
@Test
@ -56,7 +61,7 @@ class FileDataTest {
@Test
@Ignore
@DFExperimental
fun testZipWriteRead() {
Global.io.run {
val zip = Files.createTempFile("df_data_node", ".zip")