diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml new file mode 100644 index 00000000..adc74adf --- /dev/null +++ b/.github/workflows/gradle.yml @@ -0,0 +1,17 @@ +name: Gradle build + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Build with Gradle + run: ./gradlew build diff --git a/README.md b/README.md index 79fa4cf1..b712501c 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ - +[![DOI](https://zenodo.org/badge/148831678.svg)](https://zenodo.org/badge/latestdoi/148831678) # Questions and Answers # diff --git a/build.gradle.kts b/build.gradle.kts index 7f190853..cf2f4387 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -1,12 +1,12 @@ import scientifik.ScientifikExtension plugins { - id("scientifik.mpp") version "0.2.2" apply false - id("scientifik.jvm") version "0.2.2" apply false - id("scientifik.publish") version "0.2.2" apply false + id("scientifik.mpp") version "0.2.4" apply false + id("scientifik.jvm") version "0.2.4" apply false + id("scientifik.publish") version "0.2.4" apply false } -val dataforgeVersion by extra("0.1.5-dev-2") +val dataforgeVersion by extra("0.1.5-dev-3") val bintrayRepo by extra("dataforge") val githubProject by extra("dataforge-core") diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt index 65c07676..a673f0b7 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/DataNode.kt @@ -15,20 +15,20 @@ sealed class DataItem : MetaRepr { abstract val meta: Meta - class Node(val value: DataNode) : DataItem() { - override val type: KClass get() = value.type + class Node(val node: DataNode) : DataItem() { + override val type: KClass get() = node.type - override fun toMeta(): Meta = value.toMeta() + override fun toMeta(): Meta = node.toMeta() - override val meta: Meta get() = value.meta + override val meta: Meta get() = node.meta } - class Leaf(val value: Data) : DataItem() { - override val type: KClass get() = value.type + class Leaf(val data: Data) : DataItem() { + override val type: KClass get() = data.type - override fun toMeta(): Meta = value.toMeta() + override fun toMeta(): Meta = data.toMeta() - override val meta: Meta get() = value.meta + override val meta: Meta get() = data.meta } } @@ -68,8 +68,8 @@ interface DataNode : MetaRepr { } } -val DataItem?.node: DataNode? get() = (this as? DataItem.Node)?.value -val DataItem?.data: Data? get() = (this as? DataItem.Leaf)?.value +val DataItem?.node: DataNode? get() = (this as? DataItem.Node)?.node +val DataItem?.data: Data? get() = (this as? DataItem.Leaf)?.data /** * Start computation for all goals in data node and return a job for the whole node @@ -77,8 +77,8 @@ val DataItem?.data: Data? get() = (this as? DataItem.Leaf)?.v fun DataNode<*>.launchAll(scope: CoroutineScope): Job = scope.launch { items.values.forEach { when (it) { - is DataItem.Node<*> -> it.value.launchAll(scope) - is DataItem.Leaf<*> -> it.value.start(scope) + is DataItem.Node<*> -> it.node.launchAll(scope) + is DataItem.Leaf<*> -> it.data.start(scope) } } } @@ -98,7 +98,7 @@ fun DataNode.asSequence(): Sequence>> = sequ items.forEach { (head, item) -> yield(head.asName() to item) if (item is DataItem.Node) { - val subSequence = item.value.asSequence() + val subSequence = item.node.asSequence() .map { (name, data) -> (head.asName() + name) to data } yieldAll(subSequence) } @@ -111,9 +111,9 @@ fun DataNode.asSequence(): Sequence>> = sequ fun DataNode.dataSequence(): Sequence>> = sequence { items.forEach { (head, item) -> when (item) { - is DataItem.Leaf -> yield(head.asName() to item.value) + is DataItem.Leaf -> yield(head.asName() to item.data) is DataItem.Node -> { - val subSequence = item.value.dataSequence() + val subSequence = item.node.dataSequence() .map { (name, data) -> (head.asName() + name) to data } yieldAll(subSequence) } @@ -188,8 +188,8 @@ class DataTreeBuilder(val type: KClass) { operator fun set(name: Name, node: DataNode) = set(name, node.builder()) operator fun set(name: Name, item: DataItem) = when (item) { - is DataItem.Node -> set(name, item.value.builder()) - is DataItem.Leaf -> set(name, item.value) + is DataItem.Node -> set(name, item.node.builder()) + is DataItem.Leaf -> set(name, item.data) } /** @@ -223,6 +223,10 @@ class DataTreeBuilder(val type: KClass) { fun meta(block: MetaBuilder.() -> Unit) = meta.apply(block) + fun meta(meta: Meta) { + this.meta = meta.builder() + } + fun build(): DataTree { val resMap = map.mapValues { (_, value) -> when (value) { diff --git a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataCast.kt b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataCast.kt index 0b9a4910..2bf8adde 100644 --- a/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataCast.kt +++ b/dataforge-data/src/commonMain/kotlin/hep/dataforge/data/dataCast.kt @@ -28,8 +28,8 @@ expect fun DataNode<*>.canCast(type: KClass): Boolean expect fun Data<*>.canCast(type: KClass): Boolean fun DataItem<*>.canCast(type: KClass): Boolean = when (this) { - is DataItem.Node -> value.canCast(type) - is DataItem.Leaf -> value.canCast(type) + is DataItem.Node -> node.canCast(type) + is DataItem.Leaf -> data.canCast(type) } /** diff --git a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/TypeFilteredDataNode.kt b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/TypeFilteredDataNode.kt index 331f3b0e..3590679c 100644 --- a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/TypeFilteredDataNode.kt +++ b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/TypeFilteredDataNode.kt @@ -14,12 +14,12 @@ class TypeFilteredDataNode(val origin: DataNode<*>, override val ty origin.items.mapNotNull { (key, item) -> when (item) { is DataItem.Leaf -> { - (item.value.filterIsInstance(type))?.let { + (item.data.filterIsInstance(type))?.let { key to DataItem.Leaf(it) } } is DataItem.Node -> { - key to DataItem.Node(item.value.filterIsInstance(type)) + key to DataItem.Node(item.node.filterIsInstance(type)) } } }.associate { it } diff --git a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt index 29d048ed..f354c2f7 100644 --- a/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt +++ b/dataforge-data/src/jvmMain/kotlin/hep/dataforge/data/dataJVM.kt @@ -42,8 +42,8 @@ fun DataNode<*>.filterIsInstance(type: KClass): DataNode { */ fun DataItem<*>?.filterIsInstance(type: KClass): DataItem? = when (this) { null -> null - is DataItem.Node -> DataItem.Node(this.value.filterIsInstance(type)) - is DataItem.Leaf -> this.value.filterIsInstance(type)?.let { DataItem.Leaf(it) } + is DataItem.Node -> DataItem.Node(this.node.filterIsInstance(type)) + is DataItem.Leaf -> this.data.filterIsInstance(type)?.let { DataItem.Leaf(it) } } inline fun DataItem<*>?.filterIsInstance(): DataItem? = this@filterIsInstance.filterIsInstance(R::class) \ No newline at end of file diff --git a/dataforge-io/dataforge-io-yaml/build.gradle.kts b/dataforge-io/dataforge-io-yaml/build.gradle.kts index 74ba43cf..d287d9ac 100644 --- a/dataforge-io/dataforge-io-yaml/build.gradle.kts +++ b/dataforge-io/dataforge-io-yaml/build.gradle.kts @@ -7,6 +7,4 @@ description = "YAML meta IO" dependencies { api(project(":dataforge-io")) api("org.yaml:snakeyaml:1.25") - testImplementation(kotlin("test")) - testImplementation(kotlin("test-junit")) } diff --git a/dataforge-io/dataforge-io-yaml/src/test/kotlin/hep/dataforge/io/yaml/YamlMetaFormatTest.kt b/dataforge-io/dataforge-io-yaml/src/test/kotlin/hep/dataforge/io/yaml/YamlMetaFormatTest.kt index 414162f7..5c5b3c18 100644 --- a/dataforge-io/dataforge-io-yaml/src/test/kotlin/hep/dataforge/io/yaml/YamlMetaFormatTest.kt +++ b/dataforge-io/dataforge-io-yaml/src/test/kotlin/hep/dataforge/io/yaml/YamlMetaFormatTest.kt @@ -6,7 +6,7 @@ import hep.dataforge.meta.Meta import hep.dataforge.meta.buildMeta import hep.dataforge.meta.get import hep.dataforge.meta.seal -import org.junit.Test +import org.junit.jupiter.api.Test import kotlin.test.assertEquals diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Binary.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Binary.kt index bd1f2249..b671928b 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Binary.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Binary.kt @@ -10,7 +10,7 @@ interface Binary { /** * The size of binary in bytes. [ULong.MAX_VALUE] if size is not defined and input should be read until its end is reached */ - val size: ULong + val size: ULong get() = ULong.MAX_VALUE /** * Read continuous [Input] from this binary stating from the beginning. @@ -41,7 +41,11 @@ interface RandomAccessBinary : Binary { } fun Binary.toBytes(): ByteArray = read { - this.readBytes() + readBytes() +} + +fun Binary.contentToString(): String = read { + readText() } @ExperimentalUnsignedTypes diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Envelope.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Envelope.kt index 7cb918df..abf8a504 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Envelope.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/Envelope.kt @@ -83,4 +83,3 @@ fun Envelope.withMetaLayers(vararg layers: Meta): Envelope { else -> ProxyEnvelope(this, *layers) } } - diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeFormat.kt index 4f747ea2..49a25919 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeFormat.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeFormat.kt @@ -23,11 +23,15 @@ interface EnvelopeFormat : IOFormat { fun Input.readPartial(): PartialEnvelope - fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta = EmptyMeta) + fun Output.writeEnvelope( + envelope: Envelope, + metaFormatFactory: MetaFormatFactory = defaultMetaFormat, + formatMeta: Meta = EmptyMeta + ) override fun Input.readObject(): Envelope - override fun Output.writeObject(obj: Envelope): Unit = writeEnvelope(obj, defaultMetaFormat) + override fun Output.writeObject(obj: Envelope): Unit = writeEnvelope(obj) } @Type(ENVELOPE_FORMAT_TYPE) diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeParts.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeParts.kt index eb1dd696..d1b86195 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeParts.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/EnvelopeParts.kt @@ -3,6 +3,7 @@ package hep.dataforge.io import hep.dataforge.context.Global import hep.dataforge.io.EnvelopeParts.FORMAT_META_KEY import hep.dataforge.io.EnvelopeParts.FORMAT_NAME_KEY +import hep.dataforge.io.EnvelopeParts.INDEX_KEY import hep.dataforge.io.EnvelopeParts.MULTIPART_DATA_TYPE import hep.dataforge.io.EnvelopeParts.SIZE_KEY import hep.dataforge.meta.* @@ -13,6 +14,7 @@ import hep.dataforge.names.toName object EnvelopeParts { val MULTIPART_KEY = "multipart".asName() val SIZE_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "size" + val INDEX_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "index" val FORMAT_NAME_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "format" val FORMAT_META_KEY = Envelope.ENVELOPE_NODE_KEY + MULTIPART_KEY + "meta" @@ -37,8 +39,37 @@ fun EnvelopeBuilder.multipart(format: EnvelopeFormatFactory, envelopes: Collecti } } -fun EnvelopeBuilder.multipart(formatFactory: EnvelopeFormatFactory, builder: suspend SequenceScope.() -> Unit) = - multipart(formatFactory, sequence(builder).toList()) +/** + * Create a multipart partition in the envelope adding additional name-index mapping in meta + */ +@DFExperimental +fun EnvelopeBuilder.multipart(format: EnvelopeFormatFactory, envelopes: Map) { + dataType = MULTIPART_DATA_TYPE + meta { + SIZE_KEY put envelopes.size + FORMAT_NAME_KEY put format.name.toString() + } + data { + format.run { + var counter = 0 + envelopes.forEach {(key, envelope)-> + writeObject(envelope) + meta{ + append(INDEX_KEY, buildMeta { + "key" put key + "index" put counter + }) + } + counter++ + } + } + } +} + +fun EnvelopeBuilder.multipart( + formatFactory: EnvelopeFormatFactory, + builder: suspend SequenceScope.() -> Unit +) = multipart(formatFactory, sequence(builder).toList()) /** * If given envelope supports multipart data, return a sequence of those parts (could be empty). Otherwise return null. diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOFormat.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOFormat.kt index bd1e54f4..e5497365 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOFormat.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOFormat.kt @@ -79,8 +79,10 @@ inline fun buildPacketWithoutPool(headerSizeHint: Int = 0, block: BytePacketBuil } fun IOFormat.writePacket(obj: T): ByteReadPacket = buildPacket { writeObject(obj) } -//TODO Double buffer copy. fix all that with IO-2 + +@Deprecated("Not to be used outside tests due to double buffer write") fun IOFormat.writeBytes(obj: T): ByteArray = buildPacket { writeObject(obj) }.readBytes() +@Deprecated("Not to be used outside tests due to double buffer write") fun IOFormat.readBytes(array: ByteArray): T = buildPacket { writeFully(array) }.readObject() object DoubleIOFormat : IOFormat, IOFormatFactory { diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOPlugin.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOPlugin.kt index ae88e4f4..eb975029 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOPlugin.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/IOPlugin.kt @@ -20,7 +20,7 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) { metaFormatFactories.find { it.key == key }?.invoke(meta) fun metaFormat(name: String, meta: Meta = EmptyMeta): MetaFormat? = - metaFormatFactories.find { it.name.toString() == name }?.invoke(meta) + metaFormatFactories.find { it.name.last().toString() == name }?.invoke(meta) val envelopeFormatFactories by lazy { context.content(ENVELOPE_FORMAT_TYPE).values @@ -52,7 +52,7 @@ class IOPlugin(meta: Meta) : AbstractPlugin(meta) { companion object : PluginFactory { val defaultMetaFormats: List = listOf(JsonMetaFormat, BinaryMetaFormat) - val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat) + val defaultEnvelopeFormats = listOf(TaggedEnvelopeFormat, TaglessEnvelopeFormat) override val tag: PluginTag = PluginTag("io", group = PluginTag.DATAFORGE_GROUP) diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaggedEnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaggedEnvelopeFormat.kt index a95b7bfb..a461d257 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaggedEnvelopeFormat.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaggedEnvelopeFormat.kt @@ -38,15 +38,15 @@ class TaggedEnvelopeFormat( override fun Output.writeEnvelope(envelope: Envelope, metaFormatFactory: MetaFormatFactory, formatMeta: Meta) { val metaFormat = metaFormatFactory.invoke(formatMeta, io.context) - val metaBytes = metaFormat.writeBytes(envelope.meta) + val metaBytes = metaFormat.writePacket(envelope.meta) val actualSize: ULong = if (envelope.data == null) { 0u } else { envelope.data?.size ?: ULong.MAX_VALUE } - val tag = Tag(metaFormatFactory.key, metaBytes.size.toUInt() + 2u, actualSize) + val tag = Tag(metaFormatFactory.key, metaBytes.remaining.toUInt() + 2u, actualSize) writePacket(tag.toBytes()) - writeFully(metaBytes) + writePacket(metaBytes) writeText("\r\n") envelope.data?.read { copyTo(this@writeEnvelope) } flush() diff --git a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaglessEnvelopeFormat.kt b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaglessEnvelopeFormat.kt index a8f84be0..1cc62a2b 100644 --- a/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaglessEnvelopeFormat.kt +++ b/dataforge-io/src/commonMain/kotlin/hep/dataforge/io/TaglessEnvelopeFormat.kt @@ -37,10 +37,10 @@ class TaglessEnvelopeFormat( //Printing meta if (!envelope.meta.isEmpty()) { - val metaBytes = metaFormat.writeBytes(envelope.meta) - writeProperty(META_LENGTH_PROPERTY, metaBytes.size) + val metaBytes = metaFormat.writePacket(envelope.meta) + writeProperty(META_LENGTH_PROPERTY, metaBytes.remaining) writeText(metaStart + "\r\n") - writeFully(metaBytes) + writePacket(metaBytes) writeText("\r\n") } @@ -192,7 +192,7 @@ class TaglessEnvelopeFormat( return try { val buffer = ByteArray(TAGLESS_ENVELOPE_HEADER.length) input.readFully(buffer) - return if (buffer.toString() == TAGLESS_ENVELOPE_HEADER) { + return if (String(buffer) == TAGLESS_ENVELOPE_HEADER) { TaglessEnvelopeFormat(io) } else { null diff --git a/dataforge-io/src/jvmMain/kotlin/hep/dataforge/io/fileIO.kt b/dataforge-io/src/jvmMain/kotlin/hep/dataforge/io/fileIO.kt index 14e4c077..9203d306 100644 --- a/dataforge-io/src/jvmMain/kotlin/hep/dataforge/io/fileIO.kt +++ b/dataforge-io/src/jvmMain/kotlin/hep/dataforge/io/fileIO.kt @@ -4,6 +4,9 @@ import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.DFExperimental import hep.dataforge.meta.EmptyMeta import hep.dataforge.meta.Meta +import hep.dataforge.meta.isEmpty +import kotlinx.io.core.Output +import kotlinx.io.core.copyTo import kotlinx.io.nio.asInput import kotlinx.io.nio.asOutput import java.nio.file.Files @@ -12,10 +15,15 @@ import java.nio.file.StandardOpenOption import kotlin.reflect.full.isSuperclassOf import kotlin.streams.asSequence +/** + * Resolve IOFormat based on type + */ +@DFExperimental inline fun IOPlugin.resolveIOFormat(): IOFormat? { return ioFormats.values.find { it.type.isSuperclassOf(T::class) } as IOFormat? } + /** * Read file containing meta using given [formatOverride] or file extension to infer meta type. * If [path] is a directory search for file starting with `meta` in it @@ -43,11 +51,12 @@ fun IOPlugin.readMetaFile(path: Path, formatOverride: MetaFormat? = null, descri */ fun IOPlugin.writeMetaFile( path: Path, + meta: Meta, metaFormat: MetaFormatFactory = JsonMetaFormat, descriptor: NodeDescriptor? = null ) { val actualPath = if (Files.isDirectory(path)) { - path.resolve(metaFormat.name.toString()) + path.resolve("@" + metaFormat.name.toString()) } else { path } @@ -58,6 +67,28 @@ fun IOPlugin.writeMetaFile( } } +/** + * Return inferred [EnvelopeFormat] if only one format could read given file. If no format accepts file, return null. If + * multiple formats accepts file, throw an error. + */ +fun IOPlugin.peekBinaryFormat(path: Path): EnvelopeFormat? { + val binary = path.asBinary() + val formats = envelopeFormatFactories.mapNotNull { factory -> + binary.read { + factory.peekFormat(this@peekBinaryFormat, this@read) + } + } + + return when (formats.size) { + 0 -> null + 1 -> formats.first() + else -> error("Envelope format binary recognition clash") + } +} + +val IOPlugin.Companion.META_FILE_NAME: String get() = "@meta" +val IOPlugin.Companion.DATA_FILE_NAME: String get() = "@data" + /** * Read and envelope from file if the file exists, return null if file does not exist. * @@ -72,13 +103,17 @@ fun IOPlugin.writeMetaFile( * Return null otherwise. */ @DFExperimental -fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): Envelope? { +fun IOPlugin.readEnvelopeFile( + path: Path, + readNonEnvelopes: Boolean = false, + formatPeeker: IOPlugin.(Path) -> EnvelopeFormat? = IOPlugin::peekBinaryFormat +): Envelope? { if (!Files.exists(path)) return null //read two-files directory if (Files.isDirectory(path)) { val metaFile = Files.list(path).asSequence() - .singleOrNull { it.fileName.toString().startsWith("meta") } + .singleOrNull { it.fileName.toString().startsWith(IOPlugin.META_FILE_NAME) } val meta = if (metaFile == null) { EmptyMeta @@ -86,7 +121,7 @@ fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): En readMetaFile(metaFile) } - val dataFile = path.resolve("data") + val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME) val data: Binary? = if (Files.exists(dataFile)) { dataFile.asBinary() @@ -97,41 +132,76 @@ fun IOPlugin.readEnvelopeFile(path: Path, readNonEnvelopes: Boolean = false): En return SimpleEnvelope(meta, data) } - val binary = path.asBinary() + return formatPeeker(path)?.let { format -> + FileEnvelope(path, format) + } ?: if (readNonEnvelopes) { // if no format accepts file, read it as binary + SimpleEnvelope(Meta.empty, path.asBinary()) + } else null +} - val formats = envelopeFormatFactories.mapNotNull { factory -> - binary.read { - factory.peekFormat(this@readEnvelopeFile, this@read) - } - } - return when (formats.size) { - 0 -> if (readNonEnvelopes) { - SimpleEnvelope(Meta.empty, binary) - } else { - null - } - 1 -> formats.first().run { - binary.read { - readObject() - } - } - else -> error("Envelope format file recognition clash") +private fun Path.useOutput(consumer: Output.() -> Unit) { + //TODO forbid rewrite? + Files.newByteChannel( + this, + StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING + ).asOutput().use { + it.consumer() + it.flush() } } +/** + * Write a binary into file. Throws an error if file already exists + */ +fun IOFormat.writeToFile(path: Path, obj: T) { + path.useOutput { + writeObject(obj) + flush() + } +} + +/** + * Write envelope file to given [path] using [envelopeFormat] and optional [metaFormat] + */ +@DFExperimental fun IOPlugin.writeEnvelopeFile( path: Path, envelope: Envelope, - format: EnvelopeFormat = TaggedEnvelopeFormat + envelopeFormat: EnvelopeFormat = TaggedEnvelopeFormat, + metaFormat: MetaFormatFactory? = null ) { - val output = Files.newByteChannel( - path, - StandardOpenOption.WRITE, - StandardOpenOption.CREATE, - StandardOpenOption.TRUNCATE_EXISTING - ).asOutput() + path.useOutput { + with(envelopeFormat) { + writeEnvelope(envelope, metaFormat ?: envelopeFormat.defaultMetaFormat) + } + } +} - with(format) { - output.writeObject(envelope) +/** + * Write separate meta and data files to given directory [path] + */ +@DFExperimental +fun IOPlugin.writeEnvelopeDirectory( + path: Path, + envelope: Envelope, + metaFormat: MetaFormatFactory = JsonMetaFormat +) { + if (!Files.exists(path)) { + Files.createDirectories(path) + } + if (!Files.isDirectory(path)) { + error("Can't write envelope directory to file") + } + if (!envelope.meta.isEmpty()) { + writeMetaFile(path, envelope.meta, metaFormat) + } + val dataFile = path.resolve(IOPlugin.DATA_FILE_NAME) + dataFile.useOutput { + envelope.data?.read { + val copied = copyTo(this@useOutput) + if (envelope.data?.size != ULong.MAX_VALUE && copied != envelope.data?.size?.toLong()) { + error("The number of copied bytes does not equal data size") + } + } } } \ No newline at end of file diff --git a/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/FileEnvelopeTest.kt b/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/FileEnvelopeTest.kt index 595ca619..ccd9fb60 100644 --- a/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/FileEnvelopeTest.kt +++ b/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/FileEnvelopeTest.kt @@ -26,11 +26,24 @@ class FileEnvelopeTest { @Test fun testFileWriteRead() { - val tmpPath = Files.createTempFile("dataforge_test", ".df") - Global.io.writeEnvelopeFile(tmpPath,envelope) - println(tmpPath.toUri()) - val restored: Envelope = Global.io.readEnvelopeFile(tmpPath)!! - assertTrue { envelope.contentEquals(restored) } + Global.io.run { + val tmpPath = Files.createTempFile("dataforge_test", ".df") + writeEnvelopeFile(tmpPath, envelope) + println(tmpPath.toUri()) + val restored: Envelope = readEnvelopeFile(tmpPath)!! + assertTrue { envelope.contentEquals(restored) } + } + } + + @Test + fun testFileWriteReadTagless() { + Global.io.run { + val tmpPath = Files.createTempFile("dataforge_test_tagless", ".df") + writeEnvelopeFile(tmpPath, envelope, envelopeFormat = TaglessEnvelopeFormat) + println(tmpPath.toUri()) + val restored: Envelope = readEnvelopeFile(tmpPath)!! + assertTrue { envelope.contentEquals(restored) } + } } @Test diff --git a/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/tcp/EnvelopeServerTest.kt b/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/tcp/EnvelopeServerTest.kt index 37c35efc..64067dec 100644 --- a/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/tcp/EnvelopeServerTest.kt +++ b/dataforge-io/src/jvmTest/kotlin/hep/dataforge/io/tcp/EnvelopeServerTest.kt @@ -7,9 +7,9 @@ import hep.dataforge.io.TaggedEnvelopeFormat import hep.dataforge.io.writeBytes import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.runBlocking -import org.junit.AfterClass -import org.junit.BeforeClass -import org.junit.Test +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import kotlin.test.Test import kotlin.test.assertEquals import kotlin.time.ExperimentalTime @@ -30,13 +30,13 @@ class EnvelopeServerTest { @JvmStatic val echoEnvelopeServer = EnvelopeServer(Global, 7778, EchoResponder, GlobalScope) - @BeforeClass + @BeforeAll @JvmStatic fun start() { echoEnvelopeServer.start() } - @AfterClass + @AfterAll @JvmStatic fun close() { echoEnvelopeServer.stop() diff --git a/dataforge-scripting/build.gradle.kts b/dataforge-scripting/build.gradle.kts index 757f0c33..c848c1b1 100644 --- a/dataforge-scripting/build.gradle.kts +++ b/dataforge-scripting/build.gradle.kts @@ -19,8 +19,6 @@ kotlin { } val jvmTest by getting { dependencies { - implementation(kotlin("test")) - implementation(kotlin("test-junit")) implementation("ch.qos.logback:logback-classic:1.2.3") } } diff --git a/dataforge-scripting/src/jvmTest/kotlin/hep/dataforge/scripting/BuildersKtTest.kt b/dataforge-scripting/src/jvmTest/kotlin/hep/dataforge/scripting/BuildersKtTest.kt index 5a9ba56d..6dd61105 100644 --- a/dataforge-scripting/src/jvmTest/kotlin/hep/dataforge/scripting/BuildersKtTest.kt +++ b/dataforge-scripting/src/jvmTest/kotlin/hep/dataforge/scripting/BuildersKtTest.kt @@ -6,7 +6,7 @@ import hep.dataforge.meta.int import hep.dataforge.workspace.SimpleWorkspaceBuilder import hep.dataforge.workspace.context import hep.dataforge.workspace.target -import org.junit.Test +import kotlin.test.Test import kotlin.test.assertEquals diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/dataUtils.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/dataUtils.kt deleted file mode 100644 index f6d27774..00000000 --- a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/dataUtils.kt +++ /dev/null @@ -1,14 +0,0 @@ -package hep.dataforge.workspace - -import hep.dataforge.data.Data -import hep.dataforge.io.Envelope -import hep.dataforge.io.IOFormat -import hep.dataforge.io.readWith -import kotlin.reflect.KClass - -/** - * Convert an [Envelope] to a data via given format. The actual parsing is done lazily. - */ -fun Envelope.toData(type: KClass, format: IOFormat): Data = Data(type, meta) { - data?.readWith(format) ?: error("Can't convert envelope without data to Data") -} \ No newline at end of file diff --git a/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/envelopeData.kt b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/envelopeData.kt new file mode 100644 index 00000000..111bba76 --- /dev/null +++ b/dataforge-workspace/src/commonMain/kotlin/hep/dataforge/workspace/envelopeData.kt @@ -0,0 +1,33 @@ +package hep.dataforge.workspace + +import hep.dataforge.data.Data +import hep.dataforge.data.await +import hep.dataforge.io.* +import kotlinx.coroutines.coroutineScope +import kotlinx.io.core.Input +import kotlinx.io.core.buildPacket +import kotlin.reflect.KClass + +/** + * Convert an [Envelope] to a data via given format. The actual parsing is done lazily. + */ +fun Envelope.toData(type: KClass, format: IOFormat): Data = Data(type, meta) { + data?.readWith(format) ?: error("Can't convert envelope without data to Data") +} + +suspend fun Data.toEnvelope(format: IOFormat): Envelope { + val obj = coroutineScope { + await(this) + } + val binary = object : Binary { + override fun read(block: Input.() -> R): R { + //TODO optimize away additional copy by creating inputs that reads directly from output + val packet = buildPacket { + format.run { writeObject(obj) } + } + return packet.block() + } + + } + return SimpleEnvelope(meta, binary) +} \ No newline at end of file diff --git a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt index b5ecb519..2b6d5454 100644 --- a/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt +++ b/dataforge-workspace/src/jvmMain/kotlin/hep/dataforge/workspace/fileData.kt @@ -1,17 +1,27 @@ package hep.dataforge.workspace +//import jdk.nio.zipfs.ZipFileSystemProvider import hep.dataforge.data.* -import hep.dataforge.io.Envelope -import hep.dataforge.io.IOFormat -import hep.dataforge.io.IOPlugin -import hep.dataforge.io.readEnvelopeFile -import hep.dataforge.meta.Meta -import hep.dataforge.meta.get -import hep.dataforge.meta.string +import hep.dataforge.io.* +import hep.dataforge.meta.* +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext +import java.nio.file.FileSystem import java.nio.file.Files import java.nio.file.Path +import java.nio.file.StandardCopyOption +import java.nio.file.spi.FileSystemProvider import kotlin.reflect.KClass +typealias FileFormatResolver = (Path, Meta) -> IOFormat + +//private val zipFSProvider = ZipFileSystemProvider() + +private fun newZFS(path: Path): FileSystem { + val fsProvider = FileSystemProvider.installedProviders().find { it.scheme == "jar" } + ?: error("Zip file system provider not found") + return fsProvider.newFileSystem(path, mapOf("create" to "true")) +} /** * Read data with supported envelope format and binary format. If envelope format is null, then read binary directly from file. @@ -22,62 +32,152 @@ import kotlin.reflect.KClass * @param metaFile the relative file for optional meta override * @param metaFileFormat the meta format for override */ +@DFExperimental fun IOPlugin.readDataFile( path: Path, type: KClass, - formatResolver: (Meta) -> IOFormat + formatResolver: FileFormatResolver ): Data { val envelope = readEnvelopeFile(path, true) ?: error("Can't read data from $path") - val format = formatResolver(envelope.meta) + val format = formatResolver(path, envelope.meta) return envelope.toData(type, format) } -//TODO wants multi-receiver +@DFExperimental +inline fun IOPlugin.readDataFile(path: Path): Data = + readDataFile(path, T::class) { _, _ -> + resolveIOFormat() ?: error("Can't resolve IO format for ${T::class}") + } + +/** + * Add file/directory-based data tree item + */ +@DFExperimental fun DataTreeBuilder.file( plugin: IOPlugin, path: Path, - formatResolver: (Meta) -> IOFormat + formatResolver: FileFormatResolver ) { - plugin.run { - val data = readDataFile(path, type, formatResolver) - val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string - ?: path.fileName.toString().replace(".df", "") - datum(name, data) + //If path is a single file or a special directory, read it as single datum + if (!Files.isDirectory(path) || Files.list(path).allMatch { it.fileName.toString().startsWith("@") }) { + plugin.run { + val data = readDataFile(path, type, formatResolver) + val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string + ?: path.fileName.toString().replace(".df", "") + datum(name, data) + } + } else { + //otherwise, read as directory + plugin.run { + val data = readDataDirectory(path, type, formatResolver) + val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string + ?: path.fileName.toString().replace(".df", "") + node(name, data) + } } } /** - * Read the directory as a data node + * Read the directory as a data node. If [path] is a zip archive, read it as directory */ +@DFExperimental fun IOPlugin.readDataDirectory( path: Path, type: KClass, - formatResolver: (Meta) -> IOFormat + formatResolver: FileFormatResolver ): DataNode { - if (!Files.isDirectory(path)) error("Provided path $this is not a directory") + //read zipped data node + if (path.fileName != null && path.fileName.toString().endsWith(".zip")) { + //Using explicit Zip file system to avoid bizarre compatibility bugs + val fs = newZFS(path) + return readDataDirectory(fs.rootDirectories.first(), type, formatResolver) + } + if (!Files.isDirectory(path)) error("Provided path $path is not a directory") return DataNode(type) { Files.list(path).forEach { path -> - if (!path.fileName.toString().endsWith(".meta")) { + val fileName = path.fileName.toString() + if (fileName.startsWith(IOPlugin.META_FILE_NAME)) { + meta(readMetaFile(path)) + } else if (!fileName.startsWith("@")) { file(this@readDataDirectory, path, formatResolver) } } } } -fun DataTreeBuilder.directory( - plugin: IOPlugin, +@DFExperimental +inline fun IOPlugin.readDataDirectory(path: Path): DataNode = + readDataDirectory(path, T::class) { _, _ -> + resolveIOFormat() ?: error("Can't resolve IO format for ${T::class}") + } + +/** + * Write data tree to existing directory or create a new one using default [java.nio.file.FileSystem] provider + */ +@DFExperimental +suspend fun IOPlugin.writeDataDirectory( path: Path, - formatResolver: (Meta) -> IOFormat + node: DataNode, + format: IOFormat, + envelopeFormat: EnvelopeFormat? = null, + metaFormat: MetaFormatFactory? = null ) { - plugin.run { - val data = readDataDirectory(path, type, formatResolver) - val name = data.meta[Envelope.ENVELOPE_NAME_KEY].string - ?: path.fileName.toString().replace(".df", "") - node(name, data) + withContext(Dispatchers.IO) { + if (!Files.exists(path)) { + Files.createDirectories(path) + } else if (!Files.isDirectory(path)) { + error("Can't write a node into file") + } + node.items.forEach { (token, item) -> + val childPath = path.resolve(token.toString()) + when (item) { + is DataItem.Node -> { + writeDataDirectory(childPath, item.node, format, envelopeFormat) + } + is DataItem.Leaf -> { + val envelope = item.data.toEnvelope(format) + if (envelopeFormat != null) { + writeEnvelopeFile(childPath, envelope, envelopeFormat, metaFormat) + } else { + writeEnvelopeDirectory(childPath, envelope, metaFormat ?: JsonMetaFormat) + } + } + } + } + if (!node.meta.isEmpty()) { + writeMetaFile(path, node.meta, metaFormat ?: JsonMetaFormat) + } } } - - - +suspend fun IOPlugin.writeZip( + path: Path, + node: DataNode, + format: IOFormat, + envelopeFormat: EnvelopeFormat? = null, + metaFormat: MetaFormatFactory? = null +) { + withContext(Dispatchers.IO) { + val actualFile = if (path.toString().endsWith(".zip")) { + path + } else { + path.resolveSibling(path.fileName.toString() + ".zip") + } + if (Files.exists(actualFile) && Files.size(path) == 0.toLong()) { + Files.delete(path) + } + //Files.createFile(actualFile) + newZFS(actualFile).use { zipfs -> + val internalTargetPath = zipfs.getPath("/") + Files.createDirectories(internalTargetPath) + val tmp = Files.createTempDirectory("df_zip") + writeDataDirectory(tmp, node, format, envelopeFormat, metaFormat) + Files.list(tmp).forEach { sourcePath -> + val targetPath = sourcePath.fileName.toString() + val internalTargetPath = internalTargetPath.resolve(targetPath) + Files.copy(sourcePath, internalTargetPath, StandardCopyOption.REPLACE_EXISTING) + } + } + } +} diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt index c449ffc3..083d3f57 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/DataPropagationTest.kt @@ -6,8 +6,8 @@ import hep.dataforge.context.PluginTag import hep.dataforge.data.* import hep.dataforge.meta.Meta import hep.dataforge.names.asName -import org.junit.Test import kotlin.reflect.KClass +import kotlin.test.Test import kotlin.test.assertEquals diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt new file mode 100644 index 00000000..b73a4d59 --- /dev/null +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/FileDataTest.kt @@ -0,0 +1,72 @@ +package hep.dataforge.workspace + +import hep.dataforge.context.Global +import hep.dataforge.data.* +import hep.dataforge.io.IOFormat +import hep.dataforge.io.io +import hep.dataforge.meta.DFExperimental +import kotlinx.coroutines.runBlocking +import kotlinx.io.core.Input +import kotlinx.io.core.Output +import kotlinx.io.core.readText +import kotlinx.io.core.writeText +import java.nio.file.Files +import kotlin.test.Ignore +import kotlin.test.Test +import kotlin.test.assertEquals + +class FileDataTest { + val dataNode = DataNode { + node("dir") { + static("a", "Some string") { + "content" put "Some string" + } + } + static("b", "root data") + meta { + "content" put "This is root meta node" + } + } + + object StringIOFormat : IOFormat { + override fun Output.writeObject(obj: String) { + writeText(obj) + } + + override fun Input.readObject(): String { + return readText() + } + + } + + @Test + @DFExperimental + fun testDataWriteRead() { + Global.io.run { + val dir = Files.createTempDirectory("df_data_node") + runBlocking { + writeDataDirectory(dir, dataNode, StringIOFormat) + } + println(dir.toUri().toString()) + val reconstructed = readDataDirectory(dir, String::class) { _, _ -> StringIOFormat } + assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta) + assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.get()) + } + } + + + @Test + @Ignore + fun testZipWriteRead() { + Global.io.run { + val zip = Files.createTempFile("df_data_node", ".zip") + runBlocking { + writeZip(zip, dataNode, StringIOFormat) + } + println(zip.toUri().toString()) + val reconstructed = readDataDirectory(zip, String::class) { _, _ -> StringIOFormat } + assertEquals(dataNode["dir.a"]?.meta, reconstructed["dir.a"]?.meta) + assertEquals(dataNode["b"]?.data?.get(), reconstructed["b"]?.data?.get()) + } + } +} \ No newline at end of file diff --git a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt index 3a40e783..a4df6a4b 100644 --- a/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt +++ b/dataforge-workspace/src/jvmTest/kotlin/hep/dataforge/workspace/SimpleWorkspaceTest.kt @@ -7,7 +7,7 @@ import hep.dataforge.meta.builder import hep.dataforge.meta.get import hep.dataforge.meta.int import hep.dataforge.names.plus -import org.junit.Test +import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertTrue diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 5c2d1cf0..cc4fdc29 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 7c4388a9..6ce793f2 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.0-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew old mode 100644 new mode 100755 index 83f2acfd..2fe81a7d --- a/gradlew +++ b/gradlew @@ -154,19 +154,19 @@ if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi @@ -175,14 +175,9 @@ save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@"