forked from NPM/numass-framework
Update dependencies and API
This commit is contained in:
parent
91c732a09e
commit
4e590d3e14
65
build.gradle
65
build.gradle
@ -1,65 +0,0 @@
|
|||||||
buildscript {
|
|
||||||
ext.kotlin_version = "1.4.30"
|
|
||||||
repositories {
|
|
||||||
mavenCentral()
|
|
||||||
jcenter()
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
plugins{
|
|
||||||
id 'org.openjfx.javafxplugin' version '0.0.9' apply false
|
|
||||||
}
|
|
||||||
|
|
||||||
allprojects {
|
|
||||||
apply plugin: 'java'
|
|
||||||
apply plugin: "org.jetbrains.kotlin.jvm"
|
|
||||||
|
|
||||||
group = 'inr.numass'
|
|
||||||
version = '1.0.0'
|
|
||||||
|
|
||||||
[compileJava, compileTestJava]*.options*.encoding = 'UTF-8'
|
|
||||||
|
|
||||||
repositories {
|
|
||||||
mavenCentral()
|
|
||||||
jcenter()
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
compile "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
|
|
||||||
compile "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version"
|
|
||||||
compile 'org.jetbrains:annotations:16.0.2'
|
|
||||||
testImplementation group: 'junit', name: 'junit', version: '4.+'
|
|
||||||
|
|
||||||
//Spock dependencies. To be removed later
|
|
||||||
testCompile 'org.codehaus.groovy:groovy-all:2.5.+'
|
|
||||||
testCompile "org.spockframework:spock-core:1.2-groovy-2.5"
|
|
||||||
}
|
|
||||||
|
|
||||||
compileKotlin {
|
|
||||||
kotlinOptions {
|
|
||||||
jvmTarget = "1.8"
|
|
||||||
javaParameters = true
|
|
||||||
freeCompilerArgs += [
|
|
||||||
'-Xjvm-default=enable',
|
|
||||||
"-progressive",
|
|
||||||
"-Xuse-experimental=kotlin.Experimental"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
compileTestKotlin {
|
|
||||||
kotlinOptions {
|
|
||||||
jvmTarget = "1.8"
|
|
||||||
javaParameters = true
|
|
||||||
freeCompilerArgs += [
|
|
||||||
'-Xjvm-default=enable',
|
|
||||||
"-progressive",
|
|
||||||
"-Xuse-experimental=kotlin.Experimental"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
62
build.gradle.kts
Normal file
62
build.gradle.kts
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
plugins {
|
||||||
|
kotlin("jvm") version "1.5.31"
|
||||||
|
id("org.openjfx.javafxplugin") version "0.0.9" apply false
|
||||||
|
id("com.github.johnrengelman.shadow") version "7.1.0" apply false
|
||||||
|
}
|
||||||
|
|
||||||
|
allprojects {
|
||||||
|
apply(plugin = "org.jetbrains.kotlin.jvm")
|
||||||
|
|
||||||
|
group = "inr.numass"
|
||||||
|
version = "1.1.0"
|
||||||
|
|
||||||
|
repositories {
|
||||||
|
mavenCentral()
|
||||||
|
jcenter()
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
api(kotlin("reflect"))
|
||||||
|
api("org.jetbrains:annotations:23.0.0")
|
||||||
|
testImplementation("junit:junit:4.13.2")
|
||||||
|
|
||||||
|
//Spock dependencies. To be removed later
|
||||||
|
// https://mvnrepository.com/artifact/org.spockframework/spock-core
|
||||||
|
testImplementation("org.spockframework:spock-core:2.0-groovy-3.0")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks {
|
||||||
|
compileJava{
|
||||||
|
options.encoding = "UTF-8"
|
||||||
|
}
|
||||||
|
|
||||||
|
compileTestJava{
|
||||||
|
options.encoding = "UTF-8"
|
||||||
|
}
|
||||||
|
|
||||||
|
compileKotlin {
|
||||||
|
kotlinOptions {
|
||||||
|
jvmTarget = "16"
|
||||||
|
javaParameters = true
|
||||||
|
freeCompilerArgs = freeCompilerArgs + listOf(
|
||||||
|
"-Xjvm-default=all",
|
||||||
|
"-progressive",
|
||||||
|
"-Xuse-experimental=kotlin.Experimental"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
compileTestKotlin {
|
||||||
|
kotlinOptions {
|
||||||
|
jvmTarget = "16"
|
||||||
|
javaParameters = true
|
||||||
|
freeCompilerArgs = freeCompilerArgs + listOf(
|
||||||
|
"-Xjvm-default=all",
|
||||||
|
"-progressive",
|
||||||
|
"-Xuse-experimental=kotlin.Experimental"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -3,8 +3,8 @@ description = 'dataforge-control'
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
// Adding dependencies here will add the dependencies to each subproject.
|
// Adding dependencies here will add the dependencies to each subproject.
|
||||||
compile project(':dataforge-core')
|
api project(':dataforge-core')
|
||||||
//TODO consider removing storage dependency
|
//TODO consider removing storage dependency
|
||||||
compile project(':dataforge-storage')
|
api project(':dataforge-storage')
|
||||||
compile 'org.scream3r:jssc:2.8.0'
|
api 'org.scream3r:jssc:2.8.0'
|
||||||
}
|
}
|
@ -75,7 +75,6 @@ interface Device : AutoConnectible, Metoid, ContextAware, Named, Stateful {
|
|||||||
*/
|
*/
|
||||||
val type: String
|
val type: String
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override val logger: Logger
|
override val logger: Logger
|
||||||
get() = optConnection(LOGGER_ROLE, Logger::class.java).orElse(context.logger)
|
get() = optConnection(LOGGER_ROLE, Logger::class.java).orElse(context.logger)
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
description = 'dataforge-core'
|
description = 'dataforge-core'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile 'ch.qos.logback:logback-classic:1.2.3'
|
api 'ch.qos.logback:logback-classic:1.2.3'
|
||||||
compile 'org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.5.0'
|
api 'org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.5.0'
|
||||||
compile group: 'org.jetbrains.kotlin', name: 'kotlin-reflect', version: kotlin_version
|
api group: 'javax.cache', name: 'cache-api', version: '1.1.0'
|
||||||
compile group: 'javax.cache', name: 'cache-api', version: '1.1.0'
|
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,6 @@
|
|||||||
description = 'json meta type for dataforge'
|
description = 'json meta type for dataforge'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(":dataforge-core")
|
api project(":dataforge-core")
|
||||||
compile 'com.github.cliftonlabs:json-simple:3.0.2'
|
api 'com.github.cliftonlabs:json-simple:3.0.2'
|
||||||
}
|
}
|
@ -22,6 +22,7 @@
|
|||||||
package hep.dataforge.io
|
package hep.dataforge.io
|
||||||
|
|
||||||
import hep.dataforge.io.envelopes.MetaType
|
import hep.dataforge.io.envelopes.MetaType
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
class JSONMetaType : MetaType {
|
class JSONMetaType : MetaType {
|
||||||
override val codes: List<Short> = listOf(0x4a53, 1)//JS
|
override val codes: List<Short> = listOf(0x4a53, 1)//JS
|
||||||
@ -32,7 +33,7 @@ class JSONMetaType : MetaType {
|
|||||||
|
|
||||||
override val writer: MetaStreamWriter = JSONMetaWriter
|
override val writer: MetaStreamWriter = JSONMetaWriter
|
||||||
|
|
||||||
override val fileNameFilter: (String) -> Boolean = { it.toLowerCase().endsWith(".json") }
|
override val fileNameFilter: (String) -> Boolean = { it.lowercase(Locale.getDefault()).endsWith(".json") }
|
||||||
}
|
}
|
||||||
|
|
||||||
val jsonMetaType = JSONMetaType()
|
val jsonMetaType = JSONMetaType()
|
||||||
|
@ -60,7 +60,7 @@ public class Template implements Metoid, UnaryOperator<Meta> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compile template using given meta and value providers.
|
* api template using given meta and value providers.
|
||||||
*
|
*
|
||||||
* @param valueProvider
|
* @param valueProvider
|
||||||
* @param metaProvider
|
* @param metaProvider
|
||||||
@ -80,7 +80,7 @@ public class Template implements Metoid, UnaryOperator<Meta> {
|
|||||||
parent.replaceChildNode(node, def.getMeta(includePath));
|
parent.replaceChildNode(node, def.getMeta(includePath));
|
||||||
} else {
|
} else {
|
||||||
LoggerFactory.getLogger(MetaUtils.class)
|
LoggerFactory.getLogger(MetaUtils.class)
|
||||||
.warn("Can't compile template meta node with name {} not provided", includePath);
|
.warn("Can't api template meta node with name {} not provided", includePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -3,12 +3,6 @@ package hep.dataforge
|
|||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaNode
|
import hep.dataforge.meta.MetaNode
|
||||||
import hep.dataforge.names.Name
|
import hep.dataforge.names.Name
|
||||||
import java.util.stream.Collectors
|
|
||||||
import java.util.stream.Stream
|
|
||||||
|
|
||||||
fun <T> Stream<T>.toList(): List<T> {
|
|
||||||
return collect(Collectors.toList())
|
|
||||||
}
|
|
||||||
|
|
||||||
fun String?.asName(): Name {
|
fun String?.asName(): Name {
|
||||||
return Name.of(this)
|
return Name.of(this)
|
||||||
|
@ -113,7 +113,7 @@ class CachePlugin(meta: Meta) : BasicPlugin(meta) {
|
|||||||
|
|
||||||
private fun evalData() {
|
private fun evalData() {
|
||||||
data.goal.run()
|
data.goal.run()
|
||||||
(data.goal as Goal<V>).onComplete { res, err ->
|
data.goal.onComplete { res, err ->
|
||||||
if (err != null) {
|
if (err != null) {
|
||||||
result.completeExceptionally(err)
|
result.completeExceptionally(err)
|
||||||
} else {
|
} else {
|
||||||
|
@ -36,7 +36,6 @@ interface ContextAware {
|
|||||||
*/
|
*/
|
||||||
val context: Context
|
val context: Context
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val logger: Logger
|
val logger: Logger
|
||||||
get() = if (this is Named) {
|
get() = if (this is Named) {
|
||||||
LoggerFactory.getLogger(context.name + "." + (this as Named).name)
|
LoggerFactory.getLogger(context.name + "." + (this as Named).name)
|
||||||
|
@ -23,7 +23,6 @@ package hep.dataforge.context
|
|||||||
|
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.nullable
|
import hep.dataforge.nullable
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.utils.MetaFactory
|
import hep.dataforge.utils.MetaFactory
|
||||||
import java.util.*
|
import java.util.*
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
|
@ -16,7 +16,6 @@ import hep.dataforge.meta.buildMeta
|
|||||||
import hep.dataforge.nullable
|
import hep.dataforge.nullable
|
||||||
import hep.dataforge.providers.Provider
|
import hep.dataforge.providers.Provider
|
||||||
import hep.dataforge.providers.Provides
|
import hep.dataforge.providers.Provides
|
||||||
import hep.dataforge.toList
|
|
||||||
import java.util.concurrent.Executor
|
import java.util.concurrent.Executor
|
||||||
import java.util.function.BiConsumer
|
import java.util.function.BiConsumer
|
||||||
import java.util.function.Consumer
|
import java.util.function.Consumer
|
||||||
|
@ -28,7 +28,6 @@ import hep.dataforge.io.MetaFileReader
|
|||||||
import hep.dataforge.io.envelopes.EnvelopeReader
|
import hep.dataforge.io.envelopes.EnvelopeReader
|
||||||
import hep.dataforge.meta.Laminate
|
import hep.dataforge.meta.Laminate
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.toList
|
|
||||||
import java.io.IOException
|
import java.io.IOException
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
@ -32,7 +32,6 @@ import hep.dataforge.description.NodeDefs
|
|||||||
import hep.dataforge.meta.Laminate
|
import hep.dataforge.meta.Laminate
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.utils.NamingUtils.wildcardMatch
|
import hep.dataforge.utils.NamingUtils.wildcardMatch
|
||||||
import java.io.IOException
|
import java.io.IOException
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
|
@ -60,7 +60,6 @@ interface Binary : Serializable {
|
|||||||
*/
|
*/
|
||||||
val size: Long
|
val size: Long
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun stream(offset: Long): InputStream = stream.also { it.skip(offset) }
|
fun stream(offset: Long): InputStream = stream.also { it.skip(offset) }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -71,7 +70,7 @@ interface Binary : Serializable {
|
|||||||
* @return
|
* @return
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun read(offset: Int, size: Int): ByteBuffer {
|
fun read(offset: Int, size: Int): ByteBuffer {
|
||||||
return buffer.run {
|
return buffer.run {
|
||||||
position(offset)
|
position(offset)
|
||||||
@ -84,7 +83,7 @@ interface Binary : Serializable {
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun read(start: Int): ByteBuffer {
|
fun read(start: Int): ByteBuffer {
|
||||||
return read(start, (size - start).toInt())
|
return read(start, (size - start).toInt())
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ interface Described {
|
|||||||
*
|
*
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
val descriptor: NodeDescriptor
|
val descriptor: NodeDescriptor
|
||||||
get() = Descriptors.forJavaType("node", this.javaClass)
|
get() = Descriptors.forJavaType("node", this.javaClass)
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ interface OutputManager : Plugin {
|
|||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
operator fun get(stage: String, name: String, type: String? = null): Output {
|
operator fun get(stage: String, name: String, type: String? = null): Output {
|
||||||
return get {
|
return get {
|
||||||
OUTPUT_NAME_KEY to name
|
OUTPUT_NAME_KEY to name
|
||||||
@ -67,7 +67,7 @@ interface OutputManager : Plugin {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
operator fun get(name: String): Output {
|
operator fun get(name: String): Output {
|
||||||
return get {
|
return get {
|
||||||
OUTPUT_NAME_KEY to name
|
OUTPUT_NAME_KEY to name
|
||||||
|
@ -21,6 +21,7 @@ import hep.dataforge.io.MetaStreamWriter
|
|||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaUtils
|
import hep.dataforge.meta.MetaUtils
|
||||||
import java.io.*
|
import java.io.*
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
|
|
||||||
val binaryMetaType = BinaryMetaType()
|
val binaryMetaType = BinaryMetaType()
|
||||||
@ -35,7 +36,7 @@ class BinaryMetaType : MetaType {
|
|||||||
|
|
||||||
override val name: String = "binary"
|
override val name: String = "binary"
|
||||||
|
|
||||||
override val fileNameFilter: (String)->Boolean = { str -> str.toLowerCase().endsWith(".meta") }
|
override val fileNameFilter: (String)->Boolean = { str -> str.lowercase(Locale.getDefault()).endsWith(".meta") }
|
||||||
|
|
||||||
|
|
||||||
override val reader: MetaStreamReader = MetaStreamReader { stream, length ->
|
override val reader: MetaStreamReader = MetaStreamReader { stream, length ->
|
||||||
@ -55,8 +56,8 @@ class BinaryMetaType : MetaType {
|
|||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
override fun write(stream: OutputStream, meta: Meta) {
|
override fun write(stream: OutputStream, meta: Meta) {
|
||||||
MetaUtils.writeMeta(ObjectOutputStream(stream), meta)
|
MetaUtils.writeMeta(ObjectOutputStream(stream), meta)
|
||||||
stream.write('\r'.toInt())
|
stream.write('\r'.code)
|
||||||
stream.write('\n'.toInt())
|
stream.write('\n'.code)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ open class DefaultEnvelopeType : EnvelopeType {
|
|||||||
/**
|
/**
|
||||||
* The set of symbols that separates tag from metadata and data
|
* The set of symbols that separates tag from metadata and data
|
||||||
*/
|
*/
|
||||||
val SEPARATOR = byteArrayOf('\r'.toByte(), '\n'.toByte())
|
val SEPARATOR = byteArrayOf('\r'.code.toByte(), '\n'.code.toByte())
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,6 @@ interface EnvelopeReader {
|
|||||||
/**
|
/**
|
||||||
* Read the envelope from channel
|
* Read the envelope from channel
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun read(channel: ReadableByteChannel): Envelope {
|
fun read(channel: ReadableByteChannel): Envelope {
|
||||||
return read(Channels.newInputStream(channel))
|
return read(Channels.newInputStream(channel))
|
||||||
}
|
}
|
||||||
@ -53,7 +52,6 @@ interface EnvelopeReader {
|
|||||||
/**
|
/**
|
||||||
* Read the envelope from buffer (could produce lazy envelope)
|
* Read the envelope from buffer (could produce lazy envelope)
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun read(buffer: ByteBuffer): Envelope {
|
fun read(buffer: ByteBuffer): Envelope {
|
||||||
return read(BufferChannel(buffer))//read(ByteArrayInputStream(buffer.array()))
|
return read(BufferChannel(buffer))//read(ByteArrayInputStream(buffer.array()))
|
||||||
}
|
}
|
||||||
@ -61,7 +59,6 @@ interface EnvelopeReader {
|
|||||||
/**
|
/**
|
||||||
* Read the envelope from NIO file (could produce lazy envelope)
|
* Read the envelope from NIO file (could produce lazy envelope)
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun read(file: Path): Envelope {
|
fun read(file: Path): Envelope {
|
||||||
return Files.newByteChannel(file, READ).use { read(it) }
|
return Files.newByteChannel(file, READ).use { read(it) }
|
||||||
}
|
}
|
||||||
|
@ -33,11 +33,9 @@ interface EnvelopeType {
|
|||||||
|
|
||||||
val name: String
|
val name: String
|
||||||
|
|
||||||
val reader: EnvelopeReader
|
val reader: EnvelopeReader get() = getReader(emptyMap())
|
||||||
get() = getReader(emptyMap())
|
|
||||||
|
|
||||||
val writer: EnvelopeWriter
|
val writer: EnvelopeWriter get() = getWriter(emptyMap())
|
||||||
get() = getWriter(emptyMap())
|
|
||||||
|
|
||||||
fun description(): String
|
fun description(): String
|
||||||
|
|
||||||
@ -82,7 +80,8 @@ interface EnvelopeType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (ex: Exception) {
|
} catch (ex: Exception) {
|
||||||
LoggerFactory.getLogger(EnvelopeType::class.java).warn("Could not infer envelope type of file {} due to exception: {}", path, ex)
|
LoggerFactory.getLogger(EnvelopeType::class.java)
|
||||||
|
.warn("Could not infer envelope type of file {} due to exception: {}", path, ex)
|
||||||
null
|
null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,7 +9,6 @@ import hep.dataforge.context.Global
|
|||||||
import hep.dataforge.io.MetaStreamReader
|
import hep.dataforge.io.MetaStreamReader
|
||||||
import hep.dataforge.io.MetaStreamWriter
|
import hep.dataforge.io.MetaStreamWriter
|
||||||
import hep.dataforge.io.envelopes.Envelope.Companion.META_TYPE_PROPERTY
|
import hep.dataforge.io.envelopes.Envelope.Companion.META_TYPE_PROPERTY
|
||||||
import hep.dataforge.toList
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -18,7 +18,6 @@ package hep.dataforge.io.envelopes
|
|||||||
import hep.dataforge.data.binary.Binary
|
import hep.dataforge.data.binary.Binary
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
|
|
||||||
import java.io.IOException
|
|
||||||
import java.io.ObjectInputStream
|
import java.io.ObjectInputStream
|
||||||
import java.io.ObjectOutputStream
|
import java.io.ObjectOutputStream
|
||||||
|
|
||||||
@ -35,14 +34,12 @@ open class SimpleEnvelope(meta: Meta = Meta.empty(), data: Binary = Binary.EMPTY
|
|||||||
override var data: Binary = data
|
override var data: Binary = data
|
||||||
protected set
|
protected set
|
||||||
|
|
||||||
@Throws(IOException::class)
|
private fun writeObject(output: ObjectOutputStream) {
|
||||||
private fun writeObject(out: ObjectOutputStream) {
|
DefaultEnvelopeWriter(DefaultEnvelopeType.INSTANCE, binaryMetaType).write(output, this)
|
||||||
DefaultEnvelopeWriter(DefaultEnvelopeType.INSTANCE, binaryMetaType).write(out, this)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class, ClassNotFoundException::class)
|
private fun readObject(input: ObjectInputStream) {
|
||||||
private fun readObject(`in`: ObjectInputStream) {
|
val envelope = DefaultEnvelopeReader.INSTANCE.read(input)
|
||||||
val envelope = DefaultEnvelopeReader.INSTANCE.read(`in`)
|
|
||||||
|
|
||||||
this.meta = envelope.meta
|
this.meta = envelope.meta
|
||||||
this.data = envelope.data
|
this.data = envelope.data
|
||||||
|
@ -24,7 +24,6 @@ import java.nio.ByteBuffer
|
|||||||
import java.nio.channels.Channels
|
import java.nio.channels.Channels
|
||||||
import java.nio.channels.ReadableByteChannel
|
import java.nio.channels.ReadableByteChannel
|
||||||
import java.text.ParseException
|
import java.text.ParseException
|
||||||
import java.util.*
|
|
||||||
import java.util.regex.Pattern
|
import java.util.regex.Pattern
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -40,13 +39,9 @@ class TaglessEnvelopeType : EnvelopeType {
|
|||||||
return "Tagless envelope. Text only. By default uses XML meta with utf encoding and data end auto-detection."
|
return "Tagless envelope. Text only. By default uses XML meta with utf encoding and data end auto-detection."
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getReader(properties: Map<String, String>): EnvelopeReader {
|
override fun getReader(properties: Map<String, String>): EnvelopeReader = TaglessReader(properties)
|
||||||
return TaglessReader(properties)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getWriter(properties: Map<String, String>): EnvelopeWriter {
|
override fun getWriter(properties: Map<String, String>): EnvelopeWriter = TaglessWriter(properties)
|
||||||
return TaglessWriter(properties)
|
|
||||||
}
|
|
||||||
|
|
||||||
class TaglessWriter(var properties: Map<String, String> = emptyMap()) : EnvelopeWriter {
|
class TaglessWriter(var properties: Map<String, String> = emptyMap()) : EnvelopeWriter {
|
||||||
|
|
||||||
@ -88,12 +83,8 @@ class TaglessEnvelopeType : EnvelopeType {
|
|||||||
|
|
||||||
class TaglessReader(private val override: Map<String, String>) : EnvelopeReader {
|
class TaglessReader(private val override: Map<String, String>) : EnvelopeReader {
|
||||||
|
|
||||||
private val BUFFER_SIZE = 1024
|
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
override fun read(stream: InputStream): Envelope {
|
override fun read(stream: InputStream): Envelope = read(Channels.newChannel(stream))
|
||||||
return read(Channels.newChannel(stream))
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun read(channel: ReadableByteChannel): Envelope {
|
override fun read(channel: ReadableByteChannel): Envelope {
|
||||||
val properties = HashMap(override)
|
val properties = HashMap(override)
|
||||||
@ -106,36 +97,38 @@ class TaglessEnvelopeType : EnvelopeType {
|
|||||||
/**
|
/**
|
||||||
* Read lines using provided channel and buffer. Buffer state is changed by this operation
|
* Read lines using provided channel and buffer. Buffer state is changed by this operation
|
||||||
*/
|
*/
|
||||||
private fun readLines(channel: ReadableByteChannel, buffer: ByteBuffer): Sequence<String> {
|
private fun readLines(channel: ReadableByteChannel, buffer: ByteBuffer): Sequence<String> = sequence {
|
||||||
return sequence {
|
val builder = ByteArrayOutputStream()
|
||||||
val builder = ByteArrayOutputStream()
|
while (true) {
|
||||||
while (true) {
|
if (!buffer.hasRemaining()) {
|
||||||
if (!buffer.hasRemaining()) {
|
if (!channel.isOpen) {
|
||||||
if (!channel.isOpen) {
|
break
|
||||||
break
|
|
||||||
}
|
|
||||||
buffer.flip()
|
|
||||||
val count = channel.read(buffer)
|
|
||||||
buffer.flip()
|
|
||||||
if (count < BUFFER_SIZE) {
|
|
||||||
channel.close()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
val b = buffer.get()
|
buffer.flip()
|
||||||
builder.write(b.toInt())
|
val count = channel.read(buffer)
|
||||||
if (b == '\n'.toByte()) {
|
buffer.flip()
|
||||||
yield(String(builder.toByteArray(), Charsets.UTF_8))
|
if (count < BUFFER_SIZE) {
|
||||||
builder.reset()
|
channel.close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
val b = buffer.get()
|
||||||
|
builder.write(b.toInt())
|
||||||
|
if (b == '\n'.code.toByte()) {
|
||||||
|
yield(String(builder.toByteArray(), Charsets.UTF_8))
|
||||||
|
builder.reset()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
private fun readMeta(channel: ReadableByteChannel, buffer: ByteBuffer, properties: MutableMap<String, String>): Meta {
|
private fun readMeta(
|
||||||
|
channel: ReadableByteChannel,
|
||||||
|
buffer: ByteBuffer,
|
||||||
|
properties: MutableMap<String, String>,
|
||||||
|
): Meta {
|
||||||
val sb = StringBuilder()
|
val sb = StringBuilder()
|
||||||
val metaEnd = properties.getOrDefault(DATA_START_PROPERTY, DEFAULT_DATA_START)
|
val metaEnd = properties.getOrDefault(DATA_START_PROPERTY, DEFAULT_DATA_START)
|
||||||
readLines(channel, buffer).takeWhile { it.trim { it <= ' ' } != metaEnd }.forEach { line ->
|
readLines(channel, buffer).takeWhile { it.trim { char -> char <= ' ' } != metaEnd }.forEach { line ->
|
||||||
if (line.startsWith("#?")) {
|
if (line.startsWith("#?")) {
|
||||||
readProperty(line.trim(), properties)
|
readProperty(line.trim(), properties)
|
||||||
} else if (line.isEmpty() || line.startsWith("#~")) {
|
} else if (line.isEmpty() || line.startsWith("#~")) {
|
||||||
@ -161,13 +154,19 @@ class TaglessEnvelopeType : EnvelopeType {
|
|||||||
|
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
private fun readData(channel: ReadableByteChannel, buffer: ByteBuffer, properties: Map<String, String>): ByteBuffer {
|
private fun readData(
|
||||||
|
channel: ReadableByteChannel,
|
||||||
|
buffer: ByteBuffer,
|
||||||
|
properties: Map<String, String>,
|
||||||
|
): ByteBuffer {
|
||||||
val array = ByteArray(buffer.remaining());
|
val array = ByteArray(buffer.remaining());
|
||||||
buffer.get(array)
|
buffer.get(array)
|
||||||
if (properties.containsKey(DATA_LENGTH_PROPERTY)) {
|
if (properties.containsKey(DATA_LENGTH_PROPERTY)) {
|
||||||
val result = ByteBuffer.allocate(Integer.parseInt(properties[DATA_LENGTH_PROPERTY]))
|
val result = ByteBuffer.allocate(Integer.parseInt(properties[DATA_LENGTH_PROPERTY]))
|
||||||
result.put(array)//TODO fix it to not use direct array access
|
result.put(array)//TODO fix it to not use direct array access
|
||||||
channel.read(result)
|
if(result.limit() < result.capacity()) {
|
||||||
|
channel.read(result)
|
||||||
|
}
|
||||||
return result
|
return result
|
||||||
} else {
|
} else {
|
||||||
val baos = ByteArrayOutputStream()
|
val baos = ByteArrayOutputStream()
|
||||||
@ -209,7 +208,7 @@ class TaglessEnvelopeType : EnvelopeType {
|
|||||||
const val DEFAULT_META_START = "#~META~#"
|
const val DEFAULT_META_START = "#~META~#"
|
||||||
const val DATA_START_PROPERTY = "dataSeparator"
|
const val DATA_START_PROPERTY = "dataSeparator"
|
||||||
const val DEFAULT_DATA_START = "#~DATA~#"
|
const val DEFAULT_DATA_START = "#~DATA~#"
|
||||||
|
private val BUFFER_SIZE = 1024
|
||||||
val INSTANCE = TaglessEnvelopeType()
|
val INSTANCE = TaglessEnvelopeType()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import hep.dataforge.io.MetaStreamReader
|
|||||||
import hep.dataforge.io.MetaStreamWriter
|
import hep.dataforge.io.MetaStreamWriter
|
||||||
import hep.dataforge.io.XMLMetaReader
|
import hep.dataforge.io.XMLMetaReader
|
||||||
import hep.dataforge.io.XMLMetaWriter
|
import hep.dataforge.io.XMLMetaWriter
|
||||||
|
import java.util.*
|
||||||
|
|
||||||
val xmlMetaType = XMLMetaType()
|
val xmlMetaType = XMLMetaType()
|
||||||
|
|
||||||
@ -38,7 +39,7 @@ class XMLMetaType : MetaType {
|
|||||||
|
|
||||||
override val writer: MetaStreamWriter = XMLMetaWriter()
|
override val writer: MetaStreamWriter = XMLMetaWriter()
|
||||||
|
|
||||||
override val fileNameFilter: (String) -> Boolean = { str -> str.toLowerCase().endsWith(".xml") }
|
override val fileNameFilter: (String) -> Boolean = { str -> str.lowercase(Locale.getDefault()).endsWith(".xml") }
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
const val XML_META_TYPE = "XML"
|
const val XML_META_TYPE = "XML"
|
||||||
|
@ -61,7 +61,7 @@ interface TextOutput : Output {
|
|||||||
render("", meta)
|
render("", meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun renderText(text: String, color: Color) {
|
fun renderText(text: String, color: Color) {
|
||||||
renderText(text, TextColor(color))
|
renderText(text, TextColor(color))
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,6 @@ package hep.dataforge.names
|
|||||||
|
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
import java.util.stream.StreamSupport
|
import java.util.stream.StreamSupport
|
||||||
import kotlin.streams.toList
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -129,7 +128,7 @@ interface Name : Comparable<Name> {
|
|||||||
* @param name
|
* @param name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
operator fun plus(name: Name): Name {
|
operator fun plus(name: Name): Name {
|
||||||
return join(this, name)
|
return join(this, name)
|
||||||
}
|
}
|
||||||
@ -140,14 +139,14 @@ interface Name : Comparable<Name> {
|
|||||||
* @param name
|
* @param name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
operator fun plus(name: String): Name {
|
operator fun plus(name: String): Name {
|
||||||
return join(this, ofSingle(name))
|
return join(this, ofSingle(name))
|
||||||
}
|
}
|
||||||
|
|
||||||
fun asArray(): Array<String>
|
fun asArray(): Array<String>
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun equals(name: String): Boolean {
|
fun equals(name: String): Boolean {
|
||||||
return this.toString() == name
|
return this.toString() == name
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ sealed class State<T : Any>(
|
|||||||
|
|
||||||
init {
|
init {
|
||||||
if (def != null) {
|
if (def != null) {
|
||||||
channel.offer(def)
|
channel.trySend(def).isSuccess
|
||||||
ref.set(def)
|
ref.set(def)
|
||||||
valid = true
|
valid = true
|
||||||
}
|
}
|
||||||
@ -91,7 +91,7 @@ sealed class State<T : Any>(
|
|||||||
private fun updateValue(value: T) {
|
private fun updateValue(value: T) {
|
||||||
ref.set(value)
|
ref.set(value)
|
||||||
//TODO evict on full
|
//TODO evict on full
|
||||||
channel.offer(value)
|
channel.trySend(value).isSuccess
|
||||||
valid = true
|
valid = true
|
||||||
logger.debug("State {} changed to {}", name, value)
|
logger.debug("State {} changed to {}", name, value)
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ interface Column : Named, Iterable<Value>, Serializable {
|
|||||||
|
|
||||||
val format: ColumnFormat
|
val format: ColumnFormat
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override val name: String
|
override val name: String
|
||||||
get() = format.name
|
get() = format.name
|
||||||
|
|
||||||
@ -62,7 +62,7 @@ interface Column : Named, Iterable<Value>, Serializable {
|
|||||||
* Get the values as a stream
|
* Get the values as a stream
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun stream(): Stream<Value> {
|
fun stream(): Stream<Value> {
|
||||||
return StreamSupport.stream(spliterator(), false)
|
return StreamSupport.stream(spliterator(), false)
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import hep.dataforge.Named
|
|||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import hep.dataforge.meta.SimpleMetaMorph
|
import hep.dataforge.meta.SimpleMetaMorph
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import hep.dataforge.values.ValueType
|
import hep.dataforge.values.ValueType
|
||||||
import java.util.*
|
import java.util.*
|
||||||
@ -69,9 +68,9 @@ class ColumnFormat(meta: Meta) : SimpleMetaMorph(meta), Named {
|
|||||||
*/
|
*/
|
||||||
fun build(name: String, type: ValueType, vararg tags: String): ColumnFormat {
|
fun build(name: String, type: ValueType, vararg tags: String): ColumnFormat {
|
||||||
return ColumnFormat(MetaBuilder("column")
|
return ColumnFormat(MetaBuilder("column")
|
||||||
.putValue("name", name)
|
.putValue("name", name)
|
||||||
.putValue("type", type)
|
.putValue("type", type)
|
||||||
.putValue(TAG_KEY, Stream.of(*tags).toList())
|
.putValue(TAG_KEY, Stream.of(*tags).toList())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ package hep.dataforge.tables
|
|||||||
|
|
||||||
import hep.dataforge.exceptions.NamingException
|
import hep.dataforge.exceptions.NamingException
|
||||||
import hep.dataforge.nullable
|
import hep.dataforge.nullable
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.*
|
import hep.dataforge.values.*
|
||||||
import java.util.function.Predicate
|
import java.util.function.Predicate
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
@ -34,11 +33,11 @@ object Tables {
|
|||||||
@JvmStatic
|
@JvmStatic
|
||||||
fun sort(table: Table, name: String, ascending: Boolean): Table {
|
fun sort(table: Table, name: String, ascending: Boolean): Table {
|
||||||
return sort(
|
return sort(
|
||||||
table,
|
table,
|
||||||
Comparator { o1: Values, o2: Values ->
|
Comparator { o1: Values, o2: Values ->
|
||||||
val signum = if (ascending) +1 else -1
|
val signum = if (ascending) +1 else -1
|
||||||
o1.getValue(name).compareTo(o2.getValue(name)) * signum
|
o1.getValue(name).compareTo(o2.getValue(name)) * signum
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,7 +131,8 @@ fun Table.addColumn(format: ColumnFormat, transform: Values.() -> Any): Table {
|
|||||||
return ColumnTable.copy(this).buildColumn(format, transform)
|
return ColumnTable.copy(this).buildColumn(format, transform)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun Table.addColumn(name: String, type: ValueType, transform: Values.() -> Any): Table = addColumn(ColumnFormat.build(name, type), transform)
|
fun Table.addColumn(name: String, type: ValueType, transform: Values.() -> Any): Table =
|
||||||
|
addColumn(ColumnFormat.build(name, type), transform)
|
||||||
|
|
||||||
fun Table.replaceColumn(name: String, transform: Values.() -> Any): Table {
|
fun Table.replaceColumn(name: String, transform: Values.() -> Any): Table {
|
||||||
return ColumnTable.copy(this).replaceColumn(name, transform)
|
return ColumnTable.copy(this).replaceColumn(name, transform)
|
||||||
@ -149,19 +149,17 @@ fun Table.sort(comparator: Comparator<Values>): Table {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun Table.sort(name: String = format.first().name, ascending: Boolean = true): Table {
|
fun Table.sort(name: String = format.first().name, ascending: Boolean = true): Table {
|
||||||
return sort(
|
return sort { o1: Values, o2: Values ->
|
||||||
Comparator { o1: Values, o2: Values ->
|
val signum = if (ascending) +1 else -1
|
||||||
val signum = if (ascending) +1 else -1
|
o1.getValue(name).compareTo(o2.getValue(name)) * signum
|
||||||
o1.getValue(name).compareTo(o2.getValue(name)) * signum
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Row reduction */
|
/* Row reduction */
|
||||||
|
|
||||||
fun <K> Table.reduceRows(format: TableFormat? = null, keySelector: (Values) -> K, mapper: (K, List<Values>) -> Values) =
|
fun <K> Table.reduceRows(format: TableFormat? = null, keySelector: (Values) -> K, mapper: (K, List<Values>) -> Values) =
|
||||||
ListTable(format ?: this.format, this.groupBy(keySelector).map { (key, value) -> mapper(key, value) }, false)
|
ListTable(format ?: this.format, this.groupBy(keySelector).map { (key, value) -> mapper(key, value) }, false)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A helper for table row reduction
|
* A helper for table row reduction
|
||||||
@ -177,15 +175,13 @@ class RowReducer(val default: (Iterable<Value>) -> Value) {
|
|||||||
reducers[key] = reducer
|
reducers[key] = reducer
|
||||||
}
|
}
|
||||||
|
|
||||||
fun sumByDouble(key: String) = rule(key) { rows -> rows.sumByDouble { it.double }.asValue() }
|
fun sumByDouble(key: String) = rule(key) { rows -> rows.sumOf { it.double }.asValue() }
|
||||||
fun sumByInt(key: String) = rule(key) { rows -> rows.sumBy { it.int }.asValue() }
|
fun sumByInt(key: String) = rule(key) { rows -> rows.sumOf { it.int }.asValue() }
|
||||||
|
|
||||||
fun averageByDouble(key: String) = rule(key) { rows -> rows.map { it.double }.average().asValue() }
|
fun averageByDouble(key: String) = rule(key) { rows -> rows.map { it.double }.average().asValue() }
|
||||||
fun averageByInt(key: String) = rule(key) { rows -> rows.map { it.int }.average().asValue() }
|
fun averageByInt(key: String) = rule(key) { rows -> rows.map { it.int }.average().asValue() }
|
||||||
|
|
||||||
fun reduce(key: String, values: Iterable<Value>): Value {
|
fun reduce(key: String, values: Iterable<Value>): Value = reducers.getOrDefault(key, default).invoke(values)
|
||||||
return reducers.getOrDefault(key, default).invoke(values)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reduce list of rows to a single row
|
* Reduce list of rows to a single row
|
||||||
@ -206,7 +202,7 @@ class RowReducer(val default: (Iterable<Value>) -> Value) {
|
|||||||
fun Table.sumByStep(key: String, step: Double, customizer: (RowReducer) -> Unit = {}): Table {
|
fun Table.sumByStep(key: String, step: Double, customizer: (RowReducer) -> Unit = {}): Table {
|
||||||
assert(step > 0) { "Step must be positive" }
|
assert(step > 0) { "Step must be positive" }
|
||||||
|
|
||||||
val reducer = RowReducer { rows -> rows.sumByDouble { it.double }.asValue() }.apply {
|
val reducer = RowReducer { rows -> rows.sumOf { it.double }.asValue() }.apply {
|
||||||
averageByDouble(key)
|
averageByDouble(key)
|
||||||
}.apply(customizer)
|
}.apply(customizer)
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ import java.util.stream.StreamSupport
|
|||||||
*/
|
*/
|
||||||
interface ValuesSource : Iterable<Values> {
|
interface ValuesSource : Iterable<Values> {
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val rows: Stream<Values>
|
val rows: Stream<Values>
|
||||||
get() = StreamSupport.stream(this.spliterator(), false)
|
get() = StreamSupport.stream(this.spliterator(), false)
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ import java.time.LocalDateTime
|
|||||||
import java.time.ZoneOffset
|
import java.time.ZoneOffset
|
||||||
import java.time.format.DateTimeParseException
|
import java.time.format.DateTimeParseException
|
||||||
import java.util.stream.Stream
|
import java.util.stream.Stream
|
||||||
import kotlin.streams.toList
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The list of supported Value types.
|
* The list of supported Value types.
|
||||||
@ -61,15 +60,15 @@ interface Value : Serializable, Comparable<Value> {
|
|||||||
*/
|
*/
|
||||||
val boolean: Boolean
|
val boolean: Boolean
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val double: Double
|
val double: Double
|
||||||
get() = number.toDouble()
|
get() = number.toDouble()
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val int: Int
|
val int: Int
|
||||||
get() = number.toInt()
|
get() = number.toInt()
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val long: Long
|
val long: Long
|
||||||
get() = number.toLong()
|
get() = number.toLong()
|
||||||
|
|
||||||
@ -80,7 +79,7 @@ interface Value : Serializable, Comparable<Value> {
|
|||||||
*/
|
*/
|
||||||
val time: Instant
|
val time: Instant
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val binary: ByteBuffer
|
val binary: ByteBuffer
|
||||||
get() = ByteBuffer.wrap(string.toByteArray())
|
get() = ByteBuffer.wrap(string.toByteArray())
|
||||||
|
|
||||||
@ -100,11 +99,11 @@ interface Value : Serializable, Comparable<Value> {
|
|||||||
*
|
*
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
val list: List<Value>
|
val list: List<Value>
|
||||||
get() = listOf(this)
|
get() = listOf(this)
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
val isNull: Boolean
|
val isNull: Boolean
|
||||||
get() = this.type == ValueType.NULL
|
get() = this.type == ValueType.NULL
|
||||||
|
|
||||||
@ -113,7 +112,7 @@ interface Value : Serializable, Comparable<Value> {
|
|||||||
*
|
*
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
val isList: Boolean
|
val isList: Boolean
|
||||||
get() = false
|
get() = false
|
||||||
|
|
||||||
@ -122,7 +121,7 @@ interface Value : Serializable, Comparable<Value> {
|
|||||||
*/
|
*/
|
||||||
val value: Any
|
val value: Any
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override fun compareTo(other: Value): Int {
|
override fun compareTo(other: Value): Int {
|
||||||
return when (type) {
|
return when (type) {
|
||||||
ValueType.NUMBER -> ValueUtils.NUMBER_COMPARATOR.compare(number, other.number)
|
ValueType.NUMBER -> ValueUtils.NUMBER_COMPARATOR.compare(number, other.number)
|
||||||
|
@ -22,7 +22,7 @@ import java.util.*
|
|||||||
|
|
||||||
interface ValueProvider {
|
interface ValueProvider {
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun hasValue(path: String): Boolean {
|
fun hasValue(path: String): Boolean {
|
||||||
return optValue(path).isPresent
|
return optValue(path).isPresent
|
||||||
}
|
}
|
||||||
@ -30,113 +30,113 @@ interface ValueProvider {
|
|||||||
@Provides(VALUE_TARGET)
|
@Provides(VALUE_TARGET)
|
||||||
fun optValue(path: String): Optional<Value>
|
fun optValue(path: String): Optional<Value>
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getValue(path: String): Value {
|
fun getValue(path: String): Value {
|
||||||
return optValue(path).orElseThrow<NameNotFoundException> { NameNotFoundException(path) }
|
return optValue(path).orElseThrow<NameNotFoundException> { NameNotFoundException(path) }
|
||||||
}
|
}
|
||||||
|
|
||||||
@Provides(BOOLEAN_TARGET)
|
@Provides(BOOLEAN_TARGET)
|
||||||
@JvmDefault
|
|
||||||
fun optBoolean(name: String): Optional<Boolean> {
|
fun optBoolean(name: String): Optional<Boolean> {
|
||||||
return optValue(name).map<Boolean> { it.boolean }
|
return optValue(name).map<Boolean> { it.boolean }
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getBoolean(name: String, def: Boolean): Boolean {
|
fun getBoolean(name: String, def: Boolean): Boolean {
|
||||||
return optValue(name).map<Boolean> { it.boolean }.orElse(def)
|
return optValue(name).map<Boolean> { it.boolean }.orElse(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getBoolean(name: String, def: () -> Boolean): Boolean {
|
fun getBoolean(name: String, def: () -> Boolean): Boolean {
|
||||||
return optValue(name).map<Boolean> { it.boolean }.orElseGet(def)
|
return optValue(name).map<Boolean> { it.boolean }.orElseGet(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getBoolean(name: String): Boolean {
|
fun getBoolean(name: String): Boolean {
|
||||||
return getValue(name).boolean
|
return getValue(name).boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
@Provides(NUMBER_TARGET)
|
@Provides(NUMBER_TARGET)
|
||||||
@JvmDefault
|
|
||||||
fun optNumber(name: String): Optional<Number> {
|
fun optNumber(name: String): Optional<Number> {
|
||||||
return optValue(name).map<Number> { it.number }
|
return optValue(name).map<Number> { it.number }
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getDouble(name: String, def: Double): Double {
|
fun getDouble(name: String, def: Double): Double {
|
||||||
return optValue(name).map<Double> { it.double }.orElse(def)
|
return optValue(name).map<Double> { it.double }.orElse(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getDouble(name: String, def: () -> Double): Double {
|
fun getDouble(name: String, def: () -> Double): Double {
|
||||||
return optValue(name).map<Double> { it.double }.orElseGet(def)
|
return optValue(name).map<Double> { it.double }.orElseGet(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getDouble(name: String): Double {
|
fun getDouble(name: String): Double {
|
||||||
return getValue(name).double
|
return getValue(name).double
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getInt(name: String, def: Int): Int {
|
fun getInt(name: String, def: Int): Int {
|
||||||
return optValue(name).map<Int> { it.int }.orElse(def)
|
return optValue(name).map<Int> { it.int }.orElse(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getInt(name: String, def: () -> Int): Int {
|
fun getInt(name: String, def: () -> Int): Int {
|
||||||
return optValue(name).map<Int> { it.int }.orElseGet(def)
|
return optValue(name).map<Int> { it.int }.orElseGet(def)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getInt(name: String): Int {
|
fun getInt(name: String): Int {
|
||||||
return getValue(name).int
|
return getValue(name).int
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
@Provides(STRING_TARGET)
|
@Provides(STRING_TARGET)
|
||||||
fun optString(name: String): Optional<String> {
|
fun optString(name: String): Optional<String> {
|
||||||
return optValue(name).map<String> { it.string }
|
return optValue(name).map<String> { it.string }
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getString(name: String, def: String): String {
|
fun getString(name: String, def: String): String {
|
||||||
return optString(name).orElse(def)
|
return optString(name).orElse(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getString(name: String, def: () -> String): String {
|
fun getString(name: String, def: () -> String): String {
|
||||||
return optString(name).orElseGet(def)
|
return optString(name).orElseGet(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getString(name: String): String {
|
fun getString(name: String): String {
|
||||||
return getValue(name).string
|
return getValue(name).string
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getValue(name: String, def: Any): Value {
|
fun getValue(name: String, def: Any): Value {
|
||||||
return optValue(name).orElse(Value.of(def))
|
return optValue(name).orElse(Value.of(def))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getValue(name: String, def: () -> Value): Value {
|
fun getValue(name: String, def: () -> Value): Value {
|
||||||
return optValue(name).orElseGet(def)
|
return optValue(name).orElseGet(def)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Provides(TIME_TARGET)
|
@Provides(TIME_TARGET)
|
||||||
@JvmDefault
|
|
||||||
fun optTime(name: String): Optional<Instant> {
|
fun optTime(name: String): Optional<Instant> {
|
||||||
return optValue(name).map { it.time }
|
return optValue(name).map { it.time }
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getStringArray(name: String): Array<String> {
|
fun getStringArray(name: String): Array<String> {
|
||||||
val vals = getValue(name).list
|
val vals = getValue(name).list
|
||||||
return Array(vals.size) { vals[it].string }
|
return Array(vals.size) { vals[it].string }
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getStringArray(name: String, def: () -> Array<String>): Array<String> {
|
fun getStringArray(name: String, def: () -> Array<String>): Array<String> {
|
||||||
return if (this.hasValue(name)) {
|
return if (this.hasValue(name)) {
|
||||||
getStringArray(name)
|
getStringArray(name)
|
||||||
@ -145,7 +145,7 @@ interface ValueProvider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun getStringArray(name: String, def: Array<String>): Array<String> {
|
fun getStringArray(name: String, def: Array<String>): Array<String> {
|
||||||
return if (this.hasValue(name)) {
|
return if (this.hasValue(name)) {
|
||||||
getStringArray(name)
|
getStringArray(name)
|
||||||
|
@ -137,40 +137,40 @@ object ValueUtils {
|
|||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
fun DataOutput.writeValue(value: Value) {
|
fun DataOutput.writeValue(value: Value) {
|
||||||
if (value.isList) {
|
if (value.isList) {
|
||||||
writeByte('*'.toInt()) // List designation
|
writeByte('*'.code) // List designation
|
||||||
writeShort(value.list.size)
|
writeShort(value.list.size)
|
||||||
for (subValue in value.list) {
|
for (subValue in value.list) {
|
||||||
writeValue(subValue)
|
writeValue(subValue)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
when (value.type) {
|
when (value.type) {
|
||||||
ValueType.NULL -> writeChar('0'.toInt()) // null
|
ValueType.NULL -> writeChar('0'.code) // null
|
||||||
ValueType.TIME -> {
|
ValueType.TIME -> {
|
||||||
writeByte('T'.toInt())//Instant
|
writeByte('T'.code)//Instant
|
||||||
writeLong(value.time.epochSecond)
|
writeLong(value.time.epochSecond)
|
||||||
writeLong(value.time.nano.toLong())
|
writeLong(value.time.nano.toLong())
|
||||||
}
|
}
|
||||||
ValueType.STRING -> {
|
ValueType.STRING -> {
|
||||||
this.writeByte('S'.toInt())//String
|
this.writeByte('S'.code)//String
|
||||||
IOUtils.writeString(this, value.string)
|
IOUtils.writeString(this, value.string)
|
||||||
}
|
}
|
||||||
ValueType.NUMBER -> {
|
ValueType.NUMBER -> {
|
||||||
val num = value.number
|
val num = value.number
|
||||||
when (num) {
|
when (num) {
|
||||||
is Double -> {
|
is Double -> {
|
||||||
writeByte('D'.toInt()) // double
|
writeByte('D'.code) // double
|
||||||
writeDouble(num.toDouble())
|
writeDouble(num.toDouble())
|
||||||
}
|
}
|
||||||
is Int -> {
|
is Int -> {
|
||||||
writeByte('I'.toInt()) // integer
|
writeByte('I'.code) // integer
|
||||||
writeInt(num.toInt())
|
writeInt(num.toInt())
|
||||||
}
|
}
|
||||||
is Long -> {
|
is Long -> {
|
||||||
writeByte('L'.toInt())
|
writeByte('L'.code)
|
||||||
writeLong(num.toLong())
|
writeLong(num.toLong())
|
||||||
}
|
}
|
||||||
else -> {
|
else -> {
|
||||||
writeByte('N'.toInt()) // BigDecimal
|
writeByte('N'.code) // BigDecimal
|
||||||
val decimal = num.toBigDecimal()
|
val decimal = num.toBigDecimal()
|
||||||
val bigInt = decimal.unscaledValue().toByteArray()
|
val bigInt = decimal.unscaledValue().toByteArray()
|
||||||
val scale = decimal.scale()
|
val scale = decimal.scale()
|
||||||
@ -181,13 +181,13 @@ fun DataOutput.writeValue(value: Value) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ValueType.BOOLEAN -> if (value.boolean) {
|
ValueType.BOOLEAN -> if (value.boolean) {
|
||||||
writeByte('+'.toInt()) //true
|
writeByte('+'.code) //true
|
||||||
} else {
|
} else {
|
||||||
writeByte('-'.toInt()) // false
|
writeByte('-'.code) // false
|
||||||
}
|
}
|
||||||
ValueType.BINARY -> {
|
ValueType.BINARY -> {
|
||||||
val binary = value.binary
|
val binary = value.binary
|
||||||
writeByte('X'.toInt())
|
writeByte('X'.code)
|
||||||
writeInt(binary.limit())
|
writeInt(binary.limit())
|
||||||
write(binary.array())
|
write(binary.array())
|
||||||
}
|
}
|
||||||
@ -199,7 +199,7 @@ fun DataOutput.writeValue(value: Value) {
|
|||||||
* Value deserialization
|
* Value deserialization
|
||||||
*/
|
*/
|
||||||
fun DataInput.readValue(): Value {
|
fun DataInput.readValue(): Value {
|
||||||
val type = readByte().toChar()
|
val type = readByte().toInt().toChar()
|
||||||
when (type) {
|
when (type) {
|
||||||
'*' -> {
|
'*' -> {
|
||||||
val listSize = readShort()
|
val listSize = readShort()
|
||||||
@ -240,7 +240,7 @@ fun DataInput.readValue(): Value {
|
|||||||
|
|
||||||
|
|
||||||
fun ByteBuffer.getValue(): Value {
|
fun ByteBuffer.getValue(): Value {
|
||||||
val type = get().toChar()
|
val type = get().toInt().toChar()
|
||||||
when (type) {
|
when (type) {
|
||||||
'*' -> {
|
'*' -> {
|
||||||
val listSize = getShort()
|
val listSize = getShort()
|
||||||
@ -286,7 +286,7 @@ fun ByteBuffer.getValue(): Value {
|
|||||||
|
|
||||||
fun ByteBuffer.putValue(value: Value) {
|
fun ByteBuffer.putValue(value: Value) {
|
||||||
if (value.isList) {
|
if (value.isList) {
|
||||||
put('*'.toByte()) // List designation
|
put('*'.code.toByte()) // List designation
|
||||||
if (value.list.size > Short.MAX_VALUE) {
|
if (value.list.size > Short.MAX_VALUE) {
|
||||||
throw RuntimeException("The array values of size more than ${Short.MAX_VALUE} could not be serialized")
|
throw RuntimeException("The array values of size more than ${Short.MAX_VALUE} could not be serialized")
|
||||||
}
|
}
|
||||||
@ -294,14 +294,14 @@ fun ByteBuffer.putValue(value: Value) {
|
|||||||
value.list.forEach { putValue(it) }
|
value.list.forEach { putValue(it) }
|
||||||
} else {
|
} else {
|
||||||
when (value.type) {
|
when (value.type) {
|
||||||
ValueType.NULL -> put('0'.toByte()) // null
|
ValueType.NULL -> put('0'.code.toByte()) // null
|
||||||
ValueType.TIME -> {
|
ValueType.TIME -> {
|
||||||
put('T'.toByte())//Instant
|
put('T'.code.toByte())//Instant
|
||||||
putLong(value.time.epochSecond)
|
putLong(value.time.epochSecond)
|
||||||
putLong(value.time.nano.toLong())
|
putLong(value.time.nano.toLong())
|
||||||
}
|
}
|
||||||
ValueType.STRING -> {
|
ValueType.STRING -> {
|
||||||
put('S'.toByte())//String
|
put('S'.code.toByte())//String
|
||||||
if (value.string.length > Int.MAX_VALUE) {
|
if (value.string.length > Int.MAX_VALUE) {
|
||||||
throw RuntimeException("The string valuse of size more than ${Int.MAX_VALUE} could not be serialized")
|
throw RuntimeException("The string valuse of size more than ${Int.MAX_VALUE} could not be serialized")
|
||||||
}
|
}
|
||||||
@ -311,19 +311,19 @@ fun ByteBuffer.putValue(value: Value) {
|
|||||||
val num = value.number
|
val num = value.number
|
||||||
when (num) {
|
when (num) {
|
||||||
is Double -> {
|
is Double -> {
|
||||||
put('D'.toByte()) // double
|
put('D'.code.toByte()) // double
|
||||||
putDouble(num.toDouble())
|
putDouble(num.toDouble())
|
||||||
}
|
}
|
||||||
is Int -> {
|
is Int -> {
|
||||||
put('I'.toByte()) // integer
|
put('I'.code.toByte()) // integer
|
||||||
putInt(num.toInt())
|
putInt(num.toInt())
|
||||||
}
|
}
|
||||||
is Long -> {
|
is Long -> {
|
||||||
put('L'.toByte())
|
put('L'.code.toByte())
|
||||||
putLong(num.toLong())
|
putLong(num.toLong())
|
||||||
}
|
}
|
||||||
is BigDecimal -> {
|
is BigDecimal -> {
|
||||||
put('N'.toByte()) // BigDecimal
|
put('N'.code.toByte()) // BigDecimal
|
||||||
val bigInt = num.unscaledValue().toByteArray()
|
val bigInt = num.unscaledValue().toByteArray()
|
||||||
val scale = num.scale()
|
val scale = num.scale()
|
||||||
if (bigInt.size > Short.MAX_VALUE) {
|
if (bigInt.size > Short.MAX_VALUE) {
|
||||||
@ -339,12 +339,12 @@ fun ByteBuffer.putValue(value: Value) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ValueType.BOOLEAN -> if (value.boolean) {
|
ValueType.BOOLEAN -> if (value.boolean) {
|
||||||
put('+'.toByte()) //true
|
put('+'.code.toByte()) //true
|
||||||
} else {
|
} else {
|
||||||
put('-'.toByte()) // false
|
put('-'.code.toByte()) // false
|
||||||
}
|
}
|
||||||
ValueType.BINARY -> {
|
ValueType.BINARY -> {
|
||||||
put('X'.toByte())
|
put('X'.code.toByte())
|
||||||
val binary = value.binary
|
val binary = value.binary
|
||||||
putInt(binary.limit())
|
putInt(binary.limit())
|
||||||
put(binary.array())
|
put(binary.array())
|
||||||
|
@ -19,7 +19,6 @@ import hep.dataforge.meta.Meta
|
|||||||
import hep.dataforge.meta.MetaBuilder
|
import hep.dataforge.meta.MetaBuilder
|
||||||
import hep.dataforge.meta.MetaMorph
|
import hep.dataforge.meta.MetaMorph
|
||||||
import hep.dataforge.names.NameSetContainer
|
import hep.dataforge.names.NameSetContainer
|
||||||
import java.util.*
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A named set of values with fixed name list.
|
* A named set of values with fixed name list.
|
||||||
@ -32,7 +31,7 @@ interface Values : NameSetContainer, ValueProvider, MetaMorph, Iterable<NamedVal
|
|||||||
* @param path
|
* @param path
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
override fun hasValue(path: String): Boolean {
|
override fun hasValue(path: String): Boolean {
|
||||||
return this.names.contains(path)
|
return this.names.contains(path)
|
||||||
}
|
}
|
||||||
@ -43,12 +42,12 @@ interface Values : NameSetContainer, ValueProvider, MetaMorph, Iterable<NamedVal
|
|||||||
* @param num
|
* @param num
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
operator fun get(num: Int): Value {
|
operator fun get(num: Int): Value {
|
||||||
return getValue(this.names.get(num))
|
return getValue(this.names.get(num))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
operator fun get(key: String): Value {
|
operator fun get(key: String): Value {
|
||||||
return getValue(key)
|
return getValue(key)
|
||||||
}
|
}
|
||||||
@ -57,7 +56,7 @@ interface Values : NameSetContainer, ValueProvider, MetaMorph, Iterable<NamedVal
|
|||||||
* Convert a DataPoint to a Map. Order is not guaranteed
|
* Convert a DataPoint to a Map. Order is not guaranteed
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun asMap(): Map<String, Value> {
|
fun asMap(): Map<String, Value> {
|
||||||
val res = HashMap<String, Value>()
|
val res = HashMap<String, Value>()
|
||||||
for (field in this.names) {
|
for (field in this.names) {
|
||||||
@ -66,7 +65,7 @@ interface Values : NameSetContainer, ValueProvider, MetaMorph, Iterable<NamedVal
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override fun iterator(): Iterator<NamedValue> {
|
override fun iterator(): Iterator<NamedValue> {
|
||||||
return names.map { NamedValue(it, get(it)) }.iterator()
|
return names.map { NamedValue(it, get(it)) }.iterator()
|
||||||
}
|
}
|
||||||
@ -77,12 +76,12 @@ interface Values : NameSetContainer, ValueProvider, MetaMorph, Iterable<NamedVal
|
|||||||
* @param name
|
* @param name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun hasTag(name: String): Boolean {
|
fun hasTag(name: String): Boolean {
|
||||||
return names.contains(name) && getValue(name).boolean
|
return names.contains(name) && getValue(name).boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override fun toMeta(): Meta {
|
override fun toMeta(): Meta {
|
||||||
val builder = MetaBuilder("point")
|
val builder = MetaBuilder("point")
|
||||||
for (name in namesAsArray()) {
|
for (name in namesAsArray()) {
|
||||||
|
@ -70,7 +70,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param dataPath Fully qualified data name
|
* @param dataPath Fully qualified data name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun getData(dataPath: String): Data<*> {
|
fun getData(dataPath: String): Data<*> {
|
||||||
return data.getData(dataPath)
|
return data.getData(dataPath)
|
||||||
}
|
}
|
||||||
@ -90,7 +90,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param taskName
|
* @param taskName
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun getTask(taskName: String): Task<*> {
|
fun getTask(taskName: String): Task<*> {
|
||||||
return optTask(taskName) ?: throw NameNotFoundException(taskName)
|
return optTask(taskName) ?: throw NameNotFoundException(taskName)
|
||||||
}
|
}
|
||||||
@ -104,7 +104,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param overlay use given meta as overaly for existing meta with the same name
|
* @param overlay use given meta as overaly for existing meta with the same name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun runTask(taskName: String, config: Meta, overlay: Boolean): DataNode<*> {
|
fun runTask(taskName: String, config: Meta, overlay: Boolean): DataNode<*> {
|
||||||
val task = getTask(taskName)
|
val task = getTask(taskName)
|
||||||
val taskConfig = if (overlay && hasTarget(config.name)) {
|
val taskConfig = if (overlay && hasTarget(config.name)) {
|
||||||
@ -116,7 +116,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
return runTask(model)
|
return runTask(model)
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun runTask(taskName: String, config: Meta): DataNode<*> {
|
fun runTask(taskName: String, config: Meta): DataNode<*> {
|
||||||
return this.runTask(taskName, config, true)
|
return this.runTask(taskName, config, true)
|
||||||
}
|
}
|
||||||
@ -127,7 +127,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param config
|
* @param config
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun runTask(config: Meta): DataNode<*> {
|
fun runTask(config: Meta): DataNode<*> {
|
||||||
return runTask(config.name, config)
|
return runTask(config.name, config)
|
||||||
}
|
}
|
||||||
@ -139,7 +139,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param target
|
* @param target
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun runTask(taskName: String, target: String = taskName): DataNode<*> {
|
fun runTask(taskName: String, target: String = taskName): DataNode<*> {
|
||||||
return runTask(taskName, optTarget(target) ?: Meta.empty())
|
return runTask(taskName, optTarget(target) ?: Meta.empty())
|
||||||
}
|
}
|
||||||
@ -150,7 +150,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param model
|
* @param model
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun runTask(model: TaskModel): DataNode<*> {
|
fun runTask(model: TaskModel): DataNode<*> {
|
||||||
return this.getTask(model.name).run(model)
|
return this.getTask(model.name).run(model)
|
||||||
}
|
}
|
||||||
@ -169,7 +169,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param name
|
* @param name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun getTarget(name: String): Meta {
|
fun getTarget(name: String): Meta {
|
||||||
return optTarget(name) ?: throw NameNotFoundException(name)
|
return optTarget(name) ?: throw NameNotFoundException(name)
|
||||||
}
|
}
|
||||||
@ -180,7 +180,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param name
|
* @param name
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun hasTarget(name: String): Boolean {
|
fun hasTarget(name: String): Boolean {
|
||||||
return optTarget(name) != null
|
return optTarget(name) != null
|
||||||
}
|
}
|
||||||
@ -194,7 +194,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
|
|
||||||
override var context: Context
|
override var context: Context
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun loadFrom(meta: Meta): Workspace.Builder {
|
fun loadFrom(meta: Meta): Workspace.Builder {
|
||||||
if (meta.hasValue("context")) {
|
if (meta.hasValue("context")) {
|
||||||
context = Global.getContext(meta.getString("context"))
|
context = Global.getContext(meta.getString("context"))
|
||||||
@ -243,7 +243,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param dataConfig
|
* @param dataConfig
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun data(place: String, dataConfig: Meta): Workspace.Builder {
|
fun data(place: String, dataConfig: Meta): Workspace.Builder {
|
||||||
return data(place, DataLoader.SMART.build(context, dataConfig))
|
return data(place, DataLoader.SMART.build(context, dataConfig))
|
||||||
}
|
}
|
||||||
@ -256,7 +256,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param dataConfig
|
* @param dataConfig
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun data(place: String, factory: DataLoader<out Any>, dataConfig: Meta): Workspace.Builder {
|
fun data(place: String, factory: DataLoader<out Any>, dataConfig: Meta): Workspace.Builder {
|
||||||
return data(place, factory.build(context, dataConfig))
|
return data(place, factory.build(context, dataConfig))
|
||||||
}
|
}
|
||||||
@ -269,29 +269,29 @@ interface Workspace : ContextAware, Provider {
|
|||||||
* @param meta
|
* @param meta
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun staticData(name: String, obj: Any, meta: Meta): Workspace.Builder {
|
fun staticData(name: String, obj: Any, meta: Meta): Workspace.Builder {
|
||||||
return data(name, Data.buildStatic(obj, meta))
|
return data(name, Data.buildStatic(obj, meta))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun staticData(name: String, obj: Any): Workspace.Builder {
|
fun staticData(name: String, obj: Any): Workspace.Builder {
|
||||||
return data(name, Data.buildStatic(obj))
|
return data(name, Data.buildStatic(obj))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun fileData(place: String, filePath: String, meta: Meta): Workspace.Builder {
|
fun fileData(place: String, filePath: String, meta: Meta): Workspace.Builder {
|
||||||
return data(place, DataUtils.readFile(context.getFile(filePath), meta))
|
return data(place, DataUtils.readFile(context.getFile(filePath), meta))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun fileData(dataName: String, filePath: String): Workspace.Builder {
|
fun fileData(dataName: String, filePath: String): Workspace.Builder {
|
||||||
return fileData(dataName, filePath, Meta.empty())
|
return fileData(dataName, filePath, Meta.empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
fun target(name: String, meta: Meta): Workspace.Builder
|
fun target(name: String, meta: Meta): Workspace.Builder
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun target(meta: Meta): Workspace.Builder {
|
fun target(meta: Meta): Workspace.Builder {
|
||||||
return target(meta.name, meta)
|
return target(meta.name, meta)
|
||||||
}
|
}
|
||||||
@ -299,7 +299,7 @@ interface Workspace : ContextAware, Provider {
|
|||||||
fun task(task: Task<*>): Workspace.Builder
|
fun task(task: Task<*>): Workspace.Builder
|
||||||
|
|
||||||
@Throws(IllegalAccessException::class, InstantiationException::class)
|
@Throws(IllegalAccessException::class, InstantiationException::class)
|
||||||
@JvmDefault
|
|
||||||
fun task(type: Class<Task<*>>): Workspace.Builder {
|
fun task(type: Class<Task<*>>): Workspace.Builder {
|
||||||
return task(type.getConstructor().newInstance())
|
return task(type.getConstructor().newInstance())
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ interface Task<out R : Any> : Named, Described {
|
|||||||
*
|
*
|
||||||
* @param model
|
* @param model
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
fun validate(model: TaskModel) {
|
fun validate(model: TaskModel) {
|
||||||
//do nothing
|
//do nothing
|
||||||
}
|
}
|
||||||
|
@ -21,19 +21,17 @@ import org.junit.Assert.assertEquals
|
|||||||
import org.junit.Test
|
import org.junit.Test
|
||||||
import java.io.ByteArrayInputStream
|
import java.io.ByteArrayInputStream
|
||||||
import java.io.ByteArrayOutputStream
|
import java.io.ByteArrayOutputStream
|
||||||
import java.io.IOException
|
|
||||||
import java.nio.charset.Charset
|
import java.nio.charset.Charset
|
||||||
|
|
||||||
class TaglessEnvelopeTest {
|
class TaglessEnvelopeTest {
|
||||||
private val envelope = EnvelopeBuilder()
|
private val envelope = EnvelopeBuilder()
|
||||||
.meta(MetaBuilder()
|
.meta(MetaBuilder()
|
||||||
.putValue("myValue", 12)
|
.putValue("myValue", 12)
|
||||||
).data("Всем привет!".toByteArray(Charset.forName("UTF-8")))
|
).data("Всем привет!".toByteArray(Charset.forName("UTF-8")))
|
||||||
|
|
||||||
private val envelopeType = TaglessEnvelopeType.INSTANCE
|
private val envelopeType = TaglessEnvelopeType.INSTANCE
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Throws(IOException::class)
|
|
||||||
fun testWriteRead() {
|
fun testWriteRead() {
|
||||||
val baos = ByteArrayOutputStream()
|
val baos = ByteArrayOutputStream()
|
||||||
envelopeType.writer.write(baos, envelope)
|
envelopeType.writer.write(baos, envelope)
|
||||||
@ -47,7 +45,6 @@ class TaglessEnvelopeTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Throws(IOException::class)
|
|
||||||
fun testShortForm() {
|
fun testShortForm() {
|
||||||
val envString = "<meta myValue=\"12\"/>\n" +
|
val envString = "<meta myValue=\"12\"/>\n" +
|
||||||
"#~DATA~#\n" +
|
"#~DATA~#\n" +
|
||||||
|
@ -8,13 +8,13 @@ description = "A tornadofx based kotlin library"
|
|||||||
|
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-plots')
|
api project(':dataforge-plots')
|
||||||
compile project(':dataforge-gui:dataforge-html')
|
api project(':dataforge-gui:dataforge-html')
|
||||||
compile 'org.controlsfx:controlsfx:8.40.14'
|
api 'org.controlsfx:controlsfx:8.40.14'
|
||||||
compile "no.tornado:tornadofx:1.7.19"
|
api "no.tornado:tornadofx:1.7.19"
|
||||||
compile 'no.tornado:tornadofx-controlsfx:0.1.1'
|
api 'no.tornado:tornadofx-controlsfx:0.1.1'
|
||||||
compile group: 'org.fxmisc.richtext', name: 'richtextfx', version: '0.10.2'
|
api group: 'org.fxmisc.richtext', name: 'richtextfx', version: '0.10.2'
|
||||||
compile 'org.jetbrains.kotlinx:kotlinx-coroutines-javafx:1.5.0'
|
api 'org.jetbrains.kotlinx:kotlinx-coroutines-javafx:1.5.0'
|
||||||
|
|
||||||
// optional dependency for JFreeChart
|
// optional dependency for JFreeChart
|
||||||
//compileOnly project(":dataforge-plots:plots-jfc")
|
//compileOnly project(":dataforge-plots:plots-jfc")
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
description = "An html rendering core and HTML output"
|
description = "An html rendering core and HTML output"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-core')
|
api project(':dataforge-core')
|
||||||
compile 'org.jetbrains.kotlinx:kotlinx-html-jvm:0.6.11'
|
api 'org.jetbrains.kotlinx:kotlinx-html-jvm:0.6.11'
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
plugins{
|
plugins{
|
||||||
id "application"
|
id "application"
|
||||||
id "com.github.johnrengelman.shadow" version "2.0.1"
|
id "com.github.johnrengelman.shadow"
|
||||||
}
|
}
|
||||||
apply plugin: 'kotlin'
|
apply plugin: 'kotlin'
|
||||||
|
|
||||||
@ -13,8 +13,8 @@ mainClassName = mainClass
|
|||||||
description = "A demonstration for plots capabilities"
|
description = "A demonstration for plots capabilities"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-plots:plots-jfc')
|
api project(':dataforge-plots:plots-jfc')
|
||||||
compile project(':dataforge-gui')
|
api project(':dataforge-gui')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ apply plugin: 'kotlin'
|
|||||||
description = "A gui for workspace creation and manipulation"
|
description = "A gui for workspace creation and manipulation"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-gui')
|
api project(':dataforge-gui')
|
||||||
}
|
}
|
||||||
|
|
||||||
application{
|
application{
|
||||||
|
@ -44,7 +44,7 @@ class FXPlugin(meta: Meta = Meta.empty()) : BasicPlugin(meta) {
|
|||||||
*/
|
*/
|
||||||
override fun attach(context: Context) {
|
override fun attach(context: Context) {
|
||||||
super.attach(context)
|
super.attach(context)
|
||||||
if (FX.getApplication(DefaultScope) == null) {
|
if (FX.getApplication(FX.defaultScope) == null) {
|
||||||
if (consoleMode) {
|
if (consoleMode) {
|
||||||
Thread {
|
Thread {
|
||||||
context.logger.debug("Starting FX application surrogate")
|
context.logger.debug("Starting FX application surrogate")
|
||||||
@ -70,7 +70,7 @@ class FXPlugin(meta: Meta = Meta.empty()) : BasicPlugin(meta) {
|
|||||||
* Define an application to use in this context
|
* Define an application to use in this context
|
||||||
*/
|
*/
|
||||||
fun setApp(app: Application, stage: Stage) {
|
fun setApp(app: Application, stage: Stage) {
|
||||||
FX.registerApplication(DefaultScope, app, stage)
|
FX.registerApplication(FX.defaultScope, app, stage)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -18,7 +18,6 @@ package hep.dataforge.fx.meta
|
|||||||
|
|
||||||
import hep.dataforge.fx.dfIconView
|
import hep.dataforge.fx.dfIconView
|
||||||
import hep.dataforge.meta.Meta
|
import hep.dataforge.meta.Meta
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import javafx.beans.property.SimpleStringProperty
|
import javafx.beans.property.SimpleStringProperty
|
||||||
import javafx.beans.property.StringProperty
|
import javafx.beans.property.StringProperty
|
||||||
@ -55,8 +54,8 @@ open class MetaViewer(val meta: Meta, title: String = "Meta viewer: ${meta.name}
|
|||||||
is MetaItem -> {
|
is MetaItem -> {
|
||||||
val meta = value.meta
|
val meta = value.meta
|
||||||
Stream.concat(
|
Stream.concat(
|
||||||
meta.nodeNames.flatMap { meta.getMetaList(it).stream() }.map { MetaItem(it) },
|
meta.nodeNames.flatMap { meta.getMetaList(it).stream() }.map { MetaItem(it) },
|
||||||
meta.valueNames.map { ValueItem(it, meta.getValue(it)) }
|
meta.valueNames.map { ValueItem(it, meta.getValue(it)) }
|
||||||
).toList()
|
).toList()
|
||||||
}
|
}
|
||||||
is ValueItem -> null
|
is ValueItem -> null
|
||||||
|
@ -131,7 +131,7 @@ class OutputContainer(val context: Context, val meta: Meta) :
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun computeValue(): ObservableList<String> {
|
override fun computeValue(): ObservableList<String> {
|
||||||
return outputs.keys.toList().observable()
|
return outputs.keys.toList().asObservable()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
onUserSelect {
|
onUserSelect {
|
||||||
@ -169,7 +169,7 @@ class FXOutputManager(
|
|||||||
viewConsumer: Context.(OutputContainer) -> Unit = { getOrLoad(FXPlugin::class.java).display(it) }
|
viewConsumer: Context.(OutputContainer) -> Unit = { getOrLoad(FXPlugin::class.java).display(it) }
|
||||||
) : OutputManager, BasicPlugin(meta) {
|
) : OutputManager, BasicPlugin(meta) {
|
||||||
|
|
||||||
override val tag = PluginTag(name = "output.fx", dependsOn = *arrayOf("hep.dataforge:fx"))
|
override val tag = PluginTag(name = "output.fx", dependsOn = arrayOf("hep.dataforge:fx"))
|
||||||
|
|
||||||
override fun attach(context: Context) {
|
override fun attach(context: Context) {
|
||||||
super.attach(context)
|
super.attach(context)
|
||||||
|
@ -152,7 +152,7 @@ class FXTextOutput(context: Context) : FXOutput(context), TextOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private fun countLines(): Int {
|
private fun countLines(): Int {
|
||||||
return textArea.text.chars().filter { value: Int -> value == '\n'.toInt() }.count().toInt()
|
return textArea.text.chars().filter { value: Int -> value == '\n'.code }.count().toInt()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -58,8 +58,8 @@ class TableDisplay(title: String? = null) : Fragment(title = title, icon = dfIco
|
|||||||
|
|
||||||
(0 until table.size()).forEach { i ->
|
(0 until table.size()).forEach { i ->
|
||||||
rows += (0 until format.count())
|
rows += (0 until format.count())
|
||||||
.map { j -> buildCell(i, j, table.get(format.names[j], i)) }
|
.map { j -> buildCell(i, j, table.get(format.names[j], i)) }
|
||||||
.observable()
|
.asObservable()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
description = 'Commons math dependency and some useful tools'
|
description = 'Commons math dependency and some useful tools'
|
||||||
dependencies {
|
dependencies {
|
||||||
compile 'org.apache.commons:commons-math3:3.+'
|
api 'org.apache.commons:commons-math3:3.+'
|
||||||
compile project(':dataforge-core')
|
api project(':dataforge-core')
|
||||||
}
|
}
|
||||||
|
@ -14,9 +14,9 @@ class DSNumber(val ds: DerivativeStructure, nc: DSField) : FieldCompat<Number, D
|
|||||||
return ds.getPartialDerivative(*nc.names.map { orders[it] ?: 0 }.toIntArray())
|
return ds.getPartialDerivative(*nc.names.map { orders[it] ?: 0 }.toIntArray())
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun toByte(): Byte = ds.value.toByte()
|
override fun toByte(): Byte = ds.value.toInt().toByte()
|
||||||
|
|
||||||
override fun toChar(): Char = ds.value.toChar()
|
override fun toChar(): Char = ds.value.toInt().toChar()
|
||||||
|
|
||||||
override fun toDouble(): Double = ds.value
|
override fun toDouble(): Double = ds.value
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ class DSNumber(val ds: DerivativeStructure, nc: DSField) : FieldCompat<Number, D
|
|||||||
|
|
||||||
override fun toLong(): Long = ds.value.toLong()
|
override fun toLong(): Long = ds.value.toLong()
|
||||||
|
|
||||||
override fun toShort(): Short = ds.value.toShort()
|
override fun toShort(): Short = ds.value.toInt().toShort()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return new DSNumber, obtained by applying given function to underlying ds
|
* Return new DSNumber, obtained by applying given function to underlying ds
|
||||||
|
@ -55,13 +55,7 @@ class ExpressionField<in T, R>(val names: NameList, private val field: Field<T,
|
|||||||
|
|
||||||
override val zero: Expression<R> = BasicExpression { field.one }
|
override val zero: Expression<R> = BasicExpression { field.one }
|
||||||
|
|
||||||
override fun transform(n: T): Expression<R> {
|
override fun transform(n: T): Expression<R> = n as? Expression<R> ?: BasicExpression { field.transform(n) }
|
||||||
return if (n is Expression<*>) {
|
|
||||||
n as Expression<R>
|
|
||||||
} else {
|
|
||||||
BasicExpression { field.transform(n) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun add(a: T, b: T): Expression<R> {
|
override fun add(a: T, b: T): Expression<R> {
|
||||||
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
|
||||||
|
@ -9,5 +9,5 @@
|
|||||||
description = 'dataforge-plots'
|
description = 'dataforge-plots'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-core')
|
api project(':dataforge-core')
|
||||||
}
|
}
|
||||||
|
@ -8,9 +8,9 @@
|
|||||||
description = 'jFreeChart plugin'
|
description = 'jFreeChart plugin'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile 'org.jfree:jfreesvg:3.3'
|
api 'org.jfree:jfreesvg:3.3'
|
||||||
// https://mvnrepository.com/artifact/org.jfree/jfreechart-fx
|
// https://mvnrepository.com/artifact/org.jfree/jfreechart-fx
|
||||||
compile group: 'org.jfree', name: 'jfreechart-fx', version: '1.0.1'
|
api group: 'org.jfree', name: 'jfreechart-fx', version: '1.0.1'
|
||||||
|
|
||||||
compile project(":dataforge-plots")
|
api project(":dataforge-plots")
|
||||||
}
|
}
|
@ -151,7 +151,7 @@ class JFreeChartFrame : XYPlotFrame(), FXPlotFrame, Serializable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private fun getAxis(axisMeta: Meta): ValueAxis {
|
private fun getAxis(axisMeta: Meta): ValueAxis {
|
||||||
return when (axisMeta.getString("type", "number").toLowerCase()) {
|
return when (axisMeta.getString("type", "number").lowercase()) {
|
||||||
"log" -> getLogAxis(axisMeta)
|
"log" -> getLogAxis(axisMeta)
|
||||||
"time" -> getDateAxis(axisMeta)
|
"time" -> getDateAxis(axisMeta)
|
||||||
else -> getNumberAxis(axisMeta)
|
else -> getNumberAxis(axisMeta)
|
||||||
@ -273,7 +273,7 @@ class JFreeChartFrame : XYPlotFrame(), FXPlotFrame, Serializable {
|
|||||||
val render: XYLineAndShapeRenderer = if (config.getBoolean("showErrors", true)) {
|
val render: XYLineAndShapeRenderer = if (config.getBoolean("showErrors", true)) {
|
||||||
XYErrorRenderer()
|
XYErrorRenderer()
|
||||||
} else {
|
} else {
|
||||||
when (config.getString("connectionType", "DEFAULT").toUpperCase()) {
|
when (config.getString("connectionType", "DEFAULT").uppercase()) {
|
||||||
"STEP" -> XYStepRenderer()
|
"STEP" -> XYStepRenderer()
|
||||||
"SPLINE" -> XYSplineRenderer()
|
"SPLINE" -> XYSplineRenderer()
|
||||||
else -> XYLineAndShapeRenderer()
|
else -> XYLineAndShapeRenderer()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id "com.github.johnrengelman.shadow" version "7.0.0"
|
id "com.github.johnrengelman.shadow"
|
||||||
id 'application'
|
id 'application'
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -19,6 +19,6 @@ compileKotlin {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-plots:plots-jfc')
|
api project(':dataforge-plots:plots-jfc')
|
||||||
compile project(':dataforge-gui')
|
api project(':dataforge-gui')
|
||||||
}
|
}
|
||||||
|
@ -11,10 +11,8 @@ import hep.dataforge.tables.Adapters
|
|||||||
import hep.dataforge.tables.ListTable
|
import hep.dataforge.tables.ListTable
|
||||||
import hep.dataforge.tables.MetaTableFormat
|
import hep.dataforge.tables.MetaTableFormat
|
||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import hep.dataforge.values.ValueMap
|
import hep.dataforge.values.ValueMap
|
||||||
import java.util.*
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
@ -28,19 +26,19 @@ object DataPlotUtils {
|
|||||||
names.add("x")
|
names.add("x")
|
||||||
|
|
||||||
frame.plots.stream().map { it.second }
|
frame.plots.stream().map { it.second }
|
||||||
.filter {!visibleOnly || it.config.getBoolean("visible", true) }
|
.filter { !visibleOnly || it.config.getBoolean("visible", true) }
|
||||||
.forEach {
|
.forEach {
|
||||||
(it as? Plot)?.let {plot->
|
(it as? Plot)?.let { plot ->
|
||||||
names.add(plot.title)
|
names.add(plot.title)
|
||||||
plot.data.forEach { point ->
|
plot.data.forEach { point ->
|
||||||
val x = Adapters.getXValue(plot.adapter, point)
|
val x = Adapters.getXValue(plot.adapter, point)
|
||||||
val mdp: ValueMap.Builder = points.getOrPut(x) {
|
val mdp: ValueMap.Builder = points.getOrPut(x) {
|
||||||
ValueMap.Builder().apply { putValue("x", x) }
|
ValueMap.Builder().apply { putValue("x", x) }
|
||||||
}
|
|
||||||
mdp.putValue(plot.title, Adapters.getYValue(plot.adapter, point))
|
|
||||||
}
|
}
|
||||||
|
mdp.putValue(plot.title, Adapters.getYValue(plot.adapter, point))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
val res = ListTable.Builder(MetaTableFormat.forNames(names))
|
val res = ListTable.Builder(MetaTableFormat.forNames(names))
|
||||||
res.rows(points.values.stream().map { it.build() }.toList())
|
res.rows(points.values.stream().map { it.build() }.toList())
|
||||||
|
@ -24,7 +24,6 @@ import hep.dataforge.meta.*
|
|||||||
import hep.dataforge.tables.Adapters
|
import hep.dataforge.tables.Adapters
|
||||||
import hep.dataforge.tables.Adapters.DEFAULT_XY_ADAPTER
|
import hep.dataforge.tables.Adapters.DEFAULT_XY_ADAPTER
|
||||||
import hep.dataforge.tables.Adapters.buildXYDataPoint
|
import hep.dataforge.tables.Adapters.buildXYDataPoint
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import hep.dataforge.values.ValueType.BOOLEAN
|
import hep.dataforge.values.ValueType.BOOLEAN
|
||||||
import hep.dataforge.values.ValueType.NUMBER
|
import hep.dataforge.values.ValueType.NUMBER
|
||||||
@ -38,14 +37,15 @@ import kotlin.collections.set
|
|||||||
* @author Alexander Nozik
|
* @author Alexander Nozik
|
||||||
*/
|
*/
|
||||||
@ValueDefs(
|
@ValueDefs(
|
||||||
ValueDef(key = "showLine", type = arrayOf(BOOLEAN), def = "true", info = "Show the connecting line."),
|
ValueDef(key = "showLine", type = arrayOf(BOOLEAN), def = "true", info = "Show the connecting line."),
|
||||||
ValueDef(key = "showSymbol", type = arrayOf(BOOLEAN), def = "false", info = "Show symbols for data point."),
|
ValueDef(key = "showSymbol", type = arrayOf(BOOLEAN), def = "false", info = "Show symbols for data point."),
|
||||||
ValueDef(key = "showErrors", type = arrayOf(BOOLEAN), def = "false", info = "Show errors for points."),
|
ValueDef(key = "showErrors", type = arrayOf(BOOLEAN), def = "false", info = "Show errors for points."),
|
||||||
ValueDef(key = "range.from", type = arrayOf(NUMBER), def = "0.0", info = "Lower boundary for calculation range"),
|
ValueDef(key = "range.from", type = arrayOf(NUMBER), def = "0.0", info = "Lower boundary for calculation range"),
|
||||||
ValueDef(key = "range.to", type = arrayOf(NUMBER), def = "1.0", info = "Upper boundary for calculation range"),
|
ValueDef(key = "range.to", type = arrayOf(NUMBER), def = "1.0", info = "Upper boundary for calculation range"),
|
||||||
ValueDef(key = "density", type = arrayOf(NUMBER), def = "200", info = "Minimal number of points per plot")
|
ValueDef(key = "density", type = arrayOf(NUMBER), def = "200", info = "Minimal number of points per plot")
|
||||||
)
|
)
|
||||||
class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Double) -> Double) : XYPlot(name, meta, Adapters.DEFAULT_XY_ADAPTER) {
|
class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Double) -> Double) :
|
||||||
|
XYPlot(name, meta, Adapters.DEFAULT_XY_ADAPTER) {
|
||||||
|
|
||||||
private val cache = TreeMap<Double, Double>()
|
private val cache = TreeMap<Double, Double>()
|
||||||
|
|
||||||
@ -71,12 +71,12 @@ class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Dou
|
|||||||
}
|
}
|
||||||
|
|
||||||
var range by config.mutableCustomNode(
|
var range by config.mutableCustomNode(
|
||||||
"range",
|
"range",
|
||||||
read = { Pair(it.getDouble("from"), it.getDouble("to")) },
|
read = { Pair(it.getDouble("from"), it.getDouble("to")) },
|
||||||
write = {
|
write = {
|
||||||
invalidateCache()
|
invalidateCache()
|
||||||
buildMeta("range", "from" to it.first, "to" to it.second)
|
buildMeta("range", "from" to it.first, "to" to it.second)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
override fun applyValueChange(name: String, oldValue: Value?, newValue: Value?) {
|
override fun applyValueChange(name: String, oldValue: Value?, newValue: Value?) {
|
||||||
@ -142,9 +142,9 @@ class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Dou
|
|||||||
|
|
||||||
override val descriptor: NodeDescriptor by lazy {
|
override val descriptor: NodeDescriptor by lazy {
|
||||||
Descriptors.forType("plot", this::class)
|
Descriptors.forType("plot", this::class)
|
||||||
.builder()
|
.builder()
|
||||||
.apply { setDefault("connectionType".asName(), ConnectionType.SPLINE) }
|
.apply { setDefault("connectionType".asName(), ConnectionType.SPLINE) }
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getRawData(query: Meta): List<Values> {
|
override fun getRawData(query: Meta): List<Values> {
|
||||||
@ -160,8 +160,8 @@ class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Dou
|
|||||||
}
|
}
|
||||||
validateCache()
|
validateCache()
|
||||||
return cache.entries.stream()
|
return cache.entries.stream()
|
||||||
.map { entry -> buildXYDataPoint(DEFAULT_XY_ADAPTER, entry.key, entry.value) }
|
.map { entry -> buildXYDataPoint(DEFAULT_XY_ADAPTER, entry.key, entry.value) }
|
||||||
.toList()
|
.toList()
|
||||||
}
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
@ -169,7 +169,14 @@ class XYFunctionPlot(name: String, meta: Meta = Meta.empty(), val function: (Dou
|
|||||||
const val DEFAULT_DENSITY = 200
|
const val DEFAULT_DENSITY = 200
|
||||||
|
|
||||||
@JvmOverloads
|
@JvmOverloads
|
||||||
fun plot(name: String, from: Double, to: Double, numPoints: Int = DEFAULT_DENSITY, meta: Meta = Meta.empty(), function: (Double) -> Double): XYFunctionPlot {
|
fun plot(
|
||||||
|
name: String,
|
||||||
|
from: Double,
|
||||||
|
to: Double,
|
||||||
|
numPoints: Int = DEFAULT_DENSITY,
|
||||||
|
meta: Meta = Meta.empty(),
|
||||||
|
function: (Double) -> Double,
|
||||||
|
): XYFunctionPlot {
|
||||||
val p = XYFunctionPlot(name, meta, function)
|
val p = XYFunctionPlot(name, meta, function)
|
||||||
p.range = Pair(from, to)
|
p.range = Pair(from, to)
|
||||||
p.density = numPoints
|
p.density = numPoints
|
||||||
|
@ -24,7 +24,6 @@ import hep.dataforge.meta.MetaBuilder
|
|||||||
import hep.dataforge.tables.Adapters
|
import hep.dataforge.tables.Adapters
|
||||||
import hep.dataforge.tables.ValuesAdapter
|
import hep.dataforge.tables.ValuesAdapter
|
||||||
import hep.dataforge.tables.ValuesAdapter.ADAPTER_KEY
|
import hep.dataforge.tables.ValuesAdapter.ADAPTER_KEY
|
||||||
import hep.dataforge.toList
|
|
||||||
import hep.dataforge.values.Value
|
import hep.dataforge.values.Value
|
||||||
import hep.dataforge.values.ValueType
|
import hep.dataforge.values.ValueType
|
||||||
import hep.dataforge.values.Values
|
import hep.dataforge.values.Values
|
||||||
@ -39,14 +38,17 @@ import java.util.stream.Stream
|
|||||||
//@ValueDef(name = "symbolSize", type = "NUMBER", info = "The size of the symbols for scatterplot.")
|
//@ValueDef(name = "symbolSize", type = "NUMBER", info = "The size of the symbols for scatterplot.")
|
||||||
//@ValueDef(name = "lineType", info = "The type of the line fill.")
|
//@ValueDef(name = "lineType", info = "The type of the line fill.")
|
||||||
@ValueDefs(
|
@ValueDefs(
|
||||||
ValueDef(key = "color", info = "The color of line or symbol.", tags = ["widget:color"]),
|
ValueDef(key = "color", info = "The color of line or symbol.", tags = ["widget:color"]),
|
||||||
ValueDef(key = "thickness", type = [ValueType.NUMBER], def = "1", info = "Thickness of the line if it is present"),
|
ValueDef(key = "thickness", type = [ValueType.NUMBER], def = "1", info = "Thickness of the line if it is present"),
|
||||||
ValueDef(key = "connectionType", def = "DEFAULT", enumeration = XYPlot.ConnectionType::class, info = "Connection line type")
|
ValueDef(key = "connectionType",
|
||||||
|
def = "DEFAULT",
|
||||||
|
enumeration = XYPlot.ConnectionType::class,
|
||||||
|
info = "Connection line type")
|
||||||
)
|
)
|
||||||
@NodeDef(key = ADAPTER_KEY, info = "An adapter to interpret the dataset", tags = [FINAL_TAG])
|
@NodeDef(key = ADAPTER_KEY, info = "An adapter to interpret the dataset", tags = [FINAL_TAG])
|
||||||
abstract class XYPlot(name: String, meta: Meta, adapter: ValuesAdapter?) : AbstractPlot(name, meta, adapter) {
|
abstract class XYPlot(name: String, meta: Meta, adapter: ValuesAdapter?) : AbstractPlot(name, meta, adapter) {
|
||||||
|
|
||||||
enum class ConnectionType{
|
enum class ConnectionType {
|
||||||
DEFAULT,
|
DEFAULT,
|
||||||
STEP,
|
STEP,
|
||||||
SPLINE
|
SPLINE
|
||||||
@ -57,7 +59,8 @@ abstract class XYPlot(name: String, meta: Meta, adapter: ValuesAdapter?) : Abstr
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun getData(from: Value, to: Value, numPoints: Int): List<Values> {
|
fun getData(from: Value, to: Value, numPoints: Int): List<Values> {
|
||||||
return getData(MetaBuilder("").putValue("xRange.from", from).putValue("xRange.to", to).putValue("numPoints", numPoints))
|
return getData(MetaBuilder("").putValue("xRange.from", from).putValue("xRange.to", to)
|
||||||
|
.putValue("numPoints", numPoints))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
description = 'dataforge-fitting'
|
description = 'dataforge-fitting'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-maths')
|
api project(':dataforge-maths')
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
description = 'dataforge-minuit'
|
description = 'dataforge-minuit'
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-stat')
|
api project(':dataforge-stat')
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ interface Fitter : Named {
|
|||||||
|
|
||||||
fun run(state: FitState, parentLog: History?, meta: Meta): FitResult
|
fun run(state: FitState, parentLog: History?, meta: Meta): FitResult
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun run(state: FitState, parentLog: History? = null, meta: KMetaBuilder.() -> Unit): FitResult {
|
fun run(state: FitState, parentLog: History? = null, meta: KMetaBuilder.() -> Unit): FitResult {
|
||||||
return run(state, parentLog, buildMeta("fit", meta))
|
return run(state, parentLog, buildMeta("fit", meta))
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
description = 'New API and implementation of dataforge-storage'
|
description = 'New API and implementation of dataforge-storage'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-core')
|
api project(':dataforge-core')
|
||||||
}
|
}
|
@ -54,7 +54,7 @@ interface StorageElement : Named, Metoid, Provider, ContextAware, AutoConnectibl
|
|||||||
/**
|
/**
|
||||||
* Full name relative to root storage
|
* Full name relative to root storage
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
val fullName: Name
|
val fullName: Name
|
||||||
get() = if (parent == null) {
|
get() = if (parent == null) {
|
||||||
Name.empty()
|
Name.empty()
|
||||||
@ -88,7 +88,7 @@ interface Storage : StorageElement {
|
|||||||
* Get storage element (name notation for recursive calls). Null if not present
|
* Get storage element (name notation for recursive calls). Null if not present
|
||||||
*/
|
*/
|
||||||
@Provides(STORAGE_TARGET)
|
@Provides(STORAGE_TARGET)
|
||||||
@JvmDefault
|
|
||||||
operator fun get(name: String): StorageElement? {
|
operator fun get(name: String): StorageElement? {
|
||||||
return get(Name.of(name))
|
return get(Name.of(name))
|
||||||
}
|
}
|
||||||
@ -96,7 +96,7 @@ interface Storage : StorageElement {
|
|||||||
/**
|
/**
|
||||||
* Resolve storage element by its fully qualified name
|
* Resolve storage element by its fully qualified name
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
operator fun get(name: Name): StorageElement? {
|
operator fun get(name: Name): StorageElement? {
|
||||||
return if (name.length == 1) {
|
return if (name.length == 1) {
|
||||||
children.find { it.name == name.unescaped }
|
children.find { it.name == name.unescaped }
|
||||||
@ -105,13 +105,13 @@ interface Storage : StorageElement {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override fun getDefaultTarget(): String = STORAGE_TARGET
|
override fun getDefaultTarget(): String = STORAGE_TARGET
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* By default closes all children on close. If overridden, children should be closed before parent.
|
* By default closes all children on close. If overridden, children should be closed before parent.
|
||||||
*/
|
*/
|
||||||
@JvmDefault
|
|
||||||
override fun close() {
|
override fun close() {
|
||||||
children.forEach { it.close() }
|
children.forEach { it.close() }
|
||||||
}
|
}
|
||||||
@ -201,7 +201,7 @@ interface StorageElementType : Named {
|
|||||||
*/
|
*/
|
||||||
fun create(context: Context, meta: Meta, parent: StorageElement? = null): StorageElement
|
fun create(context: Context, meta: Meta, parent: StorageElement? = null): StorageElement
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
fun create(parent: StorageElement, meta: Meta): StorageElement {
|
fun create(parent: StorageElement, meta: Meta): StorageElement {
|
||||||
return create(parent.context, meta, parent)
|
return create(parent.context, meta, parent)
|
||||||
}
|
}
|
||||||
|
@ -202,7 +202,7 @@ class AppendableFileTableLoader(
|
|||||||
private val textTableReader: (ByteBuffer, TableFormat) -> Values = { buffer, format ->
|
private val textTableReader: (ByteBuffer, TableFormat) -> Values = { buffer, format ->
|
||||||
val line = buildString {
|
val line = buildString {
|
||||||
do {
|
do {
|
||||||
val char = buffer.get().toChar()
|
val char = buffer.get().toInt().toChar()
|
||||||
append(char)
|
append(char)
|
||||||
} while (char != '\n')
|
} while (char != '\n')
|
||||||
}
|
}
|
||||||
@ -213,7 +213,7 @@ private val textTableReader: (ByteBuffer, TableFormat) -> Values = { buffer, for
|
|||||||
private val binaryTableReader: (ByteBuffer, TableFormat) -> Values = { buffer, format ->
|
private val binaryTableReader: (ByteBuffer, TableFormat) -> Values = { buffer, format ->
|
||||||
ValueMap(format.names.associate { it to buffer.getValue() }.toMap()).also {
|
ValueMap(format.names.associate { it to buffer.getValue() }.toMap()).also {
|
||||||
do {
|
do {
|
||||||
val char = buffer.get().toChar()
|
val char = buffer.get().toInt().toChar()
|
||||||
} while (char != '\n')
|
} while (char != '\n')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -238,7 +238,7 @@ class TableLoaderType : FileStorageElementType {
|
|||||||
format.names.map { values[it] }.forEach {
|
format.names.map { values[it] }.forEach {
|
||||||
stream.writeValue(it)
|
stream.writeValue(it)
|
||||||
}
|
}
|
||||||
stream.writeByte('\n'.toInt())
|
stream.writeByte('\n'.code)
|
||||||
stream.flush()
|
stream.flush()
|
||||||
ByteBuffer.wrap(baos.toByteArray())
|
ByteBuffer.wrap(baos.toByteArray())
|
||||||
}
|
}
|
||||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,5 @@
|
|||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-bin.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3-bin.zip
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
|
@ -6,8 +6,10 @@ compileGroovy.dependsOn(compileKotlin)
|
|||||||
compileGroovy.classpath += files(compileKotlin.destinationDir)
|
compileGroovy.classpath += files(compileKotlin.destinationDir)
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(":dataforge-core")
|
api project(":dataforge-core")
|
||||||
compile 'org.codehaus.groovy:groovy-all:2.5+'
|
// https://mvnrepository.com/artifact/org.codehaus.groovy/groovy-all
|
||||||
|
api 'org.codehaus.groovy:groovy-all:3.0.9'
|
||||||
|
|
||||||
testCompile project(":dataforge-gui")
|
|
||||||
|
testImplementation project(":dataforge-gui")
|
||||||
}
|
}
|
@ -1,7 +1,7 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id 'groovy'
|
id 'groovy'
|
||||||
id 'application'
|
id 'application'
|
||||||
id "com.github.johnrengelman.shadow" version "7.0.0"
|
id "com.github.johnrengelman.shadow"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -16,12 +16,12 @@ plugins {
|
|||||||
description = 'The grind plugin for dataforge framework'
|
description = 'The grind plugin for dataforge framework'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':grind')
|
api project(':grind')
|
||||||
compile project(':dataforge-plots:plots-jfc')
|
api project(':dataforge-plots:plots-jfc')
|
||||||
compile project(':dataforge-gui')
|
api project(':dataforge-gui')
|
||||||
compile group: 'org.jline', name: 'jline', version: '3.5.1'
|
api group: 'org.jline', name: 'jline', version: '3.5.1'
|
||||||
// compile group: 'net.java.dev.jna', name: 'jna', version: '4.4.0'
|
// api group: 'net.java.dev.jna', name: 'jna', version: '4.4.0'
|
||||||
compile group: 'org.fusesource.jansi', name: 'jansi', version: '1.16'
|
api group: 'org.fusesource.jansi', name: 'jansi', version: '1.16'
|
||||||
}
|
}
|
||||||
|
|
||||||
task shell(dependsOn: classes, type: JavaExec) {
|
task shell(dependsOn: classes, type: JavaExec) {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
apply plugin: 'groovy'
|
apply plugin: 'groovy'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':grind')
|
api project(':grind')
|
||||||
compile project(':dataforge-maths')
|
api project(':dataforge-maths')
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import hep.dataforge.meta.Meta
|
|||||||
import hep.dataforge.workspace.Workspace
|
import hep.dataforge.workspace.Workspace
|
||||||
import hep.dataforge.workspace.tasks.Task
|
import hep.dataforge.workspace.tasks.Task
|
||||||
import org.jetbrains.annotations.NotNull
|
import org.jetbrains.annotations.NotNull
|
||||||
|
import org.slf4j.Logger
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Workspace wrapper that implements methodMissing for tasks and propertyMissing for targets
|
* Workspace wrapper that implements methodMissing for tasks and propertyMissing for targets
|
||||||
@ -20,8 +21,6 @@ class GrindWorkspace implements Workspace {
|
|||||||
this.workspace = workspace
|
this.workspace = workspace
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
DataNode<?> getData() {
|
DataNode<?> getData() {
|
||||||
return workspace.getData()
|
return workspace.getData()
|
||||||
@ -57,6 +56,11 @@ class GrindWorkspace implements Workspace {
|
|||||||
return workspace.context
|
return workspace.context
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
Logger getLogger() {
|
||||||
|
return workspace.context.getLogger()
|
||||||
|
}
|
||||||
|
|
||||||
def methodMissing(String name, Object args) {
|
def methodMissing(String name, Object args) {
|
||||||
String str = args.getClass().isArray() ? ((Object[]) args).join(" ") : args.toString()
|
String str = args.getClass().isArray() ? ((Object[]) args).join(" ") : args.toString()
|
||||||
return runTask(name, str)
|
return runTask(name, str)
|
||||||
|
@ -55,7 +55,7 @@ class WorkspaceSpec {
|
|||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@MethodDescription("Load data via closure")
|
@MethodDescription("Load data via closure")
|
||||||
void data(@DelegatesTo(value = DataNodeSpec, strategy = Closure.DELEGATE_FIRST) Closure cl) {
|
void data(@DelegatesTo(value = DataNodeSpec, strategy = Closure.DELEGATE_ONLY) Closure cl) {
|
||||||
builder.data("", DataNodeSpec.buildNode(builder.context, cl))
|
builder.data("", DataNodeSpec.buildNode(builder.context, cl))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,33 +1,10 @@
|
|||||||
allprojects {
|
|
||||||
apply plugin: "kotlin"
|
|
||||||
|
|
||||||
// apply plugin: 'org.openjfx.javafxplugin'
|
|
||||||
//
|
|
||||||
// javafx {
|
|
||||||
// modules = [ 'javafx.controls' ]
|
|
||||||
// }
|
|
||||||
|
|
||||||
compileKotlin {
|
|
||||||
kotlinOptions {
|
|
||||||
jvmTarget = "1.8"
|
|
||||||
javaParameters = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
kotlin {
|
|
||||||
experimental {
|
|
||||||
coroutines "enable"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':dataforge-plots:plots-jfc')
|
api project(':dataforge-plots:plots-jfc')
|
||||||
compile project(':dataforge-control')
|
api project(':dataforge-control')
|
||||||
compile project(':dataforge-gui')
|
api project(':dataforge-gui')
|
||||||
|
|
||||||
// https://mvnrepository.com/artifact/commons-cli/commons-cli
|
// https://mvnrepository.com/artifact/commons-cli/commons-cli
|
||||||
compile group: 'commons-cli', name: 'commons-cli', version: '1.4'
|
api group: 'commons-cli', name: 'commons-cli', version: '1.4'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
plugins {
|
plugins {
|
||||||
id "application"
|
id "application"
|
||||||
id 'com.github.johnrengelman.shadow' version '2.0.1'
|
id 'com.github.johnrengelman.shadow'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -22,8 +22,8 @@ configurations {
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
//DataForge dependencies
|
//DataForge dependencies
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
//compile project(':numass-server')
|
//api project(':numass-server')
|
||||||
|
|
||||||
// optional device classpath
|
// optional device classpath
|
||||||
devices project(':numass-control:cryotemp')
|
devices project(':numass-control:cryotemp')
|
||||||
|
@ -8,7 +8,7 @@ version = "0.2.0";
|
|||||||
//mainClassName = "inr.numass.readvac.Main"
|
//mainClassName = "inr.numass.readvac.Main"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
}
|
}
|
||||||
|
|
||||||
application{
|
application{
|
||||||
|
@ -31,7 +31,7 @@ internal fun createChannel(meta: Meta): PKT8Channel {
|
|||||||
val coefs = meta.getValue("coefs").list
|
val coefs = meta.getValue("coefs").list
|
||||||
val r0 = meta.getDouble("r0", 1000.0)
|
val r0 = meta.getDouble("r0", 1000.0)
|
||||||
return PKT8Channel(meta) { r ->
|
return PKT8Channel(meta) { r ->
|
||||||
coefs.indices.sumByDouble { coefs[it].double * Math.pow(r0 / r, it.toDouble()) }
|
coefs.indices.sumOf { coefs[it].double * Math.pow(r0 / r, it.toDouble()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else -> throw RuntimeException("Unknown transformation type")
|
else -> throw RuntimeException("Unknown transformation type")
|
||||||
|
@ -17,6 +17,6 @@
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
compile project(':numass-core')
|
api project(':numass-core')
|
||||||
}
|
}
|
@ -31,7 +31,6 @@ import kotlinx.coroutines.*
|
|||||||
import kotlinx.coroutines.channels.Channel
|
import kotlinx.coroutines.channels.Channel
|
||||||
import java.io.DataInputStream
|
import java.io.DataInputStream
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.lang.Math.pow
|
|
||||||
import java.net.Socket
|
import java.net.Socket
|
||||||
import java.nio.ByteBuffer
|
import java.nio.ByteBuffer
|
||||||
import java.nio.ByteOrder
|
import java.nio.ByteOrder
|
||||||
@ -39,8 +38,8 @@ import java.time.Duration
|
|||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.*
|
import java.util.*
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
import kotlin.collections.HashMap
|
|
||||||
import kotlin.math.ceil
|
import kotlin.math.ceil
|
||||||
|
import kotlin.math.pow
|
||||||
|
|
||||||
internal val Byte.positive
|
internal val Byte.positive
|
||||||
get() = toInt() and 0xFF
|
get() = toInt() and 0xFF
|
||||||
@ -254,7 +253,7 @@ class DanteClient(override val context: Context,val ip: String, chainLength: In
|
|||||||
}
|
}
|
||||||
|
|
||||||
return sequence {
|
return sequence {
|
||||||
val intBuffer = ByteBuffer.wrap(message!!.payload).asIntBuffer()
|
val intBuffer = ByteBuffer.wrap(message.payload).asIntBuffer()
|
||||||
while (intBuffer.hasRemaining()) {
|
while (intBuffer.hasRemaining()) {
|
||||||
yield(intBuffer.get())
|
yield(intBuffer.get())
|
||||||
}
|
}
|
||||||
@ -298,7 +297,7 @@ class DanteClient(override val context: Context,val ip: String, chainLength: In
|
|||||||
assert(en_fil_flattop in 1..15)
|
assert(en_fil_flattop in 1..15)
|
||||||
assert(fast_peak_time in 1..31)
|
assert(fast_peak_time in 1..31)
|
||||||
assert(fast_flattop in 1..31)
|
assert(fast_flattop in 1..31)
|
||||||
assert(recovery_time in (0.0..pow(2.0, 24.0) - 1))
|
assert(recovery_time.toDouble() in (0.0..2.0.pow(24.0) - 1))
|
||||||
assert(zero_peak_rate in 0..500)
|
assert(zero_peak_rate in 0..500)
|
||||||
assert(inverted_input in listOf(0, 1))
|
assert(inverted_input in listOf(0, 1))
|
||||||
assert((en_fil_peak_time + en_fil_flattop) * 2 < 1023)
|
assert((en_fil_peak_time + en_fil_flattop) * 2 < 1023)
|
||||||
|
@ -20,5 +20,5 @@ if (!hasProperty('mainClass')) {
|
|||||||
mainClassName = mainClass
|
mainClassName = mainClass
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
}
|
}
|
@ -8,7 +8,7 @@ if (!hasProperty('mainClass')) {
|
|||||||
mainClassName = mainClass
|
mainClassName = mainClass
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
}
|
}
|
||||||
|
|
||||||
task talkToServer(type: JavaExec) {
|
task talkToServer(type: JavaExec) {
|
||||||
|
@ -9,5 +9,5 @@ mainClassName = mainClass
|
|||||||
|
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
}
|
}
|
@ -95,7 +95,7 @@ fun Indicator.bind(connection: DeviceDisplayFX<*>, state: String, transform: ((V
|
|||||||
fun EventTarget.deviceStateIndicator(connection: DeviceDisplayFX<*>, state: String, showName: Boolean = true, transform: ((Value) -> Paint)? = null) {
|
fun EventTarget.deviceStateIndicator(connection: DeviceDisplayFX<*>, state: String, showName: Boolean = true, transform: ((Value) -> Paint)? = null) {
|
||||||
if (connection.device.stateNames.contains(state)) {
|
if (connection.device.stateNames.contains(state)) {
|
||||||
if (showName) {
|
if (showName) {
|
||||||
text("${state.toUpperCase()}: ")
|
text("${state.uppercase()}: ")
|
||||||
}
|
}
|
||||||
indicator {
|
indicator {
|
||||||
bind(connection, state, transform);
|
bind(connection, state, transform);
|
||||||
|
@ -3,7 +3,7 @@ apply plugin: 'application'
|
|||||||
version = "0.6.0"
|
version = "0.6.0"
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':numass-control')
|
api project(':numass-control')
|
||||||
}
|
}
|
||||||
|
|
||||||
application{
|
application{
|
||||||
|
@ -23,7 +23,7 @@ class ThyroContVacDevice(context: Context, meta: Meta) : PortSensor(context, met
|
|||||||
return GenericPortController(context, port) { it.endsWith("\r") }
|
return GenericPortController(context, port) { it.endsWith("\r") }
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun String.checksum(): Char = (sumBy { it.toInt() } % 64 + 64).toChar()
|
private fun String.checksum(): Char = (sumBy { it.code } % 64 + 64).toChar()
|
||||||
|
|
||||||
private fun wrap(str: String): String = buildString {
|
private fun wrap(str: String): String = buildString {
|
||||||
append(str)
|
append(str)
|
||||||
|
@ -2,11 +2,11 @@ description = "A bse package with minimal dependencies for numass"
|
|||||||
|
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(":numass-core:numass-data-api")
|
api project(":numass-core:numass-data-api")
|
||||||
compile project(":numass-core:numass-data-proto")
|
api project(":numass-core:numass-data-proto")
|
||||||
compile project(":dataforge-storage")
|
api project(":dataforge-storage")
|
||||||
compile project(":dataforge-core:dataforge-json")
|
api project(":dataforge-core:dataforge-json")
|
||||||
|
|
||||||
// https://mvnrepository.com/artifact/com.github.robtimus/sftp-fs
|
// https://mvnrepository.com/artifact/com.github.robtimus/sftp-fs
|
||||||
compile group: 'com.github.robtimus', name: 'sftp-fs', version: '1.1.3'
|
api group: 'com.github.robtimus', name: 'sftp-fs', version: '1.1.3'
|
||||||
}
|
}
|
@ -71,7 +71,6 @@ interface NumassSet : Named, Metoid, Iterable<NumassPoint>, Provider {
|
|||||||
return optPoint(java.lang.Double.parseDouble(voltage))
|
return optPoint(java.lang.Double.parseDouble(voltage))
|
||||||
}
|
}
|
||||||
|
|
||||||
@JvmDefault
|
|
||||||
override fun getDefaultTarget(): String {
|
override fun getDefaultTarget(): String {
|
||||||
return NUMASS_POINT_PROVIDER_KEY
|
return NUMASS_POINT_PROVIDER_KEY
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ repositories {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation("com.google.protobuf:protobuf-java:3.17.1")
|
api("com.google.protobuf:protobuf-java:3.17.1")
|
||||||
api(project(":numass-core:numass-data-api"))
|
api(project(":numass-core:numass-data-api"))
|
||||||
api(project(":dataforge-storage"))
|
api(project(":dataforge-storage"))
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,6 @@ import java.nio.ByteBuffer
|
|||||||
import java.nio.channels.FileChannel
|
import java.nio.channels.FileChannel
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.nio.file.StandardOpenOption
|
import java.nio.file.StandardOpenOption
|
||||||
import java.util.*
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An envelope type for legacy numass tags. Reads legacy tag and writes DF02 tags
|
* An envelope type for legacy numass tags. Reads legacy tag and writes DF02 tags
|
||||||
@ -106,8 +105,9 @@ class NumassEnvelopeType : EnvelopeType {
|
|||||||
companion object {
|
companion object {
|
||||||
val INSTANCE = NumassEnvelopeType()
|
val INSTANCE = NumassEnvelopeType()
|
||||||
|
|
||||||
val LEGACY_START_SEQUENCE = byteArrayOf('#'.toByte(), '!'.toByte())
|
val LEGACY_START_SEQUENCE = byteArrayOf('#'.code.toByte(), '!'.code.toByte())
|
||||||
val LEGACY_END_SEQUENCE = byteArrayOf('!'.toByte(), '#'.toByte(), '\r'.toByte(), '\n'.toByte())
|
val LEGACY_END_SEQUENCE =
|
||||||
|
byteArrayOf('!'.code.toByte(), '#'.code.toByte(), '\r'.code.toByte(), '\n'.code.toByte())
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replacement for standard type infer to include legacy type
|
* Replacement for standard type infer to include legacy type
|
||||||
@ -121,10 +121,10 @@ class NumassEnvelopeType : EnvelopeType {
|
|||||||
val buffer = it.map(FileChannel.MapMode.READ_ONLY, 0, 6)
|
val buffer = it.map(FileChannel.MapMode.READ_ONLY, 0, 6)
|
||||||
when {
|
when {
|
||||||
//TODO use templates from appropriate types
|
//TODO use templates from appropriate types
|
||||||
buffer.get(0) == '#'.toByte() && buffer.get(1) == '!'.toByte() -> INSTANCE
|
buffer.get(0) == '#'.code.toByte() && buffer.get(1) == '!'.code.toByte() -> INSTANCE
|
||||||
buffer.get(0) == '#'.toByte() && buffer.get(1) == '!'.toByte() &&
|
buffer.get(0) == '#'.code.toByte() && buffer.get(1) == '!'.code.toByte() &&
|
||||||
buffer.get(4) == 'T'.toByte() && buffer.get(5) == 'L'.toByte() -> TaglessEnvelopeType.INSTANCE
|
buffer.get(4) == 'T'.code.toByte() && buffer.get(5) == 'L'.code.toByte() -> TaglessEnvelopeType.INSTANCE
|
||||||
buffer.get(0) == '#'.toByte() && buffer.get(1) == '~'.toByte() -> DefaultEnvelopeType.INSTANCE
|
buffer.get(0) == '#'.code.toByte() && buffer.get(1) == '~'.code.toByte() -> DefaultEnvelopeType.INSTANCE
|
||||||
else -> null
|
else -> null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,16 +69,16 @@ class ChernovProcessor(
|
|||||||
|
|
||||||
val timeInTicks = (pos + buffer.position() - 1)
|
val timeInTicks = (pos + buffer.position() - 1)
|
||||||
|
|
||||||
val event = OrphanNumassEvent(amp.toShort(), (timeInTicks * tickSize).toLong())
|
val event = OrphanNumassEvent(amp.toInt().toShort(), (timeInTicks * tickSize).toLong())
|
||||||
yield(event)
|
yield(event)
|
||||||
|
|
||||||
//subtracting event from buffer copy
|
//subtracting event from buffer copy
|
||||||
for (x in (signalRange.first + timeInTicks.toInt())..(signalRange.endInclusive + timeInTicks.toInt())) {
|
for (x in (signalRange.first + timeInTicks.toInt())..(signalRange.last + timeInTicks.toInt())) {
|
||||||
//TODO check all roundings
|
//TODO check all roundings
|
||||||
if (x >= 0 && x < buffer.limit()) {
|
if (x >= 0 && x < buffer.limit()) {
|
||||||
val oldValue = buffer.get(x)
|
val oldValue = buffer.get(x)
|
||||||
val newValue = oldValue - amp * signal(x - timeInTicks) / signalMax
|
val newValue = oldValue - amp * signal(x - timeInTicks) / signalMax
|
||||||
buffer.put(x, newValue.toShort())
|
buffer.put(x, newValue.toInt().toShort())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println(buffer.array().joinToString())
|
println(buffer.array().joinToString())
|
||||||
|
@ -12,7 +12,7 @@ class ChernovProcessorTest {
|
|||||||
val events = mapOf<Double, Double>(10.0 to 1.0, 16.0 to 0.5)
|
val events = mapOf<Double, Double>(10.0 to 1.0, 16.0 to 0.5)
|
||||||
|
|
||||||
val buffer = ShortArray(40) { i ->
|
val buffer = ShortArray(40) { i ->
|
||||||
events.entries.sumByDouble { (pos, amp) -> amp * gaussian.value(pos - i.toDouble()) }.toInt().toShort()
|
events.entries.sumOf { (pos, amp) -> amp * gaussian.value(pos - i.toDouble()) }.toInt().toShort()
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -22,7 +22,6 @@ import hep.dataforge.tables.ListTable
|
|||||||
import hep.dataforge.tables.Table
|
import hep.dataforge.tables.Table
|
||||||
import hep.dataforge.tables.TableFormat
|
import hep.dataforge.tables.TableFormat
|
||||||
import hep.dataforge.tables.TableFormatBuilder
|
import hep.dataforge.tables.TableFormatBuilder
|
||||||
import hep.dataforge.toList
|
|
||||||
import inr.numass.data.api.NumassBlock
|
import inr.numass.data.api.NumassBlock
|
||||||
import inr.numass.data.api.NumassEvent
|
import inr.numass.data.api.NumassEvent
|
||||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||||
|
@ -172,8 +172,8 @@ fun Sequence<NumassEvent>.getAmplitudeSpectrum(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
val minChannel = config.getInt("window.lo") { spectrum.keys.min() ?: 0 }
|
val minChannel = config.getInt("window.lo") { spectrum.keys.minOrNull() ?: 0 }
|
||||||
val maxChannel = config.getInt("window.up") { spectrum.keys.max() ?: 4096 }
|
val maxChannel = config.getInt("window.up") { spectrum.keys.maxOrNull() ?: 4096 }
|
||||||
|
|
||||||
return ListTable.Builder(format)
|
return ListTable.Builder(format)
|
||||||
.rows(IntStream.range(minChannel, maxChannel)
|
.rows(IntStream.range(minChannel, maxChannel)
|
||||||
|
@ -30,18 +30,8 @@ import inr.numass.data.analyzers.NumassAnalyzer.Companion.LENGTH_KEY
|
|||||||
import inr.numass.data.analyzers.TimeAnalyzer.AveragingMethod.*
|
import inr.numass.data.analyzers.TimeAnalyzer.AveragingMethod.*
|
||||||
import inr.numass.data.api.*
|
import inr.numass.data.api.*
|
||||||
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
import inr.numass.data.api.NumassPoint.Companion.HV_KEY
|
||||||
import java.util.*
|
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
import kotlin.collections.List
|
|
||||||
import kotlin.collections.asSequence
|
|
||||||
import kotlin.collections.count
|
|
||||||
import kotlin.collections.first
|
|
||||||
import kotlin.collections.map
|
|
||||||
import kotlin.collections.set
|
import kotlin.collections.set
|
||||||
import kotlin.collections.sortBy
|
|
||||||
import kotlin.collections.sumBy
|
|
||||||
import kotlin.collections.sumByDouble
|
|
||||||
import kotlin.collections.toMutableList
|
|
||||||
import kotlin.math.*
|
import kotlin.math.*
|
||||||
import kotlin.streams.asSequence
|
import kotlin.streams.asSequence
|
||||||
|
|
||||||
@ -163,20 +153,20 @@ open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(p
|
|||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
val totalTime = sumByDouble { it.getDouble(LENGTH_KEY) }
|
val totalTime = sumOf { it.getDouble(LENGTH_KEY) }
|
||||||
|
|
||||||
val (countRate, countRateDispersion) = when (method) {
|
val (countRate, countRateDispersion) = when (method) {
|
||||||
ARITHMETIC -> Pair(
|
ARITHMETIC -> Pair(
|
||||||
sumByDouble { it.getDouble(COUNT_RATE_KEY) } / size,
|
sumOf { it.getDouble(COUNT_RATE_KEY) } / size,
|
||||||
sumByDouble { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size
|
sumOf { it.getDouble(COUNT_RATE_ERROR_KEY).pow(2.0) } / size / size
|
||||||
)
|
)
|
||||||
WEIGHTED -> Pair(
|
WEIGHTED -> Pair(
|
||||||
sumByDouble { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
|
sumOf { it.getDouble(COUNT_RATE_KEY) * it.getDouble(LENGTH_KEY) } / totalTime,
|
||||||
sumByDouble { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) }
|
sumOf { (it.getDouble(COUNT_RATE_ERROR_KEY) * it.getDouble(LENGTH_KEY) / totalTime).pow(2.0) }
|
||||||
)
|
)
|
||||||
GEOMETRIC -> {
|
GEOMETRIC -> {
|
||||||
val mean = exp(sumByDouble { ln(it.getDouble(COUNT_RATE_KEY)) } / size)
|
val mean = exp(sumOf { ln(it.getDouble(COUNT_RATE_KEY)) } / size)
|
||||||
val variance = (mean / size).pow(2.0) * sumByDouble {
|
val variance = (mean / size).pow(2.0) * sumOf {
|
||||||
(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(
|
(it.getDouble(COUNT_RATE_ERROR_KEY) / it.getDouble(
|
||||||
COUNT_RATE_KEY
|
COUNT_RATE_KEY
|
||||||
)).pow(2.0)
|
)).pow(2.0)
|
||||||
@ -187,7 +177,7 @@ open class TimeAnalyzer(processor: SignalProcessor? = null) : AbstractAnalyzer(p
|
|||||||
|
|
||||||
return ValueMap.Builder(first())
|
return ValueMap.Builder(first())
|
||||||
.putValue(LENGTH_KEY, totalTime)
|
.putValue(LENGTH_KEY, totalTime)
|
||||||
.putValue(COUNT_KEY, sumBy { it.getInt(COUNT_KEY) })
|
.putValue(COUNT_KEY, sumOf { it.getInt(COUNT_KEY) })
|
||||||
.putValue(COUNT_RATE_KEY, countRate)
|
.putValue(COUNT_RATE_KEY, countRate)
|
||||||
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
|
.putValue(COUNT_RATE_ERROR_KEY, sqrt(countRateDispersion))
|
||||||
.build()
|
.build()
|
||||||
|
@ -5,46 +5,26 @@ plugins {
|
|||||||
|
|
||||||
apply plugin: 'kotlin'
|
apply plugin: 'kotlin'
|
||||||
|
|
||||||
//apply plugin: 'org.openjfx.javafxplugin'
|
|
||||||
//
|
|
||||||
//javafx {
|
|
||||||
// modules = [ 'javafx.controls' ]
|
|
||||||
//}
|
|
||||||
|
|
||||||
//if (!hasProperty('mainClass')) {
|
|
||||||
// ext.mainClass = 'inr.numass.LaunchGrindShell'
|
|
||||||
//}
|
|
||||||
mainClassName = 'inr.numass.LaunchGrindShell'
|
mainClassName = 'inr.numass.LaunchGrindShell'
|
||||||
|
|
||||||
description = "Main numass project"
|
description = "Main numass project"
|
||||||
|
|
||||||
compileKotlin {
|
|
||||||
kotlinOptions {
|
|
||||||
jvmTarget = "1.8"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
compileTestKotlin {
|
|
||||||
kotlinOptions {
|
|
||||||
jvmTarget = "1.8"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
compileGroovy.dependsOn(compileKotlin)
|
compileGroovy.dependsOn(compileKotlin)
|
||||||
compileGroovy.classpath += files(compileKotlin.destinationDir)
|
compileGroovy.classpath += files(compileKotlin.destinationDir)
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile group: 'commons-cli', name: 'commons-cli', version: '1.+'
|
api group: 'commons-cli', name: 'commons-cli', version: '1.+'
|
||||||
compile group: 'commons-io', name: 'commons-io', version: '2.+'
|
api group: 'commons-io', name: 'commons-io', version: '2.+'
|
||||||
compile project(':numass-core')
|
api project(':numass-core')
|
||||||
compile project(':numass-core:numass-signal-processing')
|
api project(':numass-core:numass-signal-processing')
|
||||||
compileOnly "org.jetbrains.kotlin:kotlin-main-kts:1.3.21"
|
compileOnly "org.jetbrains.kotlin:kotlin-main-kts:1.3.21"
|
||||||
compile project(':dataforge-stat:dataforge-minuit')
|
api project(':dataforge-stat:dataforge-minuit')
|
||||||
compile project(':grind:grind-terminal')
|
api project(':grind:grind-terminal')
|
||||||
compile project(":dataforge-gui")
|
api project(":dataforge-gui")
|
||||||
//compile "hep.dataforge:dataforge-html"
|
//api "hep.dataforge:dataforge-html"
|
||||||
|
|
||||||
// https://mvnrepository.com/artifact/org.ehcache/ehcache
|
// https://mvnrepository.com/artifact/org.ehcache/ehcache
|
||||||
//compile group: 'org.ehcache', name: 'ehcache', version: '3.4.0'
|
//api group: 'org.ehcache', name: 'ehcache', version: '3.4.0'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package inr.numass
|
package inr.numass
|
||||||
|
|
||||||
|
import groovy.cli.picocli.CliBuilder
|
||||||
import hep.dataforge.context.Context
|
import hep.dataforge.context.Context
|
||||||
import hep.dataforge.context.Global
|
import hep.dataforge.context.Global
|
||||||
import hep.dataforge.fx.output.FXOutputManager
|
import hep.dataforge.fx.output.FXOutputManager
|
||||||
@ -8,7 +9,6 @@ import hep.dataforge.grind.workspace.GrindWorkspace
|
|||||||
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
import hep.dataforge.plots.jfreechart.JFreeChartPlugin
|
||||||
import hep.dataforge.workspace.FileBasedWorkspace
|
import hep.dataforge.workspace.FileBasedWorkspace
|
||||||
import hep.dataforge.workspace.Workspace
|
import hep.dataforge.workspace.Workspace
|
||||||
import groovy.cli.commons.CliBuilder
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Created by darksnake on 29-Aug-16.
|
* Created by darksnake on 29-Aug-16.
|
||||||
|
@ -31,7 +31,7 @@ suspend fun Sequence<OrphanNumassEvent>.generateBlock(start: Instant, length: Lo
|
|||||||
|
|
||||||
private class MergingState(private val chains: List<Chain<OrphanNumassEvent>>) {
|
private class MergingState(private val chains: List<Chain<OrphanNumassEvent>>) {
|
||||||
suspend fun poll(): OrphanNumassEvent {
|
suspend fun poll(): OrphanNumassEvent {
|
||||||
val next = chains.minBy { it.value.timeOffset } ?: chains.first()
|
val next = chains.minByOrNull { it.value.timeOffset } ?: chains.first()
|
||||||
val res = next.value
|
val res = next.value
|
||||||
next.next()
|
next.next()
|
||||||
return res
|
return res
|
||||||
@ -64,7 +64,8 @@ fun Chain<OrphanNumassEvent>.withDeadTime(deadTime: (OrphanNumassEvent) -> Long)
|
|||||||
|
|
||||||
object NumassGenerator {
|
object NumassGenerator {
|
||||||
|
|
||||||
val defaultAmplitudeGenerator: RandomGenerator.(OrphanNumassEvent?, Long) -> Short = { _, _ -> ((nextDouble() + 2.0) * 100).toShort() }
|
val defaultAmplitudeGenerator: RandomGenerator.(OrphanNumassEvent?, Long) -> Short =
|
||||||
|
{ _, _ -> ((nextDouble() + 2.0) * 100).toInt().toShort() }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate an event chain with fixed count rate
|
* Generate an event chain with fixed count rate
|
||||||
@ -133,6 +134,6 @@ object NumassGenerator {
|
|||||||
}
|
}
|
||||||
val distribution = EnumeratedRealDistribution(channels, values)
|
val distribution = EnumeratedRealDistribution(channels, values)
|
||||||
|
|
||||||
return generateEvents(cr, rnd) { _, _ -> distribution.sample().toShort() }
|
return generateEvents(cr, rnd) { _, _ -> distribution.sample().toInt().toShort() }
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -30,7 +30,6 @@ import org.apache.commons.math3.random.RandomGenerator
|
|||||||
import java.lang.Math.max
|
import java.lang.Math.max
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.*
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
import java.util.concurrent.atomic.AtomicReference
|
import java.util.concurrent.atomic.AtomicReference
|
||||||
|
|
||||||
@ -91,7 +90,7 @@ class PileUpSimulator {
|
|||||||
throw Error()
|
throw Error()
|
||||||
}
|
}
|
||||||
|
|
||||||
return (prevChanel + coef * nextChanel).toShort()
|
return (prevChanel + coef * nextChanel).toInt().toShort()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -281,7 +281,7 @@ object LossCalculator {
|
|||||||
0.0
|
0.0
|
||||||
} else {
|
} else {
|
||||||
val probs = getLossProbabilities(x)
|
val probs = getLossProbabilities(x)
|
||||||
(1 until probs.size).sumByDouble { i ->
|
(1 until probs.size).sumOf { i ->
|
||||||
probs[i] * getLossValue(i, Ei, Ef)
|
probs[i] * getLossValue(i, Ei, Ef)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -116,7 +116,7 @@ class SterileNeutrinoSpectrum @JvmOverloads constructor(
|
|||||||
return if (fss == null) {
|
return if (fss == null) {
|
||||||
sourceFunction.value(0.0, eIn, set)
|
sourceFunction.value(0.0, eIn, set)
|
||||||
} else {
|
} else {
|
||||||
(0 until fss.size()).sumByDouble { fss.getP(it) * sourceFunction.value(fss.getE(it), eIn, set) }
|
(0 until fss.size()).sumOf { fss.getP(it) * sourceFunction.value(fss.getE(it), eIn, set) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,20 +84,22 @@ fun main() {
|
|||||||
val rawSpectrum = analyzer.getAmplitudeSpectrum(point, meta).withBinning(20)
|
val rawSpectrum = analyzer.getAmplitudeSpectrum(point, meta).withBinning(20)
|
||||||
group.add(DataPlot.plot("raw", rawSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("raw", rawSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val rawNorm = rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxBy { it.double }!!.double
|
val rawNorm = rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxByOrNull { it.double }!!.double
|
||||||
val normalizedSpectrum = ColumnTable.copy(rawSpectrum)
|
val normalizedSpectrum = ColumnTable.copy(rawSpectrum)
|
||||||
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / rawNorm }
|
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / rawNorm }
|
||||||
normalizedFrame.add(DataPlot.plot("${setName}_raw", normalizedSpectrum, AMPLITUDE_ADAPTER))
|
normalizedFrame.add(DataPlot.plot("${setName}_raw", normalizedSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
|
|
||||||
println("[$setName] Raw spectrum integral: ${rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumByDouble { it.double }}")
|
println("[$setName] Raw spectrum integral: ${
|
||||||
|
rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumOf { it.double }
|
||||||
|
}")
|
||||||
|
|
||||||
group.add(DataPlot.plot("filtered", analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(20), AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("filtered", analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(20), AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val filteredSpectrum = analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(20)
|
val filteredSpectrum = analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(20)
|
||||||
group.add(DataPlot.plot("invertedFilter", filteredSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("invertedFilter", filteredSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val filteredNorm = filteredSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxBy { it.double }!!.double
|
val filteredNorm = filteredSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxByOrNull { it.double }!!.double
|
||||||
val normalizedFilteredSpectrum = ColumnTable.copy(filteredSpectrum)
|
val normalizedFilteredSpectrum = ColumnTable.copy(filteredSpectrum)
|
||||||
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / filteredNorm }
|
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / filteredNorm }
|
||||||
|
|
||||||
@ -112,6 +114,8 @@ fun main() {
|
|||||||
|
|
||||||
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
println("[$setName] Pileup spectrum integral: ${pileupSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumByDouble { it.double }}")
|
println("[$setName] Pileup spectrum integral: ${
|
||||||
|
pileupSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumOf { it.double }
|
||||||
|
}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -86,20 +86,22 @@ fun main() {
|
|||||||
val rawSpectrum = analyzer.getAmplitudeSpectrum(point, meta).withBinning(20)
|
val rawSpectrum = analyzer.getAmplitudeSpectrum(point, meta).withBinning(20)
|
||||||
group.add(DataPlot.plot("raw", rawSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("raw", rawSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val rawNorm = rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxBy { it.double }!!.double
|
val rawNorm = rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxByOrNull { it.double }!!.double
|
||||||
val normalizedSpectrum = ColumnTable.copy(rawSpectrum)
|
val normalizedSpectrum = ColumnTable.copy(rawSpectrum)
|
||||||
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / rawNorm }
|
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / rawNorm }
|
||||||
normalizedFrame.add(DataPlot.plot("${setName}_raw", normalizedSpectrum, AMPLITUDE_ADAPTER))
|
normalizedFrame.add(DataPlot.plot("${setName}_raw", normalizedSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
|
|
||||||
println("[$setName] Raw spectrum integral: ${rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumByDouble { it.double }}")
|
println("[$setName] Raw spectrum integral: ${
|
||||||
|
rawSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumOf { it.double }
|
||||||
|
}")
|
||||||
|
|
||||||
group.add(DataPlot.plot("filtered", analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(20), AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("filtered", analyzer.getAmplitudeSpectrum(point, metaForChain).withBinning(20), AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val filteredSpectrum = analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(20)
|
val filteredSpectrum = analyzer.getAmplitudeSpectrum(point, metaForChainInverted).withBinning(20)
|
||||||
group.add(DataPlot.plot("invertedFilter", filteredSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("invertedFilter", filteredSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
val filteredNorm = filteredSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxBy { it.double }!!.double
|
val filteredNorm = filteredSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).maxByOrNull { it.double }!!.double
|
||||||
val normalizedFilteredSpectrum = ColumnTable.copy(filteredSpectrum)
|
val normalizedFilteredSpectrum = ColumnTable.copy(filteredSpectrum)
|
||||||
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / filteredNorm }
|
.replaceColumn(NumassAnalyzer.COUNT_RATE_KEY) { it.getDouble(NumassAnalyzer.COUNT_RATE_KEY) / filteredNorm }
|
||||||
|
|
||||||
@ -114,6 +116,8 @@ fun main() {
|
|||||||
|
|
||||||
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))
|
group.add(DataPlot.plot("pileup", pileupSpectrum, AMPLITUDE_ADAPTER))
|
||||||
|
|
||||||
println("[$setName] Pileup spectrum integral: ${pileupSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumByDouble { it.double }}")
|
println("[$setName] Pileup spectrum integral: ${
|
||||||
|
pileupSpectrum.getColumn(NumassAnalyzer.COUNT_RATE_KEY).sumOf { it.double }
|
||||||
|
}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user