Tables basics

A lot of refactoring
This commit is contained in:
Alexander Nozik 2020-01-29 21:34:51 +03:00
parent fe6760eee6
commit f906fbdb0a
47 changed files with 664 additions and 259 deletions

View File

@ -6,7 +6,7 @@ plugins {
id("scientifik.publish") version toolsVersion apply false id("scientifik.publish") version toolsVersion apply false
} }
val dataforgeVersion by extra("0.1.5-dev-7") val dataforgeVersion by extra("0.1.5-dev-8")
val bintrayRepo by extra("dataforge") val bintrayRepo by extra("dataforge")
val githubProject by extra("dataforge-core") val githubProject by extra("dataforge-core")

View File

@ -0,0 +1,130 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hep.dataforge.descriptors
import hep.dataforge.meta.DFExperimental
import hep.dataforge.values.ValueType
import kotlin.reflect.KClass
@MustBeDocumented
annotation class Attribute(
val key: String,
val value: String
)
@MustBeDocumented
annotation class Attributes(
val attrs: Array<Attribute>
)
@MustBeDocumented
annotation class ItemDef(
val info: String = "",
val multiple: Boolean = false,
val required: Boolean = false
)
@Target(AnnotationTarget.PROPERTY)
@MustBeDocumented
annotation class ValueDef(
val type: Array<ValueType> = [ValueType.STRING],
val def: String = "",
val allowed: Array<String> = [],
val enumeration: KClass<*> = Any::class
)
///**
// * Description text for meta property, node or whole object
// */
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class Description(val value: String)
//
///**
// * Annotation for value property which states that lists are expected
// */
//@Target(AnnotationTarget.PROPERTY)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class Multiple
//
///**
// * Descriptor target
// * The DataForge path to the resource containing the description. Following targets are supported:
// * 1. resource
// * 1. file
// * 1. class
// * 1. method
// * 1. property
// *
// *
// * Does not work if [type] is provided
// */
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class Descriptor(val value: String)
//
//
///**
// * Aggregator class for descriptor nodes
// */
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class DescriptorNodes(vararg val nodes: NodeDef)
//
///**
// * Aggregator class for descriptor values
// */
//@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class DescriptorValues(vararg val nodes: ValueDef)
//
///**
// * Alternative name for property descriptor declaration
// */
//@Target(AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class DescriptorName(val name: String)
//
//@Target(AnnotationTarget.PROPERTY)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class DescriptorValue(val def: ValueDef)
////TODO enter fields directly?
//
//@Target(AnnotationTarget.PROPERTY)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class ValueProperty(
// val name: String = "",
// val type: Array<ValueType> = arrayOf(ValueType.STRING),
// val multiple: Boolean = false,
// val def: String = "",
// val enumeration: KClass<*> = Any::class,
// val tags: Array<String> = emptyArray()
//)
//
//
//@Target(AnnotationTarget.PROPERTY)
//@Retention(AnnotationRetention.RUNTIME)
//@MustBeDocumented
//annotation class NodeProperty(val name: String = "")

View File

@ -0,0 +1,65 @@
package hep.dataforge.descriptors
import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.descriptors.attributes
import hep.dataforge.meta.scheme.ConfigurableDelegate
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.values.parseValue
import kotlin.reflect.KProperty1
import kotlin.reflect.full.findAnnotation
import kotlin.reflect.full.isSubclassOf
import kotlin.reflect.full.memberProperties
//inline fun <reified T : Scheme> T.buildDescriptor(): NodeDescriptor = NodeDescriptor {
// T::class.apply {
// findAnnotation<ItemDef>()?.let { def ->
// info = def.info
// required = def.required
// multiple = def.multiple
// }
// findAnnotation<Attribute>()?.let { attr ->
// attributes {
// this[attr.key] = attr.value.parseValue()
// }
// }
// findAnnotation<Attributes>()?.attrs?.forEach { attr ->
// attributes {
// this[attr.key] = attr.value.parseValue()
// }
// }
// }
// T::class.memberProperties.forEach { property ->
// val delegate = property.getDelegate(this@buildDescriptor)
//
// val descriptor: ItemDescriptor = when (delegate) {
// is ConfigurableDelegate -> buildPropertyDescriptor(property, delegate)
// is ReadWriteDelegateWrapper<*, *> -> {
// if (delegate.delegate is ConfigurableDelegate) {
// buildPropertyDescriptor(property, delegate.delegate as ConfigurableDelegate)
// } else {
// return@forEach
// }
// }
// else -> return@forEach
// }
// defineItem(property.name, descriptor)
// }
//}
//inline fun <T : Scheme, reified V : Any?> buildPropertyDescriptor(
// property: KProperty1<T, V>,
// delegate: ConfigurableDelegate
//): ItemDescriptor {
// when {
// V::class.isSubclassOf(Scheme::class) -> NodeDescriptor {
// default = delegate.default.node
// }
// V::class.isSubclassOf(Meta::class) -> NodeDescriptor {
// default = delegate.default.node
// }
//
// }
//}

View File

@ -1,6 +1,9 @@
package hep.dataforge.data package hep.dataforge.data
import hep.dataforge.meta.* import hep.dataforge.meta.*
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.SchemeSpec
import hep.dataforge.meta.scheme.string
import hep.dataforge.names.toName import hep.dataforge.names.toName

View File

@ -1,12 +1,13 @@
package hep.dataforge.io.yaml package hep.dataforge.io.yaml
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.io.MetaFormat import hep.dataforge.io.MetaFormat
import hep.dataforge.io.MetaFormatFactory import hep.dataforge.io.MetaFormatFactory
import hep.dataforge.meta.DFExperimental import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.toMap import hep.dataforge.meta.toMap
import hep.dataforge.meta.scheme.toMeta
import hep.dataforge.meta.toMeta import hep.dataforge.meta.toMeta
import kotlinx.io.Input import kotlinx.io.Input
import kotlinx.io.Output import kotlinx.io.Output

View File

@ -1,7 +1,7 @@
package hep.dataforge.io package hep.dataforge.io
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.* import hep.dataforge.meta.*
import hep.dataforge.values.* import hep.dataforge.values.*
import kotlinx.io.* import kotlinx.io.*

View File

@ -3,9 +3,9 @@
package hep.dataforge.io package hep.dataforge.io
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.descriptors.ItemDescriptor import hep.dataforge.meta.descriptors.ItemDescriptor
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.descriptors.ValueDescriptor import hep.dataforge.meta.descriptors.ValueDescriptor
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBase import hep.dataforge.meta.MetaBase
import hep.dataforge.meta.MetaItem import hep.dataforge.meta.MetaItem

View File

@ -1,7 +1,7 @@
package hep.dataforge.io package hep.dataforge.io
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE import hep.dataforge.io.MetaFormatFactory.Companion.META_FORMAT_TYPE
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.names.Name import hep.dataforge.names.Name

View File

@ -6,6 +6,7 @@ import hep.dataforge.io.*
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import kotlin.reflect.KClass import kotlin.reflect.KClass
class RemoteFunctionClient(override val context: Context, val responder: Responder) : FunctionServer, ContextAware { class RemoteFunctionClient(override val context: Context, val responder: Responder) : FunctionServer, ContextAware {

View File

@ -8,6 +8,7 @@ import hep.dataforge.io.Responder
import hep.dataforge.io.type import hep.dataforge.io.type
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
class RemoteFunctionServer( class RemoteFunctionServer(
override val context: Context, override val context: Context,

View File

@ -18,7 +18,7 @@ object ValueSerializer : KSerializer<Value> {
private val valueTypeSerializer = EnumSerializer(ValueType::class) private val valueTypeSerializer = EnumSerializer(ValueType::class)
private val listSerializer by lazy { ArrayListSerializer(ValueSerializer) } private val listSerializer by lazy { ArrayListSerializer(ValueSerializer) }
override val descriptor: SerialDescriptor = descriptor("hep.dataforge.values.Value") { override val descriptor: SerialDescriptor = descriptor("Value") {
boolean("isList") boolean("isList")
enum<ValueType>("valueType") enum<ValueType>("valueType")
element("value", null) element("value", null)

View File

@ -3,6 +3,7 @@ package hep.dataforge.io
import hep.dataforge.meta.DFExperimental import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import kotlinx.io.text.writeRawString import kotlinx.io.text.writeRawString
import kotlinx.io.text.writeUtf8String import kotlinx.io.text.writeUtf8String

View File

@ -1,6 +1,6 @@
package hep.dataforge.io package hep.dataforge.io
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.DFExperimental import hep.dataforge.meta.DFExperimental
import hep.dataforge.meta.EmptyMeta import hep.dataforge.meta.EmptyMeta
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta

View File

@ -1,149 +0,0 @@
/*
* Copyright 2018 Alexander Nozik.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hep.dataforge.descriptors
import hep.dataforge.values.ValueType
import kotlin.reflect.KClass
@Target(AnnotationTarget.PROPERTY)
@MustBeDocumented
annotation class ValueDef(
val key: String,
val type: Array<ValueType> = arrayOf(ValueType.STRING),
val multiple: Boolean = false,
val def: String = "",
val info: String = "",
val required: Boolean = true,
val allowed: Array<String> = emptyArray(),
val enumeration: KClass<*> = Any::class,
val tags: Array<String> = emptyArray()
)
@MustBeDocumented
annotation class NodeDef(
val key: String,
val info: String = "",
val multiple: Boolean = false,
val required: Boolean = false,
val tags: Array<String> = emptyArray(),
/**
* A list of child value descriptors
*/
val values: Array<ValueDef> = emptyArray(),
/**
* A target class for this node to describe
* @return
*/
val type: KClass<*> = Any::class,
/**
* The DataForge path to the resource containing the description. Following targets are supported:
*
* 1. resource
* 1. file
* 1. class
* 1. method
* 1. property
*
*
* Does not work if [type] is provided
*
* @return
*/
val descriptor: String = ""
)
/**
* Description text for meta property, node or whole object
*/
@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class Description(val value: String)
/**
* Annotation for value property which states that lists are expected
*/
@Target(AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class Multiple
/**
* Descriptor target
* The DataForge path to the resource containing the description. Following targets are supported:
* 1. resource
* 1. file
* 1. class
* 1. method
* 1. property
*
*
* Does not work if [type] is provided
*/
@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class Descriptor(val value: String)
/**
* Aggregator class for descriptor nodes
*/
@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class DescriptorNodes(vararg val nodes: NodeDef)
/**
* Aggregator class for descriptor values
*/
@Target(AnnotationTarget.CLASS, AnnotationTarget.PROPERTY, AnnotationTarget.FUNCTION, AnnotationTarget.VALUE_PARAMETER)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class DescriptorValues(vararg val nodes: ValueDef)
/**
* Alternative name for property descriptor declaration
*/
@Target(AnnotationTarget.PROPERTY, AnnotationTarget.VALUE_PARAMETER)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class DescriptorName(val name: String)
@Target(AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class DescriptorValue(val def: ValueDef)
//TODO enter fields directly?
@Target(AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class ValueProperty(
val name: String = "",
val type: Array<ValueType> = arrayOf(ValueType.STRING),
val multiple: Boolean = false,
val def: String = "",
val enumeration: KClass<*> = Any::class,
val tags: Array<String> = emptyArray()
)
@Target(AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class NodeProperty(val name: String = "")

View File

@ -12,10 +12,15 @@ data class MetaListener(
val action: (name: Name, oldItem: MetaItem<*>?, newItem: MetaItem<*>?) -> Unit val action: (name: Name, oldItem: MetaItem<*>?, newItem: MetaItem<*>?) -> Unit
) )
interface ObservableMeta : Meta {
fun onChange(owner: Any?, action: (Name, MetaItem<*>?, MetaItem<*>?) -> Unit)
fun removeListener(owner: Any?)
}
/** /**
* Mutable meta representing object state * Mutable meta representing object state
*/ */
class Config : AbstractMutableMeta<Config>() { class Config : AbstractMutableMeta<Config>(), ObservableMeta {
private val listeners = HashSet<MetaListener>() private val listeners = HashSet<MetaListener>()
@ -26,14 +31,14 @@ class Config : AbstractMutableMeta<Config>() {
/** /**
* Add change listener to this meta. Owner is declared to be able to remove listeners later. Listener without owner could not be removed * Add change listener to this meta. Owner is declared to be able to remove listeners later. Listener without owner could not be removed
*/ */
fun onChange(owner: Any?, action: (Name, MetaItem<*>?, MetaItem<*>?) -> Unit) { override fun onChange(owner: Any?, action: (Name, MetaItem<*>?, MetaItem<*>?) -> Unit) {
listeners.add(MetaListener(owner, action)) listeners.add(MetaListener(owner, action))
} }
/** /**
* Remove all listeners belonging to given owner * Remove all listeners belonging to given owner
*/ */
fun removeListener(owner: Any?) { override fun removeListener(owner: Any?) {
listeners.removeAll { it.owner === owner } listeners.removeAll { it.owner === owner }
} }

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta
import hep.dataforge.meta.scheme.Configurable
import hep.dataforge.names.* import hep.dataforge.names.*
import hep.dataforge.values.Value import hep.dataforge.values.Value
@ -160,7 +161,7 @@ operator fun MutableMeta<*>.set(name: String, metas: Iterable<Meta>): Unit = set
/** /**
* Append the node with a same-name-sibling, automatically generating numerical index * Append the node with a same-name-sibling, automatically generating numerical index
*/ */
fun MutableMeta<*>.append(name: Name, value: Any?) { fun <M: MutableMeta<M>> M.append(name: Name, value: Any?) {
require(!name.isEmpty()) { "Name could not be empty for append operation" } require(!name.isEmpty()) { "Name could not be empty for append operation" }
val newIndex = name.last()!!.index val newIndex = name.last()!!.index
if (newIndex.isNotEmpty()) { if (newIndex.isNotEmpty()) {
@ -171,4 +172,4 @@ fun MutableMeta<*>.append(name: Name, value: Any?) {
} }
} }
fun MutableMeta<*>.append(name: String, value: Any?) = append(name.toName(), value) fun <M: MutableMeta<M>> M.append(name: String, value: Any?) = append(name.toName(), value)

View File

@ -1,4 +1,4 @@
package hep.dataforge.descriptors package hep.dataforge.meta.descriptors
/** /**
* An object which provides its descriptor * An object which provides its descriptor

View File

@ -1,4 +1,4 @@
package hep.dataforge.descriptors package hep.dataforge.meta.descriptors
import hep.dataforge.meta.MetaBase import hep.dataforge.meta.MetaBase
import hep.dataforge.meta.MetaItem import hep.dataforge.meta.MetaItem

View File

@ -1,6 +1,7 @@
package hep.dataforge.descriptors package hep.dataforge.meta.descriptors
import hep.dataforge.meta.* import hep.dataforge.meta.*
import hep.dataforge.meta.scheme.*
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.NameToken import hep.dataforge.names.NameToken
import hep.dataforge.names.asName import hep.dataforge.names.asName
@ -41,6 +42,25 @@ sealed class ItemDescriptor : Scheme() {
abstract var required: Boolean abstract var required: Boolean
} }
/**
* Configure attributes of the descriptor
*/
fun ItemDescriptor.attributes(block: Config.() -> Unit) {
(attributes ?: Config().also { this.config = it }).apply(block)
}
/**
* Check if given item suits the descriptor
*/
fun ItemDescriptor.validateItem(item: MetaItem<*>?): Boolean {
return when (this) {
is ValueDescriptor -> isAllowedValue(item.value ?: return false)
is NodeDescriptor -> items.all { (key, d) ->
d.validateItem(item.node[key])
}
}
}
/** /**
* Descriptor for meta node. Could contain additional information for viewing * Descriptor for meta node. Could contain additional information for viewing
* and editing. * and editing.
@ -61,7 +81,7 @@ class NodeDescriptor : ItemDescriptor() {
* *
* @return * @return
*/ */
var default: Config? by config() var default by node()
/** /**
* The map of children node descriptors * The map of children node descriptors
@ -71,15 +91,21 @@ class NodeDescriptor : ItemDescriptor() {
name to wrap(node.node ?: error("Node descriptor must be a node")) name to wrap(node.node ?: error("Node descriptor must be a node"))
} }
/**
fun node(name: String, descriptor: NodeDescriptor) { * Define a child item descriptor for this node
*/
fun defineItem(name: String, descriptor: ItemDescriptor) {
if (items.keys.contains(name)) error("The key $name already exists in descriptor") if (items.keys.contains(name)) error("The key $name already exists in descriptor")
val token = NameToken(NODE_KEY, name) val token = when (descriptor) {
is NodeDescriptor -> NameToken(NODE_KEY, name)
is ValueDescriptor -> NameToken(VALUE_KEY, name)
}
config[token] = descriptor.config config[token] = descriptor.config
} }
fun node(name: String, block: NodeDescriptor.() -> Unit) { fun defineNode(name: String, block: NodeDescriptor.() -> Unit) {
val token = NameToken(NODE_KEY, name) val token = NameToken(NODE_KEY, name)
if (config[token] == null) { if (config[token] == null) {
config[token] = NodeDescriptor(block) config[token] = NodeDescriptor(block)
@ -100,7 +126,7 @@ class NodeDescriptor : ItemDescriptor() {
} }
} }
fun node(name: Name, block: NodeDescriptor.() -> Unit) { fun defineNode(name: Name, block: NodeDescriptor.() -> Unit) {
buildNode(name).apply(block) buildNode(name).apply(block)
} }
@ -112,22 +138,17 @@ class NodeDescriptor : ItemDescriptor() {
name to ValueDescriptor.wrap(node.node ?: error("Value descriptor must be a node")) name to ValueDescriptor.wrap(node.node ?: error("Value descriptor must be a node"))
} }
fun value(name: String, descriptor: ValueDescriptor) {
if (items.keys.contains(name)) error("The key $name already exists in descriptor")
val token = NameToken(VALUE_KEY, name)
config[token] = descriptor.config
}
/** /**
* Add a value descriptor using block for * Add a value descriptor using block for
*/ */
fun value(name: String, block: ValueDescriptor.() -> Unit) { fun defineValue(name: String, block: ValueDescriptor.() -> Unit) {
value(name, ValueDescriptor(block)) defineItem(name, ValueDescriptor(block))
} }
fun value(name: Name, block: ValueDescriptor.() -> Unit) { fun defineValue(name: Name, block: ValueDescriptor.() -> Unit) {
require(name.length >= 1) { "Name length for value descriptor must be non-empty" } require(name.length >= 1) { "Name length for value descriptor must be non-empty" }
buildNode(name.cutLast()).value(name.last().toString(), block) buildNode(name.cutLast()).defineValue(name.last().toString(), block)
} }
val items: Map<String, ItemDescriptor> get() = nodes + values val items: Map<String, ItemDescriptor> get() = nodes + values

View File

@ -1,6 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.values.Value import hep.dataforge.values.Value
/** /**

View File

@ -1,9 +1,7 @@
package hep.dataforge.meta package hep.dataforge.meta.scheme
import hep.dataforge.descriptors.Described import hep.dataforge.meta.*
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.*
import hep.dataforge.descriptors.defaultItem
import hep.dataforge.descriptors.get
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.toName import hep.dataforge.names.toName
@ -24,6 +22,14 @@ interface Configurable : Described {
*/ */
fun getDefaultItem(name: Name): MetaItem<*>? = null fun getDefaultItem(name: Name): MetaItem<*>? = null
/**
* Check if property with given [name] could be assigned to [value]
*/
fun validateItem(name: Name, item: MetaItem<*>?): Boolean {
val descriptor = descriptor?.get(name)
return descriptor?.validateItem(item) ?: true
}
override val descriptor: NodeDescriptor? get() = null override val descriptor: NodeDescriptor? get() = null
} }
@ -39,7 +45,11 @@ fun Configurable.getProperty(key: String) = getProperty(key.toName())
* Set a configurable property * Set a configurable property
*/ */
fun Configurable.setProperty(name: Name, item: MetaItem<*>?) { fun Configurable.setProperty(name: Name, item: MetaItem<*>?) {
if(validateItem(name,item)) {
config[name] = item config[name] = item
} else {
error("Validation failed for property $name with value $item")
}
} }
fun Configurable.setProperty(key: String, item: MetaItem<*>?) { fun Configurable.setProperty(key: String, item: MetaItem<*>?) {

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta.scheme
import hep.dataforge.meta.*
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.asName import hep.dataforge.names.asName
import hep.dataforge.values.* import hep.dataforge.values.*
@ -41,23 +42,35 @@ class LazyConfigurableDelegate(
/** /**
* A property delegate that uses custom key * A property delegate that uses custom key
*/ */
fun Configurable.item(default: Any?, key: Name? = null): ConfigurableDelegate = fun Configurable.item(default: Any? = null, key: Name? = null): ConfigurableDelegate =
ConfigurableDelegate(this, key, default?.let { MetaItem.of(it) }) ConfigurableDelegate(
this,
key,
default?.let { MetaItem.of(it) })
/** /**
* Generation of item delegate with lazy default. * Generation of item delegate with lazy default.
* Lazy default could be used also for validation * Lazy default could be used also for validation
*/ */
fun Configurable.lazyItem(key: Name? = null, default: () -> Any?): ConfigurableDelegate = fun Configurable.lazyItem(key: Name? = null, default: () -> Any?): ConfigurableDelegate =
LazyConfigurableDelegate(this, key) { default()?.let { MetaItem.of(it) } } LazyConfigurableDelegate(this, key) {
default()?.let {
MetaItem.of(it)
}
}
fun <T> Configurable.item( fun <T> Configurable.item(
default: T? = null, default: T? = null,
key: Name? = null, key: Name? = null,
writer: (T) -> MetaItem<*>? = { MetaItem.of(it) }, writer: (T) -> MetaItem<*>? = {
MetaItem.of(it)
},
reader: (MetaItem<*>?) -> T reader: (MetaItem<*>?) -> T
): ReadWriteProperty<Any?, T> = ): ReadWriteProperty<Any?, T> =
ConfigurableDelegate(this, key, default?.let { MetaItem.of(it) }).map(reader = reader, writer = writer) ConfigurableDelegate(
this,
key,
default?.let { MetaItem.of(it) }).map(reader = reader, writer = writer)
fun Configurable.value(default: Any? = null, key: Name? = null): ReadWriteProperty<Any?, Value?> = fun Configurable.value(default: Any? = null, key: Name? = null): ReadWriteProperty<Any?, Value?> =
item(default, key).transform { it.value } item(default, key).transform { it.value }
@ -68,9 +81,13 @@ fun <T> Configurable.value(
writer: (T) -> Value? = { Value.of(it) }, writer: (T) -> Value? = { Value.of(it) },
reader: (Value?) -> T reader: (Value?) -> T
): ReadWriteProperty<Any?, T> = ): ReadWriteProperty<Any?, T> =
ConfigurableDelegate(this, key, default?.let { MetaItem.of(it) }).map( ConfigurableDelegate(
this,
key,
default?.let { MetaItem.of(it) }
).map(
reader = { reader(it.value) }, reader = { reader(it.value) },
writer = { writer(it)?.let { MetaItem.ValueItem(it) } } writer = { value -> writer(value)?.let { MetaItem.ValueItem(it) } }
) )
fun Configurable.string(default: String? = null, key: Name? = null): ReadWriteProperty<Any?, String?> = fun Configurable.string(default: String? = null, key: Name? = null): ReadWriteProperty<Any?, String?> =
@ -184,6 +201,10 @@ fun Configurable.doubleArray(vararg doubles: Double, key: Name? = null): ReadWri
fun Configurable.config(key: Name? = null): ReadWriteProperty<Any?, Config?> = fun Configurable.config(key: Name? = null): ReadWriteProperty<Any?, Config?> =
config.node(key) config.node(key)
fun Configurable.node(key: Name? = null): ReadWriteProperty<Any?, Meta?> = item().map(
reader = { it.node },
writer = { it?.let { MetaItem.NodeItem(it) } }
)
fun <T : Configurable> Configurable.spec(spec: Specification<T>, key: Name? = null): ReadWriteProperty<Any?, T?> = fun <T : Configurable> Configurable.spec(spec: Specification<T>, key: Name? = null): ReadWriteProperty<Any?, T?> =
object : ReadWriteProperty<Any?, T?> { object : ReadWriteProperty<Any?, T?> {

View File

@ -1,6 +1,7 @@
package hep.dataforge.meta package hep.dataforge.meta.scheme
import hep.dataforge.descriptors.* import hep.dataforge.meta.*
import hep.dataforge.meta.descriptors.*
import hep.dataforge.names.Name import hep.dataforge.names.Name
import hep.dataforge.names.NameToken import hep.dataforge.names.NameToken
import hep.dataforge.names.plus import hep.dataforge.names.plus
@ -8,7 +9,7 @@ import hep.dataforge.names.plus
/** /**
* A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [Specification]. * A base for delegate-based or descriptor-based scheme. [Scheme] has an empty constructor to simplify usage from [Specification].
*/ */
open class Scheme() : Configurable, Described { open class Scheme() : Configurable, Described, MetaRepr {
constructor(config: Config, defaultProvider: (Name) -> MetaItem<*>?) : this() { constructor(config: Config, defaultProvider: (Name) -> MetaItem<*>?) : this() {
this.config = config this.config = config
this.defaultProvider = defaultProvider this.defaultProvider = defaultProvider
@ -17,7 +18,8 @@ open class Scheme() : Configurable, Described {
//constructor(config: Config, default: Meta) : this(config, { default[it] }) //constructor(config: Config, default: Meta) : this(config, { default[it] })
constructor(config: Config) : this(config, { null }) constructor(config: Config) : this(config, { null })
final override var config: Config = Config() final override var config: Config =
Config()
internal set internal set
lateinit var defaultProvider: (Name) -> MetaItem<*>? lateinit var defaultProvider: (Name) -> MetaItem<*>?
@ -37,6 +39,8 @@ open class Scheme() : Configurable, Described {
*/ */
open val defaultLayer: Meta get() = DefaultLayer(Name.EMPTY) open val defaultLayer: Meta get() = DefaultLayer(Name.EMPTY)
override fun toMeta(): Meta = config.seal()
private inner class DefaultLayer(val path: Name) : MetaBase() { private inner class DefaultLayer(val path: Name) : MetaBase() {
override val items: Map<NameToken, MetaItem<*>> = override val items: Map<NameToken, MetaItem<*>> =
(descriptor?.get(path) as? NodeDescriptor)?.items?.entries?.associate { (key, descriptor) -> (descriptor?.get(path) as? NodeDescriptor)?.items?.entries?.associate { (key, descriptor) ->
@ -55,7 +59,8 @@ open class Scheme() : Configurable, Described {
/** /**
* A specification for simplified generation of wrappers * A specification for simplified generation of wrappers
*/ */
open class SchemeSpec<T : Scheme>(val builder: () -> T) : Specification<T> { open class SchemeSpec<T : Scheme>(val builder: () -> T) :
Specification<T> {
override fun wrap(config: Config, defaultProvider: (Name) -> MetaItem<*>?): T { override fun wrap(config: Config, defaultProvider: (Name) -> MetaItem<*>?): T {
return builder().apply { return builder().apply {
this.config = config this.config = config
@ -75,14 +80,18 @@ open class MetaScheme(
init { init {
this.descriptor = descriptor this.descriptor = descriptor
} }
override val defaultLayer: Meta get() = Laminate(meta, descriptor?.defaultItem().node)
override val defaultLayer: Meta
get() = Laminate(meta, descriptor?.defaultItem().node)
} }
fun Meta.asScheme() = MetaScheme(this) fun Meta.asScheme() =
MetaScheme(this)
fun <T : Configurable> Meta.toScheme(spec: Specification<T>, block: T.() -> Unit) = spec.wrap(this).apply(block) fun <T : Configurable> Meta.toScheme(spec: Specification<T>, block: T.() -> Unit) = spec.wrap(this).apply(block)
/** /**
* Create a snapshot laminate * Create a snapshot laminate
*/ */
fun Scheme.toMeta(): Laminate = Laminate(config, defaultLayer) fun Scheme.toMeta(): Laminate =
Laminate(config, defaultLayer)

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta.scheme
import hep.dataforge.meta.*
import hep.dataforge.names.Name import hep.dataforge.names.Name
import kotlin.jvm.JvmName import kotlin.jvm.JvmName
@ -33,7 +34,9 @@ interface Specification<T : Configurable> {
/** /**
* Wrap a configuration using static meta as default * Wrap a configuration using static meta as default
*/ */
fun wrap(default: Meta): T = wrap(Config()){default[it]} fun wrap(default: Meta): T = wrap(
Config()
){default[it]}
} }
/** /**
@ -54,7 +57,8 @@ fun <C : Configurable, S : Specification<C>> Configurable.update(spec: S, action
fun <C : Configurable, S : Specification<C>> S.createStyle(action: C.() -> Unit): Meta = fun <C : Configurable, S : Specification<C>> S.createStyle(action: C.() -> Unit): Meta =
Config().also { update(it, action) } Config().also { update(it, action) }
fun <T : Configurable> MetaItem<*>.spec(spec: Specification<T>): T? = node?.let { spec.wrap(Config(), it) } fun <T : Configurable> MetaItem<*>.spec(spec: Specification<T>): T? = node?.let { spec.wrap(
Config(), it) }
@JvmName("configSpec") @JvmName("configSpec")
fun <T : Configurable> MetaItem<Config>.spec(spec: Specification<T>): T? = node?.let { spec.wrap(it) } fun <T : Configurable> MetaItem<Config>.spec(spec: Specification<T>): T? = node?.let { spec.wrap(it) }

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta.transformations
import hep.dataforge.meta.*
import hep.dataforge.names.Name import hep.dataforge.names.Name
/** /**
@ -8,7 +9,7 @@ import hep.dataforge.names.Name
interface TransformationRule { interface TransformationRule {
/** /**
* Check if this transformation * Check if this transformation should be applied to a node with given name and value
*/ */
fun matches(name: Name, item: MetaItem<*>?): Boolean fun matches(name: Name, item: MetaItem<*>?): Boolean
@ -29,7 +30,8 @@ interface TransformationRule {
/** /**
* A transformation which keeps all elements, matching [selector] unchanged. * A transformation which keeps all elements, matching [selector] unchanged.
*/ */
data class KeepTransformationRule(val selector: (Name) -> Boolean) : TransformationRule { data class KeepTransformationRule(val selector: (Name) -> Boolean) :
TransformationRule {
override fun matches(name: Name, item: MetaItem<*>?): Boolean { override fun matches(name: Name, item: MetaItem<*>?): Boolean {
return selector(name) return selector(name)
} }
@ -87,7 +89,8 @@ inline class MetaTransformation(val transformations: Collection<TransformationRu
/** /**
* Produce new meta using only those items that match transformation rules * Produce new meta using only those items that match transformation rules
*/ */
fun transform(source: Meta): Meta = buildMeta { fun transform(source: Meta): Meta =
buildMeta {
transformations.forEach { rule -> transformations.forEach { rule ->
rule.selectItems(source).forEach { name -> rule.selectItems(source).forEach { name ->
rule.transformItem(name, source[name], this) rule.transformItem(name, source[name], this)
@ -98,7 +101,8 @@ inline class MetaTransformation(val transformations: Collection<TransformationRu
/** /**
* Transform a meta, replacing all elements found in rules with transformed entries * Transform a meta, replacing all elements found in rules with transformed entries
*/ */
fun apply(source: Meta): Meta = buildMeta(source) { fun apply(source: Meta): Meta =
buildMeta(source) {
transformations.forEach { rule -> transformations.forEach { rule ->
rule.selectItems(source).forEach { name -> rule.selectItems(source).forEach { name ->
remove(name) remove(name)
@ -150,7 +154,8 @@ class MetaTransformationBuilder {
* Keep nodes by regex * Keep nodes by regex
*/ */
fun keep(regex: String) { fun keep(regex: String) {
transformations.add(RegexItemTransformationRule(regex.toRegex()) { name, _, metaItem -> transformations.add(
RegexItemTransformationRule(regex.toRegex()) { name, _, metaItem ->
setItem(name, metaItem) setItem(name, metaItem)
}) })
} }

View File

@ -1,5 +1,6 @@
package hep.dataforge.meta package hep.dataforge.meta
import hep.dataforge.meta.scheme.*
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -1,5 +1,8 @@
package hep.dataforge.meta package hep.dataforge.meta
import hep.dataforge.meta.scheme.asScheme
import hep.dataforge.meta.scheme.getProperty
import hep.dataforge.meta.scheme.toMeta
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -1,5 +1,8 @@
package hep.dataforge.meta package hep.dataforge.meta
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.Specification
import hep.dataforge.meta.scheme.numberList
import hep.dataforge.names.Name import hep.dataforge.names.Name
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -1,4 +1,4 @@
package hep.dataforge.descriptors package hep.dataforge.meta.descriptors
import hep.dataforge.values.ValueType import hep.dataforge.values.ValueType
import kotlin.test.Test import kotlin.test.Test
@ -7,14 +7,14 @@ import kotlin.test.assertEquals
class DescriptorTest { class DescriptorTest {
val descriptor = NodeDescriptor { val descriptor = NodeDescriptor {
node("aNode") { defineNode("aNode") {
info = "A root demo node" info = "A root demo node"
value("b") { defineValue("b") {
info = "b number value" info = "b number value"
type(ValueType.NUMBER) type(ValueType.NUMBER)
} }
node("otherNode") { defineNode("otherNode") {
value("otherValue") { defineValue("otherValue") {
type(ValueType.BOOLEAN) type(ValueType.BOOLEAN)
default(false) default(false)
info = "default value" info = "default value"

View File

@ -3,6 +3,7 @@ package hep.dataforge.scripting
import hep.dataforge.context.Global import hep.dataforge.context.Global
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import hep.dataforge.workspace.SimpleWorkspaceBuilder import hep.dataforge.workspace.SimpleWorkspaceBuilder
import hep.dataforge.workspace.context import hep.dataforge.workspace.context
import hep.dataforge.workspace.target import hep.dataforge.workspace.target

View File

@ -1,25 +0,0 @@
plugins {
id "org.jetbrains.kotlin.multiplatform"
}
repositories {
jcenter()
}
kotlin {
targets {
fromPreset(presets.jvm, 'jvm')
//fromPreset(presets.js, 'js')
// For ARM, preset should be changed to presets.iosArm32 or presets.iosArm64
// For Linux, preset should be changed to e.g. presets.linuxX64
// For MacOS, preset should be changed to e.g. presets.macosX64
//fromPreset(presets.iosX64, 'ios')
}
sourceSets {
commonMain {
dependencies {
api project(":dataforge-context")
}
}
}
}

View File

@ -0,0 +1,14 @@
plugins {
id("scientifik.mpp")
}
kotlin {
sourceSets {
val commonMain by getting{
dependencies {
api(project(":dataforge-context"))
api(project(":dataforge-io"))
}
}
}
}

View File

@ -0,0 +1,8 @@
package hep.dataforge.tables
import hep.dataforge.meta.scheme.Scheme
import hep.dataforge.meta.scheme.SchemeSpec
class ColumnScheme : Scheme() {
companion object : SchemeSpec<ColumnScheme>(::ColumnScheme)
}

View File

@ -0,0 +1,35 @@
package hep.dataforge.tables
import kotlin.reflect.KClass
class ColumnTable(override val columns: Map<String, Column<*>>) :
Table {
private val rowsNum = columns.values.first().size
init {
require(columns.values.all { it.size == rowsNum }) { "All columns must be of the same size" }
}
override val rows: List<Row>
get() = (0 until rowsNum).map { VirtualRow(it) }
@Suppress("UNCHECKED_CAST")
override fun <T : Any> getValue(row: Int, column: String, type: KClass<out T>): T? {
val value = columns[column]?.get(row)
return when {
value == null -> null
type.isInstance(value) -> value as T
else -> error("Expected type is $type, but found ${value::class}")
}
}
private inner class VirtualRow(val index: Int) : Row {
override fun <T : Any> getValue(column: String, type: KClass<out T>): T? = getValue(index, column, type)
}
}
class ColumnTableBuilder {
private val columns = ArrayList<Column<*>>()
fun build() = ColumnTable(columns.associateBy { it.name })
}

View File

@ -0,0 +1,28 @@
package hep.dataforge.tables
import hep.dataforge.meta.Meta
import kotlin.reflect.KClass
class ListColumn<T : Any>(
override val name: String,
private val data: List<T?>,
override val type: KClass<out T>,
override val meta: Meta
) : Column<T> {
override val size: Int get() = data.size
override fun get(index: Int): T? = data[index]
companion object {
inline operator fun <reified T : Any> invoke(
name: String,
data: List<T>,
noinline metaBuilder: ColumnScheme.() -> Unit
): ListColumn<T> = ListColumn(name, data, T::class, ColumnScheme(metaBuilder).toMeta())
}
}
inline fun <T : Any, reified R : Any> Column<T>.map(meta: Meta = this.meta, noinline block: (T?) -> R): Column<R> {
val data = List(size) { block(get(it)) }
return ListColumn(name, data, R::class, meta)
}

View File

@ -0,0 +1,19 @@
package hep.dataforge.tables
import kotlin.reflect.KClass
class MapRow(val values: Map<String, Any>) : Row {
@Suppress("UNCHECKED_CAST")
override fun <T : Any> getValue(column: String, type: KClass<out T>): T? {
val value = values[column]
return when {
value == null -> null
type.isInstance(value) -> {
value as T?
}
else -> {
error("Expected type is $type, but found ${value::class}")
}
}
}
}

View File

@ -0,0 +1,94 @@
package hep.dataforge.tables
import hep.dataforge.meta.Meta
import kotlin.reflect.KClass
//interface NumberColumn<N : Number> : Column<N>
data class RealColumn(
override val name: String,
val data: DoubleArray,
override val meta: Meta = Meta.EMPTY
) : Column<Double> {
override val type: KClass<out Double> get() = Double::class
override val size: Int get() = data.size
@Suppress("OVERRIDE_BY_INLINE", "NOTHING_TO_INLINE")
override inline fun get(index: Int): Double = data[index]
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (other !is RealColumn) return false
if (name != other.name) return false
if (!data.contentEquals(other.data)) return false
if (meta != other.meta) return false
return true
}
override fun hashCode(): Int {
var result = name.hashCode()
result = 31 * result + data.contentHashCode()
result = 31 * result + meta.hashCode()
return result
}
companion object {
inline operator fun <reified T : Any> invoke(
name: String,
data: DoubleArray,
noinline metaBuilder: ColumnScheme.() -> Unit
): RealColumn = RealColumn(name, data, ColumnScheme(metaBuilder).toMeta())
}
}
fun <T : Any> Column<T>.map(meta: Meta = this.meta, block: (T?) -> Double): RealColumn {
val data = DoubleArray(size) { block(get(it)) }
return RealColumn(name, data, meta)
}
data class IntColumn(
override val name: String,
val data: IntArray,
override val meta: Meta = Meta.EMPTY
) : Column<Int> {
override val type: KClass<out Int> get() = Int::class
override val size: Int get() = data.size
@Suppress("OVERRIDE_BY_INLINE", "NOTHING_TO_INLINE")
override inline fun get(index: Int): Int = data[index]
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (other !is IntColumn) return false
if (name != other.name) return false
if (!data.contentEquals(other.data)) return false
if (meta != other.meta) return false
return true
}
override fun hashCode(): Int {
var result = name.hashCode()
result = 31 * result + data.contentHashCode()
result = 31 * result + meta.hashCode()
return result
}
companion object {
inline operator fun <reified T : Any> invoke(
name: String,
data: IntArray,
noinline metaBuilder: ColumnScheme.() -> Unit
): IntColumn = IntColumn(name, data, ColumnScheme(metaBuilder).toMeta())
}
}
fun <T : Any> Column<T>.map(meta: Meta = this.meta, block: (T?) -> Int): IntColumn {
val data = IntArray(size) { block(get(it)) }
return IntColumn(name, data, meta)
}

View File

@ -0,0 +1,26 @@
package hep.dataforge.tables
import hep.dataforge.meta.Meta
import kotlin.reflect.KClass
data class ColumnDef<T : Any>(val name: String, val type: KClass<T>, val meta: Meta)
class RowTable<R : Row>(override val rows: List<R>, private val columnDefs: List<ColumnDef<*>>) : Table {
override fun <T : Any> getValue(row: Int, column: String, type: KClass<out T>): T? =
rows[row].getValue(column, type)
override val columns: Map<String, Column<*>>
get() = columnDefs.associate { it.name to VirtualColumn(it) }
private inner class VirtualColumn<T : Any>(val def: ColumnDef<T>) :
Column<T> {
override val name: String get() = def.name
override val type: KClass<out T> get() = def.type
override val meta: Meta get() = def.meta
override val size: Int get() = rows.size
override fun get(index: Int): T? = rows[index].getValue(name, type)
}
}

View File

@ -0,0 +1,30 @@
package hep.dataforge.tables
import hep.dataforge.meta.Meta
import kotlin.reflect.KClass
interface Table {
fun <T : Any> getValue(row: Int, column: String, type: KClass<out T>): T?
val columns: Map<String, Column<*>>
val rows: List<Row>
}
interface Column<out T : Any> {
val name: String
val type: KClass<out T>
val meta: Meta
val size: Int
operator fun get(index: Int): T?
}
val Column<*>.indices get() = (0 until size)
operator fun <T : Any> Column<T>.iterator() = iterator {
for (i in indices){
yield(get(i))
}
}
interface Row {
fun <T : Any> getValue(column: String, type: KClass<out T>): T?
}

View File

@ -0,0 +1,37 @@
package hep.dataforge.tables
import hep.dataforge.meta.Meta
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KClass
import kotlin.reflect.KProperty
import kotlin.reflect.full.cast
import kotlin.reflect.full.isSubclassOf
class TableAccessor(val table: Table) : Table by table {
inline fun <reified T : Any> column() = ColumnProperty(table, T::class)
}
@Suppress("UNCHECKED_CAST")
fun <T : Any> Column<*>.cast(type: KClass<T>): Column<T> {
return if (type.isSubclassOf(this.type)) {
this as Column<T>
} else {
ColumnWrapper(this, type)
}
}
class ColumnWrapper<T : Any>(val column: Column<*>, override val type: KClass<T>) : Column<T> {
override val name: String get() = column.name
override val meta: Meta get() = column.meta
override val size: Int get() = column.size
override fun get(index: Int): T? = type.cast(column[index])
}
class ColumnProperty<T : Any>(val table: Table, val type: KClass<T>) : ReadOnlyProperty<Any?, Column<T>?> {
override fun getValue(thisRef: Any?, property: KProperty<*>): Column<T>? {
val name = property.name
return table.columns[name]?.cast(type)
}
}

View File

@ -1,7 +1,7 @@
package hep.dataforge.workspace package hep.dataforge.workspace
import hep.dataforge.data.DataNode import hep.dataforge.data.DataNode
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.node import hep.dataforge.meta.node

View File

@ -2,7 +2,7 @@ package hep.dataforge.workspace
import hep.dataforge.context.Named import hep.dataforge.context.Named
import hep.dataforge.data.DataNode import hep.dataforge.data.DataNode
import hep.dataforge.descriptors.Described import hep.dataforge.meta.descriptors.Described
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.provider.Type import hep.dataforge.provider.Type
import hep.dataforge.workspace.Task.Companion.TYPE import hep.dataforge.workspace.Task.Companion.TYPE

View File

@ -2,7 +2,7 @@ package hep.dataforge.workspace
import hep.dataforge.context.Context import hep.dataforge.context.Context
import hep.dataforge.data.* import hep.dataforge.data.*
import hep.dataforge.descriptors.NodeDescriptor import hep.dataforge.meta.descriptors.NodeDescriptor
import hep.dataforge.meta.DFBuilder import hep.dataforge.meta.DFBuilder
import hep.dataforge.meta.Meta import hep.dataforge.meta.Meta
import hep.dataforge.meta.get import hep.dataforge.meta.get

View File

@ -6,6 +6,7 @@ import hep.dataforge.meta.boolean
import hep.dataforge.meta.builder import hep.dataforge.meta.builder
import hep.dataforge.meta.get import hep.dataforge.meta.get
import hep.dataforge.meta.int import hep.dataforge.meta.int
import hep.dataforge.meta.scheme.int
import hep.dataforge.names.plus import hep.dataforge.names.plus
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -28,7 +28,8 @@ include(
":dataforge-context", ":dataforge-context",
":dataforge-data", ":dataforge-data",
":dataforge-output", ":dataforge-output",
":dataforge-output-html", ":dataforge-output:dataforge-output-html",
":dataforge-tables",
":dataforge-workspace", ":dataforge-workspace",
":dataforge-scripting" ":dataforge-scripting"
) )