Update numass to match dev

This commit is contained in:
Alexander Nozik 2018-03-27 15:58:24 +03:00
parent 7404e5fa67
commit a24e7c6963
20 changed files with 107 additions and 137 deletions

View File

@ -1,11 +1,17 @@
buildscript {
ext.kotlin_version = "1.2.30"
ext.kotlin_version = "1.2.31"
repositories {
jcenter()
maven {
url "https://dl.bintray.com/kotlin/kotlin-eap"
}
}
plugins {
id "org.jetbrains.kotlin.jvm" version "1.2.30" apply false
dependencies {
classpath "org.jetbrains.dokka:dokka-gradle-plugin:0.9.16"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects{
apply plugin: 'idea'
apply plugin: 'java'

View File

@ -16,12 +16,12 @@
package inr.numass.control.cryotemp
import hep.dataforge.Named
import hep.dataforge.kodex.buildMeta
import hep.dataforge.kodex.stringValue
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.meta.Metoid
import hep.dataforge.names.Named
internal fun createChannel(name: String): PKT8Channel =
@ -51,11 +51,7 @@ internal fun createChannel(meta: Meta): PKT8Channel {
*/
class PKT8Channel(override val meta: Meta, private val func: (Double) -> Double) : Named, Metoid {
private val _name: String by meta.stringValue()
override fun getName(): String {
return _name
}
override val name: String by meta.stringValue()
fun description(): String {
return meta.getString("description", "")

View File

@ -19,7 +19,7 @@ import kotlin.streams.asSequence
* Created by darksnake on 30-Jan-17.
*/
object NumassDataUtils {
fun join(name: String, sets: Collection<NumassSet>): NumassSet {
fun join(setName: String, sets: Collection<NumassSet>): NumassSet {
return object : NumassSet {
override val points: Stream<out NumassPoint> by lazy {
val points = sets.stream().flatMap<NumassPoint> { it.points }
@ -33,9 +33,7 @@ object NumassDataUtils {
metaBuilder
}
override fun getName(): String {
return name
}
override val name = setName
}
}

View File

@ -5,9 +5,9 @@
*/
package inr.numass.data.api
import hep.dataforge.Named
import hep.dataforge.kodex.toList
import hep.dataforge.meta.Metoid
import hep.dataforge.names.Named
import hep.dataforge.providers.Provider
import hep.dataforge.providers.Provides
import hep.dataforge.providers.ProvidesNames

View File

@ -23,7 +23,7 @@ import java.util.stream.Stream
* Created by darksnake on 08.07.2017.
*/
class NumassDatFile @Throws(IOException::class)
constructor(private val name: String, private val path: Path, meta: Meta) : NumassSet {
constructor(override val name: String, private val path: Path, meta: Meta) : NumassSet {
override val meta: Meta
private val hVdev: Double
@ -57,10 +57,6 @@ constructor(private val name: String, private val path: Path, meta: Meta) : Numa
.build()
}
override fun getName(): String {
return name
}
private fun hasUset(): Boolean {
return meta.getBoolean("dat.uSet", true)
}

View File

@ -2,7 +2,7 @@ package inr.numass.data.storage
import hep.dataforge.context.Context
import hep.dataforge.data.DataFactory
import hep.dataforge.data.DataTree
import hep.dataforge.data.DataNodeEditor
import hep.dataforge.meta.Meta
import hep.dataforge.storage.commons.StorageManager
import hep.dataforge.storage.commons.StorageUtils
@ -13,12 +13,10 @@ import inr.numass.data.api.NumassSet
*/
class NumassDataFactory : DataFactory<NumassSet>(NumassSet::class.java) {
override fun getName(): String {
return "numass"
}
override val name= "numass"
override fun fill(builder: DataTree.Builder<NumassSet>, context: Context, meta: Meta) {
override fun fill(builder: DataNodeEditor<NumassSet>, context: Context, meta: Meta) {
val newMeta = meta.builder.setValue("type", "numass")
val storage = context.load(StorageManager::class.java, Meta.empty()).buildStorage(newMeta)
StorageUtils.loaderStream(storage).forEach { loader ->

View File

@ -1,32 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass;
import hep.dataforge.meta.MetaBuilder;
import hep.dataforge.storage.commons.StorageDataFactory;
import hep.dataforge.workspace.BasicWorkspace;
import hep.dataforge.workspace.Workspace;
/**
*
* @author Alexander Nozik
*/
public class WorkspaceTest {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
String storagepath = "D:\\Work\\Numass\\data\\";
Workspace workspace = BasicWorkspace.builder()
.setContext(Numass.buildContext())
.data("", new StorageDataFactory(), new MetaBuilder("storage").putValue("path", storagepath))
.build();
}
}

View File

@ -152,8 +152,8 @@ object NumassUtils {
* @return
*/
fun setToNode(set: NumassSet): DataNode<Any> {
val builder = DataSet.builder()
builder.setName(set.name)
val builder = DataSet.edit()
builder.name = set.name
set.points.forEach { point ->
val pointMeta = MetaBuilder("point")
.putValue("voltage", point.voltage)

View File

@ -0,0 +1,32 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package inr.numass
import hep.dataforge.meta.MetaBuilder
import hep.dataforge.storage.commons.StorageDataFactory
import hep.dataforge.workspace.BasicWorkspace
/**
*
* @author Alexander Nozik
*/
object WorkspaceTest {
/**
* @param args the command line arguments
*/
@JvmStatic
fun main(args: Array<String>) {
val storagepath = "D:\\Work\\Numass\\data\\"
val workspace = BasicWorkspace.builder().apply {
this.context = Numass.buildContext()
data("", StorageDataFactory(), MetaBuilder("storage").putValue("path", storagepath))
}.build()
}
}

View File

@ -37,9 +37,11 @@ import java.util.*
*/
@TypedActionDef(name = "summary", inputType = FitState::class, outputType = Table::class, info = "Generate summary for fit results of different datasets.")
@ValueDef(name = "parnames", multiple = true, required = true, info = "List of names of parameters for which summary should be done")
class SummaryAction : ManyToOneAction<FitState, Table>() {
object SummaryAction : ManyToOneAction<FitState, Table>() {
protected override fun buildGroups(context: Context, input: DataNode<FitState>, actionMeta: Meta): List<DataNode<FitState>> {
const val SUMMARY_NAME = "sumName"
override fun buildGroups(context: Context, input: DataNode<FitState>, actionMeta: Meta): List<DataNode<FitState>> {
val meta = inputMeta(context, input.meta, actionMeta)
val groups: List<DataNode<FitState>>
if (meta.hasValue("grouping.byValue")) {
@ -108,9 +110,4 @@ class SummaryAction : ManyToOneAction<FitState, Table>() {
super.afterGroup(context, groupName, outputMeta, output)
}
companion object {
val SUMMARY_NAME = "sumName"
}
}

View File

@ -63,7 +63,7 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
if (inputMeta.getBoolean("plotHist", true)) {
val histPlot = pm.getPlotFrame(getName(), "histogram");
val histPlot = pm.getPlotFrame(name, "histogram");
histPlot.configure {
node("xAxis") {
@ -99,7 +99,7 @@ class TimeAnalyzerAction : OneToOneAction<NumassPoint, Table>() {
configure(inputMeta.getMetaOrEmpty("plot"))
}
pm.getPlotFrame(getName(), "stat-method").add(statPlot)
pm.getPlotFrame(name, "stat-method").add(statPlot)
(1..100).map { inputMeta.getDouble("t0Step", 1000.0) * it }.map { t ->
val result = analyzer.analyze(input, inputMeta.builder.setValue("t0", t))

View File

@ -68,7 +68,7 @@ class TimeSpectrumAction : OneToOneAction<NumassPoint, Table>() {
if (inputMeta.getBoolean("plotHist", true)) {
val histPlot = pm.getPlotFrame(getName(), "histogram");
val histPlot = pm.getPlotFrame(name, "histogram");
histPlot.configure {
node("xAxis") {
@ -105,7 +105,7 @@ class TimeSpectrumAction : OneToOneAction<NumassPoint, Table>() {
configure(inputMeta.getMetaOrEmpty("plot"))
}
pm.getPlotFrame(getName(), "stat-method").add(statPlot)
pm.getPlotFrame(name, "stat-method").add(statPlot)
(1..100).map { 1000 * it }.map { t ->
val result = analyzer.analyze(input, buildMeta {

View File

@ -1,15 +1,17 @@
package inr.numass.actions
import hep.dataforge.Named
import hep.dataforge.actions.OneToOneAction
import hep.dataforge.context.Context
import hep.dataforge.description.NodeDef
import hep.dataforge.description.TypedActionDef
import hep.dataforge.description.ValueDef
import hep.dataforge.description.ValueDefs
import hep.dataforge.isAnonymous
import hep.dataforge.meta.Laminate
import hep.dataforge.meta.Meta
import hep.dataforge.meta.MetaUtils
import hep.dataforge.names.Named
import hep.dataforge.tables.ColumnFormat
import hep.dataforge.tables.ColumnTable
import hep.dataforge.tables.ListColumn
@ -49,6 +51,8 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
if (meta.hasValue("correction")) {
val correction = meta.getString("correction")
corrections.add(object : Correction {
override val name: String = ""
override fun corr(point: Values): Double {
return pointExpression(correction, point)
}
@ -60,7 +64,7 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
for (correction in corrections) {
//adding correction columns
if (!correction.isAnonimous) {
if (!correction.isAnonymous) {
table = table.buildColumn(ColumnFormat.build(correction.name, NUMBER)) { correction.corr(it) }
if (correction.hasError()) {
table = table.buildColumn(ColumnFormat.build(correction.name + ".err", NUMBER)) { correction.corrErr(it) }
@ -69,8 +73,8 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
}
// adding original count rate and error columns
table = table.addColumn(ListColumn(ColumnFormat.build(COUNT_RATE_KEY + ".orig", NUMBER), table.getColumn(COUNT_RATE_KEY).stream()))
table = table.addColumn(ListColumn(ColumnFormat.build(COUNT_RATE_ERROR_KEY + ".orig", NUMBER), table
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_KEY.orig", NUMBER), table.getColumn(COUNT_RATE_KEY).stream()))
table = table.addColumn(ListColumn(ColumnFormat.build("$COUNT_RATE_ERROR_KEY.orig", NUMBER), table
.getColumn(COUNT_RATE_ERROR_KEY).stream()))
val cr = ArrayList<Double>()
@ -99,7 +103,7 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
val res = table.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_KEY).format, cr.stream()))
.addColumn(ListColumn.build(table.getColumn(COUNT_RATE_ERROR_KEY).format, crErr.stream()))
context.io.output(name, getName()).push(NumassUtils.wrap(res, meta))
context.io.output(name, name).push(NumassUtils.wrap(res, meta))
return res
}
@ -112,9 +116,7 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
val expr = corrMeta.getString("value")
val errExpr = corrMeta.getString("err", "")
return object : Correction {
override fun getName(): String {
return corrMeta.getString("name", corrMeta.name)
}
override val name=corrMeta.getString("name", corrMeta.name)
override fun corr(point: Values): Double {
return pointExpression(expr, point)
@ -136,10 +138,6 @@ class TransformDataAction : OneToOneAction<Table, Table>() {
private interface Correction : Named {
override fun getName(): String {
return ""
}
/**
* correction coefficient
*

View File

@ -45,7 +45,7 @@ fun main(args: Array<String>) {
val all = NumassDataUtils.join("sum", loaders)
val data = DataSet.builder(NumassPoint::class.java).apply {
val data = DataSet.edit(NumassPoint::class).apply {
hvs.forEach { hv ->
putStatic(
"point_${hv.toInt()}",

View File

@ -46,7 +46,7 @@ object Threshold {
val analyzer = TimeAnalyzer();
val data = DataSet.builder(NumassPoint::class.java).also { dataBuilder ->
val data = DataSet.edit(NumassPoint::class).also { dataBuilder ->
sets.sortedBy { it.startTime }
.flatMap { set -> set.points.toList() }
.groupBy { it.voltage }

View File

@ -27,12 +27,12 @@ import inr.numass.NumassUtils
class NumassFitScanSummaryTask : AbstractTask<Table>() {
override fun run(model: TaskModel, data: DataNode<*>): DataNode<Table> {
val builder = DataSet.builder(Table::class.java)
val builder = DataSet.edit(Table::class)
val action = FitSummaryAction()
val input = data.checked(FitResult::class.java)
input.nodeStream()
.filter { it -> it.getSize(false) > 0 }
.forEach { node -> builder.putData(node.name, action.run(model.context, node, model.meta).data) }
.forEach { node -> builder.putData(node.name, action.run(model.context, node, model.meta).data!!) }
return builder.build()
}
@ -41,9 +41,7 @@ class NumassFitScanSummaryTask : AbstractTask<Table>() {
}
override fun getName(): String {
return "scansum"
}
override val name = "scansum"
@TypedActionDef(name = "sterileSummary", inputType = FitResult::class, outputType = Table::class)
private inner class FitSummaryAction : ManyToOneAction<FitResult, Table>() {

View File

@ -21,7 +21,7 @@ import java.util.stream.Collectors
/**
* @author Alexander Nozik
*/
class NumassFitScanTask : AbstractTask<FitResult>() {
object NumassFitScanTask : AbstractTask<FitResult>() {
override fun run(model: TaskModel, data: DataNode<*>): DataNode<FitResult> {
@ -39,7 +39,7 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
}
val action = FitAction()
val resultBuilder = DataTree.builder(FitResult::class.java)
val resultBuilder = DataTree.edit(FitResult::class)
val sourceNode = data.checked(Table::class.java)
//do fit
@ -53,7 +53,7 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
val resultName = String.format("%s[%s=%s]", table.name, scanParameter, `val`.stringValue())
// overrideMeta.setValue("@resultName", String.format("%s[%s=%s]", table.getName(), scanParameter, val.stringValue()));
if (overrideMeta.hasMeta("params." + scanParameter)) {
if (overrideMeta.hasMeta("params.$scanParameter")) {
overrideMeta.setValue("params.$scanParameter.value", `val`)
} else {
overrideMeta.getMetaList("params.param").stream()
@ -62,7 +62,7 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
}
// Data<Table> newData = new Data<Table>(data.getGoal(),data.type(),overrideMeta);
val node = action.run(model.context, DataNode.of(resultName, table, Meta.empty()), overrideMeta)
resultBuilder.putData(table.name + ".fit_" + i, node.data)
resultBuilder.putData(table.name + ".fit_" + i, node.data!!)
}
}
@ -76,8 +76,6 @@ class NumassFitScanTask : AbstractTask<FitResult>() {
model.dependsOn("filter", meta)
}
override fun getName(): String {
return "fitscan"
}
override val name = "fitscan"
}

View File

@ -16,33 +16,24 @@
package inr.numass.tasks
import hep.dataforge.actions.Action
import hep.dataforge.data.DataNode
import hep.dataforge.meta.Meta
import hep.dataforge.stat.fit.FitState
import hep.dataforge.tables.Table
import hep.dataforge.workspace.tasks.SingleActionTask
import hep.dataforge.workspace.tasks.AbstractTask
import hep.dataforge.workspace.tasks.TaskModel
import inr.numass.actions.SummaryAction
/**
* Created by darksnake on 16-Sep-16.
*/
class NumassFitSummaryTask : SingleActionTask<FitState, Table>() {
override fun getName(): String {
return "summary"
}
object NumassFitSummaryTask : AbstractTask<Table>() {
override val name: String = "summary"
override fun getAction(model: TaskModel): Action<FitState, Table> {
return SummaryAction()
}
override fun gatherNode(data: DataNode<*>): DataNode<FitState> {
return data.getCheckedNode("fit", FitState::class.java)
}
override fun transformMeta(model: TaskModel): Meta {
return model.meta.getMeta("summary")
override fun run(model: TaskModel, data: DataNode<*>): DataNode<out Table> {
val actionMeta = model.meta.getMeta("summary")
val checkedData = data.getCheckedNode("fit", FitState::class.java)
return SummaryAction.run(model.context, checkedData, actionMeta)
}
override fun buildModel(model: TaskModel.Builder, meta: Meta) {

View File

@ -42,7 +42,7 @@ val selectTask = task("select") {
data("*")
configure(meta.getMetaOrEmpty("data"))
}
transform<NumassSet, NumassSet> { data ->
transform<NumassSet> { data ->
logger.info("Starting selection from data node with size ${data.size}")
CustomDataFilter(meta).filter<NumassSet>(data.checked(NumassSet::class.java)).also {
logger.info("Selected ${it.size} elements")
@ -139,8 +139,8 @@ val mergeEmptyTask = task("empty") {
.setValue("merge." + MERGE_NAME, meta.getString("merge." + MERGE_NAME, "") + "_empty");
dependsOn(mergeTask, newMeta)
}
transform<Table, Table> { data ->
val builder = DataSet.builder(Table::class.java)
transform<Table> { data ->
val builder = DataSet.edit(Table::class)
data.forEach {
builder.putData(it.name + "_empty", it.anonymize());
}
@ -154,12 +154,13 @@ val subtractEmptyTask = task("dif") {
dependsOn(mergeTask, meta, "data")
dependsOn(mergeEmptyTask, meta, "empty")
}
transform<Table, Table> { data ->
val builder = DataTree.builder(Table::class.java)
val rootNode = data.getCheckedNode<Table>("data", Table::class.java)
val empty = data.getCheckedNode<Table>("empty", Table::class.java).data
transform<Table> { data ->
val builder = DataTree.edit(Table::class)
val rootNode = data.getCheckedNode("data", Table::class.java)
val empty = data.getCheckedNode("empty", Table::class.java).data
?: throw RuntimeException("No empty data found")
rootNode.forEachData(Table::class.java, { input ->
rootNode.visit(Table::class.java, { input ->
val resMeta = buildMeta {
putNode("data", input.meta)
putNode("empty", empty.meta)

View File

@ -12,7 +12,7 @@ import java.util.stream.Stream
* Cached numass data
* Created by darksnake on 23-Jun-17.
*/
class NumassDataCache(val data: NumassSet) : NumassSet {
class NumassDataCache(private val data: NumassSet) : NumassSet {
//private val cachedDescription: String by lazy { data.description }
override val meta: Meta by lazy { data.meta }
private val cachedPoints: List<NumassPoint> by lazy { data.points.collect(Collectors.toList()) }
@ -22,12 +22,5 @@ class NumassDataCache(val data: NumassSet) : NumassSet {
override val points: Stream<NumassPoint>
get() = cachedPoints.stream()
// override fun getDescription(): String {
// return cachedDescription
// }
override fun getName(): String {
return data.name;
}
override val name: String = data.name
}